diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9b323cd20d..eb9b3a0916 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,6 @@ repos: hooks: - id: end-of-file-fixer files: \.py$ - - id: requirements-txt-fixer - id: check-merge-conflict - id: check-case-conflict - id: check-json diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 6ad203d629..191fd99540 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,37 +1,23 @@ -# Read the Docs configuration file for Sphinx projects +# .readthedocs.yml +# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required version: 2 -# Set the OS, Python version and other tools you might need build: - os: ubuntu-22.04 + os: "ubuntu-22.04" tools: python: "3.10" - # You can also specify other tool versions: - # nodejs: "20" - # rust: "1.70" - # golang: "1.20" -# Build documentation in the "docs/" directory with Sphinx +# Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py - # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs - # builder: "dirhtml" - # Fail on all warnings to avoid broken references - # fail_on_warning: true + fail_on_warning: false # Optionally build your docs in additional formats such as PDF and ePub -# formats: -# - pdf -# - epub +formats: + - htmlzip -# Optional but recommended, declare the Python requirements required -# to build your documentation -# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +# Optionally set the version of Python and requirements required to build your docs python: install: - - method: pip - path: . - - requirements: docs/docs-requirements.txt + - requirements: docs/requirements.txt diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 254b64a38f..dd8d206cfc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,79 +1,2 @@ # Contributing to OpenFL - -We welcome contributions from the community. There are several ways to contribute: -* Improvements in [documentation](https://openfl.readthedocs.io/en/latest/). -* Contributing to OpenFL's code-base: via bug-fixes or feature additions. -* Answering questions on our [discussions page](https://github.com/securefederatedai/openfl/discussions). -* Participating in our [roadmap](https://github.com/securefederatedai/openfl/blob/develop/ROADMAP.md) discussions. - -We have a slack [channel](https://join.slack.com/t/openfl/shared_invite/zt-ovzbohvn-T5fApk05~YS_iZhjJ5yaTw) and we host regular [community meetings](https://github.com/securefederatedai/openfl#support). - - -## How to contribute code -### Step 1. Open an issue - -Before you start making any changes, it is always good to open an [issue](https://github.com/securefederatedai/openfl/issues/new/choose) first (assuming one does not already exist), outlining your proposed changes. We can give you feedback, and potentially validate the proposed changes. - -For minor changes (akin to a documentation or bug fix), proceed to opening a Pull Request (PR) directly. - -### Step 2. Make code changes - -To modify code, you need to fork the repository. Set up a development environment as covered in the section "Setup environment" below. - -### Step 3. Create a Pull Request (PR) - -Once the change is ready, open a PR from your branch in your fork, to the `develop` branch in [securefederatedai/openfl](https://github.com/securefederatedai/openfl). OpenFL follows standard recommendations of PR formatting. Find more details [here](https://github.blog/2015-01-21-how-to-write-the-perfect-pull-request/). - -### Step 4. Sign your work - -Signoff your patch commits using your real name. We discourage anonymous contributions. - - Signed-off-by: Joe Smith - -If you set your `user.name` and `user.email` git configs, you can sign your -commits using `git commit --signoff`. - -Your signature [certifies](http://developercertificate.org/) that you wrote the patch, or, you otherwise have the right to pass it on as an open-source patch. - -OpenFL is licensed under the [Apache 2.0 license](https://github.com/securefederatedai/openfl/blob/develop/LICENSE). By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms. - -### Step 5. Code review and merge - -Verify that your contribution passes all tests in our CI/CD pipeline. In case of a failure, like shown below, look into the error messages and try to fix them. - -![CI/CD](docs/images/CI_details.png) - -Meanwhile, a reviewer will review the pull request and provide comments. Post few iterations of -reviews and changes (depending on the complexity of the changes), PR will be approved for merge. - -## Setup environment - -We recommend setting up a local dev environment. Clone your forked repo to your local machine and install the dependencies. - -```shell -git clone https://github.com/YOUR_GITHUB_USERNAME/openfl.git -cd openfl -pip install -U pip setuptools wheel -pip install . -pip install -r linters-requirements.txt -``` - -## Code style - -OpenFL uses [ruff](https://github.com/astral-sh/ruff) to lint/format code and [precommit](https://pre-commit.com/) checks. - -Run the following command at the **root** directory of the repo to format your code. - -``` -sh scripts/format.sh -``` -You may need to resolve errors that could not be resolved by autoformatting. To only show lint errors, run `sh scripts/lint.sh` at the **root** directory of the repo. - -### Docstrings -Since docstrings cannot be checked or standardized, if you do write/edit any docstring, make sure to check them manually. OpenFL docstrings should follow the conventions below: - -A **class** or a **function** docstring may contain: -* A one-line description of the class/function. -* Paragraph(s) of detailed information. -* Optional `Examples` section. -* `Args` section for arguments under `__init__()`. +For more information, see [Contributing to OpenFL](https://openfl.readthedocs.io/en/latest/contributing.html). \ No newline at end of file diff --git a/ROADMAP.md b/ROADMAP.md index 84beffb93e..de644142e8 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,42 +1,2 @@ # OpenFL Project Roadmap - -This document is intended to give users and contributors an idea of the OpenFL team's current priorities, features we plan to incorporate over the short, medium, and long term, and call out opportunities for the community to get involved. - -## When will this document be updated? -At a minimum once each product release - which we expect to be on quarterly cadence. - -## 1. Features and interfaces - -### 1.1 Decoupling the FL specification interface from the infrastructure -The task runner interface is coupled with the the single experiment aggregator / collaborator infrastructure, and the Interactive API is tied to the director / envoy infrastructure. -The Interactive API was originally designed to be a high-level API for OpenFL, but for the cases when more control is required by users, access to lower level interfaces is necessary. -In OpenFL 1.5, we introduced the Workflow API as an experimental feature, which can be used to specify the federated learning flow, independently of the underlying computing infrastructure. The Workflow API facilitates a seamless transition from local simulation to a federated setting. Additionally, this approach offers greater control over the sequence and content of the FL experiment steps, which enables more complex experiments beyond just horizontal FL. Workflow API also provides more granular privacy controls, allowing the model owner to explicitly permit or forbid the transfer of specific attributes over the network. - -### 1.2 Consolidating interfaces -OpenFL has supported multiple ways of running FL experiments for a long time, many of which are not interoperable: TaskRunner API, Workflow API, Python Native API, and Interactive API. The strategic vision is to consolidate OpenFL around the Workflow API, as it focuses on meeting the needs of the data scientist, who is the main user of the framework. Over the upcoming 1.x releases, we plan to gradually deprecate and eliminate the legacy Python Native API and Interactive API. OpenFL 2.0 will be centered around the Workflow API, facilitating a seamless transition from local simulations to distributed FL experiments, and even enabling the setup of permanent federations, which is currently only possible through the Interactive API. - -### 1.3 Component standardization and framework interoperability - -Federated Learning is a [burgeoning space](https://github.com/weimingwill/awesome-federated-learning#frameworks). -Most core FL infrastructure (model weight extraction, network protocols, and serialization designs) must be reimplemented ad hoc by each framework. -This causes community fragmentation and distracts from some of the bigger problems to be solved in federated learning. In the short term, we want to collaborate on standards for FL, - first at the communication and storage layer, and make these components modular across other frameworks. Our aim is also to provide a library for FL algorithms, compression methods, - that can both be applied and interpreted easily. - - ### 1.4 Confidential computing support - Although OpenFL currently relies on Intel® SGX for trusted execution, the long term vision is towards broader confidential computing ecosystem support. This can be achieved by packaging OpenFL workspaces and workflows as Confidential Containers (CoCo), which supports a spectrum of TEE backends, including Intel® SGX and TDX, Arm TrustZone, and AMD SEV. - -## Upcoming OpenFL releases - -### OpenFL 1.7 (Q1 2025) -This release is focused on enabling a great developer experience for OpenFL users: -1. Introducing the [FederatedRuntime](https://openfl.readthedocs.io/en/latest/about/features_index/workflowinterface.html#runtimes-future-plans) for Workflow API, which allows running FL workflows in a distributed setting (after local simulation with the LocalRuntime). -2. Adding support for federated XGBoost in OpenFL. See the example [XGBoost workspace](https://github.com/securefederatedai/openfl/tree/develop/openfl-workspace/xgb_higgs) based on Task Runner API. -3. Revised Task Runner API workspace dockerization process, with TEE-ready containers (using Gramine and Intel® Software Guard Extensions). The current release contains an initial set of changes that enable OpenFL compatibility with the broader confidential containers ecosystem. -4. Streamlining the Federated Evaluation experiments with TaskRunner API -5. Migrating a selection of key OpenFL tutorials from Python Native API to Workflow API. Check out the updated [Tutorials folder](https://github.com/securefederatedai/openfl/tree/develop/openfl-tutorials/experimental/workflow) -6. Deprecating the Python Native API -7. Deprecating the Interactive API - -### OpenFL 1.8 (TBA) -Stay tuned for updates soon! \ No newline at end of file +For more information, see [roadmap](https://openfl.readthedocs.io/en/latest/roadmap.html). \ No newline at end of file diff --git a/docs/.gitignore b/docs/.gitignore index 23592ba08e..c6b43d1085 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1,5 +1,3 @@ -# openfl* -# models* -# data* /_build -**/.ipynb_checkpoints \ No newline at end of file +**/.ipynb_checkpoints +_autosummary \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile index 4670cae22c..398cd36726 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,13 +1,8 @@ -# Copyright (C) 2020-2023 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - - # Minimal makefile for Sphinx documentation -# # You can set these variables from the command line, and also # from the environment for the first two. -SPHINXOPTS ?= -D autodoc_default_options.imported-members=True +SPHINXOPTS ?= -D autodoc_default_options.imported-members=True -D nb_execution_mode=force SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build @@ -21,4 +16,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index fbab187dc1..bc2190d850 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,29 +1,2 @@ -# How to update the documentation - -We use sphinx to generate the documentation for this project. -The documentation project has been initialized properly and we basically just need to update the actual content. - -Install the openfl package - -```sh -pip install . -``` - -Install requirements for building documentation: - -```sh -pip install -r docs-requirements.txt -``` - - -The Makefile supports many targets. We choose html because we can easily host the documentation on a remote server. Compile the documentation source code: -```sh -make clean -make html -``` - -Open documentation locally: -```sh -cd _build/html -python -m http.server -``` +# Update documentation +For more information, refer [here](https://openfl.readthedocs.io/en/latest/contributing.html#update-documentation). diff --git a/docs/_static/css/Intel_One_Mono_Font_Theme.css b/docs/_static/css/Intel_One_Mono_Font_Theme.css deleted file mode 100644 index 63d179fe23..0000000000 --- a/docs/_static/css/Intel_One_Mono_Font_Theme.css +++ /dev/null @@ -1,7820 +0,0 @@ -@layer default, accessibility; - -@import url(colors.css); -@import url(accessibility_overrides.css); - -@layer default{ - - html { - box-sizing: border-box - } - - *, - :after, - :before { - box-sizing: inherit - } - - article, - aside, - details, - figcaption, - figure, - footer, - header, - hgroup, - nav, - section { - display: block - } - - audio, - canvas, - video { - display: inline-block; - *display: inline; - *zoom: 1 - } - - [hidden], - audio:not([controls]) { - display: none - } - - * { - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box - } - - html { - font-size: 100%; - -webkit-text-size-adjust: 100%; - -ms-text-size-adjust: 100% - } - - body { - margin: 0 - } - - a:active, - a:hover { - outline: 0 - } - - abbr[title] { - border-bottom: 1px dotted - } - - b, - strong { - font-weight: 700 - } - - blockquote { - margin: 0 - } - - dfn { - font-style: italic - } - - ins { - background: #ff9; - text-decoration: none - } - - ins, - mark { - color: #000 - } - - mark { - background: #ff0; - font-style: italic; - font-weight: 700 - } - - .rst-content code, - .rst-content tt, - code, - kbd, - pre, - samp { - font-family: IntelOne Mono, monospace, serif; - _font-family: courier new, monospace; - font-size: 1em - } - - pre { - white-space: pre - } - - q { - quotes: none - } - - q:after, - q:before { - content: ""; - content: none - } - - small { - font-size: 85% - } - - sub, - sup { - font-size: 75%; - line-height: 0; - position: relative; - vertical-align: baseline - } - - sup { - top: -.5em - } - - sub { - bottom: -.25em - } - - dl, - ol, - ul { - margin: 0; - padding: 0; - list-style: none; - list-style-image: none - } - - li { - list-style: none - } - - dd { - margin: 0 - } - - img { - border: 0; - -ms-interpolation-mode: bicubic; - vertical-align: middle; - max-width: 100% - } - - svg:not(:root) { - overflow: hidden - } - - figure, - form { - margin: 0 - } - - label { - cursor: pointer - } - - button, - input, - select, - textarea { - font-size: 100%; - margin: 0; - vertical-align: baseline; - *vertical-align: middle - } - - button, - input { - line-height: normal - } - - button, - input[type=button], - input[type=reset], - input[type=submit] { - cursor: pointer; - -webkit-appearance: button; - *overflow: visible - } - - button[disabled], - input[disabled] { - cursor: default - } - - input[type=search] { - -webkit-appearance: textfield; - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; - box-sizing: content-box - } - - textarea { - resize: vertical - } - - table { - border-collapse: collapse; - border-spacing: 0 - } - - td { - vertical-align: top - } - - .chromeframe { - margin: .2em 0; - background: #ccc; - color: #000; - padding: .2em 0 - } - - .ir { - display: block; - border: 0; - text-indent: -999em; - overflow: hidden; - background-color: transparent; - background-repeat: no-repeat; - text-align: left; - direction: ltr; - *line-height: 0 - } - - .ir br { - display: none - } - - .hidden { - display: none !important; - visibility: hidden - } - - .visuallyhidden { - border: 0; - clip: rect(0 0 0 0); - height: 1px; - margin: -1px; - overflow: hidden; - padding: 0; - position: absolute; - width: 1px - } - - .visuallyhidden.focusable:active, - .visuallyhidden.focusable:focus { - clip: auto; - height: auto; - margin: 0; - overflow: visible; - position: static; - width: auto - } - - .invisible { - visibility: hidden - } - - .relative { - position: relative - } - - big, - small { - font-size: 100% - } - - @media print { - - body, - html, - section { - background: none !important - } - - * { - box-shadow: none !important; - text-shadow: none !important; - filter: none !important; - -ms-filter: none !important - } - - a, - a:visited { - text-decoration: underline - } - - .ir a:after, - a[href^="#"]:after, - a[href^="javascript:"]:after { - content: "" - } - - blockquote, - pre { - page-break-inside: avoid - } - - thead { - display: table-header-group - } - - img, - tr { - page-break-inside: avoid - } - - img { - max-width: 100% !important - } - - @page { - margin: .5cm - } - - .rst-content .toctree-wrapper>p.caption, - h2, - h3, - p { - orphans: 3; - widows: 3 - } - - .rst-content .toctree-wrapper>p.caption, - h2, - h3 { - page-break-after: avoid - } - } - - .btn, - .fa:before, - .icon:before, - .rst-content .admonition, - .rst-content .admonition-title:before, - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .code-block-caption .headerlink:before, - .rst-content .danger, - .rst-content .eqno .headerlink:before, - .rst-content .error, - .rst-content .hint, - .rst-content .important, - .rst-content .note, - .rst-content .seealso, - .rst-content .tip, - .rst-content .warning, - .rst-content code.download span:first-child:before, - .rst-content dl dt .headerlink:before, - .rst-content h1 .headerlink:before, - .rst-content h2 .headerlink:before, - .rst-content h3 .headerlink:before, - .rst-content h4 .headerlink:before, - .rst-content h5 .headerlink:before, - .rst-content h6 .headerlink:before, - .rst-content p.caption .headerlink:before, - .rst-content p .headerlink:before, - .rst-content table>caption .headerlink:before, - .rst-content tt.download span:first-child:before, - .wy-alert, - .wy-dropdown .caret:before, - .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, - .wy-menu-vertical li.current>a button.toctree-expand:before, - .wy-menu-vertical li.on a button.toctree-expand:before, - .wy-menu-vertical li button.toctree-expand:before, - input[type=color], - input[type=date], - input[type=datetime-local], - input[type=datetime], - input[type=email], - input[type=month], - input[type=number], - input[type=password], - input[type=search], - input[type=tel], - input[type=text], - input[type=time], - input[type=url], - input[type=week], - select, - textarea { - -webkit-font-smoothing: antialiased - } - - .clearfix { - *zoom: 1 - } - - .clearfix:after, - .clearfix:before { - display: table; - content: "" - } - - .clearfix:after { - clear: both - } - - /*! - * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome - * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) - */ - @font-face { - font-family: FontAwesome; - src: url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713); - src: url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"), url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"), url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"), url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"), url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg"); - font-weight: 400; - font-style: normal - } - - /*! - - * Intel One Mono 1.1 by @Intel Crop - https://github.com/intel/intel-one-mono - @IntelOne Mono - * License - https://github.com/intel/intel-one-mono/blob/main/license (Font: SIL OFL 1.1, CSS: MIT License) - */ - @font-face { - font-family: IntelOne Mono; - src: url(fonts/intelone-mono-font-family-regular.woff2) format("woff2"), url(fonts/intelone-mono-font-family-regular.woff) format("woff"), url(fonts/intelone-mono-font-family-regular.ttf) format("truetype"); - font-weight: 400; - font-style: normal - } - - - .fa, - .icon, - .rst-content .admonition-title, - .rst-content .code-block-caption .headerlink, - .rst-content .eqno .headerlink, - .rst-content code.download span:first-child, - .rst-content dl dt .headerlink, - .rst-content h1 .headerlink, - .rst-content h2 .headerlink, - .rst-content h3 .headerlink, - .rst-content h4 .headerlink, - .rst-content h5 .headerlink, - .rst-content h6 .headerlink, - .rst-content p.caption .headerlink, - .rst-content p .headerlink, - .rst-content table>caption .headerlink, - .rst-content tt.download span:first-child, - .wy-menu-vertical li.current>a button.toctree-expand, - .wy-menu-vertical li.on a button.toctree-expand, - .wy-menu-vertical li button.toctree-expand { - display: inline-block; - font: normal normal normal 14px/1 IntelOne Mono, FontAwesome; - font-size: inherit; - text-rendering: auto; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale - } - - .fa-lg { - font-size: 1.33333em; - line-height: .75em; - vertical-align: -15% - } - - .fa-2x { - font-size: 2em - } - - .fa-3x { - font-size: 3em - } - - .fa-4x { - font-size: 4em - } - - .fa-5x { - font-size: 5em - } - - .fa-fw { - width: 1.28571em; - text-align: center - } - - .fa-ul { - padding-left: 0; - margin-left: 2.14286em; - list-style-type: none - } - - .fa-ul>li { - position: relative - } - - .fa-li { - position: absolute; - left: -2.14286em; - width: 2.14286em; - top: .14286em; - text-align: center - } - - .fa-li.fa-lg { - left: -1.85714em - } - - .fa-border { - padding: .2em .25em .15em; - border: .08em solid #eee; - border-radius: .1em - } - - .fa-pull-left { - float: left - } - - .fa-pull-right { - float: right - } - - .fa-pull-left.icon, - .fa.fa-pull-left, - .rst-content .code-block-caption .fa-pull-left.headerlink, - .rst-content .eqno .fa-pull-left.headerlink, - .rst-content .fa-pull-left.admonition-title, - .rst-content code.download span.fa-pull-left:first-child, - .rst-content dl dt .fa-pull-left.headerlink, - .rst-content h1 .fa-pull-left.headerlink, - .rst-content h2 .fa-pull-left.headerlink, - .rst-content h3 .fa-pull-left.headerlink, - .rst-content h4 .fa-pull-left.headerlink, - .rst-content h5 .fa-pull-left.headerlink, - .rst-content h6 .fa-pull-left.headerlink, - .rst-content p .fa-pull-left.headerlink, - .rst-content table>caption .fa-pull-left.headerlink, - .rst-content tt.download span.fa-pull-left:first-child, - .wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand, - .wy-menu-vertical li.on a button.fa-pull-left.toctree-expand, - .wy-menu-vertical li button.fa-pull-left.toctree-expand { - margin-right: .3em - } - - .fa-pull-right.icon, - .fa.fa-pull-right, - .rst-content .code-block-caption .fa-pull-right.headerlink, - .rst-content .eqno .fa-pull-right.headerlink, - .rst-content .fa-pull-right.admonition-title, - .rst-content code.download span.fa-pull-right:first-child, - .rst-content dl dt .fa-pull-right.headerlink, - .rst-content h1 .fa-pull-right.headerlink, - .rst-content h2 .fa-pull-right.headerlink, - .rst-content h3 .fa-pull-right.headerlink, - .rst-content h4 .fa-pull-right.headerlink, - .rst-content h5 .fa-pull-right.headerlink, - .rst-content h6 .fa-pull-right.headerlink, - .rst-content p .fa-pull-right.headerlink, - .rst-content table>caption .fa-pull-right.headerlink, - .rst-content tt.download span.fa-pull-right:first-child, - .wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand, - .wy-menu-vertical li.on a button.fa-pull-right.toctree-expand, - .wy-menu-vertical li button.fa-pull-right.toctree-expand { - margin-left: .3em - } - - .pull-right { - float: right - } - - .pull-left { - float: left - } - - .fa.pull-left, - .pull-left.icon, - .rst-content .code-block-caption .pull-left.headerlink, - .rst-content .eqno .pull-left.headerlink, - .rst-content .pull-left.admonition-title, - .rst-content code.download span.pull-left:first-child, - .rst-content dl dt .pull-left.headerlink, - .rst-content h1 .pull-left.headerlink, - .rst-content h2 .pull-left.headerlink, - .rst-content h3 .pull-left.headerlink, - .rst-content h4 .pull-left.headerlink, - .rst-content h5 .pull-left.headerlink, - .rst-content h6 .pull-left.headerlink, - .rst-content p .pull-left.headerlink, - .rst-content table>caption .pull-left.headerlink, - .rst-content tt.download span.pull-left:first-child, - .wy-menu-vertical li.current>a button.pull-left.toctree-expand, - .wy-menu-vertical li.on a button.pull-left.toctree-expand, - .wy-menu-vertical li button.pull-left.toctree-expand { - margin-right: .3em - } - - .fa.pull-right, - .pull-right.icon, - .rst-content .code-block-caption .pull-right.headerlink, - .rst-content .eqno .pull-right.headerlink, - .rst-content .pull-right.admonition-title, - .rst-content code.download span.pull-right:first-child, - .rst-content dl dt .pull-right.headerlink, - .rst-content h1 .pull-right.headerlink, - .rst-content h2 .pull-right.headerlink, - .rst-content h3 .pull-right.headerlink, - .rst-content h4 .pull-right.headerlink, - .rst-content h5 .pull-right.headerlink, - .rst-content h6 .pull-right.headerlink, - .rst-content p .pull-right.headerlink, - .rst-content table>caption .pull-right.headerlink, - .rst-content tt.download span.pull-right:first-child, - .wy-menu-vertical li.current>a button.pull-right.toctree-expand, - .wy-menu-vertical li.on a button.pull-right.toctree-expand, - .wy-menu-vertical li button.pull-right.toctree-expand { - margin-left: .3em - } - - .fa-spin { - -webkit-animation: fa-spin 2s linear infinite; - animation: fa-spin 2s linear infinite - } - - .fa-pulse { - -webkit-animation: fa-spin 1s steps(8) infinite; - animation: fa-spin 1s steps(8) infinite - } - - @-webkit-keyframes fa-spin { - 0% { - -webkit-transform: rotate(0deg); - transform: rotate(0deg) - } - - to { - -webkit-transform: rotate(359deg); - transform: rotate(359deg) - } - } - - @keyframes fa-spin { - 0% { - -webkit-transform: rotate(0deg); - transform: rotate(0deg) - } - - to { - -webkit-transform: rotate(359deg); - transform: rotate(359deg) - } - } - - .fa-rotate-90 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=1)"; - -webkit-transform: rotate(90deg); - -ms-transform: rotate(90deg); - transform: rotate(90deg) - } - - .fa-rotate-180 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2)"; - -webkit-transform: rotate(180deg); - -ms-transform: rotate(180deg); - transform: rotate(180deg) - } - - .fa-rotate-270 { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=3)"; - -webkit-transform: rotate(270deg); - -ms-transform: rotate(270deg); - transform: rotate(270deg) - } - - .fa-flip-horizontal { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)"; - -webkit-transform: scaleX(-1); - -ms-transform: scaleX(-1); - transform: scaleX(-1) - } - - .fa-flip-vertical { - -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"; - -webkit-transform: scaleY(-1); - -ms-transform: scaleY(-1); - transform: scaleY(-1) - } - - :root .fa-flip-horizontal, - :root .fa-flip-vertical, - :root .fa-rotate-90, - :root .fa-rotate-180, - :root .fa-rotate-270 { - filter: none - } - - .fa-stack { - position: relative; - display: inline-block; - width: 2em; - height: 2em; - line-height: 2em; - vertical-align: middle - } - - .fa-stack-1x, - .fa-stack-2x { - position: absolute; - left: 0; - width: 100%; - text-align: center - } - - .fa-stack-1x { - line-height: inherit - } - - .fa-stack-2x { - font-size: 2em - } - - .fa-inverse { - color: #fff - } - - .fa-glass:before { - content: "" - } - - .fa-music:before { - content: "" - } - - .fa-search:before, - .icon-search:before { - content: "" - } - - .fa-envelope-o:before { - content: "" - } - - .fa-heart:before { - content: "" - } - - .fa-star:before { - content: "" - } - - .fa-star-o:before { - content: "" - } - - .fa-user:before { - content: "" - } - - .fa-film:before { - content: "" - } - - .fa-th-large:before { - content: "" - } - - .fa-th:before { - content: "" - } - - .fa-th-list:before { - content: "" - } - - .fa-check:before { - content: "" - } - - .fa-close:before, - .fa-remove:before, - .fa-times:before { - content: "" - } - - .fa-search-plus:before { - content: "" - } - - .fa-search-minus:before { - content: "" - } - - .fa-power-off:before { - content: "" - } - - .fa-signal:before { - content: "" - } - - .fa-cog:before, - .fa-gear:before { - content: "" - } - - .fa-trash-o:before { - content: "" - } - - .fa-home:before, - .icon-home:before { - content: "" - } - - .fa-file-o:before { - content: "" - } - - .fa-clock-o:before { - content: "" - } - - .fa-road:before { - content: "" - } - - .fa-download:before, - .rst-content code.download span:first-child:before, - .rst-content tt.download span:first-child:before { - content: "" - } - - .fa-arrow-circle-o-down:before { - content: "" - } - - .fa-arrow-circle-o-up:before { - content: "" - } - - .fa-inbox:before { - content: "" - } - - .fa-play-circle-o:before { - content: "" - } - - .fa-repeat:before, - .fa-rotate-right:before { - content: "" - } - - .fa-refresh:before { - content: "" - } - - .fa-list-alt:before { - content: "" - } - - .fa-lock:before { - content: "" - } - - .fa-flag:before { - content: "" - } - - .fa-headphones:before { - content: "" - } - - .fa-volume-off:before { - content: "" - } - - .fa-volume-down:before { - content: "" - } - - .fa-volume-up:before { - content: "" - } - - .fa-qrcode:before { - content: "" - } - - .fa-barcode:before { - content: "" - } - - .fa-tag:before { - content: "" - } - - .fa-tags:before { - content: "" - } - - .fa-book:before, - .icon-book:before { - content: "" - } - - .fa-bookmark:before { - content: "" - } - - .fa-print:before { - content: "" - } - - .fa-camera:before { - content: "" - } - - .fa-font:before { - content: "" - } - - .fa-bold:before { - content: "" - } - - .fa-italic:before { - content: "" - } - - .fa-text-height:before { - content: "" - } - - .fa-text-width:before { - content: "" - } - - .fa-align-left:before { - content: "" - } - - .fa-align-center:before { - content: "" - } - - .fa-align-right:before { - content: "" - } - - .fa-align-justify:before { - content: "" - } - - .fa-list:before { - content: "" - } - - .fa-dedent:before, - .fa-outdent:before { - content: "" - } - - .fa-indent:before { - content: "" - } - - .fa-video-camera:before { - content: "" - } - - .fa-image:before, - .fa-photo:before, - .fa-picture-o:before { - content: "" - } - - .fa-pencil:before { - content: "" - } - - .fa-map-marker:before { - content: "" - } - - .fa-adjust:before { - content: "" - } - - .fa-tint:before { - content: "" - } - - .fa-edit:before, - .fa-pencil-square-o:before { - content: "" - } - - .fa-share-square-o:before { - content: "" - } - - .fa-check-square-o:before { - content: "" - } - - .fa-arrows:before { - content: "" - } - - .fa-step-backward:before { - content: "" - } - - .fa-fast-backward:before { - content: "" - } - - .fa-backward:before { - content: "" - } - - .fa-play:before { - content: "" - } - - .fa-pause:before { - content: "" - } - - .fa-stop:before { - content: "" - } - - .fa-forward:before { - content: "" - } - - .fa-fast-forward:before { - content: "" - } - - .fa-step-forward:before { - content: "" - } - - .fa-eject:before { - content: "" - } - - .fa-chevron-left:before { - content: "" - } - - .fa-chevron-right:before { - content: "" - } - - .fa-plus-circle:before { - content: "" - } - - .fa-minus-circle:before { - content: "" - } - - .fa-times-circle:before, - .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before { - content: "" - } - - .fa-check-circle:before, - .wy-inline-validate.wy-inline-validate-success .wy-input-context:before { - content: "" - } - - .fa-question-circle:before { - content: "" - } - - .fa-info-circle:before { - content: "" - } - - .fa-crosshairs:before { - content: "" - } - - .fa-times-circle-o:before { - content: "" - } - - .fa-check-circle-o:before { - content: "" - } - - .fa-ban:before { - content: "" - } - - .fa-arrow-left:before { - content: "" - } - - .fa-arrow-right:before { - content: "" - } - - .fa-arrow-up:before { - content: "" - } - - .fa-arrow-down:before { - content: "" - } - - .fa-mail-forward:before, - .fa-share:before { - content: "" - } - - .fa-expand:before { - content: "" - } - - .fa-compress:before { - content: "" - } - - .fa-plus:before { - content: "" - } - - .fa-minus:before { - content: "" - } - - .fa-asterisk:before { - content: "" - } - - .fa-exclamation-circle:before, - .rst-content .admonition-title:before, - .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before { - content: "" - } - - .fa-gift:before { - content: "" - } - - .fa-leaf:before { - content: "" - } - - .fa-fire:before, - .icon-fire:before { - content: "" - } - - .fa-eye:before { - content: "" - } - - .fa-eye-slash:before { - content: "" - } - - .fa-exclamation-triangle:before, - .fa-warning:before { - content: "" - } - - .fa-plane:before { - content: "" - } - - .fa-calendar:before { - content: "" - } - - .fa-random:before { - content: "" - } - - .fa-comment:before { - content: "" - } - - .fa-magnet:before { - content: "" - } - - .fa-chevron-up:before { - content: "" - } - - .fa-chevron-down:before { - content: "" - } - - .fa-retweet:before { - content: "" - } - - .fa-shopping-cart:before { - content: "" - } - - .fa-folder:before { - content: "" - } - - .fa-folder-open:before { - content: "" - } - - .fa-arrows-v:before { - content: "" - } - - .fa-arrows-h:before { - content: "" - } - - .fa-bar-chart-o:before, - .fa-bar-chart:before { - content: "" - } - - .fa-twitter-square:before { - content: "" - } - - .fa-facebook-square:before { - content: "" - } - - .fa-camera-retro:before { - content: "" - } - - .fa-key:before { - content: "" - } - - .fa-cogs:before, - .fa-gears:before { - content: "" - } - - .fa-comments:before { - content: "" - } - - .fa-thumbs-o-up:before { - content: "" - } - - .fa-thumbs-o-down:before { - content: "" - } - - .fa-star-half:before { - content: "" - } - - .fa-heart-o:before { - content: "" - } - - .fa-sign-out:before { - content: "" - } - - .fa-linkedin-square:before { - content: "" - } - - .fa-thumb-tack:before { - content: "" - } - - .fa-external-link:before { - content: "" - } - - .fa-sign-in:before { - content: "" - } - - .fa-trophy:before { - content: "" - } - - .fa-github-square:before { - content: "" - } - - .fa-upload:before { - content: "" - } - - .fa-lemon-o:before { - content: "" - } - - .fa-phone:before { - content: "" - } - - .fa-square-o:before { - content: "" - } - - .fa-bookmark-o:before { - content: "" - } - - .fa-phone-square:before { - content: "" - } - - .fa-twitter:before { - content: "" - } - - .fa-facebook-f:before, - .fa-facebook:before { - content: "" - } - - .fa-github:before, - .icon-github:before { - content: "" - } - - .fa-unlock:before { - content: "" - } - - .fa-credit-card:before { - content: "" - } - - .fa-feed:before, - .fa-rss:before { - content: "" - } - - .fa-hdd-o:before { - content: "" - } - - .fa-bullhorn:before { - content: "" - } - - .fa-bell:before { - content: "" - } - - .fa-certificate:before { - content: "" - } - - .fa-hand-o-right:before { - content: "" - } - - .fa-hand-o-left:before { - content: "" - } - - .fa-hand-o-up:before { - content: "" - } - - .fa-hand-o-down:before { - content: "" - } - - .fa-arrow-circle-left:before, - .icon-circle-arrow-left:before { - content: "" - } - - .fa-arrow-circle-right:before, - .icon-circle-arrow-right:before { - content: "" - } - - .fa-arrow-circle-up:before { - content: "" - } - - .fa-arrow-circle-down:before { - content: "" - } - - .fa-globe:before { - content: "" - } - - .fa-wrench:before { - content: "" - } - - .fa-tasks:before { - content: "" - } - - .fa-filter:before { - content: "" - } - - .fa-briefcase:before { - content: "" - } - - .fa-arrows-alt:before { - content: "" - } - - .fa-group:before, - .fa-users:before { - content: "" - } - - .fa-chain:before, - .fa-link:before, - .icon-link:before { - content: "" - } - - .fa-cloud:before { - content: "" - } - - .fa-flask:before { - content: "" - } - - .fa-cut:before, - .fa-scissors:before { - content: "" - } - - .fa-copy:before, - .fa-files-o:before { - content: "" - } - - .fa-paperclip:before { - content: "" - } - - .fa-floppy-o:before, - .fa-save:before { - content: "" - } - - .fa-square:before { - content: "" - } - - .fa-bars:before, - .fa-navicon:before, - .fa-reorder:before { - content: "" - } - - .fa-list-ul:before { - content: "" - } - - .fa-list-ol:before { - content: "" - } - - .fa-strikethrough:before { - content: "" - } - - .fa-underline:before { - content: "" - } - - .fa-table:before { - content: "" - } - - .fa-magic:before { - content: "" - } - - .fa-truck:before { - content: "" - } - - .fa-pinterest:before { - content: "" - } - - .fa-pinterest-square:before { - content: "" - } - - .fa-google-plus-square:before { - content: "" - } - - .fa-google-plus:before { - content: "" - } - - .fa-money:before { - content: "" - } - - .fa-caret-down:before, - .icon-caret-down:before, - .wy-dropdown .caret:before { - content: "" - } - - .fa-caret-up:before { - content: "" - } - - .fa-caret-left:before { - content: "" - } - - .fa-caret-right:before { - content: "" - } - - .fa-columns:before { - content: "" - } - - .fa-sort:before, - .fa-unsorted:before { - content: "" - } - - .fa-sort-desc:before, - .fa-sort-down:before { - content: "" - } - - .fa-sort-asc:before, - .fa-sort-up:before { - content: "" - } - - .fa-envelope:before { - content: "" - } - - .fa-linkedin:before { - content: "" - } - - .fa-rotate-left:before, - .fa-undo:before { - content: "" - } - - .fa-gavel:before, - .fa-legal:before { - content: "" - } - - .fa-dashboard:before, - .fa-tachometer:before { - content: "" - } - - .fa-comment-o:before { - content: "" - } - - .fa-comments-o:before { - content: "" - } - - .fa-bolt:before, - .fa-flash:before { - content: "" - } - - .fa-sitemap:before { - content: "" - } - - .fa-umbrella:before { - content: "" - } - - .fa-clipboard:before, - .fa-paste:before { - content: "" - } - - .fa-lightbulb-o:before { - content: "" - } - - .fa-exchange:before { - content: "" - } - - .fa-cloud-download:before { - content: "" - } - - .fa-cloud-upload:before { - content: "" - } - - .fa-user-md:before { - content: "" - } - - .fa-stethoscope:before { - content: "" - } - - .fa-suitcase:before { - content: "" - } - - .fa-bell-o:before { - content: "" - } - - .fa-coffee:before { - content: "" - } - - .fa-cutlery:before { - content: "" - } - - .fa-file-text-o:before { - content: "" - } - - .fa-building-o:before { - content: "" - } - - .fa-hospital-o:before { - content: "" - } - - .fa-ambulance:before { - content: "" - } - - .fa-medkit:before { - content: "" - } - - .fa-fighter-jet:before { - content: "" - } - - .fa-beer:before { - content: "" - } - - .fa-h-square:before { - content: "" - } - - .fa-plus-square:before { - content: "" - } - - .fa-angle-double-left:before { - content: "" - } - - .fa-angle-double-right:before { - content: "" - } - - .fa-angle-double-up:before { - content: "" - } - - .fa-angle-double-down:before { - content: "" - } - - .fa-angle-left:before { - content: "" - } - - .fa-angle-right:before { - content: "" - } - - .fa-angle-up:before { - content: "" - } - - .fa-angle-down:before { - content: "" - } - - .fa-desktop:before { - content: "" - } - - .fa-laptop:before { - content: "" - } - - .fa-tablet:before { - content: "" - } - - .fa-mobile-phone:before, - .fa-mobile:before { - content: "" - } - - .fa-circle-o:before { - content: "" - } - - .fa-quote-left:before { - content: "" - } - - .fa-quote-right:before { - content: "" - } - - .fa-spinner:before { - content: "" - } - - .fa-circle:before { - content: "" - } - - .fa-mail-reply:before, - .fa-reply:before { - content: "" - } - - .fa-github-alt:before { - content: "" - } - - .fa-folder-o:before { - content: "" - } - - .fa-folder-open-o:before { - content: "" - } - - .fa-smile-o:before { - content: "" - } - - .fa-frown-o:before { - content: "" - } - - .fa-meh-o:before { - content: "" - } - - .fa-gamepad:before { - content: "" - } - - .fa-keyboard-o:before { - content: "" - } - - .fa-flag-o:before { - content: "" - } - - .fa-flag-checkered:before { - content: "" - } - - .fa-terminal:before { - content: "" - } - - .fa-code:before { - content: "" - } - - .fa-mail-reply-all:before, - .fa-reply-all:before { - content: "" - } - - .fa-star-half-empty:before, - .fa-star-half-full:before, - .fa-star-half-o:before { - content: "" - } - - .fa-location-arrow:before { - content: "" - } - - .fa-crop:before { - content: "" - } - - .fa-code-fork:before { - content: "" - } - - .fa-chain-broken:before, - .fa-unlink:before { - content: "" - } - - .fa-question:before { - content: "" - } - - .fa-info:before { - content: "" - } - - .fa-exclamation:before { - content: "" - } - - .fa-superscript:before { - content: "" - } - - .fa-subscript:before { - content: "" - } - - .fa-eraser:before { - content: "" - } - - .fa-puzzle-piece:before { - content: "" - } - - .fa-microphone:before { - content: "" - } - - .fa-microphone-slash:before { - content: "" - } - - .fa-shield:before { - content: "" - } - - .fa-calendar-o:before { - content: "" - } - - .fa-fire-extinguisher:before { - content: "" - } - - .fa-rocket:before { - content: "" - } - - .fa-maxcdn:before { - content: "" - } - - .fa-chevron-circle-left:before { - content: "" - } - - .fa-chevron-circle-right:before { - content: "" - } - - .fa-chevron-circle-up:before { - content: "" - } - - .fa-chevron-circle-down:before { - content: "" - } - - .fa-html5:before { - content: "" - } - - .fa-css3:before { - content: "" - } - - .fa-anchor:before { - content: "" - } - - .fa-unlock-alt:before { - content: "" - } - - .fa-bullseye:before { - content: "" - } - - .fa-ellipsis-h:before { - content: "" - } - - .fa-ellipsis-v:before { - content: "" - } - - .fa-rss-square:before { - content: "" - } - - .fa-play-circle:before { - content: "" - } - - .fa-ticket:before { - content: "" - } - - .fa-minus-square:before { - content: "" - } - - .fa-minus-square-o:before, - .wy-menu-vertical li.current>a button.toctree-expand:before, - .wy-menu-vertical li.on a button.toctree-expand:before { - content: "" - } - - .fa-level-up:before { - content: "" - } - - .fa-level-down:before { - content: "" - } - - .fa-check-square:before { - content: "" - } - - .fa-pencil-square:before { - content: "" - } - - .fa-external-link-square:before { - content: "" - } - - .fa-share-square:before { - content: "" - } - - .fa-compass:before { - content: "" - } - - .fa-caret-square-o-down:before, - .fa-toggle-down:before { - content: "" - } - - .fa-caret-square-o-up:before, - .fa-toggle-up:before { - content: "" - } - - .fa-caret-square-o-right:before, - .fa-toggle-right:before { - content: "" - } - - .fa-eur:before, - .fa-euro:before { - content: "" - } - - .fa-gbp:before { - content: "" - } - - .fa-dollar:before, - .fa-usd:before { - content: "" - } - - .fa-inr:before, - .fa-rupee:before { - content: "" - } - - .fa-cny:before, - .fa-jpy:before, - .fa-rmb:before, - .fa-yen:before { - content: "" - } - - .fa-rouble:before, - .fa-rub:before, - .fa-ruble:before { - content: "" - } - - .fa-krw:before, - .fa-won:before { - content: "" - } - - .fa-bitcoin:before, - .fa-btc:before { - content: "" - } - - .fa-file:before { - content: "" - } - - .fa-file-text:before { - content: "" - } - - .fa-sort-alpha-asc:before { - content: "" - } - - .fa-sort-alpha-desc:before { - content: "" - } - - .fa-sort-amount-asc:before { - content: "" - } - - .fa-sort-amount-desc:before { - content: "" - } - - .fa-sort-numeric-asc:before { - content: "" - } - - .fa-sort-numeric-desc:before { - content: "" - } - - .fa-thumbs-up:before { - content: "" - } - - .fa-thumbs-down:before { - content: "" - } - - .fa-youtube-square:before { - content: "" - } - - .fa-youtube:before { - content: "" - } - - .fa-xing:before { - content: "" - } - - .fa-xing-square:before { - content: "" - } - - .fa-youtube-play:before { - content: "" - } - - .fa-dropbox:before { - content: "" - } - - .fa-stack-overflow:before { - content: "" - } - - .fa-instagram:before { - content: "" - } - - .fa-flickr:before { - content: "" - } - - .fa-adn:before { - content: "" - } - - .fa-bitbucket:before, - .icon-bitbucket:before { - content: "" - } - - .fa-bitbucket-square:before { - content: "" - } - - .fa-tumblr:before { - content: "" - } - - .fa-tumblr-square:before { - content: "" - } - - .fa-long-arrow-down:before { - content: "" - } - - .fa-long-arrow-up:before { - content: "" - } - - .fa-long-arrow-left:before { - content: "" - } - - .fa-long-arrow-right:before { - content: "" - } - - .fa-apple:before { - content: "" - } - - .fa-windows:before { - content: "" - } - - .fa-android:before { - content: "" - } - - .fa-linux:before { - content: "" - } - - .fa-dribbble:before { - content: "" - } - - .fa-skype:before { - content: "" - } - - .fa-foursquare:before { - content: "" - } - - .fa-trello:before { - content: "" - } - - .fa-female:before { - content: "" - } - - .fa-male:before { - content: "" - } - - .fa-gittip:before, - .fa-gratipay:before { - content: "" - } - - .fa-sun-o:before { - content: "" - } - - .fa-moon-o:before { - content: "" - } - - .fa-archive:before { - content: "" - } - - .fa-bug:before { - content: "" - } - - .fa-vk:before { - content: "" - } - - .fa-weibo:before { - content: "" - } - - .fa-renren:before { - content: "" - } - - .fa-pagelines:before { - content: "" - } - - .fa-stack-exchange:before { - content: "" - } - - .fa-arrow-circle-o-right:before { - content: "" - } - - .fa-arrow-circle-o-left:before { - content: "" - } - - .fa-caret-square-o-left:before, - .fa-toggle-left:before { - content: "" - } - - .fa-dot-circle-o:before { - content: "" - } - - .fa-wheelchair:before { - content: "" - } - - .fa-vimeo-square:before { - content: "" - } - - .fa-try:before, - .fa-turkish-lira:before { - content: "" - } - - .fa-plus-square-o:before, - .wy-menu-vertical li button.toctree-expand:before { - content: "" - } - - .fa-space-shuttle:before { - content: "" - } - - .fa-slack:before { - content: "" - } - - .fa-envelope-square:before { - content: "" - } - - .fa-wordpress:before { - content: "" - } - - .fa-openid:before { - content: "" - } - - .fa-bank:before, - .fa-institution:before, - .fa-university:before { - content: "" - } - - .fa-graduation-cap:before, - .fa-mortar-board:before { - content: "" - } - - .fa-yahoo:before { - content: "" - } - - .fa-google:before { - content: "" - } - - .fa-reddit:before { - content: "" - } - - .fa-reddit-square:before { - content: "" - } - - .fa-stumbleupon-circle:before { - content: "" - } - - .fa-stumbleupon:before { - content: "" - } - - .fa-delicious:before { - content: "" - } - - .fa-digg:before { - content: "" - } - - .fa-pied-piper-pp:before { - content: "" - } - - .fa-pied-piper-alt:before { - content: "" - } - - .fa-drupal:before { - content: "" - } - - .fa-joomla:before { - content: "" - } - - .fa-language:before { - content: "" - } - - .fa-fax:before { - content: "" - } - - .fa-building:before { - content: "" - } - - .fa-child:before { - content: "" - } - - .fa-paw:before { - content: "" - } - - .fa-spoon:before { - content: "" - } - - .fa-cube:before { - content: "" - } - - .fa-cubes:before { - content: "" - } - - .fa-behance:before { - content: "" - } - - .fa-behance-square:before { - content: "" - } - - .fa-steam:before { - content: "" - } - - .fa-steam-square:before { - content: "" - } - - .fa-recycle:before { - content: "" - } - - .fa-automobile:before, - .fa-car:before { - content: "" - } - - .fa-cab:before, - .fa-taxi:before { - content: "" - } - - .fa-tree:before { - content: "" - } - - .fa-spotify:before { - content: "" - } - - .fa-deviantart:before { - content: "" - } - - .fa-soundcloud:before { - content: "" - } - - .fa-database:before { - content: "" - } - - .fa-file-pdf-o:before { - content: "" - } - - .fa-file-word-o:before { - content: "" - } - - .fa-file-excel-o:before { - content: "" - } - - .fa-file-powerpoint-o:before { - content: "" - } - - .fa-file-image-o:before, - .fa-file-photo-o:before, - .fa-file-picture-o:before { - content: "" - } - - .fa-file-archive-o:before, - .fa-file-zip-o:before { - content: "" - } - - .fa-file-audio-o:before, - .fa-file-sound-o:before { - content: "" - } - - .fa-file-movie-o:before, - .fa-file-video-o:before { - content: "" - } - - .fa-file-code-o:before { - content: "" - } - - .fa-vine:before { - content: "" - } - - .fa-codepen:before { - content: "" - } - - .fa-jsfiddle:before { - content: "" - } - - .fa-life-bouy:before, - .fa-life-buoy:before, - .fa-life-ring:before, - .fa-life-saver:before, - .fa-support:before { - content: "" - } - - .fa-circle-o-notch:before { - content: "" - } - - .fa-ra:before, - .fa-rebel:before, - .fa-resistance:before { - content: "" - } - - .fa-empire:before, - .fa-ge:before { - content: "" - } - - .fa-git-square:before { - content: "" - } - - .fa-git:before { - content: "" - } - - .fa-hacker-news:before, - .fa-y-combinator-square:before, - .fa-yc-square:before { - content: "" - } - - .fa-tencent-weibo:before { - content: "" - } - - .fa-qq:before { - content: "" - } - - .fa-wechat:before, - .fa-weixin:before { - content: "" - } - - .fa-paper-plane:before, - .fa-send:before { - content: "" - } - - .fa-paper-plane-o:before, - .fa-send-o:before { - content: "" - } - - .fa-history:before { - content: "" - } - - .fa-circle-thin:before { - content: "" - } - - .fa-header:before { - content: "" - } - - .fa-paragraph:before { - content: "" - } - - .fa-sliders:before { - content: "" - } - - .fa-share-alt:before { - content: "" - } - - .fa-share-alt-square:before { - content: "" - } - - .fa-bomb:before { - content: "" - } - - .fa-futbol-o:before, - .fa-soccer-ball-o:before { - content: "" - } - - .fa-tty:before { - content: "" - } - - .fa-binoculars:before { - content: "" - } - - .fa-plug:before { - content: "" - } - - .fa-slideshare:before { - content: "" - } - - .fa-twitch:before { - content: "" - } - - .fa-yelp:before { - content: "" - } - - .fa-newspaper-o:before { - content: "" - } - - .fa-wifi:before { - content: "" - } - - .fa-calculator:before { - content: "" - } - - .fa-paypal:before { - content: "" - } - - .fa-google-wallet:before { - content: "" - } - - .fa-cc-visa:before { - content: "" - } - - .fa-cc-mastercard:before { - content: "" - } - - .fa-cc-discover:before { - content: "" - } - - .fa-cc-amex:before { - content: "" - } - - .fa-cc-paypal:before { - content: "" - } - - .fa-cc-stripe:before { - content: "" - } - - .fa-bell-slash:before { - content: "" - } - - .fa-bell-slash-o:before { - content: "" - } - - .fa-trash:before { - content: "" - } - - .fa-copyright:before { - content: "" - } - - .fa-at:before { - content: "" - } - - .fa-eyedropper:before { - content: "" - } - - .fa-paint-brush:before { - content: "" - } - - .fa-birthday-cake:before { - content: "" - } - - .fa-area-chart:before { - content: "" - } - - .fa-pie-chart:before { - content: "" - } - - .fa-line-chart:before { - content: "" - } - - .fa-lastfm:before { - content: "" - } - - .fa-lastfm-square:before { - content: "" - } - - .fa-toggle-off:before { - content: "" - } - - .fa-toggle-on:before { - content: "" - } - - .fa-bicycle:before { - content: "" - } - - .fa-bus:before { - content: "" - } - - .fa-ioxhost:before { - content: "" - } - - .fa-angellist:before { - content: "" - } - - .fa-cc:before { - content: "" - } - - .fa-ils:before, - .fa-shekel:before, - .fa-sheqel:before { - content: "" - } - - .fa-meanpath:before { - content: "" - } - - .fa-buysellads:before { - content: "" - } - - .fa-connectdevelop:before { - content: "" - } - - .fa-dashcube:before { - content: "" - } - - .fa-forumbee:before { - content: "" - } - - .fa-leanpub:before { - content: "" - } - - .fa-sellsy:before { - content: "" - } - - .fa-shirtsinbulk:before { - content: "" - } - - .fa-simplybuilt:before { - content: "" - } - - .fa-skyatlas:before { - content: "" - } - - .fa-cart-plus:before { - content: "" - } - - .fa-cart-arrow-down:before { - content: "" - } - - .fa-diamond:before { - content: "" - } - - .fa-ship:before { - content: "" - } - - .fa-user-secret:before { - content: "" - } - - .fa-motorcycle:before { - content: "" - } - - .fa-street-view:before { - content: "" - } - - .fa-heartbeat:before { - content: "" - } - - .fa-venus:before { - content: "" - } - - .fa-mars:before { - content: "" - } - - .fa-mercury:before { - content: "" - } - - .fa-intersex:before, - .fa-transgender:before { - content: "" - } - - .fa-transgender-alt:before { - content: "" - } - - .fa-venus-double:before { - content: "" - } - - .fa-mars-double:before { - content: "" - } - - .fa-venus-mars:before { - content: "" - } - - .fa-mars-stroke:before { - content: "" - } - - .fa-mars-stroke-v:before { - content: "" - } - - .fa-mars-stroke-h:before { - content: "" - } - - .fa-neuter:before { - content: "" - } - - .fa-genderless:before { - content: "" - } - - .fa-facebook-official:before { - content: "" - } - - .fa-pinterest-p:before { - content: "" - } - - .fa-whatsapp:before { - content: "" - } - - .fa-server:before { - content: "" - } - - .fa-user-plus:before { - content: "" - } - - .fa-user-times:before { - content: "" - } - - .fa-bed:before, - .fa-hotel:before { - content: "" - } - - .fa-viacoin:before { - content: "" - } - - .fa-train:before { - content: "" - } - - .fa-subway:before { - content: "" - } - - .fa-medium:before { - content: "" - } - - .fa-y-combinator:before, - .fa-yc:before { - content: "" - } - - .fa-optin-monster:before { - content: "" - } - - .fa-opencart:before { - content: "" - } - - .fa-expeditedssl:before { - content: "" - } - - .fa-battery-4:before, - .fa-battery-full:before, - .fa-battery:before { - content: "" - } - - .fa-battery-3:before, - .fa-battery-three-quarters:before { - content: "" - } - - .fa-battery-2:before, - .fa-battery-half:before { - content: "" - } - - .fa-battery-1:before, - .fa-battery-quarter:before { - content: "" - } - - .fa-battery-0:before, - .fa-battery-empty:before { - content: "" - } - - .fa-mouse-pointer:before { - content: "" - } - - .fa-i-cursor:before { - content: "" - } - - .fa-object-group:before { - content: "" - } - - .fa-object-ungroup:before { - content: "" - } - - .fa-sticky-note:before { - content: "" - } - - .fa-sticky-note-o:before { - content: "" - } - - .fa-cc-jcb:before { - content: "" - } - - .fa-cc-diners-club:before { - content: "" - } - - .fa-clone:before { - content: "" - } - - .fa-balance-scale:before { - content: "" - } - - .fa-hourglass-o:before { - content: "" - } - - .fa-hourglass-1:before, - .fa-hourglass-start:before { - content: "" - } - - .fa-hourglass-2:before, - .fa-hourglass-half:before { - content: "" - } - - .fa-hourglass-3:before, - .fa-hourglass-end:before { - content: "" - } - - .fa-hourglass:before { - content: "" - } - - .fa-hand-grab-o:before, - .fa-hand-rock-o:before { - content: "" - } - - .fa-hand-paper-o:before, - .fa-hand-stop-o:before { - content: "" - } - - .fa-hand-scissors-o:before { - content: "" - } - - .fa-hand-lizard-o:before { - content: "" - } - - .fa-hand-spock-o:before { - content: "" - } - - .fa-hand-pointer-o:before { - content: "" - } - - .fa-hand-peace-o:before { - content: "" - } - - .fa-trademark:before { - content: "" - } - - .fa-registered:before { - content: "" - } - - .fa-creative-commons:before { - content: "" - } - - .fa-gg:before { - content: "" - } - - .fa-gg-circle:before { - content: "" - } - - .fa-tripadvisor:before { - content: "" - } - - .fa-odnoklassniki:before { - content: "" - } - - .fa-odnoklassniki-square:before { - content: "" - } - - .fa-get-pocket:before { - content: "" - } - - .fa-wikipedia-w:before { - content: "" - } - - .fa-safari:before { - content: "" - } - - .fa-chrome:before { - content: "" - } - - .fa-firefox:before { - content: "" - } - - .fa-opera:before { - content: "" - } - - .fa-internet-explorer:before { - content: "" - } - - .fa-television:before, - .fa-tv:before { - content: "" - } - - .fa-contao:before { - content: "" - } - - .fa-500px:before { - content: "" - } - - .fa-amazon:before { - content: "" - } - - .fa-calendar-plus-o:before { - content: "" - } - - .fa-calendar-minus-o:before { - content: "" - } - - .fa-calendar-times-o:before { - content: "" - } - - .fa-calendar-check-o:before { - content: "" - } - - .fa-industry:before { - content: "" - } - - .fa-map-pin:before { - content: "" - } - - .fa-map-signs:before { - content: "" - } - - .fa-map-o:before { - content: "" - } - - .fa-map:before { - content: "" - } - - .fa-commenting:before { - content: "" - } - - .fa-commenting-o:before { - content: "" - } - - .fa-houzz:before { - content: "" - } - - .fa-vimeo:before { - content: "" - } - - .fa-black-tie:before { - content: "" - } - - .fa-fonticons:before { - content: "" - } - - .fa-reddit-alien:before { - content: "" - } - - .fa-edge:before { - content: "" - } - - .fa-credit-card-alt:before { - content: "" - } - - .fa-codiepie:before { - content: "" - } - - .fa-modx:before { - content: "" - } - - .fa-fort-awesome:before { - content: "" - } - - .fa-usb:before { - content: "" - } - - .fa-product-hunt:before { - content: "" - } - - .fa-mixcloud:before { - content: "" - } - - .fa-scribd:before { - content: "" - } - - .fa-pause-circle:before { - content: "" - } - - .fa-pause-circle-o:before { - content: "" - } - - .fa-stop-circle:before { - content: "" - } - - .fa-stop-circle-o:before { - content: "" - } - - .fa-shopping-bag:before { - content: "" - } - - .fa-shopping-basket:before { - content: "" - } - - .fa-hashtag:before { - content: "" - } - - .fa-bluetooth:before { - content: "" - } - - .fa-bluetooth-b:before { - content: "" - } - - .fa-percent:before { - content: "" - } - - .fa-gitlab:before, - .icon-gitlab:before { - content: "" - } - - .fa-wpbeginner:before { - content: "" - } - - .fa-wpforms:before { - content: "" - } - - .fa-envira:before { - content: "" - } - - .fa-universal-access:before { - content: "" - } - - .fa-wheelchair-alt:before { - content: "" - } - - .fa-question-circle-o:before { - content: "" - } - - .fa-blind:before { - content: "" - } - - .fa-audio-description:before { - content: "" - } - - .fa-volume-control-phone:before { - content: "" - } - - .fa-braille:before { - content: "" - } - - .fa-assistive-listening-systems:before { - content: "" - } - - .fa-american-sign-language-interpreting:before, - .fa-asl-interpreting:before { - content: "" - } - - .fa-deaf:before, - .fa-deafness:before, - .fa-hard-of-hearing:before { - content: "" - } - - .fa-glide:before { - content: "" - } - - .fa-glide-g:before { - content: "" - } - - .fa-sign-language:before, - .fa-signing:before { - content: "" - } - - .fa-low-vision:before { - content: "" - } - - .fa-viadeo:before { - content: "" - } - - .fa-viadeo-square:before { - content: "" - } - - .fa-snapchat:before { - content: "" - } - - .fa-snapchat-ghost:before { - content: "" - } - - .fa-snapchat-square:before { - content: "" - } - - .fa-pied-piper:before { - content: "" - } - - .fa-first-order:before { - content: "" - } - - .fa-yoast:before { - content: "" - } - - .fa-themeisle:before { - content: "" - } - - .fa-google-plus-circle:before, - .fa-google-plus-official:before { - content: "" - } - - .fa-fa:before, - .fa-font-awesome:before { - content: "" - } - - .fa-handshake-o:before { - content: "" - } - - .fa-envelope-open:before { - content: "" - } - - .fa-envelope-open-o:before { - content: "" - } - - .fa-linode:before { - content: "" - } - - .fa-address-book:before { - content: "" - } - - .fa-address-book-o:before { - content: "" - } - - .fa-address-card:before, - .fa-vcard:before { - content: "" - } - - .fa-address-card-o:before, - .fa-vcard-o:before { - content: "" - } - - .fa-user-circle:before { - content: "" - } - - .fa-user-circle-o:before { - content: "" - } - - .fa-user-o:before { - content: "" - } - - .fa-id-badge:before { - content: "" - } - - .fa-drivers-license:before, - .fa-id-card:before { - content: "" - } - - .fa-drivers-license-o:before, - .fa-id-card-o:before { - content: "" - } - - .fa-quora:before { - content: "" - } - - .fa-free-code-camp:before { - content: "" - } - - .fa-telegram:before { - content: "" - } - - .fa-thermometer-4:before, - .fa-thermometer-full:before, - .fa-thermometer:before { - content: "" - } - - .fa-thermometer-3:before, - .fa-thermometer-three-quarters:before { - content: "" - } - - .fa-thermometer-2:before, - .fa-thermometer-half:before { - content: "" - } - - .fa-thermometer-1:before, - .fa-thermometer-quarter:before { - content: "" - } - - .fa-thermometer-0:before, - .fa-thermometer-empty:before { - content: "" - } - - .fa-shower:before { - content: "" - } - - .fa-bath:before, - .fa-bathtub:before, - .fa-s15:before { - content: "" - } - - .fa-podcast:before { - content: "" - } - - .fa-window-maximize:before { - content: "" - } - - .fa-window-minimize:before { - content: "" - } - - .fa-window-restore:before { - content: "" - } - - .fa-times-rectangle:before, - .fa-window-close:before { - content: "" - } - - .fa-times-rectangle-o:before, - .fa-window-close-o:before { - content: "" - } - - .fa-bandcamp:before { - content: "" - } - - .fa-grav:before { - content: "" - } - - .fa-etsy:before { - content: "" - } - - .fa-imdb:before { - content: "" - } - - .fa-ravelry:before { - content: "" - } - - .fa-eercast:before { - content: "" - } - - .fa-microchip:before { - content: "" - } - - .fa-snowflake-o:before { - content: "" - } - - .fa-superpowers:before { - content: "" - } - - .fa-wpexplorer:before { - content: "" - } - - .fa-meetup:before { - content: "" - } - - .sr-only { - position: absolute; - width: 1px; - height: 1px; - padding: 0; - margin: -1px; - overflow: hidden; - clip: rect(0, 0, 0, 0); - border: 0 - } - - .sr-only-focusable:active, - .sr-only-focusable:focus { - position: static; - width: auto; - height: auto; - margin: 0; - overflow: visible; - clip: auto - } - - .fa, - .icon, - .rst-content .admonition-title, - .rst-content .code-block-caption .headerlink, - .rst-content .eqno .headerlink, - .rst-content code.download span:first-child, - .rst-content dl dt .headerlink, - .rst-content h1 .headerlink, - .rst-content h2 .headerlink, - .rst-content h3 .headerlink, - .rst-content h4 .headerlink, - .rst-content h5 .headerlink, - .rst-content h6 .headerlink, - .rst-content p.caption .headerlink, - .rst-content p .headerlink, - .rst-content table>caption .headerlink, - .rst-content tt.download span:first-child, - .wy-dropdown .caret, - .wy-inline-validate.wy-inline-validate-danger .wy-input-context, - .wy-inline-validate.wy-inline-validate-info .wy-input-context, - .wy-inline-validate.wy-inline-validate-success .wy-input-context, - .wy-inline-validate.wy-inline-validate-warning .wy-input-context, - .wy-menu-vertical li.current>a button.toctree-expand, - .wy-menu-vertical li.on a button.toctree-expand, - .wy-menu-vertical li button.toctree-expand { - font-family: inherit - } - - .fa:before, - .icon:before, - .rst-content .admonition-title:before, - .rst-content .code-block-caption .headerlink:before, - .rst-content .eqno .headerlink:before, - .rst-content code.download span:first-child:before, - .rst-content dl dt .headerlink:before, - .rst-content h1 .headerlink:before, - .rst-content h2 .headerlink:before, - .rst-content h3 .headerlink:before, - .rst-content h4 .headerlink:before, - .rst-content h5 .headerlink:before, - .rst-content h6 .headerlink:before, - .rst-content p.caption .headerlink:before, - .rst-content p .headerlink:before, - .rst-content table>caption .headerlink:before, - .rst-content tt.download span:first-child:before, - .wy-dropdown .caret:before, - .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, - .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, - .wy-menu-vertical li.current>a button.toctree-expand:before, - .wy-menu-vertical li.on a button.toctree-expand:before, - .wy-menu-vertical li button.toctree-expand:before { - font-family: IntelOne Mono, FontAwesome; - display: inline-block; - font-style: normal; - font-weight: 400; - line-height: 1; - text-decoration: inherit - } - - .rst-content .code-block-caption a .headerlink, - .rst-content .eqno a .headerlink, - .rst-content a .admonition-title, - .rst-content code.download a span:first-child, - .rst-content dl dt a .headerlink, - .rst-content h1 a .headerlink, - .rst-content h2 a .headerlink, - .rst-content h3 a .headerlink, - .rst-content h4 a .headerlink, - .rst-content h5 a .headerlink, - .rst-content h6 a .headerlink, - .rst-content p.caption a .headerlink, - .rst-content p a .headerlink, - .rst-content table>caption a .headerlink, - .rst-content tt.download a span:first-child, - .wy-menu-vertical li.current>a button.toctree-expand, - .wy-menu-vertical li.on a button.toctree-expand, - .wy-menu-vertical li a button.toctree-expand, - a .fa, - a .icon, - a .rst-content .admonition-title, - a .rst-content .code-block-caption .headerlink, - a .rst-content .eqno .headerlink, - a .rst-content code.download span:first-child, - a .rst-content dl dt .headerlink, - a .rst-content h1 .headerlink, - a .rst-content h2 .headerlink, - a .rst-content h3 .headerlink, - a .rst-content h4 .headerlink, - a .rst-content h5 .headerlink, - a .rst-content h6 .headerlink, - a .rst-content p.caption .headerlink, - a .rst-content p .headerlink, - a .rst-content table>caption .headerlink, - a .rst-content tt.download span:first-child, - a .wy-menu-vertical li button.toctree-expand { - display: inline-block; - text-decoration: inherit - } - - .btn .fa, - .btn .icon, - .btn .rst-content .admonition-title, - .btn .rst-content .code-block-caption .headerlink, - .btn .rst-content .eqno .headerlink, - .btn .rst-content code.download span:first-child, - .btn .rst-content dl dt .headerlink, - .btn .rst-content h1 .headerlink, - .btn .rst-content h2 .headerlink, - .btn .rst-content h3 .headerlink, - .btn .rst-content h4 .headerlink, - .btn .rst-content h5 .headerlink, - .btn .rst-content h6 .headerlink, - .btn .rst-content p .headerlink, - .btn .rst-content table>caption .headerlink, - .btn .rst-content tt.download span:first-child, - .btn .wy-menu-vertical li.current>a button.toctree-expand, - .btn .wy-menu-vertical li.on a button.toctree-expand, - .btn .wy-menu-vertical li button.toctree-expand, - .nav .fa, - .nav .icon, - .nav .rst-content .admonition-title, - .nav .rst-content .code-block-caption .headerlink, - .nav .rst-content .eqno .headerlink, - .nav .rst-content code.download span:first-child, - .nav .rst-content dl dt .headerlink, - .nav .rst-content h1 .headerlink, - .nav .rst-content h2 .headerlink, - .nav .rst-content h3 .headerlink, - .nav .rst-content h4 .headerlink, - .nav .rst-content h5 .headerlink, - .nav .rst-content h6 .headerlink, - .nav .rst-content p .headerlink, - .nav .rst-content table>caption .headerlink, - .nav .rst-content tt.download span:first-child, - .nav .wy-menu-vertical li.current>a button.toctree-expand, - .nav .wy-menu-vertical li.on a button.toctree-expand, - .nav .wy-menu-vertical li button.toctree-expand, - .rst-content .btn .admonition-title, - .rst-content .code-block-caption .btn .headerlink, - .rst-content .code-block-caption .nav .headerlink, - .rst-content .eqno .btn .headerlink, - .rst-content .eqno .nav .headerlink, - .rst-content .nav .admonition-title, - .rst-content code.download .btn span:first-child, - .rst-content code.download .nav span:first-child, - .rst-content dl dt .btn .headerlink, - .rst-content dl dt .nav .headerlink, - .rst-content h1 .btn .headerlink, - .rst-content h1 .nav .headerlink, - .rst-content h2 .btn .headerlink, - .rst-content h2 .nav .headerlink, - .rst-content h3 .btn .headerlink, - .rst-content h3 .nav .headerlink, - .rst-content h4 .btn .headerlink, - .rst-content h4 .nav .headerlink, - .rst-content h5 .btn .headerlink, - .rst-content h5 .nav .headerlink, - .rst-content h6 .btn .headerlink, - .rst-content h6 .nav .headerlink, - .rst-content p .btn .headerlink, - .rst-content p .nav .headerlink, - .rst-content table>caption .btn .headerlink, - .rst-content table>caption .nav .headerlink, - .rst-content tt.download .btn span:first-child, - .rst-content tt.download .nav span:first-child, - .wy-menu-vertical li .btn button.toctree-expand, - .wy-menu-vertical li.current>a .btn button.toctree-expand, - .wy-menu-vertical li.current>a .nav button.toctree-expand, - .wy-menu-vertical li .nav button.toctree-expand, - .wy-menu-vertical li.on a .btn button.toctree-expand, - .wy-menu-vertical li.on a .nav button.toctree-expand { - display: inline - } - - .btn .fa-large.icon, - .btn .fa.fa-large, - .btn .rst-content .code-block-caption .fa-large.headerlink, - .btn .rst-content .eqno .fa-large.headerlink, - .btn .rst-content .fa-large.admonition-title, - .btn .rst-content code.download span.fa-large:first-child, - .btn .rst-content dl dt .fa-large.headerlink, - .btn .rst-content h1 .fa-large.headerlink, - .btn .rst-content h2 .fa-large.headerlink, - .btn .rst-content h3 .fa-large.headerlink, - .btn .rst-content h4 .fa-large.headerlink, - .btn .rst-content h5 .fa-large.headerlink, - .btn .rst-content h6 .fa-large.headerlink, - .btn .rst-content p .fa-large.headerlink, - .btn .rst-content table>caption .fa-large.headerlink, - .btn .rst-content tt.download span.fa-large:first-child, - .btn .wy-menu-vertical li button.fa-large.toctree-expand, - .nav .fa-large.icon, - .nav .fa.fa-large, - .nav .rst-content .code-block-caption .fa-large.headerlink, - .nav .rst-content .eqno .fa-large.headerlink, - .nav .rst-content .fa-large.admonition-title, - .nav .rst-content code.download span.fa-large:first-child, - .nav .rst-content dl dt .fa-large.headerlink, - .nav .rst-content h1 .fa-large.headerlink, - .nav .rst-content h2 .fa-large.headerlink, - .nav .rst-content h3 .fa-large.headerlink, - .nav .rst-content h4 .fa-large.headerlink, - .nav .rst-content h5 .fa-large.headerlink, - .nav .rst-content h6 .fa-large.headerlink, - .nav .rst-content p .fa-large.headerlink, - .nav .rst-content table>caption .fa-large.headerlink, - .nav .rst-content tt.download span.fa-large:first-child, - .nav .wy-menu-vertical li button.fa-large.toctree-expand, - .rst-content .btn .fa-large.admonition-title, - .rst-content .code-block-caption .btn .fa-large.headerlink, - .rst-content .code-block-caption .nav .fa-large.headerlink, - .rst-content .eqno .btn .fa-large.headerlink, - .rst-content .eqno .nav .fa-large.headerlink, - .rst-content .nav .fa-large.admonition-title, - .rst-content code.download .btn span.fa-large:first-child, - .rst-content code.download .nav span.fa-large:first-child, - .rst-content dl dt .btn .fa-large.headerlink, - .rst-content dl dt .nav .fa-large.headerlink, - .rst-content h1 .btn .fa-large.headerlink, - .rst-content h1 .nav .fa-large.headerlink, - .rst-content h2 .btn .fa-large.headerlink, - .rst-content h2 .nav .fa-large.headerlink, - .rst-content h3 .btn .fa-large.headerlink, - .rst-content h3 .nav .fa-large.headerlink, - .rst-content h4 .btn .fa-large.headerlink, - .rst-content h4 .nav .fa-large.headerlink, - .rst-content h5 .btn .fa-large.headerlink, - .rst-content h5 .nav .fa-large.headerlink, - .rst-content h6 .btn .fa-large.headerlink, - .rst-content h6 .nav .fa-large.headerlink, - .rst-content p .btn .fa-large.headerlink, - .rst-content p .nav .fa-large.headerlink, - .rst-content table>caption .btn .fa-large.headerlink, - .rst-content table>caption .nav .fa-large.headerlink, - .rst-content tt.download .btn span.fa-large:first-child, - .rst-content tt.download .nav span.fa-large:first-child, - .wy-menu-vertical li .btn button.fa-large.toctree-expand, - .wy-menu-vertical li .nav button.fa-large.toctree-expand { - line-height: .9em - } - - .btn .fa-spin.icon, - .btn .fa.fa-spin, - .btn .rst-content .code-block-caption .fa-spin.headerlink, - .btn .rst-content .eqno .fa-spin.headerlink, - .btn .rst-content .fa-spin.admonition-title, - .btn .rst-content code.download span.fa-spin:first-child, - .btn .rst-content dl dt .fa-spin.headerlink, - .btn .rst-content h1 .fa-spin.headerlink, - .btn .rst-content h2 .fa-spin.headerlink, - .btn .rst-content h3 .fa-spin.headerlink, - .btn .rst-content h4 .fa-spin.headerlink, - .btn .rst-content h5 .fa-spin.headerlink, - .btn .rst-content h6 .fa-spin.headerlink, - .btn .rst-content p .fa-spin.headerlink, - .btn .rst-content table>caption .fa-spin.headerlink, - .btn .rst-content tt.download span.fa-spin:first-child, - .btn .wy-menu-vertical li button.fa-spin.toctree-expand, - .nav .fa-spin.icon, - .nav .fa.fa-spin, - .nav .rst-content .code-block-caption .fa-spin.headerlink, - .nav .rst-content .eqno .fa-spin.headerlink, - .nav .rst-content .fa-spin.admonition-title, - .nav .rst-content code.download span.fa-spin:first-child, - .nav .rst-content dl dt .fa-spin.headerlink, - .nav .rst-content h1 .fa-spin.headerlink, - .nav .rst-content h2 .fa-spin.headerlink, - .nav .rst-content h3 .fa-spin.headerlink, - .nav .rst-content h4 .fa-spin.headerlink, - .nav .rst-content h5 .fa-spin.headerlink, - .nav .rst-content h6 .fa-spin.headerlink, - .nav .rst-content p .fa-spin.headerlink, - .nav .rst-content table>caption .fa-spin.headerlink, - .nav .rst-content tt.download span.fa-spin:first-child, - .nav .wy-menu-vertical li button.fa-spin.toctree-expand, - .rst-content .btn .fa-spin.admonition-title, - .rst-content .code-block-caption .btn .fa-spin.headerlink, - .rst-content .code-block-caption .nav .fa-spin.headerlink, - .rst-content .eqno .btn .fa-spin.headerlink, - .rst-content .eqno .nav .fa-spin.headerlink, - .rst-content .nav .fa-spin.admonition-title, - .rst-content code.download .btn span.fa-spin:first-child, - .rst-content code.download .nav span.fa-spin:first-child, - .rst-content dl dt .btn .fa-spin.headerlink, - .rst-content dl dt .nav .fa-spin.headerlink, - .rst-content h1 .btn .fa-spin.headerlink, - .rst-content h1 .nav .fa-spin.headerlink, - .rst-content h2 .btn .fa-spin.headerlink, - .rst-content h2 .nav .fa-spin.headerlink, - .rst-content h3 .btn .fa-spin.headerlink, - .rst-content h3 .nav .fa-spin.headerlink, - .rst-content h4 .btn .fa-spin.headerlink, - .rst-content h4 .nav .fa-spin.headerlink, - .rst-content h5 .btn .fa-spin.headerlink, - .rst-content h5 .nav .fa-spin.headerlink, - .rst-content h6 .btn .fa-spin.headerlink, - .rst-content h6 .nav .fa-spin.headerlink, - .rst-content p .btn .fa-spin.headerlink, - .rst-content p .nav .fa-spin.headerlink, - .rst-content table>caption .btn .fa-spin.headerlink, - .rst-content table>caption .nav .fa-spin.headerlink, - .rst-content tt.download .btn span.fa-spin:first-child, - .rst-content tt.download .nav span.fa-spin:first-child, - .wy-menu-vertical li .btn button.fa-spin.toctree-expand, - .wy-menu-vertical li .nav button.fa-spin.toctree-expand { - display: inline-block - } - - .btn.fa:before, - .btn.icon:before, - .rst-content .btn.admonition-title:before, - .rst-content .code-block-caption .btn.headerlink:before, - .rst-content .eqno .btn.headerlink:before, - .rst-content code.download span.btn:first-child:before, - .rst-content dl dt .btn.headerlink:before, - .rst-content h1 .btn.headerlink:before, - .rst-content h2 .btn.headerlink:before, - .rst-content h3 .btn.headerlink:before, - .rst-content h4 .btn.headerlink:before, - .rst-content h5 .btn.headerlink:before, - .rst-content h6 .btn.headerlink:before, - .rst-content p .btn.headerlink:before, - .rst-content table>caption .btn.headerlink:before, - .rst-content tt.download span.btn:first-child:before, - .wy-menu-vertical li button.btn.toctree-expand:before { - opacity: .5; - -webkit-transition: opacity .05s ease-in; - -moz-transition: opacity .05s ease-in; - transition: opacity .05s ease-in - } - - .btn.fa:hover:before, - .btn.icon:hover:before, - .rst-content .btn.admonition-title:hover:before, - .rst-content .code-block-caption .btn.headerlink:hover:before, - .rst-content .eqno .btn.headerlink:hover:before, - .rst-content code.download span.btn:first-child:hover:before, - .rst-content dl dt .btn.headerlink:hover:before, - .rst-content h1 .btn.headerlink:hover:before, - .rst-content h2 .btn.headerlink:hover:before, - .rst-content h3 .btn.headerlink:hover:before, - .rst-content h4 .btn.headerlink:hover:before, - .rst-content h5 .btn.headerlink:hover:before, - .rst-content h6 .btn.headerlink:hover:before, - .rst-content p .btn.headerlink:hover:before, - .rst-content table>caption .btn.headerlink:hover:before, - .rst-content tt.download span.btn:first-child:hover:before, - .wy-menu-vertical li button.btn.toctree-expand:hover:before { - opacity: 1 - } - - .btn-mini .fa:before, - .btn-mini .icon:before, - .btn-mini .rst-content .admonition-title:before, - .btn-mini .rst-content .code-block-caption .headerlink:before, - .btn-mini .rst-content .eqno .headerlink:before, - .btn-mini .rst-content code.download span:first-child:before, - .btn-mini .rst-content dl dt .headerlink:before, - .btn-mini .rst-content h1 .headerlink:before, - .btn-mini .rst-content h2 .headerlink:before, - .btn-mini .rst-content h3 .headerlink:before, - .btn-mini .rst-content h4 .headerlink:before, - .btn-mini .rst-content h5 .headerlink:before, - .btn-mini .rst-content h6 .headerlink:before, - .btn-mini .rst-content p .headerlink:before, - .btn-mini .rst-content table>caption .headerlink:before, - .btn-mini .rst-content tt.download span:first-child:before, - .btn-mini .wy-menu-vertical li button.toctree-expand:before, - .rst-content .btn-mini .admonition-title:before, - .rst-content .code-block-caption .btn-mini .headerlink:before, - .rst-content .eqno .btn-mini .headerlink:before, - .rst-content code.download .btn-mini span:first-child:before, - .rst-content dl dt .btn-mini .headerlink:before, - .rst-content h1 .btn-mini .headerlink:before, - .rst-content h2 .btn-mini .headerlink:before, - .rst-content h3 .btn-mini .headerlink:before, - .rst-content h4 .btn-mini .headerlink:before, - .rst-content h5 .btn-mini .headerlink:before, - .rst-content h6 .btn-mini .headerlink:before, - .rst-content p .btn-mini .headerlink:before, - .rst-content table>caption .btn-mini .headerlink:before, - .rst-content tt.download .btn-mini span:first-child:before, - .wy-menu-vertical li .btn-mini button.toctree-expand:before { - font-size: 14px; - vertical-align: -15% - } - - .rst-content .admonition, - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .danger, - .rst-content .error, - .rst-content .hint, - .rst-content .important, - .rst-content .note, - .rst-content .seealso, - .rst-content .tip, - .rst-content .warning, - .wy-alert { - padding: 12px; - line-height: 24px; - margin-bottom: 24px; - background: #e7f2fa - } - - .rst-content .admonition-title, - .wy-alert-title { - font-weight: 700; - display: block; - color: #fff; - background: #6ab0de; - padding: 6px 12px; - margin: -12px -12px 12px - } - - .rst-content .danger, - .rst-content .error, - .rst-content .wy-alert-danger.admonition, - .rst-content .wy-alert-danger.admonition-todo, - .rst-content .wy-alert-danger.attention, - .rst-content .wy-alert-danger.caution, - .rst-content .wy-alert-danger.hint, - .rst-content .wy-alert-danger.important, - .rst-content .wy-alert-danger.note, - .rst-content .wy-alert-danger.seealso, - .rst-content .wy-alert-danger.tip, - .rst-content .wy-alert-danger.warning, - .wy-alert.wy-alert-danger { - background: #fdf3f2 - } - - .rst-content .danger .admonition-title, - .rst-content .danger .wy-alert-title, - .rst-content .error .admonition-title, - .rst-content .error .wy-alert-title, - .rst-content .wy-alert-danger.admonition-todo .admonition-title, - .rst-content .wy-alert-danger.admonition-todo .wy-alert-title, - .rst-content .wy-alert-danger.admonition .admonition-title, - .rst-content .wy-alert-danger.admonition .wy-alert-title, - .rst-content .wy-alert-danger.attention .admonition-title, - .rst-content .wy-alert-danger.attention .wy-alert-title, - .rst-content .wy-alert-danger.caution .admonition-title, - .rst-content .wy-alert-danger.caution .wy-alert-title, - .rst-content .wy-alert-danger.hint .admonition-title, - .rst-content .wy-alert-danger.hint .wy-alert-title, - .rst-content .wy-alert-danger.important .admonition-title, - .rst-content .wy-alert-danger.important .wy-alert-title, - .rst-content .wy-alert-danger.note .admonition-title, - .rst-content .wy-alert-danger.note .wy-alert-title, - .rst-content .wy-alert-danger.seealso .admonition-title, - .rst-content .wy-alert-danger.seealso .wy-alert-title, - .rst-content .wy-alert-danger.tip .admonition-title, - .rst-content .wy-alert-danger.tip .wy-alert-title, - .rst-content .wy-alert-danger.warning .admonition-title, - .rst-content .wy-alert-danger.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-danger .admonition-title, - .wy-alert.wy-alert-danger .rst-content .admonition-title, - .wy-alert.wy-alert-danger .wy-alert-title { - background: #f29f97 - } - - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .warning, - .rst-content .wy-alert-warning.admonition, - .rst-content .wy-alert-warning.danger, - .rst-content .wy-alert-warning.error, - .rst-content .wy-alert-warning.hint, - .rst-content .wy-alert-warning.important, - .rst-content .wy-alert-warning.note, - .rst-content .wy-alert-warning.seealso, - .rst-content .wy-alert-warning.tip, - .wy-alert.wy-alert-warning { - background: #ffedcc - } - - .rst-content .admonition-todo .admonition-title, - .rst-content .admonition-todo .wy-alert-title, - .rst-content .attention .admonition-title, - .rst-content .attention .wy-alert-title, - .rst-content .caution .admonition-title, - .rst-content .caution .wy-alert-title, - .rst-content .warning .admonition-title, - .rst-content .warning .wy-alert-title, - .rst-content .wy-alert-warning.admonition .admonition-title, - .rst-content .wy-alert-warning.admonition .wy-alert-title, - .rst-content .wy-alert-warning.danger .admonition-title, - .rst-content .wy-alert-warning.danger .wy-alert-title, - .rst-content .wy-alert-warning.error .admonition-title, - .rst-content .wy-alert-warning.error .wy-alert-title, - .rst-content .wy-alert-warning.hint .admonition-title, - .rst-content .wy-alert-warning.hint .wy-alert-title, - .rst-content .wy-alert-warning.important .admonition-title, - .rst-content .wy-alert-warning.important .wy-alert-title, - .rst-content .wy-alert-warning.note .admonition-title, - .rst-content .wy-alert-warning.note .wy-alert-title, - .rst-content .wy-alert-warning.seealso .admonition-title, - .rst-content .wy-alert-warning.seealso .wy-alert-title, - .rst-content .wy-alert-warning.tip .admonition-title, - .rst-content .wy-alert-warning.tip .wy-alert-title, - .rst-content .wy-alert.wy-alert-warning .admonition-title, - .wy-alert.wy-alert-warning .rst-content .admonition-title, - .wy-alert.wy-alert-warning .wy-alert-title { - background: #f0b37e - } - - .rst-content .note, - .rst-content .seealso, - .rst-content .wy-alert-info.admonition, - .rst-content .wy-alert-info.admonition-todo, - .rst-content .wy-alert-info.attention, - .rst-content .wy-alert-info.caution, - .rst-content .wy-alert-info.danger, - .rst-content .wy-alert-info.error, - .rst-content .wy-alert-info.hint, - .rst-content .wy-alert-info.important, - .rst-content .wy-alert-info.tip, - .rst-content .wy-alert-info.warning, - .wy-alert.wy-alert-info { - background: #e7f2fa - } - - .rst-content .note .admonition-title, - .rst-content .note .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-info.admonition-todo .admonition-title, - .rst-content .wy-alert-info.admonition-todo .wy-alert-title, - .rst-content .wy-alert-info.admonition .admonition-title, - .rst-content .wy-alert-info.admonition .wy-alert-title, - .rst-content .wy-alert-info.attention .admonition-title, - .rst-content .wy-alert-info.attention .wy-alert-title, - .rst-content .wy-alert-info.caution .admonition-title, - .rst-content .wy-alert-info.caution .wy-alert-title, - .rst-content .wy-alert-info.danger .admonition-title, - .rst-content .wy-alert-info.danger .wy-alert-title, - .rst-content .wy-alert-info.error .admonition-title, - .rst-content .wy-alert-info.error .wy-alert-title, - .rst-content .wy-alert-info.hint .admonition-title, - .rst-content .wy-alert-info.hint .wy-alert-title, - .rst-content .wy-alert-info.important .admonition-title, - .rst-content .wy-alert-info.important .wy-alert-title, - .rst-content .wy-alert-info.tip .admonition-title, - .rst-content .wy-alert-info.tip .wy-alert-title, - .rst-content .wy-alert-info.warning .admonition-title, - .rst-content .wy-alert-info.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-info .admonition-title, - .wy-alert.wy-alert-info .rst-content .admonition-title, - .wy-alert.wy-alert-info .wy-alert-title { - background: #6ab0de - } - - .rst-content .hint, - .rst-content .important, - .rst-content .tip, - .rst-content .wy-alert-success.admonition, - .rst-content .wy-alert-success.admonition-todo, - .rst-content .wy-alert-success.attention, - .rst-content .wy-alert-success.caution, - .rst-content .wy-alert-success.danger, - .rst-content .wy-alert-success.error, - .rst-content .wy-alert-success.note, - .rst-content .wy-alert-success.seealso, - .rst-content .wy-alert-success.warning, - .wy-alert.wy-alert-success { - background: #dbfaf4 - } - - .rst-content .hint .admonition-title, - .rst-content .hint .wy-alert-title, - .rst-content .important .admonition-title, - .rst-content .important .wy-alert-title, - .rst-content .tip .admonition-title, - .rst-content .tip .wy-alert-title, - .rst-content .wy-alert-success.admonition-todo .admonition-title, - .rst-content .wy-alert-success.admonition-todo .wy-alert-title, - .rst-content .wy-alert-success.admonition .admonition-title, - .rst-content .wy-alert-success.admonition .wy-alert-title, - .rst-content .wy-alert-success.attention .admonition-title, - .rst-content .wy-alert-success.attention .wy-alert-title, - .rst-content .wy-alert-success.caution .admonition-title, - .rst-content .wy-alert-success.caution .wy-alert-title, - .rst-content .wy-alert-success.danger .admonition-title, - .rst-content .wy-alert-success.danger .wy-alert-title, - .rst-content .wy-alert-success.error .admonition-title, - .rst-content .wy-alert-success.error .wy-alert-title, - .rst-content .wy-alert-success.note .admonition-title, - .rst-content .wy-alert-success.note .wy-alert-title, - .rst-content .wy-alert-success.seealso .admonition-title, - .rst-content .wy-alert-success.seealso .wy-alert-title, - .rst-content .wy-alert-success.warning .admonition-title, - .rst-content .wy-alert-success.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-success .admonition-title, - .wy-alert.wy-alert-success .rst-content .admonition-title, - .wy-alert.wy-alert-success .wy-alert-title { - background: #1abc9c - } - - .rst-content .wy-alert-neutral.admonition, - .rst-content .wy-alert-neutral.admonition-todo, - .rst-content .wy-alert-neutral.attention, - .rst-content .wy-alert-neutral.caution, - .rst-content .wy-alert-neutral.danger, - .rst-content .wy-alert-neutral.error, - .rst-content .wy-alert-neutral.hint, - .rst-content .wy-alert-neutral.important, - .rst-content .wy-alert-neutral.note, - .rst-content .wy-alert-neutral.seealso, - .rst-content .wy-alert-neutral.tip, - .rst-content .wy-alert-neutral.warning, - .wy-alert.wy-alert-neutral { - background: #f3f6f6 - } - - .rst-content .wy-alert-neutral.admonition-todo .admonition-title, - .rst-content .wy-alert-neutral.admonition-todo .wy-alert-title, - .rst-content .wy-alert-neutral.admonition .admonition-title, - .rst-content .wy-alert-neutral.admonition .wy-alert-title, - .rst-content .wy-alert-neutral.attention .admonition-title, - .rst-content .wy-alert-neutral.attention .wy-alert-title, - .rst-content .wy-alert-neutral.caution .admonition-title, - .rst-content .wy-alert-neutral.caution .wy-alert-title, - .rst-content .wy-alert-neutral.danger .admonition-title, - .rst-content .wy-alert-neutral.danger .wy-alert-title, - .rst-content .wy-alert-neutral.error .admonition-title, - .rst-content .wy-alert-neutral.error .wy-alert-title, - .rst-content .wy-alert-neutral.hint .admonition-title, - .rst-content .wy-alert-neutral.hint .wy-alert-title, - .rst-content .wy-alert-neutral.important .admonition-title, - .rst-content .wy-alert-neutral.important .wy-alert-title, - .rst-content .wy-alert-neutral.note .admonition-title, - .rst-content .wy-alert-neutral.note .wy-alert-title, - .rst-content .wy-alert-neutral.seealso .admonition-title, - .rst-content .wy-alert-neutral.seealso .wy-alert-title, - .rst-content .wy-alert-neutral.tip .admonition-title, - .rst-content .wy-alert-neutral.tip .wy-alert-title, - .rst-content .wy-alert-neutral.warning .admonition-title, - .rst-content .wy-alert-neutral.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-neutral .admonition-title, - .wy-alert.wy-alert-neutral .rst-content .admonition-title, - .wy-alert.wy-alert-neutral .wy-alert-title { - color: #404040; - background: #e1e4e5 - } - - .rst-content .wy-alert-neutral.admonition-todo a, - .rst-content .wy-alert-neutral.admonition a, - .rst-content .wy-alert-neutral.attention a, - .rst-content .wy-alert-neutral.caution a, - .rst-content .wy-alert-neutral.danger a, - .rst-content .wy-alert-neutral.error a, - .rst-content .wy-alert-neutral.hint a, - .rst-content .wy-alert-neutral.important a, - .rst-content .wy-alert-neutral.note a, - .rst-content .wy-alert-neutral.seealso a, - .rst-content .wy-alert-neutral.tip a, - .rst-content .wy-alert-neutral.warning a, - .wy-alert.wy-alert-neutral a { - color: #2980b9 - } - - .rst-content .admonition-todo p:last-child, - .rst-content .admonition p:last-child, - .rst-content .attention p:last-child, - .rst-content .caution p:last-child, - .rst-content .danger p:last-child, - .rst-content .error p:last-child, - .rst-content .hint p:last-child, - .rst-content .important p:last-child, - .rst-content .note p:last-child, - .rst-content .seealso p:last-child, - .rst-content .tip p:last-child, - .rst-content .warning p:last-child, - .wy-alert p:last-child { - margin-bottom: 0 - } - - .wy-tray-container { - position: fixed; - bottom: 0; - left: 0; - z-index: 600 - } - - .wy-tray-container li { - display: block; - width: 300px; - background: transparent; - color: #fff; - text-align: center; - box-shadow: 0 5px 5px 0 rgba(0, 0, 0, .1); - padding: 0 24px; - min-width: 20%; - opacity: 0; - height: 0; - line-height: 56px; - overflow: hidden; - -webkit-transition: all .3s ease-in; - -moz-transition: all .3s ease-in; - transition: all .3s ease-in - } - - .wy-tray-container li.wy-tray-item-success { - background: #27ae60 - } - - .wy-tray-container li.wy-tray-item-info { - background: #2980b9 - } - - .wy-tray-container li.wy-tray-item-warning { - background: #e67e22 - } - - .wy-tray-container li.wy-tray-item-danger { - background: #e74c3c - } - - .wy-tray-container li.on { - opacity: 1; - height: 56px - } - - @media screen and (max-width:768px) { - .wy-tray-container { - bottom: auto; - top: 0; - width: 100% - } - - .wy-tray-container li { - width: 100% - } - } - - button { - font-size: 100%; - margin: 0; - vertical-align: baseline; - *vertical-align: middle; - cursor: pointer; - line-height: normal; - -webkit-appearance: button; - *overflow: visible - } - - button::-moz-focus-inner, - input::-moz-focus-inner { - border: 0; - padding: 0 - } - - button[disabled] { - cursor: default - } - - .btn { - display: inline-block; - border-radius: 2px; - line-height: normal; - white-space: nowrap; - text-align: center; - cursor: pointer; - font-size: 100%; - padding: 6px 12px 8px; - color: #fff; - border: 1px solid rgba(0, 0, 0, .1); - background-color: #27ae60; - text-decoration: none; - font-weight: 400; - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif; - box-shadow: inset 0 1px 2px -1px hsla(0, 0%, 100%, .5), inset 0 -2px 0 0 rgba(0, 0, 0, .1); - outline-none: false; - vertical-align: middle; - *display: inline; - zoom: 1; - -webkit-user-drag: none; - -webkit-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; - -webkit-transition: all .1s linear; - -moz-transition: all .1s linear; - transition: all .1s linear - } - - .btn-hover { - background: #2e8ece; - color: #fff - } - - .btn:hover { - background: #2cc36b; - color: #fff - } - - .btn:focus { - background: #2cc36b; - outline: 0 - } - - .btn:active { - box-shadow: inset 0 -1px 0 0 rgba(0, 0, 0, .05), inset 0 2px 0 0 rgba(0, 0, 0, .1); - padding: 8px 12px 6px - } - - .btn:visited { - color: #fff - } - - .btn-disabled, - .btn-disabled:active, - .btn-disabled:focus, - .btn-disabled:hover, - .btn:disabled { - background-image: none; - filter: progid:DXImageTransform.Microsoft.gradient(enabled=false); - filter: alpha(opacity=40); - opacity: .4; - cursor: not-allowed; - box-shadow: none - } - - .btn::-moz-focus-inner { - padding: 0; - border: 0 - } - - .btn-small { - font-size: 80% - } - - .btn-info { - background-color: #2980b9 !important - } - - .btn-info:hover { - background-color: #2e8ece !important - } - - .btn-neutral { - background-color: #f3f6f6 !important; - color: #404040 !important - } - - .btn-neutral:hover { - background-color: #e5ebeb !important; - color: #404040 - } - - .btn-neutral:visited { - color: #404040 !important - } - - .btn-success { - background-color: #27ae60 !important - } - - .btn-success:hover { - background-color: #295 !important - } - - .btn-danger { - background-color: #e74c3c !important - } - - .btn-danger:hover { - background-color: #ea6153 !important - } - - .btn-warning { - background-color: #e67e22 !important - } - - .btn-warning:hover { - background-color: #e98b39 !important - } - - .btn-invert { - background-color: #222 - } - - .btn-invert:hover { - background-color: #2f2f2f !important - } - - .btn-link { - background-color: transparent !important; - color: #2980b9; - box-shadow: none; - border-color: transparent !important - } - - .btn-link:active, - .btn-link:hover { - background-color: transparent !important; - color: #409ad5 !important; - box-shadow: none - } - - .btn-link:visited { - color: #9b59b6 - } - - .wy-btn-group .btn, - .wy-control .btn { - vertical-align: middle - } - - .wy-btn-group { - margin-bottom: 24px; - *zoom: 1 - } - - .wy-btn-group:after, - .wy-btn-group:before { - display: table; - content: "" - } - - .wy-btn-group:after { - clear: both - } - - .wy-dropdown { - position: relative; - display: inline-block - } - - .wy-dropdown-active .wy-dropdown-menu { - display: block - } - - .wy-dropdown-menu { - position: absolute; - left: 0; - display: none; - float: left; - top: 100%; - min-width: 100%; - background: #fcfcfc; - z-index: 100; - border: 1px solid #cfd7dd; - box-shadow: 0 2px 2px 0 rgba(0, 0, 0, .1); - padding: 12px - } - - .wy-dropdown-menu>dd>a { - display: block; - clear: both; - color: #404040; - white-space: nowrap; - font-size: 90%; - padding: 0 12px; - cursor: pointer - } - - .wy-dropdown-menu>dd>a:hover { - background: #2980b9; - color: #fff - } - - .wy-dropdown-menu>dd.divider { - border-top: 1px solid #cfd7dd; - margin: 6px 0 - } - - .wy-dropdown-menu>dd.search { - padding-bottom: 12px - } - - .wy-dropdown-menu>dd.search input[type=search] { - width: 100% - } - - .wy-dropdown-menu>dd.call-to-action { - background: #e3e3e3; - text-transform: uppercase; - font-weight: 500; - font-size: 80% - } - - .wy-dropdown-menu>dd.call-to-action:hover { - background: #e3e3e3 - } - - .wy-dropdown-menu>dd.call-to-action .btn { - color: #fff - } - - .wy-dropdown.wy-dropdown-up .wy-dropdown-menu { - bottom: 100%; - top: auto; - left: auto; - right: 0 - } - - .wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu { - background: #fcfcfc; - margin-top: 2px - } - - .wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a { - padding: 6px 12px - } - - .wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover { - background: #2980b9; - color: #fff - } - - .wy-dropdown.wy-dropdown-left .wy-dropdown-menu { - right: 0; - left: auto; - text-align: right - } - - .wy-dropdown-arrow:before { - content: " "; - border-bottom: 5px solid #f5f5f5; - border-left: 5px solid transparent; - border-right: 5px solid transparent; - position: absolute; - display: block; - top: -4px; - left: 50%; - margin-left: -3px - } - - .wy-dropdown-arrow.wy-dropdown-arrow-left:before { - left: 11px - } - - .wy-form-stacked select { - display: block - } - - .wy-form-aligned .wy-help-inline, - .wy-form-aligned input, - .wy-form-aligned label, - .wy-form-aligned select, - .wy-form-aligned textarea { - display: inline-block; - *display: inline; - *zoom: 1; - vertical-align: middle - } - - .wy-form-aligned .wy-control-group>label { - display: inline-block; - vertical-align: middle; - width: 10em; - margin: 6px 12px 0 0; - float: left - } - - .wy-form-aligned .wy-control { - float: left - } - - .wy-form-aligned .wy-control label { - display: block - } - - .wy-form-aligned .wy-control select { - margin-top: 6px - } - - fieldset { - margin: 0 - } - - fieldset, - legend { - border: 0; - padding: 0 - } - - legend { - width: 100%; - white-space: normal; - margin-bottom: 24px; - font-size: 150%; - *margin-left: -7px - } - - label, - legend { - display: block - } - - label { - margin: 0 0 .3125em; - color: #333; - font-size: 90% - } - - input, - select, - textarea { - font-size: 100%; - margin: 0; - vertical-align: baseline; - *vertical-align: middle - } - - .wy-control-group { - margin-bottom: 24px; - max-width: 1200px; - margin-left: auto; - margin-right: auto; - *zoom: 1 - } - - .wy-control-group:after, - .wy-control-group:before { - display: table; - content: "" - } - - .wy-control-group:after { - clear: both - } - - .wy-control-group.wy-control-group-required>label:after { - content: " *"; - color: #e74c3c - } - - .wy-control-group .wy-form-full, - .wy-control-group .wy-form-halves, - .wy-control-group .wy-form-thirds { - padding-bottom: 12px - } - - .wy-control-group .wy-form-full input[type=color], - .wy-control-group .wy-form-full input[type=date], - .wy-control-group .wy-form-full input[type=datetime-local], - .wy-control-group .wy-form-full input[type=datetime], - .wy-control-group .wy-form-full input[type=email], - .wy-control-group .wy-form-full input[type=month], - .wy-control-group .wy-form-full input[type=number], - .wy-control-group .wy-form-full input[type=password], - .wy-control-group .wy-form-full input[type=search], - .wy-control-group .wy-form-full input[type=tel], - .wy-control-group .wy-form-full input[type=text], - .wy-control-group .wy-form-full input[type=time], - .wy-control-group .wy-form-full input[type=url], - .wy-control-group .wy-form-full input[type=week], - .wy-control-group .wy-form-full select, - .wy-control-group .wy-form-halves input[type=color], - .wy-control-group .wy-form-halves input[type=date], - .wy-control-group .wy-form-halves input[type=datetime-local], - .wy-control-group .wy-form-halves input[type=datetime], - .wy-control-group .wy-form-halves input[type=email], - .wy-control-group .wy-form-halves input[type=month], - .wy-control-group .wy-form-halves input[type=number], - .wy-control-group .wy-form-halves input[type=password], - .wy-control-group .wy-form-halves input[type=search], - .wy-control-group .wy-form-halves input[type=tel], - .wy-control-group .wy-form-halves input[type=text], - .wy-control-group .wy-form-halves input[type=time], - .wy-control-group .wy-form-halves input[type=url], - .wy-control-group .wy-form-halves input[type=week], - .wy-control-group .wy-form-halves select, - .wy-control-group .wy-form-thirds input[type=color], - .wy-control-group .wy-form-thirds input[type=date], - .wy-control-group .wy-form-thirds input[type=datetime-local], - .wy-control-group .wy-form-thirds input[type=datetime], - .wy-control-group .wy-form-thirds input[type=email], - .wy-control-group .wy-form-thirds input[type=month], - .wy-control-group .wy-form-thirds input[type=number], - .wy-control-group .wy-form-thirds input[type=password], - .wy-control-group .wy-form-thirds input[type=search], - .wy-control-group .wy-form-thirds input[type=tel], - .wy-control-group .wy-form-thirds input[type=text], - .wy-control-group .wy-form-thirds input[type=time], - .wy-control-group .wy-form-thirds input[type=url], - .wy-control-group .wy-form-thirds input[type=week], - .wy-control-group .wy-form-thirds select { - width: 100% - } - - .wy-control-group .wy-form-full { - float: left; - display: block; - width: 100%; - margin-right: 0 - } - - .wy-control-group .wy-form-full:last-child { - margin-right: 0 - } - - .wy-control-group .wy-form-halves { - float: left; - display: block; - margin-right: 2.35765%; - width: 48.82117% - } - - .wy-control-group .wy-form-halves:last-child, - .wy-control-group .wy-form-halves:nth-of-type(2n) { - margin-right: 0 - } - - .wy-control-group .wy-form-halves:nth-of-type(odd) { - clear: left - } - - .wy-control-group .wy-form-thirds { - float: left; - display: block; - margin-right: 2.35765%; - width: 31.76157% - } - - .wy-control-group .wy-form-thirds:last-child, - .wy-control-group .wy-form-thirds:nth-of-type(3n) { - margin-right: 0 - } - - .wy-control-group .wy-form-thirds:nth-of-type(3n+1) { - clear: left - } - - .wy-control-group.wy-control-group-no-input .wy-control, - .wy-control-no-input { - margin: 6px 0 0; - font-size: 90% - } - - .wy-control-no-input { - display: inline-block - } - - .wy-control-group.fluid-input input[type=color], - .wy-control-group.fluid-input input[type=date], - .wy-control-group.fluid-input input[type=datetime-local], - .wy-control-group.fluid-input input[type=datetime], - .wy-control-group.fluid-input input[type=email], - .wy-control-group.fluid-input input[type=month], - .wy-control-group.fluid-input input[type=number], - .wy-control-group.fluid-input input[type=password], - .wy-control-group.fluid-input input[type=search], - .wy-control-group.fluid-input input[type=tel], - .wy-control-group.fluid-input input[type=text], - .wy-control-group.fluid-input input[type=time], - .wy-control-group.fluid-input input[type=url], - .wy-control-group.fluid-input input[type=week] { - width: 100% - } - - .wy-form-message-inline { - padding-left: .3em; - color: #666; - font-size: 90% - } - - .wy-form-message { - display: block; - color: #999; - font-size: 70%; - margin-top: .3125em; - font-style: italic - } - - .wy-form-message p { - font-size: inherit; - font-style: italic; - margin-bottom: 6px - } - - .wy-form-message p:last-child { - margin-bottom: 0 - } - - input { - line-height: normal - } - - input[type=button], - input[type=reset], - input[type=submit] { - -webkit-appearance: button; - cursor: pointer; - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif; - *overflow: visible - } - - input[type=color], - input[type=date], - input[type=datetime-local], - input[type=datetime], - input[type=email], - input[type=month], - input[type=number], - input[type=password], - input[type=search], - input[type=tel], - input[type=text], - input[type=time], - input[type=url], - input[type=week] { - -webkit-appearance: none; - padding: 6px; - display: inline-block; - border: 1px solid #ccc; - font-size: 80%; - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif; - box-shadow: inset 0 1px 3px #ddd; - border-radius: 0; - -webkit-transition: border .3s linear; - -moz-transition: border .3s linear; - transition: border .3s linear - } - - input[type=datetime-local] { - padding: .34375em .625em - } - - input[disabled] { - cursor: default - } - - input[type=checkbox], - input[type=radio] { - padding: 0; - margin-right: .3125em; - *height: 13px; - *width: 13px - } - - input[type=checkbox], - input[type=radio], - input[type=search] { - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box - } - - input[type=search]::-webkit-search-cancel-button, - input[type=search]::-webkit-search-decoration { - -webkit-appearance: none - } - - input[type=color]:focus, - input[type=date]:focus, - input[type=datetime-local]:focus, - input[type=datetime]:focus, - input[type=email]:focus, - input[type=month]:focus, - input[type=number]:focus, - input[type=password]:focus, - input[type=search]:focus, - input[type=tel]:focus, - input[type=text]:focus, - input[type=time]:focus, - input[type=url]:focus, - input[type=week]:focus { - outline: 0; - outline: thin dotted\9; - border-color: #333 - } - - input.no-focus:focus { - border-color: #ccc !important - } - - input[type=checkbox]:focus, - input[type=file]:focus, - input[type=radio]:focus { - outline: thin dotted #333; - outline: 1px auto #129fea - } - - input[type=color][disabled], - input[type=date][disabled], - input[type=datetime-local][disabled], - input[type=datetime][disabled], - input[type=email][disabled], - input[type=month][disabled], - input[type=number][disabled], - input[type=password][disabled], - input[type=search][disabled], - input[type=tel][disabled], - input[type=text][disabled], - input[type=time][disabled], - input[type=url][disabled], - input[type=week][disabled] { - cursor: not-allowed; - background-color: #fafafa - } - - input:focus:invalid, - select:focus:invalid, - textarea:focus:invalid { - color: #e74c3c; - border: 1px solid #e74c3c - } - - input:focus:invalid:focus, - select:focus:invalid:focus, - textarea:focus:invalid:focus { - border-color: #e74c3c - } - - input[type=checkbox]:focus:invalid:focus, - input[type=file]:focus:invalid:focus, - input[type=radio]:focus:invalid:focus { - outline-color: #e74c3c - } - - input.wy-input-large { - padding: 12px; - font-size: 100% - } - - textarea { - overflow: auto; - vertical-align: top; - width: 100%; - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif - } - - select, - textarea { - padding: .5em .625em; - display: inline-block; - border: 1px solid #ccc; - font-size: 80%; - box-shadow: inset 0 1px 3px #ddd; - -webkit-transition: border .3s linear; - -moz-transition: border .3s linear; - transition: border .3s linear - } - - select { - border: 1px solid #ccc; - background-color: #fff - } - - select[multiple] { - height: auto - } - - select:focus, - textarea:focus { - outline: 0 - } - - input[readonly], - select[disabled], - select[readonly], - textarea[disabled], - textarea[readonly] { - cursor: not-allowed; - background-color: #fafafa - } - - input[type=checkbox][disabled], - input[type=radio][disabled] { - cursor: not-allowed - } - - .wy-checkbox, - .wy-radio { - margin: 6px 0; - color: #404040; - display: block - } - - .wy-checkbox input, - .wy-radio input { - vertical-align: baseline - } - - .wy-form-message-inline { - display: inline-block; - *display: inline; - *zoom: 1; - vertical-align: middle - } - - .wy-input-prefix, - .wy-input-suffix { - white-space: nowrap; - padding: 6px - } - - .wy-input-prefix .wy-input-context, - .wy-input-suffix .wy-input-context { - line-height: 27px; - padding: 0 8px; - display: inline-block; - font-size: 80%; - background-color: #f3f6f6; - border: 1px solid #ccc; - color: #999 - } - - .wy-input-suffix .wy-input-context { - border-left: 0 - } - - .wy-input-prefix .wy-input-context { - border-right: 0 - } - - .wy-switch { - position: relative; - display: block; - height: 24px; - margin-top: 12px; - cursor: pointer - } - - .wy-switch:before { - left: 0; - top: 0; - width: 36px; - height: 12px; - background: #ccc - } - - .wy-switch:after, - .wy-switch:before { - position: absolute; - content: ""; - display: block; - border-radius: 4px; - -webkit-transition: all .2s ease-in-out; - -moz-transition: all .2s ease-in-out; - transition: all .2s ease-in-out - } - - .wy-switch:after { - width: 18px; - height: 18px; - background: #999; - left: -3px; - top: -3px - } - - .wy-switch span { - position: absolute; - left: 48px; - display: block; - font-size: 12px; - color: #ccc; - line-height: 1 - } - - .wy-switch.active:before { - background: #1e8449 - } - - .wy-switch.active:after { - left: 24px; - background: #27ae60 - } - - .wy-switch.disabled { - cursor: not-allowed; - opacity: .8 - } - - .wy-control-group.wy-control-group-error .wy-form-message, - .wy-control-group.wy-control-group-error>label { - color: #e74c3c - } - - .wy-control-group.wy-control-group-error input[type=color], - .wy-control-group.wy-control-group-error input[type=date], - .wy-control-group.wy-control-group-error input[type=datetime-local], - .wy-control-group.wy-control-group-error input[type=datetime], - .wy-control-group.wy-control-group-error input[type=email], - .wy-control-group.wy-control-group-error input[type=month], - .wy-control-group.wy-control-group-error input[type=number], - .wy-control-group.wy-control-group-error input[type=password], - .wy-control-group.wy-control-group-error input[type=search], - .wy-control-group.wy-control-group-error input[type=tel], - .wy-control-group.wy-control-group-error input[type=text], - .wy-control-group.wy-control-group-error input[type=time], - .wy-control-group.wy-control-group-error input[type=url], - .wy-control-group.wy-control-group-error input[type=week], - .wy-control-group.wy-control-group-error textarea { - border: 1px solid #e74c3c - } - - .wy-inline-validate { - white-space: nowrap - } - - .wy-inline-validate .wy-input-context { - padding: .5em .625em; - display: inline-block; - font-size: 80% - } - - .wy-inline-validate.wy-inline-validate-success .wy-input-context { - color: #27ae60 - } - - .wy-inline-validate.wy-inline-validate-danger .wy-input-context { - color: #e74c3c - } - - .wy-inline-validate.wy-inline-validate-warning .wy-input-context { - color: #e67e22 - } - - .wy-inline-validate.wy-inline-validate-info .wy-input-context { - color: #2980b9 - } - - .rotate-90 { - -webkit-transform: rotate(90deg); - -moz-transform: rotate(90deg); - -ms-transform: rotate(90deg); - -o-transform: rotate(90deg); - transform: rotate(90deg) - } - - .rotate-180 { - -webkit-transform: rotate(180deg); - -moz-transform: rotate(180deg); - -ms-transform: rotate(180deg); - -o-transform: rotate(180deg); - transform: rotate(180deg) - } - - .rotate-270 { - -webkit-transform: rotate(270deg); - -moz-transform: rotate(270deg); - -ms-transform: rotate(270deg); - -o-transform: rotate(270deg); - transform: rotate(270deg) - } - - .mirror { - -webkit-transform: scaleX(-1); - -moz-transform: scaleX(-1); - -ms-transform: scaleX(-1); - -o-transform: scaleX(-1); - transform: scaleX(-1) - } - - .mirror.rotate-90 { - -webkit-transform: scaleX(-1) rotate(90deg); - -moz-transform: scaleX(-1) rotate(90deg); - -ms-transform: scaleX(-1) rotate(90deg); - -o-transform: scaleX(-1) rotate(90deg); - transform: scaleX(-1) rotate(90deg) - } - - .mirror.rotate-180 { - -webkit-transform: scaleX(-1) rotate(180deg); - -moz-transform: scaleX(-1) rotate(180deg); - -ms-transform: scaleX(-1) rotate(180deg); - -o-transform: scaleX(-1) rotate(180deg); - transform: scaleX(-1) rotate(180deg) - } - - .mirror.rotate-270 { - -webkit-transform: scaleX(-1) rotate(270deg); - -moz-transform: scaleX(-1) rotate(270deg); - -ms-transform: scaleX(-1) rotate(270deg); - -o-transform: scaleX(-1) rotate(270deg); - transform: scaleX(-1) rotate(270deg) - } - - @media only screen and (max-width:480px) { - .wy-form button[type=submit] { - margin: .7em 0 0 - } - - .wy-form input[type=color], - .wy-form input[type=date], - .wy-form input[type=datetime-local], - .wy-form input[type=datetime], - .wy-form input[type=email], - .wy-form input[type=month], - .wy-form input[type=number], - .wy-form input[type=password], - .wy-form input[type=search], - .wy-form input[type=tel], - .wy-form input[type=text], - .wy-form input[type=time], - .wy-form input[type=url], - .wy-form input[type=week], - .wy-form label { - margin-bottom: .3em; - display: block - } - - .wy-form input[type=color], - .wy-form input[type=date], - .wy-form input[type=datetime-local], - .wy-form input[type=datetime], - .wy-form input[type=email], - .wy-form input[type=month], - .wy-form input[type=number], - .wy-form input[type=password], - .wy-form input[type=search], - .wy-form input[type=tel], - .wy-form input[type=time], - .wy-form input[type=url], - .wy-form input[type=week] { - margin-bottom: 0 - } - - .wy-form-aligned .wy-control-group label { - margin-bottom: .3em; - text-align: left; - display: block; - width: 100% - } - - .wy-form-aligned .wy-control { - margin: 1.5em 0 0 - } - - .wy-form-message, - .wy-form-message-inline, - .wy-form .wy-help-inline { - display: block; - font-size: 80%; - padding: 6px 0 - } - } - - @media screen and (max-width:768px) { - .tablet-hide { - display: none - } - } - - @media screen and (max-width:480px) { - .mobile-hide { - display: none - } - } - - .float-left { - float: left - } - - .float-right { - float: right - } - - .full-width { - width: 100% - } - - .rst-content table.docutils, - .rst-content table.field-list, - .wy-table { - border-collapse: collapse; - border-spacing: 0; - empty-cells: show; - margin-bottom: 24px - } - - .rst-content table.docutils caption, - .rst-content table.field-list caption, - .wy-table caption { - color: #000; - font: italic 85%/1 arial, sans-serif; - padding: 1em 0; - text-align: center - } - - .rst-content table.docutils td, - .rst-content table.docutils th, - .rst-content table.field-list td, - .rst-content table.field-list th, - .wy-table td, - .wy-table th { - font-size: 90%; - margin: 0; - overflow: visible; - padding: 8px 16px - } - - .rst-content table.docutils td:first-child, - .rst-content table.docutils th:first-child, - .rst-content table.field-list td:first-child, - .rst-content table.field-list th:first-child, - .wy-table td:first-child, - .wy-table th:first-child { - border-left-width: 0 - } - - .rst-content table.docutils thead, - .rst-content table.field-list thead, - .wy-table thead { - color: #000; - text-align: left; - vertical-align: bottom; - white-space: nowrap - } - - .rst-content table.docutils thead th, - .rst-content table.field-list thead th, - .wy-table thead th { - font-weight: 700; - border-bottom: 2px solid #e1e4e5 - } - - .rst-content table.docutils td, - .rst-content table.field-list td, - .wy-table td { - background-color: transparent; - vertical-align: middle - } - - .rst-content table.docutils td p, - .rst-content table.field-list td p, - .wy-table td p { - line-height: 18px - } - - .rst-content table.docutils td p:last-child, - .rst-content table.field-list td p:last-child, - .wy-table td p:last-child { - margin-bottom: 0 - } - - .rst-content table.docutils .wy-table-cell-min, - .rst-content table.field-list .wy-table-cell-min, - .wy-table .wy-table-cell-min { - width: 1%; - padding-right: 0 - } - - .rst-content table.docutils .wy-table-cell-min input[type=checkbox], - .rst-content table.field-list .wy-table-cell-min input[type=checkbox], - .wy-table .wy-table-cell-min input[type=checkbox] { - margin: 0 - } - - .wy-table-secondary { - color: grey; - font-size: 90% - } - - .wy-table-tertiary { - color: grey; - font-size: 80% - } - - .rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td, - .wy-table-backed, - .wy-table-odd td, - .wy-table-striped tr:nth-child(2n-1) td { - background-color: #f3f6f6 - } - - .rst-content table.docutils, - .wy-table-bordered-all { - border: 1px solid #e1e4e5 - } - - .rst-content table.docutils td, - .wy-table-bordered-all td { - border-bottom: 1px solid #e1e4e5; - border-left: 1px solid #e1e4e5 - } - - .rst-content table.docutils tbody>tr:last-child td, - .wy-table-bordered-all tbody>tr:last-child td { - border-bottom-width: 0 - } - - .wy-table-bordered { - border: 1px solid #e1e4e5 - } - - .wy-table-bordered-rows td { - border-bottom: 1px solid #e1e4e5 - } - - .wy-table-bordered-rows tbody>tr:last-child td { - border-bottom-width: 0 - } - - .wy-table-horizontal td, - .wy-table-horizontal th { - border-width: 0 0 1px; - border-bottom: 1px solid #e1e4e5 - } - - .wy-table-horizontal tbody>tr:last-child td { - border-bottom-width: 0 - } - - .wy-table-responsive { - margin-bottom: 24px; - max-width: 100%; - overflow: auto - } - - .wy-table-responsive table { - margin-bottom: 0 !important - } - - .wy-table-responsive table td, - .wy-table-responsive table th { - white-space: nowrap - } - - a { - color: #2980b9; - text-decoration: none; - cursor: pointer - } - - a:hover { - color: #3091d1 - } - - a:visited { - color: #9b59b6 - } - - html { - height: 100% - } - - body, - html { - overflow-x: hidden - } - - body { - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif; - font-weight: 400; - color: #404040; - line-height: 1.5; - min-height: 100%; - background: #edf0f2 - } - - .wy-text-left { - text-align: left - } - - .wy-text-center { - text-align: center - } - - .wy-text-right { - text-align: right - } - - .wy-text-large { - font-size: 120% - } - - .wy-text-normal { - font-size: 100% - } - - .wy-text-small, - small { - font-size: 80% - } - - .wy-text-strike { - text-decoration: line-through - } - - .wy-text-warning { - color: #e67e22 !important - } - - a.wy-text-warning:hover { - color: #eb9950 !important - } - - .wy-text-info { - color: #2980b9 !important - } - - a.wy-text-info:hover { - color: #409ad5 !important - } - - .wy-text-success { - color: #27ae60 !important - } - - a.wy-text-success:hover { - color: #36d278 !important - } - - .wy-text-danger { - color: #e74c3c !important - } - - a.wy-text-danger:hover { - color: #ed7669 !important - } - - .wy-text-neutral { - color: #404040 !important - } - - a.wy-text-neutral:hover { - color: #595959 !important - } - - .rst-content .toctree-wrapper>p.caption, - h1, - h2, - h3, - h4, - h5, - h6, - legend { - margin-top: 0; - font-weight: 700; - font-family: "Roboto Slab", ff-tisa-web-pro, Georgia, Arial, sans-serif - } - - p { - line-height: 24px; - font-size: 16px; - margin: 0 0 24px - } - - h1 { - font-size: 175% - } - - .rst-content .toctree-wrapper>p.caption, - h2 { - font-size: 150% - } - - h3 { - font-size: 125% - } - - h4 { - font-size: 115% - } - - h5 { - font-size: 110% - } - - h6 { - font-size: 100% - } - - hr { - display: block; - height: 1px; - border: 0; - border-top: 1px solid #e1e4e5; - margin: 24px 0; - padding: 0 - } - - .rst-content code, - .rst-content tt, - code { - white-space: nowrap; - max-width: 100%; - background: #fff; - border: 1px solid #e1e4e5; - font-size: 75%; - padding: 0 5px; - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - color: #e74c3c; - overflow-x: auto - } - - .rst-content tt.code-large, - code.code-large { - font-size: 90% - } - - .rst-content .section ul, - .rst-content .toctree-wrapper ul, - .rst-content section ul, - .wy-plain-list-disc, - article ul { - list-style: disc; - line-height: 24px; - margin-bottom: 24px - } - - .rst-content .section ul li, - .rst-content .toctree-wrapper ul li, - .rst-content section ul li, - .wy-plain-list-disc li, - article ul li { - list-style: disc; - margin-left: 24px - } - - .rst-content .section ul li p:last-child, - .rst-content .section ul li ul, - .rst-content .toctree-wrapper ul li p:last-child, - .rst-content .toctree-wrapper ul li ul, - .rst-content section ul li p:last-child, - .rst-content section ul li ul, - .wy-plain-list-disc li p:last-child, - .wy-plain-list-disc li ul, - article ul li p:last-child, - article ul li ul { - margin-bottom: 0 - } - - .rst-content .section ul li li, - .rst-content .toctree-wrapper ul li li, - .rst-content section ul li li, - .wy-plain-list-disc li li, - article ul li li { - list-style: circle - } - - .rst-content .section ul li li li, - .rst-content .toctree-wrapper ul li li li, - .rst-content section ul li li li, - .wy-plain-list-disc li li li, - article ul li li li { - list-style: square - } - - .rst-content .section ul li ol li, - .rst-content .toctree-wrapper ul li ol li, - .rst-content section ul li ol li, - .wy-plain-list-disc li ol li, - article ul li ol li { - list-style: decimal - } - - .rst-content .section ol, - .rst-content .section ol.arabic, - .rst-content .toctree-wrapper ol, - .rst-content .toctree-wrapper ol.arabic, - .rst-content section ol, - .rst-content section ol.arabic, - .wy-plain-list-decimal, - article ol { - list-style: decimal; - line-height: 24px; - margin-bottom: 24px - } - - .rst-content .section ol.arabic li, - .rst-content .section ol li, - .rst-content .toctree-wrapper ol.arabic li, - .rst-content .toctree-wrapper ol li, - .rst-content section ol.arabic li, - .rst-content section ol li, - .wy-plain-list-decimal li, - article ol li { - list-style: decimal; - margin-left: 24px - } - - .rst-content .section ol.arabic li ul, - .rst-content .section ol li p:last-child, - .rst-content .section ol li ul, - .rst-content .toctree-wrapper ol.arabic li ul, - .rst-content .toctree-wrapper ol li p:last-child, - .rst-content .toctree-wrapper ol li ul, - .rst-content section ol.arabic li ul, - .rst-content section ol li p:last-child, - .rst-content section ol li ul, - .wy-plain-list-decimal li p:last-child, - .wy-plain-list-decimal li ul, - article ol li p:last-child, - article ol li ul { - margin-bottom: 0 - } - - .rst-content .section ol.arabic li ul li, - .rst-content .section ol li ul li, - .rst-content .toctree-wrapper ol.arabic li ul li, - .rst-content .toctree-wrapper ol li ul li, - .rst-content section ol.arabic li ul li, - .rst-content section ol li ul li, - .wy-plain-list-decimal li ul li, - article ol li ul li { - list-style: disc - } - - .wy-breadcrumbs { - *zoom: 1 - } - - .wy-breadcrumbs:after, - .wy-breadcrumbs:before { - display: table; - content: "" - } - - .wy-breadcrumbs:after { - clear: both - } - - .wy-breadcrumbs>li { - display: inline-block; - padding-top: 5px - } - - .wy-breadcrumbs>li.wy-breadcrumbs-aside { - float: right - } - - .rst-content .wy-breadcrumbs>li code, - .rst-content .wy-breadcrumbs>li tt, - .wy-breadcrumbs>li .rst-content tt, - .wy-breadcrumbs>li code { - all: inherit; - color: inherit - } - - .breadcrumb-item:before { - content: "/"; - color: #bbb; - font-size: 13px; - padding: 0 6px 0 3px - } - - .wy-breadcrumbs-extra { - margin-bottom: 0; - color: #b3b3b3; - font-size: 80%; - display: inline-block - } - - @media screen and (max-width:480px) { - - .wy-breadcrumbs-extra, - .wy-breadcrumbs li.wy-breadcrumbs-aside { - display: none - } - } - - @media print { - .wy-breadcrumbs li.wy-breadcrumbs-aside { - display: none - } - } - - html { - font-size: 16px - } - - .wy-affix { - position: fixed; - top: 1.618em - } - - .wy-menu a:hover { - text-decoration: none - } - - .wy-menu-horiz { - *zoom: 1 - } - - .wy-menu-horiz:after, - .wy-menu-horiz:before { - display: table; - content: "" - } - - .wy-menu-horiz:after { - clear: both - } - - .wy-menu-horiz li, - .wy-menu-horiz ul { - display: inline-block - } - - .wy-menu-horiz li:hover { - background: hsla(0, 0%, 100%, .1) - } - - .wy-menu-horiz li.divide-left { - border-left: 1px solid #404040 - } - - .wy-menu-horiz li.divide-right { - border-right: 1px solid #404040 - } - - .wy-menu-horiz a { - height: 32px; - display: inline-block; - line-height: 32px; - padding: 0 16px - } - - .wy-menu-vertical { - width: 300px - } - - .wy-menu-vertical header, - .wy-menu-vertical p.caption { - color: #55a5d9; - height: 32px; - line-height: 32px; - padding: 0 1.618em; - margin: 12px 0 0; - display: block; - font-weight: 700; - text-transform: uppercase; - font-size: 85%; - white-space: nowrap - } - - .wy-menu-vertical ul { - margin-bottom: 0 - } - - .wy-menu-vertical li.divide-top { - border-top: 1px solid #404040 - } - - .wy-menu-vertical li.divide-bottom { - border-bottom: 1px solid #404040 - } - - .wy-menu-vertical li.current { - background: #e3e3e3 - } - - .wy-menu-vertical li.current a { - color: grey; - border-right: 1px solid #c9c9c9; - padding: .4045em 2.427em - } - - .wy-menu-vertical li.current a:hover { - background: #d6d6d6 - } - - .rst-content .wy-menu-vertical li tt, - .wy-menu-vertical li .rst-content tt, - .wy-menu-vertical li code { - border: none; - background: inherit; - color: inherit; - padding-left: 0; - padding-right: 0 - } - - .wy-menu-vertical li button.toctree-expand { - display: block; - float: left; - margin-left: -1.2em; - line-height: 18px; - color: #4d4d4d; - border: none; - background: none; - padding: 0 - } - - .wy-menu-vertical li.current>a, - .wy-menu-vertical li.on a { - color: #404040; - font-weight: 700; - position: relative; - background: #fcfcfc; - border: none; - padding: .4045em 1.618em - } - - .wy-menu-vertical li.current>a:hover, - .wy-menu-vertical li.on a:hover { - background: #fcfcfc - } - - .wy-menu-vertical li.current>a:hover button.toctree-expand, - .wy-menu-vertical li.on a:hover button.toctree-expand { - color: grey - } - - .wy-menu-vertical li.current>a button.toctree-expand, - .wy-menu-vertical li.on a button.toctree-expand { - display: block; - line-height: 18px; - color: #333 - } - - .wy-menu-vertical li.toctree-l1.current>a { - border-bottom: 1px solid #c9c9c9; - border-top: 1px solid #c9c9c9 - } - - .wy-menu-vertical .toctree-l1.current .toctree-l2>ul, - .wy-menu-vertical .toctree-l2.current .toctree-l3>ul, - .wy-menu-vertical .toctree-l3.current .toctree-l4>ul, - .wy-menu-vertical .toctree-l4.current .toctree-l5>ul, - .wy-menu-vertical .toctree-l5.current .toctree-l6>ul, - .wy-menu-vertical .toctree-l6.current .toctree-l7>ul, - .wy-menu-vertical .toctree-l7.current .toctree-l8>ul, - .wy-menu-vertical .toctree-l8.current .toctree-l9>ul, - .wy-menu-vertical .toctree-l9.current .toctree-l10>ul, - .wy-menu-vertical .toctree-l10.current .toctree-l11>ul { - display: none - } - - .wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul, - .wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul, - .wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul, - .wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul, - .wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul, - .wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul, - .wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul, - .wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul, - .wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul, - .wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul { - display: block - } - - .wy-menu-vertical li.toctree-l3, - .wy-menu-vertical li.toctree-l4 { - font-size: .9em - } - - .wy-menu-vertical li.toctree-l2 a, - .wy-menu-vertical li.toctree-l3 a, - .wy-menu-vertical li.toctree-l4 a, - .wy-menu-vertical li.toctree-l5 a, - .wy-menu-vertical li.toctree-l6 a, - .wy-menu-vertical li.toctree-l7 a, - .wy-menu-vertical li.toctree-l8 a, - .wy-menu-vertical li.toctree-l9 a, - .wy-menu-vertical li.toctree-l10 a { - color: #404040 - } - - .wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand, - .wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand { - color: grey - } - - .wy-menu-vertical li.toctree-l2.current li.toctree-l3>a, - .wy-menu-vertical li.toctree-l3.current li.toctree-l4>a, - .wy-menu-vertical li.toctree-l4.current li.toctree-l5>a, - .wy-menu-vertical li.toctree-l5.current li.toctree-l6>a, - .wy-menu-vertical li.toctree-l6.current li.toctree-l7>a, - .wy-menu-vertical li.toctree-l7.current li.toctree-l8>a, - .wy-menu-vertical li.toctree-l8.current li.toctree-l9>a, - .wy-menu-vertical li.toctree-l9.current li.toctree-l10>a, - .wy-menu-vertical li.toctree-l10.current li.toctree-l11>a { - display: block - } - - .wy-menu-vertical li.toctree-l2.current>a { - padding: .4045em 2.427em - } - - .wy-menu-vertical li.toctree-l2.current li.toctree-l3>a { - padding: .4045em 1.618em .4045em 4.045em - } - - .wy-menu-vertical li.toctree-l3.current>a { - padding: .4045em 4.045em - } - - .wy-menu-vertical li.toctree-l3.current li.toctree-l4>a { - padding: .4045em 1.618em .4045em 5.663em - } - - .wy-menu-vertical li.toctree-l4.current>a { - padding: .4045em 5.663em - } - - .wy-menu-vertical li.toctree-l4.current li.toctree-l5>a { - padding: .4045em 1.618em .4045em 7.281em - } - - .wy-menu-vertical li.toctree-l5.current>a { - padding: .4045em 7.281em - } - - .wy-menu-vertical li.toctree-l5.current li.toctree-l6>a { - padding: .4045em 1.618em .4045em 8.899em - } - - .wy-menu-vertical li.toctree-l6.current>a { - padding: .4045em 8.899em - } - - .wy-menu-vertical li.toctree-l6.current li.toctree-l7>a { - padding: .4045em 1.618em .4045em 10.517em - } - - .wy-menu-vertical li.toctree-l7.current>a { - padding: .4045em 10.517em - } - - .wy-menu-vertical li.toctree-l7.current li.toctree-l8>a { - padding: .4045em 1.618em .4045em 12.135em - } - - .wy-menu-vertical li.toctree-l8.current>a { - padding: .4045em 12.135em - } - - .wy-menu-vertical li.toctree-l8.current li.toctree-l9>a { - padding: .4045em 1.618em .4045em 13.753em - } - - .wy-menu-vertical li.toctree-l9.current>a { - padding: .4045em 13.753em - } - - .wy-menu-vertical li.toctree-l9.current li.toctree-l10>a { - padding: .4045em 1.618em .4045em 15.371em - } - - .wy-menu-vertical li.toctree-l10.current>a { - padding: .4045em 15.371em - } - - .wy-menu-vertical li.toctree-l10.current li.toctree-l11>a { - padding: .4045em 1.618em .4045em 16.989em - } - - .wy-menu-vertical li.toctree-l2.current>a, - .wy-menu-vertical li.toctree-l2.current li.toctree-l3>a { - background: #c9c9c9 - } - - .wy-menu-vertical li.toctree-l2 button.toctree-expand { - color: #a3a3a3 - } - - .wy-menu-vertical li.toctree-l3.current>a, - .wy-menu-vertical li.toctree-l3.current li.toctree-l4>a { - background: #bdbdbd - } - - .wy-menu-vertical li.toctree-l3 button.toctree-expand { - color: #969696 - } - - .wy-menu-vertical li.current ul { - display: block - } - - .wy-menu-vertical li ul { - margin-bottom: 0; - display: none - } - - .wy-menu-vertical li ul li a { - margin-bottom: 0; - color: #d9d9d9; - font-weight: 400 - } - - .wy-menu-vertical a { - line-height: 18px; - padding: .4045em 1.618em; - display: block; - position: relative; - font-size: 90%; - color: #d9d9d9 - } - - .wy-menu-vertical a:hover { - background-color: #4e4a4a; - cursor: pointer - } - - .wy-menu-vertical a:hover button.toctree-expand { - color: #d9d9d9 - } - - .wy-menu-vertical a:active { - background-color: #2980b9; - cursor: pointer; - color: #fff - } - - .wy-menu-vertical a:active button.toctree-expand { - color: #fff - } - - .wy-side-nav-search { - display: block; - width: 300px; - padding: .809em; - margin-bottom: .809em; - z-index: 200; - background-color: #2980b9; - text-align: center; - color: #fcfcfc - } - - .wy-side-nav-search input[type=text] { - width: 100%; - border-radius: 50px; - padding: 6px 12px; - border-color: #2472a4 - } - - .wy-side-nav-search img { - display: block; - margin: auto auto .809em; - height: 45px; - width: 45px; - background-color: #2980b9; - padding: 5px; - border-radius: 100% - } - - .wy-side-nav-search .wy-dropdown>a, - .wy-side-nav-search>a { - color: #fcfcfc; - font-size: 100%; - font-weight: 700; - display: inline-block; - padding: 4px 6px; - margin-bottom: .809em; - max-width: 100% - } - - .wy-side-nav-search .wy-dropdown>a:hover, - .wy-side-nav-search>a:hover { - background: hsla(0, 0%, 100%, .1) - } - - .wy-side-nav-search .wy-dropdown>a img.logo, - .wy-side-nav-search>a img.logo { - display: block; - margin: 0 auto; - height: auto; - width: auto; - border-radius: 0; - max-width: 100%; - background: transparent - } - - .wy-side-nav-search .wy-dropdown>a.icon img.logo, - .wy-side-nav-search>a.icon img.logo { - margin-top: .85em - } - - .wy-side-nav-search>div.version { - margin-top: -.4045em; - margin-bottom: .809em; - font-weight: 400; - color: hsla(0, 0%, 100%, .3) - } - - .wy-nav .wy-menu-vertical header { - color: #2980b9 - } - - .wy-nav .wy-menu-vertical a { - color: #b3b3b3 - } - - .wy-nav .wy-menu-vertical a:hover { - background-color: #2980b9; - color: #fff - } - - [data-menu-wrap] { - -webkit-transition: all .2s ease-in; - -moz-transition: all .2s ease-in; - transition: all .2s ease-in; - position: absolute; - opacity: 1; - width: 100%; - opacity: 0 - } - - [data-menu-wrap].move-center { - left: 0; - right: auto; - opacity: 1 - } - - [data-menu-wrap].move-left { - right: auto; - left: -100%; - opacity: 0 - } - - [data-menu-wrap].move-right { - right: -100%; - left: auto; - opacity: 0 - } - - .wy-body-for-nav { - background: #fcfcfc - } - - .wy-grid-for-nav { - position: absolute; - width: 100%; - height: 100% - } - - .wy-nav-side { - position: fixed; - top: 0; - bottom: 0; - left: 0; - padding-bottom: 2em; - width: 300px; - overflow-x: hidden; - overflow-y: hidden; - min-height: 100%; - color: #9b9b9b; - background: #343131; - z-index: 200 - } - - .wy-side-scroll { - width: 320px; - position: relative; - overflow-x: hidden; - overflow-y: scroll; - height: 100% - } - - .wy-nav-top { - display: none; - background: #2980b9; - color: #fff; - padding: .4045em .809em; - position: relative; - line-height: 50px; - text-align: center; - font-size: 100%; - *zoom: 1 - } - - .wy-nav-top:after, - .wy-nav-top:before { - display: table; - content: "" - } - - .wy-nav-top:after { - clear: both - } - - .wy-nav-top a { - color: #fff; - font-weight: 700 - } - - .wy-nav-top img { - margin-right: 12px; - height: 45px; - width: 45px; - background-color: #2980b9; - padding: 5px; - border-radius: 100% - } - - .wy-nav-top i { - font-size: 30px; - float: left; - cursor: pointer; - padding-top: inherit - } - - .wy-nav-content-wrap { - margin-left: 300px; - background: #fcfcfc; - min-height: 100% - } - - .wy-nav-content { - padding: 1.618em 3.236em; - height: 100%; - max-width: 800px; - margin: auto - } - - .wy-body-mask { - position: fixed; - width: 100%; - height: 100%; - background: rgba(0, 0, 0, .2); - display: none; - z-index: 499 - } - - .wy-body-mask.on { - display: block - } - - footer { - color: grey - } - - footer p { - margin-bottom: 12px - } - - .rst-content footer span.commit tt, - footer span.commit .rst-content tt, - footer span.commit code { - padding: 0; - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - font-size: 1em; - background: none; - border: none; - color: grey - } - - .rst-footer-buttons { - *zoom: 1 - } - - .rst-footer-buttons:after, - .rst-footer-buttons:before { - width: 100%; - display: table; - content: "" - } - - .rst-footer-buttons:after { - clear: both - } - - .rst-breadcrumbs-buttons { - margin-top: 12px; - *zoom: 1 - } - - .rst-breadcrumbs-buttons:after, - .rst-breadcrumbs-buttons:before { - display: table; - content: "" - } - - .rst-breadcrumbs-buttons:after { - clear: both - } - - #search-results .search li { - margin-bottom: 24px; - border-bottom: 1px solid #e1e4e5; - padding-bottom: 24px - } - - #search-results .search li:first-child { - border-top: 1px solid #e1e4e5; - padding-top: 24px - } - - #search-results .search li a { - font-size: 120%; - margin-bottom: 12px; - display: inline-block - } - - #search-results .context { - color: grey; - font-size: 90% - } - - .genindextable li>ul { - margin-left: 24px - } - - @media screen and (max-width:768px) { - .wy-body-for-nav { - background: #fcfcfc - } - - .wy-nav-top { - display: block - } - - .wy-nav-side { - left: -300px - } - - .wy-nav-side.shift { - width: 85%; - left: 0 - } - - .wy-menu.wy-menu-vertical, - .wy-side-nav-search, - .wy-side-scroll { - width: auto - } - - .wy-nav-content-wrap { - margin-left: 0 - } - - .wy-nav-content-wrap .wy-nav-content { - padding: 1.618em - } - - .wy-nav-content-wrap.shift { - position: fixed; - min-width: 100%; - left: 85%; - top: 0; - height: 100%; - overflow: hidden - } - } - - @media screen and (min-width:1100px) { - .wy-nav-content-wrap { - background: rgba(0, 0, 0, .05) - } - - .wy-nav-content { - margin: 0; - background: #fcfcfc - } - } - - @media print { - - .rst-versions, - .wy-nav-side, - footer { - display: none - } - - .wy-nav-content-wrap { - margin-left: 0 - } - } - - .rst-versions { - position: fixed; - bottom: 0; - left: 0; - width: 300px; - color: #fcfcfc; - background: #1f1d1d; - font-family: Lato, proxima-nova, Helvetica Neue, Arial, sans-serif; - z-index: 400 - } - - .rst-versions a { - color: #2980b9; - text-decoration: none - } - - .rst-versions .rst-badge-small { - display: none - } - - .rst-versions .rst-current-version { - padding: 12px; - background-color: #272525; - display: block; - text-align: right; - font-size: 90%; - cursor: pointer; - color: #27ae60; - *zoom: 1 - } - - .rst-versions .rst-current-version:after, - .rst-versions .rst-current-version:before { - display: table; - content: "" - } - - .rst-versions .rst-current-version:after { - clear: both - } - - .rst-content .code-block-caption .rst-versions .rst-current-version .headerlink, - .rst-content .eqno .rst-versions .rst-current-version .headerlink, - .rst-content .rst-versions .rst-current-version .admonition-title, - .rst-content code.download .rst-versions .rst-current-version span:first-child, - .rst-content dl dt .rst-versions .rst-current-version .headerlink, - .rst-content h1 .rst-versions .rst-current-version .headerlink, - .rst-content h2 .rst-versions .rst-current-version .headerlink, - .rst-content h3 .rst-versions .rst-current-version .headerlink, - .rst-content h4 .rst-versions .rst-current-version .headerlink, - .rst-content h5 .rst-versions .rst-current-version .headerlink, - .rst-content h6 .rst-versions .rst-current-version .headerlink, - .rst-content p .rst-versions .rst-current-version .headerlink, - .rst-content table>caption .rst-versions .rst-current-version .headerlink, - .rst-content tt.download .rst-versions .rst-current-version span:first-child, - .rst-versions .rst-current-version .fa, - .rst-versions .rst-current-version .icon, - .rst-versions .rst-current-version .rst-content .admonition-title, - .rst-versions .rst-current-version .rst-content .code-block-caption .headerlink, - .rst-versions .rst-current-version .rst-content .eqno .headerlink, - .rst-versions .rst-current-version .rst-content code.download span:first-child, - .rst-versions .rst-current-version .rst-content dl dt .headerlink, - .rst-versions .rst-current-version .rst-content h1 .headerlink, - .rst-versions .rst-current-version .rst-content h2 .headerlink, - .rst-versions .rst-current-version .rst-content h3 .headerlink, - .rst-versions .rst-current-version .rst-content h4 .headerlink, - .rst-versions .rst-current-version .rst-content h5 .headerlink, - .rst-versions .rst-current-version .rst-content h6 .headerlink, - .rst-versions .rst-current-version .rst-content p .headerlink, - .rst-versions .rst-current-version .rst-content table>caption .headerlink, - .rst-versions .rst-current-version .rst-content tt.download span:first-child, - .rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand, - .wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand { - color: #fcfcfc - } - - .rst-versions .rst-current-version .fa-book, - .rst-versions .rst-current-version .icon-book { - float: left - } - - .rst-versions .rst-current-version.rst-out-of-date { - background-color: #e74c3c; - color: #fff - } - - .rst-versions .rst-current-version.rst-active-old-version { - background-color: #f1c40f; - color: #000 - } - - .rst-versions.shift-up { - height: auto; - max-height: 100%; - overflow-y: scroll - } - - .rst-versions.shift-up .rst-other-versions { - display: block - } - - .rst-versions .rst-other-versions { - font-size: 90%; - padding: 12px; - color: grey; - display: none - } - - .rst-versions .rst-other-versions hr { - display: block; - height: 1px; - border: 0; - margin: 20px 0; - padding: 0; - border-top: 1px solid #413d3d - } - - .rst-versions .rst-other-versions dd { - display: inline-block; - margin: 0 - } - - .rst-versions .rst-other-versions dd a { - display: inline-block; - padding: 6px; - color: #fcfcfc - } - - .rst-versions.rst-badge { - width: auto; - bottom: 20px; - right: 20px; - left: auto; - border: none; - max-width: 300px; - max-height: 90% - } - - .rst-versions.rst-badge .fa-book, - .rst-versions.rst-badge .icon-book { - float: none; - line-height: 30px - } - - .rst-versions.rst-badge.shift-up .rst-current-version { - text-align: right - } - - .rst-versions.rst-badge.shift-up .rst-current-version .fa-book, - .rst-versions.rst-badge.shift-up .rst-current-version .icon-book { - float: left - } - - .rst-versions.rst-badge>.rst-current-version { - width: auto; - height: 30px; - line-height: 30px; - padding: 0 6px; - display: block; - text-align: center - } - - @media screen and (max-width:768px) { - .rst-versions { - width: 85%; - display: none - } - - .rst-versions.shift { - display: block - } - } - - .rst-content .toctree-wrapper>p.caption, - .rst-content h1, - .rst-content h2, - .rst-content h3, - .rst-content h4, - .rst-content h5, - .rst-content h6 { - margin-bottom: 24px - } - - .rst-content img { - max-width: 100%; - height: auto - } - - .rst-content div.figure, - .rst-content figure { - margin-bottom: 24px - } - - .rst-content div.figure .caption-text, - .rst-content figure .caption-text { - font-style: italic - } - - .rst-content div.figure p:last-child.caption, - .rst-content figure p:last-child.caption { - margin-bottom: 0 - } - - .rst-content div.figure.align-center, - .rst-content figure.align-center { - text-align: center - } - - .rst-content .section>a>img, - .rst-content .section>img, - .rst-content section>a>img, - .rst-content section>img { - margin-bottom: 24px - } - - .rst-content abbr[title] { - text-decoration: none - } - - .rst-content.style-external-links a.reference.external:after { - font-family: FontAwesome; - content: "\f08e"; - color: #b3b3b3; - vertical-align: super; - font-size: 60%; - margin: 0 .2em - } - - .rst-content blockquote { - margin-left: 24px; - line-height: 24px; - margin-bottom: 24px - } - - .rst-content pre.literal-block { - white-space: pre; - margin: 0; - padding: 12px; - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - display: block; - overflow: auto - } - - .rst-content div[class^=highlight], - .rst-content pre.literal-block { - border: 1px solid #e1e4e5; - overflow-x: auto; - margin: 1px 0 24px - } - - .rst-content div[class^=highlight] div[class^=highlight], - .rst-content pre.literal-block div[class^=highlight] { - padding: 0; - border: none; - margin: 0 - } - - .rst-content div[class^=highlight] td.code { - width: 100% - } - - .rst-content .linenodiv pre { - border-right: 1px solid #e6e9ea; - margin: 0; - padding: 12px; - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - user-select: none; - pointer-events: none - } - - .rst-content div[class^=highlight] pre { - white-space: pre; - margin: 0; - padding: 12px; - display: block; - overflow: auto - } - - .rst-content div[class^=highlight] pre .hll { - display: block; - margin: 0 -12px; - padding: 0 12px - } - - .rst-content .linenodiv pre, - .rst-content div[class^=highlight] pre, - .rst-content pre.literal-block { - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - font-size: 12px; - line-height: 1.4 - } - - .rst-content div.highlight .gp, - .rst-content div.highlight span.linenos { - user-select: none; - pointer-events: none - } - - .rst-content div.highlight span.linenos { - display: inline-block; - padding-left: 0; - padding-right: 12px; - margin-right: 12px; - border-right: 1px solid #e6e9ea - } - - .rst-content .code-block-caption { - font-style: italic; - font-size: 85%; - line-height: 1; - padding: 1em 0; - text-align: center - } - - @media print { - - .rst-content .codeblock, - .rst-content div[class^=highlight], - .rst-content div[class^=highlight] pre { - white-space: pre-wrap - } - } - - .rst-content .admonition, - .rst-content .admonition-todo, - .rst-content .attention, - .rst-content .caution, - .rst-content .danger, - .rst-content .error, - .rst-content .hint, - .rst-content .important, - .rst-content .note, - .rst-content .seealso, - .rst-content .tip, - .rst-content .warning { - clear: both - } - - .rst-content .admonition-todo .last, - .rst-content .admonition-todo>:last-child, - .rst-content .admonition .last, - .rst-content .admonition>:last-child, - .rst-content .attention .last, - .rst-content .attention>:last-child, - .rst-content .caution .last, - .rst-content .caution>:last-child, - .rst-content .danger .last, - .rst-content .danger>:last-child, - .rst-content .error .last, - .rst-content .error>:last-child, - .rst-content .hint .last, - .rst-content .hint>:last-child, - .rst-content .important .last, - .rst-content .important>:last-child, - .rst-content .note .last, - .rst-content .note>:last-child, - .rst-content .seealso .last, - .rst-content .seealso>:last-child, - .rst-content .tip .last, - .rst-content .tip>:last-child, - .rst-content .warning .last, - .rst-content .warning>:last-child { - margin-bottom: 0 - } - - .rst-content .admonition-title:before { - margin-right: 4px - } - - .rst-content .admonition table { - border-color: rgba(0, 0, 0, .1) - } - - .rst-content .admonition table td, - .rst-content .admonition table th { - background: transparent !important; - border-color: rgba(0, 0, 0, .1) !important - } - - .rst-content .section ol.loweralpha, - .rst-content .section ol.loweralpha>li, - .rst-content .toctree-wrapper ol.loweralpha, - .rst-content .toctree-wrapper ol.loweralpha>li, - .rst-content section ol.loweralpha, - .rst-content section ol.loweralpha>li { - list-style: lower-alpha - } - - .rst-content .section ol.upperalpha, - .rst-content .section ol.upperalpha>li, - .rst-content .toctree-wrapper ol.upperalpha, - .rst-content .toctree-wrapper ol.upperalpha>li, - .rst-content section ol.upperalpha, - .rst-content section ol.upperalpha>li { - list-style: upper-alpha - } - - .rst-content .section ol li>*, - .rst-content .section ul li>*, - .rst-content .toctree-wrapper ol li>*, - .rst-content .toctree-wrapper ul li>*, - .rst-content section ol li>*, - .rst-content section ul li>* { - margin-top: 12px; - margin-bottom: 12px - } - - .rst-content .section ol li>:first-child, - .rst-content .section ul li>:first-child, - .rst-content .toctree-wrapper ol li>:first-child, - .rst-content .toctree-wrapper ul li>:first-child, - .rst-content section ol li>:first-child, - .rst-content section ul li>:first-child { - margin-top: 0 - } - - .rst-content .section ol li>p, - .rst-content .section ol li>p:last-child, - .rst-content .section ul li>p, - .rst-content .section ul li>p:last-child, - .rst-content .toctree-wrapper ol li>p, - .rst-content .toctree-wrapper ol li>p:last-child, - .rst-content .toctree-wrapper ul li>p, - .rst-content .toctree-wrapper ul li>p:last-child, - .rst-content section ol li>p, - .rst-content section ol li>p:last-child, - .rst-content section ul li>p, - .rst-content section ul li>p:last-child { - margin-bottom: 12px - } - - .rst-content .section ol li>p:only-child, - .rst-content .section ol li>p:only-child:last-child, - .rst-content .section ul li>p:only-child, - .rst-content .section ul li>p:only-child:last-child, - .rst-content .toctree-wrapper ol li>p:only-child, - .rst-content .toctree-wrapper ol li>p:only-child:last-child, - .rst-content .toctree-wrapper ul li>p:only-child, - .rst-content .toctree-wrapper ul li>p:only-child:last-child, - .rst-content section ol li>p:only-child, - .rst-content section ol li>p:only-child:last-child, - .rst-content section ul li>p:only-child, - .rst-content section ul li>p:only-child:last-child { - margin-bottom: 0 - } - - .rst-content .section ol li>ol, - .rst-content .section ol li>ul, - .rst-content .section ul li>ol, - .rst-content .section ul li>ul, - .rst-content .toctree-wrapper ol li>ol, - .rst-content .toctree-wrapper ol li>ul, - .rst-content .toctree-wrapper ul li>ol, - .rst-content .toctree-wrapper ul li>ul, - .rst-content section ol li>ol, - .rst-content section ol li>ul, - .rst-content section ul li>ol, - .rst-content section ul li>ul { - margin-bottom: 12px - } - - .rst-content .section ol.simple li>*, - .rst-content .section ol.simple li ol, - .rst-content .section ol.simple li ul, - .rst-content .section ul.simple li>*, - .rst-content .section ul.simple li ol, - .rst-content .section ul.simple li ul, - .rst-content .toctree-wrapper ol.simple li>*, - .rst-content .toctree-wrapper ol.simple li ol, - .rst-content .toctree-wrapper ol.simple li ul, - .rst-content .toctree-wrapper ul.simple li>*, - .rst-content .toctree-wrapper ul.simple li ol, - .rst-content .toctree-wrapper ul.simple li ul, - .rst-content section ol.simple li>*, - .rst-content section ol.simple li ol, - .rst-content section ol.simple li ul, - .rst-content section ul.simple li>*, - .rst-content section ul.simple li ol, - .rst-content section ul.simple li ul { - margin-top: 0; - margin-bottom: 0 - } - - .rst-content .line-block { - margin-left: 0; - margin-bottom: 24px; - line-height: 24px - } - - .rst-content .line-block .line-block { - margin-left: 24px; - margin-bottom: 0 - } - - .rst-content .topic-title { - font-weight: 700; - margin-bottom: 12px - } - - .rst-content .toc-backref { - color: #404040 - } - - .rst-content .align-right { - float: right; - margin: 0 0 24px 24px - } - - .rst-content .align-left { - float: left; - margin: 0 24px 24px 0 - } - - .rst-content .align-center { - margin: auto - } - - .rst-content .align-center:not(table) { - display: block - } - - .rst-content .code-block-caption .headerlink, - .rst-content .eqno .headerlink, - .rst-content .toctree-wrapper>p.caption .headerlink, - .rst-content dl dt .headerlink, - .rst-content h1 .headerlink, - .rst-content h2 .headerlink, - .rst-content h3 .headerlink, - .rst-content h4 .headerlink, - .rst-content h5 .headerlink, - .rst-content h6 .headerlink, - .rst-content p.caption .headerlink, - .rst-content p .headerlink, - .rst-content table>caption .headerlink { - opacity: 0; - font-size: 14px; - font-family: FontAwesome; - margin-left: .5em - } - - .rst-content .code-block-caption .headerlink:focus, - .rst-content .code-block-caption:hover .headerlink, - .rst-content .eqno .headerlink:focus, - .rst-content .eqno:hover .headerlink, - .rst-content .toctree-wrapper>p.caption .headerlink:focus, - .rst-content .toctree-wrapper>p.caption:hover .headerlink, - .rst-content dl dt .headerlink:focus, - .rst-content dl dt:hover .headerlink, - .rst-content h1 .headerlink:focus, - .rst-content h1:hover .headerlink, - .rst-content h2 .headerlink:focus, - .rst-content h2:hover .headerlink, - .rst-content h3 .headerlink:focus, - .rst-content h3:hover .headerlink, - .rst-content h4 .headerlink:focus, - .rst-content h4:hover .headerlink, - .rst-content h5 .headerlink:focus, - .rst-content h5:hover .headerlink, - .rst-content h6 .headerlink:focus, - .rst-content h6:hover .headerlink, - .rst-content p.caption .headerlink:focus, - .rst-content p.caption:hover .headerlink, - .rst-content p .headerlink:focus, - .rst-content p:hover .headerlink, - .rst-content table>caption .headerlink:focus, - .rst-content table>caption:hover .headerlink { - opacity: 1 - } - - .rst-content p a { - overflow-wrap: anywhere - } - - .rst-content .wy-table td p, - .rst-content .wy-table td ul, - .rst-content .wy-table th p, - .rst-content .wy-table th ul, - .rst-content table.docutils td p, - .rst-content table.docutils td ul, - .rst-content table.docutils th p, - .rst-content table.docutils th ul, - .rst-content table.field-list td p, - .rst-content table.field-list td ul, - .rst-content table.field-list th p, - .rst-content table.field-list th ul { - font-size: inherit - } - - .rst-content .btn:focus { - outline: 2px solid - } - - .rst-content table>caption .headerlink:after { - font-size: 12px - } - - .rst-content .centered { - text-align: center - } - - .rst-content .sidebar { - float: right; - width: 40%; - display: block; - margin: 0 0 24px 24px; - padding: 24px; - background: #f3f6f6; - border: 1px solid #e1e4e5 - } - - .rst-content .sidebar dl, - .rst-content .sidebar p, - .rst-content .sidebar ul { - font-size: 90% - } - - .rst-content .sidebar .last, - .rst-content .sidebar>:last-child { - margin-bottom: 0 - } - - .rst-content .sidebar .sidebar-title { - display: block; - font-family: "Roboto Slab", ff-tisa-web-pro, Georgia, Arial, sans-serif; - font-weight: 700; - background: #e1e4e5; - padding: 6px 12px; - margin: -24px -24px 24px; - font-size: 100% - } - - .rst-content .highlighted { - background: #f1c40f; - box-shadow: 0 0 0 2px #f1c40f; - display: inline; - font-weight: 700 - } - - .rst-content .citation-reference, - .rst-content .footnote-reference { - vertical-align: baseline; - position: relative; - top: -.4em; - line-height: 0; - font-size: 90% - } - - .rst-content .citation-reference>span.fn-bracket, - .rst-content .footnote-reference>span.fn-bracket { - display: none - } - - .rst-content .hlist { - width: 100% - } - - .rst-content dl dt span.classifier:before { - content: " : " - } - - .rst-content dl dt span.classifier-delimiter { - display: none !important - } - - html.writer-html4 .rst-content table.docutils.citation, - html.writer-html4 .rst-content table.docutils.footnote { - background: none; - border: none - } - - html.writer-html4 .rst-content table.docutils.citation td, - html.writer-html4 .rst-content table.docutils.citation tr, - html.writer-html4 .rst-content table.docutils.footnote td, - html.writer-html4 .rst-content table.docutils.footnote tr { - border: none; - background-color: transparent !important; - white-space: normal - } - - html.writer-html4 .rst-content table.docutils.citation td.label, - html.writer-html4 .rst-content table.docutils.footnote td.label { - padding-left: 0; - padding-right: 0; - vertical-align: top - } - - html.writer-html5 .rst-content dl.citation, - html.writer-html5 .rst-content dl.field-list, - html.writer-html5 .rst-content dl.footnote { - display: grid; - grid-template-columns: auto minmax(80%, 95%) - } - - html.writer-html5 .rst-content dl.citation>dt, - html.writer-html5 .rst-content dl.field-list>dt, - html.writer-html5 .rst-content dl.footnote>dt { - display: inline-grid; - grid-template-columns: max-content auto - } - - html.writer-html5 .rst-content aside.citation, - html.writer-html5 .rst-content aside.footnote, - html.writer-html5 .rst-content div.citation { - display: grid; - grid-template-columns: auto auto minmax(.65rem, auto) minmax(40%, 95%) - } - - html.writer-html5 .rst-content aside.citation>span.label, - html.writer-html5 .rst-content aside.footnote>span.label, - html.writer-html5 .rst-content div.citation>span.label { - grid-column-start: 1; - grid-column-end: 2 - } - - html.writer-html5 .rst-content aside.citation>span.backrefs, - html.writer-html5 .rst-content aside.footnote>span.backrefs, - html.writer-html5 .rst-content div.citation>span.backrefs { - grid-column-start: 2; - grid-column-end: 3; - grid-row-start: 1; - grid-row-end: 3 - } - - html.writer-html5 .rst-content aside.citation>p, - html.writer-html5 .rst-content aside.footnote>p, - html.writer-html5 .rst-content div.citation>p { - grid-column-start: 4; - grid-column-end: 5 - } - - html.writer-html5 .rst-content dl.citation, - html.writer-html5 .rst-content dl.field-list, - html.writer-html5 .rst-content dl.footnote { - margin-bottom: 24px - } - - html.writer-html5 .rst-content dl.citation>dt, - html.writer-html5 .rst-content dl.field-list>dt, - html.writer-html5 .rst-content dl.footnote>dt { - padding-left: 1rem - } - - html.writer-html5 .rst-content dl.citation>dd, - html.writer-html5 .rst-content dl.citation>dt, - html.writer-html5 .rst-content dl.field-list>dd, - html.writer-html5 .rst-content dl.field-list>dt, - html.writer-html5 .rst-content dl.footnote>dd, - html.writer-html5 .rst-content dl.footnote>dt { - margin-bottom: 0 - } - - html.writer-html5 .rst-content dl.citation, - html.writer-html5 .rst-content dl.footnote { - font-size: .9rem - } - - html.writer-html5 .rst-content dl.citation>dt, - html.writer-html5 .rst-content dl.footnote>dt { - margin: 0 .5rem .5rem 0; - line-height: 1.2rem; - word-break: break-all; - font-weight: 400 - } - - html.writer-html5 .rst-content dl.citation>dt>span.brackets:before, - html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before { - content: "[" - } - - html.writer-html5 .rst-content dl.citation>dt>span.brackets:after, - html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after { - content: "]" - } - - html.writer-html5 .rst-content dl.citation>dt>span.fn-backref, - html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref { - text-align: left; - font-style: italic; - margin-left: .65rem; - word-break: break-word; - word-spacing: -.1rem; - max-width: 5rem - } - - html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a, - html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a { - word-break: keep-all - } - - html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before, - html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before { - content: " " - } - - html.writer-html5 .rst-content dl.citation>dd, - html.writer-html5 .rst-content dl.footnote>dd { - margin: 0 0 .5rem; - line-height: 1.2rem - } - - html.writer-html5 .rst-content dl.citation>dd p, - html.writer-html5 .rst-content dl.footnote>dd p { - font-size: .9rem - } - - html.writer-html5 .rst-content aside.citation, - html.writer-html5 .rst-content aside.footnote, - html.writer-html5 .rst-content div.citation { - padding-left: 1rem; - padding-right: 1rem; - font-size: .9rem; - line-height: 1.2rem - } - - html.writer-html5 .rst-content aside.citation p, - html.writer-html5 .rst-content aside.footnote p, - html.writer-html5 .rst-content div.citation p { - font-size: .9rem; - line-height: 1.2rem; - margin-bottom: 12px - } - - html.writer-html5 .rst-content aside.citation span.backrefs, - html.writer-html5 .rst-content aside.footnote span.backrefs, - html.writer-html5 .rst-content div.citation span.backrefs { - text-align: left; - font-style: italic; - margin-left: .65rem; - word-break: break-word; - word-spacing: -.1rem; - max-width: 5rem - } - - html.writer-html5 .rst-content aside.citation span.backrefs>a, - html.writer-html5 .rst-content aside.footnote span.backrefs>a, - html.writer-html5 .rst-content div.citation span.backrefs>a { - word-break: keep-all - } - - html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before, - html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before, - html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before { - content: " " - } - - html.writer-html5 .rst-content aside.citation span.label, - html.writer-html5 .rst-content aside.footnote span.label, - html.writer-html5 .rst-content div.citation span.label { - line-height: 1.2rem - } - - html.writer-html5 .rst-content aside.citation-list, - html.writer-html5 .rst-content aside.footnote-list, - html.writer-html5 .rst-content div.citation-list { - margin-bottom: 24px - } - - html.writer-html5 .rst-content dl.option-list kbd { - font-size: .9rem - } - - .rst-content table.docutils.footnote, - html.writer-html4 .rst-content table.docutils.citation, - html.writer-html5 .rst-content aside.footnote, - html.writer-html5 .rst-content aside.footnote-list aside.footnote, - html.writer-html5 .rst-content div.citation-list>div.citation, - html.writer-html5 .rst-content dl.citation, - html.writer-html5 .rst-content dl.footnote { - color: grey - } - - .rst-content table.docutils.footnote code, - .rst-content table.docutils.footnote tt, - html.writer-html4 .rst-content table.docutils.citation code, - html.writer-html4 .rst-content table.docutils.citation tt, - html.writer-html5 .rst-content aside.footnote-list aside.footnote code, - html.writer-html5 .rst-content aside.footnote-list aside.footnote tt, - html.writer-html5 .rst-content aside.footnote code, - html.writer-html5 .rst-content aside.footnote tt, - html.writer-html5 .rst-content div.citation-list>div.citation code, - html.writer-html5 .rst-content div.citation-list>div.citation tt, - html.writer-html5 .rst-content dl.citation code, - html.writer-html5 .rst-content dl.citation tt, - html.writer-html5 .rst-content dl.footnote code, - html.writer-html5 .rst-content dl.footnote tt { - color: #555 - } - - .rst-content .wy-table-responsive.citation, - .rst-content .wy-table-responsive.footnote { - margin-bottom: 0 - } - - .rst-content .wy-table-responsive.citation+:not(.citation), - .rst-content .wy-table-responsive.footnote+:not(.footnote) { - margin-top: 24px - } - - .rst-content .wy-table-responsive.citation:last-child, - .rst-content .wy-table-responsive.footnote:last-child { - margin-bottom: 24px - } - - .rst-content table.docutils th { - border-color: #e1e4e5 - } - - html.writer-html5 .rst-content table.docutils th { - border: 1px solid #e1e4e5 - } - - html.writer-html5 .rst-content table.docutils td>p, - html.writer-html5 .rst-content table.docutils th>p { - line-height: 1rem; - margin-bottom: 0; - font-size: .9rem - } - - .rst-content table.docutils td .last, - .rst-content table.docutils td .last>:last-child { - margin-bottom: 0 - } - - .rst-content table.field-list, - .rst-content table.field-list td { - border: none - } - - .rst-content table.field-list td p { - line-height: inherit - } - - .rst-content table.field-list td>strong { - display: inline-block - } - - .rst-content table.field-list .field-name { - padding-right: 10px; - text-align: left; - white-space: nowrap - } - - .rst-content table.field-list .field-body { - text-align: left - } - - .rst-content code, - .rst-content tt { - color: #000; - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - padding: 2px 5px - } - - .rst-content code big, - .rst-content code em, - .rst-content tt big, - .rst-content tt em { - font-size: 100% !important; - line-height: normal - } - - .rst-content code.literal, - .rst-content tt.literal { - color: #e74c3c; - white-space: normal - } - - .rst-content code.xref, - .rst-content tt.xref, - a .rst-content code, - a .rst-content tt { - font-weight: 700; - color: #404040; - overflow-wrap: normal - } - - .rst-content kbd, - .rst-content pre, - .rst-content samp { - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace - } - - .rst-content a code, - .rst-content a tt { - color: #2980b9 - } - - .rst-content dl { - margin-bottom: 24px - } - - .rst-content dl dt { - font-weight: 700; - margin-bottom: 12px - } - - .rst-content dl ol, - .rst-content dl p, - .rst-content dl table, - .rst-content dl ul { - margin-bottom: 12px - } - - .rst-content dl dd { - margin: 0 0 12px 24px; - line-height: 24px - } - - .rst-content dl dd>ol:last-child, - .rst-content dl dd>p:last-child, - .rst-content dl dd>table:last-child, - .rst-content dl dd>ul:last-child { - margin-bottom: 0 - } - - html.writer-html4 .rst-content dl:not(.docutils), - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) { - margin-bottom: 24px - } - - html.writer-html4 .rst-content dl:not(.docutils)>dt, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt { - display: table; - margin: 6px 0; - font-size: 90%; - line-height: normal; - background: #e7f2fa; - color: #2980b9; - border-top: 3px solid #6ab0de; - padding: 6px; - position: relative - } - - html.writer-html4 .rst-content dl:not(.docutils)>dt:before, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before { - color: #6ab0de - } - - html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink { - color: #404040; - font-size: 100% !important - } - - html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt { - margin-bottom: 6px; - border: none; - border-left: 3px solid #ccc; - background: #f0f0f0; - color: #555 - } - - html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink { - color: #404040; - font-size: 100% !important - } - - html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child { - margin-top: 0 - } - - html.writer-html4 .rst-content dl:not(.docutils) code.descclassname, - html.writer-html4 .rst-content dl:not(.docutils) code.descname, - html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname, - html.writer-html4 .rst-content dl:not(.docutils) tt.descname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname { - background-color: transparent; - border: none; - padding: 0; - font-size: 100% !important - } - - html.writer-html4 .rst-content dl:not(.docutils) code.descname, - html.writer-html4 .rst-content dl:not(.docutils) tt.descname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname { - font-weight: 700 - } - - html.writer-html4 .rst-content dl:not(.docutils) .optional, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional { - display: inline-block; - padding: 0 4px; - color: #000; - font-weight: 700 - } - - html.writer-html4 .rst-content dl:not(.docutils) .property, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property { - display: inline-block; - padding-right: 8px; - max-width: 100% - } - - html.writer-html4 .rst-content dl:not(.docutils) .k, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k { - font-style: italic - } - - html.writer-html4 .rst-content dl:not(.docutils) .descclassname, - html.writer-html4 .rst-content dl:not(.docutils) .descname, - html.writer-html4 .rst-content dl:not(.docutils) .sig-name, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname, - html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name { - font-family: IntelOne Mono, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, Courier New, Courier, monospace; - color: #000 - } - - .rst-content .viewcode-back, - .rst-content .viewcode-link { - display: inline-block; - color: #27ae60; - font-size: 80%; - padding-left: 24px - } - - .rst-content .viewcode-back { - display: block; - float: right - } - - .rst-content p.rubric { - margin-bottom: 12px; - font-weight: 700 - } - - .rst-content code.download, - .rst-content tt.download { - background: inherit; - padding: inherit; - font-weight: 400; - font-family: inherit; - font-size: inherit; - color: inherit; - border: inherit; - white-space: inherit - } - - .rst-content code.download span:first-child, - .rst-content tt.download span:first-child { - -webkit-font-smoothing: subpixel-antialiased - } - - .rst-content code.download span:first-child:before, - .rst-content tt.download span:first-child:before { - margin-right: 4px - } - - .rst-content .guilabel { - border: 1px solid #7fbbe3; - background: #e7f2fa; - font-size: 80%; - font-weight: 700; - border-radius: 4px; - padding: 2.4px 6px; - margin: auto 2px - } - - .rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd, - .rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd { - color: inherit; - font-size: 80%; - background-color: #fff; - border: 1px solid #a6a6a6; - border-radius: 4px; - box-shadow: 0 2px grey; - padding: 2.4px 6px; - margin: auto 0 - } - - .rst-content .versionmodified { - font-style: italic - } - - @media screen and (max-width:480px) { - .rst-content .sidebar { - width: 100% - } - } - - span[id*=MathJax-Span] { - color: #404040 - } - - .math { - text-align: center - } - - @font-face { - font-family: Lato; - src: url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"); - font-weight: 400; - font-style: normal; - font-display: block - } - - @font-face { - font-family: Lato; - src: url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"); - font-weight: 700; - font-style: normal; - font-display: block - } - - @font-face { - font-family: Lato; - src: url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"); - font-weight: 700; - font-style: italic; - font-display: block - } - - @font-face { - font-family: Lato; - src: url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"); - font-weight: 400; - font-style: italic; - font-display: block - } - - @font-face { - font-family: "Roboto Slab"; - font-style: normal; - font-weight: 400; - src: url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"); - font-display: block - } - - @font-face { - font-family: "Roboto Slab"; - font-style: normal; - font-weight: 700; - src: url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"); - font-display: block - } - - @font-face { - font-family: "IntelOne Mono"; - src: url(fonts/intelone-mono-font-family-bold.woff2) format("woff2"), url(fonts/intelone-mono-font-family-bold.woff) format("woff"); - font-weight: 700; - font-style: normal; - font-display: block - } - - @font-face { - font-family: "IntelOne Mono"; - src: url(fonts/intelone-mono-font-family-italic.woff2) format("woff2"), url(fonts/intelone-mono-font-family-italic.woff) format("woff"); - font-weight: 700; - font-style: italic; - font-display: block - } - - @font-face { - font-family: "IntelOne Mono"; - src: url(fonts/intelone-mono-font-family-italic.woff2) format("woff2"), url(fonts/intelone-mono-font-family-italic.woff) format("woff"); - font-weight: 400; - font-style: italic; - font-display: block - } -} \ No newline at end of file diff --git a/docs/_static/css/accessibility_overrides.css b/docs/_static/css/accessibility_overrides.css deleted file mode 100644 index 43c855f81b..0000000000 --- a/docs/_static/css/accessibility_overrides.css +++ /dev/null @@ -1,204 +0,0 @@ -@layer accessibility{ - - html{ - font-size: 100%; - } - - /* links */ - - .wy-breadcrumbs a{ - color:var(--classic-blue); - } - - [role=main] a, - footer a:not([class*=btn-neutral]){ - color: var(--classic-blue); - text-decoration: underline; - } - - [role=main] a:hover, - footer a:not([class*=btn-neutral]):hover{ - color: var(--cobalt); - } - - .rst-versions.shift-up a{ - color:var(--classic-blue-tint2); - } - - /* read the docs meta information in toggle content */ - - .rst-versions .rst-other-versions{ - color:var(--carbon-tint1); - } - - /* end read the docs meta information in toggle content */ - - /* end links */ - - footer{ - color:#737373; - } - - /* breadcrumb navigation */ - - .icon.icon-home{ - margin-left:-5px; - padding:5px; - } - - /* end breadcrumb navigation */ - - .rst-content .toctree-wrapper>p.caption, - .rst-content h1, - .rst-content h2, - .rst-content h3, - .rst-content h4, - .rst-content h5, - .rst-content h6{ - margin-bottom:1.5rem; - } - - /* search results */ - - #search-results .context{ - color:var(--black); - } - - - /* end search results */ - - /* flow content */ - - p { - line-height: inherit; - font-size: inherit; - } - - button, - input, - select, - textarea, - input[type=button], - input[type=reset], - input[type=submit] { - font-size: inherit; - } - - .rst-content .important, - .rst-content .note, - .rst-content .seealso, - .rst-content .tip, - .rst-content .warning, - .wy-alert { - line-height: inherit; - } - - .rst-content .section ul, - .rst-content .toctree-wrapper ul, - .rst-content section ul, - .wy-plain-list-disc, - article ul { - line-height: inherit; - } - - .rst-content .toctree-wrapper ol, - .rst-content .toctree-wrapper ol.arabic, - .rst-content section ol, - .rst-content section ol.arabic, - .wy-plain-list-decimal, - article ol { - line-height: inherit; - } - - .rst-content blockquote { - line-height: inherit; - } - - .rst-content dl dd { - line-height: inherit; - } - - /* end flow content */ - - /* sidebar */ - - .wy-nav-side ::placeholder{ - /* needed for Safari */ - color:#757575; - } - - .wy-menu-vertical header, .wy-menu-vertical p.caption{ - color:var(--classic-blue-tint2); - } - - .wy-menu-vertical li.current>a button.toctree-expand, - .wy-menu-vertical li button.toctree-expand { - color: currentColor; - padding: 7px; - transform: translate(-7px, -7px); - } - - .wy-side-nav-search{ - background: var(--cobalt); - } - - .wy-side-nav-search>div.version{ - color:var(--classic-blue-tint2); - } - - /* end sidebar */ - - - /* notes and similar */ - - .rst-content .note .admonition-title, - .rst-content .note .wy-alert-title, - .rst-content .seealso .admonition-title, - .rst-content .seealso .wy-alert-title, - .rst-content .wy-alert-info.admonition-todo .admonition-title, - .rst-content .wy-alert-info.admonition-todo .wy-alert-title, - .rst-content .wy-alert-info.admonition .admonition-title, - .rst-content .wy-alert-info.admonition .wy-alert-title, - .rst-content .wy-alert-info.attention .admonition-title, - .rst-content .wy-alert-info.attention .wy-alert-title, - .rst-content .wy-alert-info.caution .admonition-title, - .rst-content .wy-alert-info.caution .wy-alert-title, - .rst-content .wy-alert-info.danger .admonition-title, - .rst-content .wy-alert-info.danger .wy-alert-title, - .rst-content .wy-alert-info.error .admonition-title, - .rst-content .wy-alert-info.error .wy-alert-title, - .rst-content .wy-alert-info.hint .admonition-title, - .rst-content .wy-alert-info.hint .wy-alert-title, - .rst-content .wy-alert-info.important .admonition-title, - .rst-content .wy-alert-info.important .wy-alert-title, - .rst-content .wy-alert-info.tip .admonition-title, - .rst-content .wy-alert-info.tip .wy-alert-title, - .rst-content .wy-alert-info.warning .admonition-title, - .rst-content .wy-alert-info.warning .wy-alert-title, - .rst-content .wy-alert.wy-alert-info .admonition-title, - .wy-alert.wy-alert-info .rst-content .admonition-title, - .wy-alert.wy-alert-info .wy-alert-title{ - background: var(--blue-steel-shade1); - } - - /* code and code blocks */ - - .rst-content code.literal, - .rst-content tt.literal{ - color:var(--rust-shade1); - } - - .rst-content code, - .rst-content tt, code{ - font-size:0.875rem; - } - - .rst-content .linenodiv pre, - .rst-content div[class^=highlight] pre, - .rst-content pre.literal-block{ - word-break: break-all; - white-space: pre-wrap; - font-size:0.875rem; - } - -} diff --git a/docs/_static/css/colors.css b/docs/_static/css/colors.css deleted file mode 100644 index 4267889695..0000000000 --- a/docs/_static/css/colors.css +++ /dev/null @@ -1,59 +0,0 @@ - :root{ - --white: #fff; - --black: #000; - - --classic-blue: #0054ae; - --classic-blue-shade1: #00377c; - --classic-blue-shade2: #001e50; - --classic-blue-tint2: #6cc4f5; - --classic-blue-tint1: #0099ec; - - --carbon: #808080; - --carbon-shade1: #525252; - --carbon-shade2: #262626; - --carbon-tint2: #e9e9e9; - --carbon-tint1: #aeaeae; - - --blue-steel: #548fad; - --blue-steel-shade1: #41728a; - --blue-steel-shade2: #183544; - --blue-steel-tint2: #b9d6e5; - --blue-steel-tint1: #86b3ca; - - --geode: #8f5da2; - --geode-shade1: #653171; - --geode-tint2: #eec3f7; - --geode-tint1: #cc94da; - - --rust: #e96115; - --rust-shade1: #b24501; - --rust-tint2: #ffc599; - --rust-tint1: #ff8f51; - - --moss: #8bae46; - --moss-shade1: #708541; - --moss-shade2: #515a3d; - --moss-tint2: #d7f3a2; - --moss-tint1: #b1d272; - - --cobalt: #1e2eb8; - --cobalt-shade1: #000f8a; - --cobalt-shade2: #000864; - --cobalt-shade3:#040e35; - --cobalt-tint2: #98a1ff; - --cobalt-tint1: #5b69ff; - - --coral: #ff5662; - --coral-shade1: #c81326; - --coral-tint2: #ffb6b9; - --coral-tint1: #ff848a; - - --daisy: #fec91b; - --daisy-shade1: #edb200; - --daisy-shade2: #c98f00; - --daisy-tint1: #ffe17a; - - --status-error: #ce0000; - --status-success: #008a00; - --status-warning: #ffd100; -} diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css deleted file mode 100644 index 098d13475f..0000000000 --- a/docs/_static/css/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -.toctree-expand { - display: none; -} diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 deleted file mode 100644 index 7059e23142..0000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 deleted file mode 100644 index f2c76e5bda..0000000000 Binary files a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-bold.woff b/docs/_static/css/fonts/intelone-mono-font-family-bold.woff deleted file mode 100644 index d426aa01e4..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-bold.woff and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-bold.woff2 b/docs/_static/css/fonts/intelone-mono-font-family-bold.woff2 deleted file mode 100644 index 76aa8a41ba..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-bold.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-italic.woff b/docs/_static/css/fonts/intelone-mono-font-family-italic.woff deleted file mode 100644 index 8cb0afcfe6..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-italic.woff and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-italic.woff2 b/docs/_static/css/fonts/intelone-mono-font-family-italic.woff2 deleted file mode 100644 index 05b2ac271c..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-italic.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-regular.ttf b/docs/_static/css/fonts/intelone-mono-font-family-regular.ttf deleted file mode 100644 index 6409c6ab7d..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-regular.ttf and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-regular.woff b/docs/_static/css/fonts/intelone-mono-font-family-regular.woff deleted file mode 100644 index a3f68a3631..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-regular.woff and /dev/null differ diff --git a/docs/_static/css/fonts/intelone-mono-font-family-regular.woff2 b/docs/_static/css/fonts/intelone-mono-font-family-regular.woff2 deleted file mode 100644 index 737aaa0150..0000000000 Binary files a/docs/_static/css/fonts/intelone-mono-font-family-regular.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff2 b/docs/_static/css/fonts/lato-bold-italic.woff2 deleted file mode 100644 index c4e3d804b5..0000000000 Binary files a/docs/_static/css/fonts/lato-bold-italic.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-bold.woff2 b/docs/_static/css/fonts/lato-bold.woff2 deleted file mode 100644 index bb195043cf..0000000000 Binary files a/docs/_static/css/fonts/lato-bold.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff2 b/docs/_static/css/fonts/lato-normal-italic.woff2 deleted file mode 100644 index 3404f37e2e..0000000000 Binary files a/docs/_static/css/fonts/lato-normal-italic.woff2 and /dev/null differ diff --git a/docs/_static/css/fonts/lato-normal.woff2 b/docs/_static/css/fonts/lato-normal.woff2 deleted file mode 100644 index 3bf9843328..0000000000 Binary files a/docs/_static/css/fonts/lato-normal.woff2 and /dev/null differ diff --git a/docs/_static/favicon.png b/docs/_static/favicon.png new file mode 100644 index 0000000000..77d8bf4802 Binary files /dev/null and b/docs/_static/favicon.png differ diff --git a/docs/_static/openfl_logo.png b/docs/_static/openfl_logo.png new file mode 100644 index 0000000000..3158a5b5cf Binary files /dev/null and b/docs/_static/openfl_logo.png differ diff --git a/docs/_static/style.css b/docs/_static/style.css new file mode 100644 index 0000000000..4a8c3e5ed1 --- /dev/null +++ b/docs/_static/style.css @@ -0,0 +1,297 @@ +@import url("theme.css"); + +/* Base LP sidebar modifications */ +body:has(.hero) .sidebar-toggle, +body:has(.hero) .bd-sidebar-secondary { + display: none !important; +} + +body:has(.hero) .search-button { + display: flex !important; +} + +body:has(.hero) .primary-toggle { + display: inline-block !important; +} + +body:has(.hero) .prev-next-footer { + display: none; +} + +body:has(.hero) .bd-article-container { + max-width: unset !important; +} + +body:has(.hero) .bd-page-width { + max-width: unset !important; +} + +body:has(.hero) .bd-article { + display: flex; + flex-direction: column; + padding: 0; +} + +body:has(.hero) .bd-container { + flex-direction: column; +} + +@media (min-width: 960px) { + body:has(.hero) .bd-header-article { + justify-content: center; + } + + body:has(.hero) .header-article-items, + body:has(.hero) .bd-article > section { + max-width: 65rem !important; + align-self: center; + width: -moz-available; + width: -webkit-fill-available; + width: fill-available; + } +} + +/* Custom CSS */ + +:root { + --block-bg-opacity: .5; +} + +.bd-main .bd-content .bd-article-container .bd-article:has(.hero) { + padding: 0; +} + +.bd-main .bd-content .bd-article-container .bd-article:has(.hero) > section > * { + padding-inline: 2rem !important; +} + +@media (max-width: 768px) { + .bd-main .bd-content .bd-article-container .bd-article:has(.hero) > section > * { + padding-inline: 1rem !important; + } +} + +.bd-main .bd-content .bd-article-container .bd-article:has(.hero) h1 { + display: none; +} + +.wy-side-nav-search { + background-color: #fff; +} + +.getting-started, +.user-guides, +.installation { + background: #3C4043; + color: white; + height: 170px; + border: none !important; + border-radius: 12px; +} + +.getting-started:hover, +.user-guides:hover, +.installation:hover { + background: #AECBFA; + color: #202124; + transform: unset !important; +} + +.getting-started .sd-card-body, +.user-guides .sd-card-body, +.installation .sd-card-body { + display: flex; + align-items: center; + justify-content: center; + font: 500 24px 'Roboto', sans-serif; +} + +.getting-started .sd-card-title, +.user-guides .sd-card-title, +.installation .sd-card-title { + display: flex; + flex-direction: column; + align-items: center; + gap: 12px; +} + +.getting-started svg, +.user-guides svg, +.installation svg { + color: #8AB4F8; +} + +.getting-started:hover svg, +.user-guides:hover svg, +.installation:hover svg { + color: #3C4043; +} + +.bd-main .bd-content .bd-article-container .bd-article:has(.hero) > section > .hero { + padding-inline: 2rem 0 !important; +} + +.hero { + display: grid; + grid: auto-flow / 1fr .6fr; + align-items: center; + background: rgb(32,33,36); + background: linear-gradient(90deg, rgba(32,33,36,1) 0%, rgba(39,45,56,1) 100%); + position: relative; + overflow: hidden; + border-radius: 24px; +} + +.hero > img { + position: absolute; + top: 0; + right: 0; + height: 100%; + background: transparent !important; +} + +.hero-left { + padding-block: 24px; + display: flex; + flex-direction: column; +} + +.hero-left img { + width: 100px; + height: auto; + position: relative; + margin-bottom: 16px; + background: transparent !important; +} + +.hero-left h2 { + font: 500 32px 'Google Sans', 'Roboto', sans-serif; + color: white; + margin-top: 0; +} + +.hero-left p { + font: 400 16px 'Roboto', sans-serif; + color: white; +} + +@media (max-width: 1295px) { + .hero > img { + right: -75px; + } +} + +@media (max-width: 750px) { + .hero { + grid: auto-flow / 1fr; + } + + .hero-left { + padding-right: 2rem; + } + + .hero > img { + display: none; + } +} + +.product-offerings { + margin-block: 32px !important; +} + +.product-offerings .sd-card-title { + font: 400 24px 'Google Sans', 'Roboto', sans-serif; +} + +.color-cards { + background: #E8EAED; + color: #222832; + padding: 48px 12px 0 12px; + margin-bottom: 0 !important; + border-radius: 24px 24px 0 0; +} + +.color-cards > div { + gap: 24px 0; +} + +.color-cards + p { + background: #E8EAED; + padding: 24px 12px 48px 12px; + font-weight: 600; + color: #222832; + border-radius: 0 0 24px 24px; +} + +.color-cards + p > a { + color: #222832; +} + +.color-cards + p > a:hover, +html[data-theme="dark"] .color-cards + p > a:hover { + color: #e89217; +} + +html[data-theme="dark"] .color-cards, +html[data-theme="dark"] .hero, +html[data-theme="dark"] .color-cards + p, +html[data-theme="dark"] .color-cards + p > a { + background: #202124; + color: white; +} + +.ecosystem-grid { + font-size: smaller; +} + +.ecosystem-grid > div { + gap: 20px; +} + +.ecosystem-grid .sd-col { + border: 1px solid #dadce0; + border-radius: 8px; + width: calc(50% - 10px); + padding: 16px; +} + +.ecosystem-grid .sd-col > p { + display: flex; + flex-direction: column; + gap: 10px; +} + +.ecosystem-grid .sd-col > p > svg { + color: #00897B; +} + +.ecosystem-grid ul { + list-style-type: none; + padding-inline-start: 0.5em; +} + +.ecosystem-grid a { + text-decoration: none; +} + +div.red-background pre { + background-color: rgba(244, 204, 204, var(--block-bg-opacity)); +} + +div.green-background pre { + background-color: rgba(204, 244, 204, var(--block-bg-opacity)); +} + +/* Python code block comments */ +html[data-theme="light"] .highlight span.c1 { + color: #fa8d59; +} + +/* Python code traceback and exception */ +html[data-theme="light"] .highlight span.gt { + color: #ff0000; +} + +html[data-theme="light"] .highlight span.gr { + color: #ff0000; +} \ No newline at end of file diff --git a/docs/_templates/custom-class-template.rst b/docs/_templates/custom-class-template.rst index d64b80d51c..16ebb2f338 100644 --- a/docs/_templates/custom-class-template.rst +++ b/docs/_templates/custom-class-template.rst @@ -6,18 +6,16 @@ :members: :show-inheritance: :inherited-members: - :special-members: __call__, __add__, __mul__ {% block methods %} + .. automethod:: __init__ + {% if methods %} .. rubric:: {{ _('Methods') }} .. autosummary:: - :nosignatures: {% for item in methods %} - {%- if not item.startswith('_') %} ~{{ name }}.{{ item }} - {%- endif -%} {%- endfor %} {% endif %} {% endblock %} diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst index ec6b7ab05d..ef2c09a544 100644 --- a/docs/_templates/custom-module-template.rst +++ b/docs/_templates/custom-module-template.rst @@ -1,10 +1,10 @@ {{ fullname | escape | underline}} .. automodule:: {{ fullname }} - :members: + {% block attributes %} {% if attributes %} - .. rubric:: Module attributes + .. rubric:: Module Attributes .. autosummary:: :toctree: @@ -20,7 +20,6 @@ .. autosummary:: :toctree: - :nosignatures: {% for item in functions %} {{ item }} {%- endfor %} @@ -34,7 +33,6 @@ .. autosummary:: :toctree: :template: custom-class-template.rst - :nosignatures: {% for item in classes %} {{ item }} {%- endfor %} @@ -55,6 +53,8 @@ {% block modules %} {% if modules %} +.. rubric:: Modules + .. autosummary:: :toctree: :template: custom-module-template.rst diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 0000000000..93a7941f9d --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,2 @@ +{% extends "!layout.html" %} +{% set css_files = css_files + ["_static/style.css"] %} \ No newline at end of file diff --git a/docs/about/blogs_publications.md b/docs/about/blogs_publications.md index 5ef2c27ce8..39b33ba7be 100644 --- a/docs/about/blogs_publications.md +++ b/docs/about/blogs_publications.md @@ -1,5 +1,4 @@ -Blogs & Publications -==================== +# Blogs and Publications * [Federated learning enables big data for rare cancer boundary detection, Dec 2022](https://www.nature.com/articles/s41467-022-33407-5) * [How OpenFL Can Boost Your Federated Learning Project, 2022](https://www.intel.com/content/www/us/en/developer/articles/technical/how-openfl-boost-your-federated-learning-project.html) @@ -8,4 +7,3 @@ Blogs & Publications * [A Path Towards Secure Federated Learning, Apr 2022](https://medium.com/openfl/a-path-towards-secure-federated-learning-c2fb16d5e66e) * [Go Federated with OpenFL: Put your Deep Learning pipeline on Federated rails, Oct 2021](https://towardsdatascience.com/go-federated-with-openfl-8bc145a5ead1) * [Federated learning in medicine: facilitating multi-institutional collaborations without sharing patient data, Jul 2020](https://www.nature.com/articles/s41598-020-69250-1) - diff --git a/docs/about/features.rst b/docs/about/features.rst index d57569484f..bd3e05581d 100644 --- a/docs/about/features.rst +++ b/docs/about/features.rst @@ -1,7 +1,3 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -========== Features ========== @@ -11,7 +7,7 @@ Features Running a Federation --------------------- -|productName| has multiple options for setting up a federation and running experiments, depending on the users needs. +OpenFL has multiple options for setting up a federation and running experiments, depending on the users needs. Task Runner Define an experiment and distribute it manually. All participants can verify model code and FL plan prior to execution. @@ -51,12 +47,12 @@ Aggregation Algorithms FedAvg Paper: `McMahan et al., 2017 `_ - Default aggregation algorithm in |productName|. Multiplies local model weights with relative data size and averages this multiplication result. + Default aggregation algorithm in OpenFL. Multiplies local model weights with relative data size and averages this multiplication result. FedProx Paper: `Li et al., 2020 `_ - FedProx in |productName| is implemented as a custom optimizer for PyTorch/TensorFlow. In order to use FedProx, do the following: + FedProx in OpenFL is implemented as a custom optimizer for PyTorch/TensorFlow. In order to use FedProx, do the following: 1. PyTorch: @@ -74,7 +70,7 @@ FedProx FedOpt Paper: `Reddi et al., 2020 `_ - FedOpt in |productName|: :ref:`adaptive_aggregation_functions` + FedOpt in OpenFL: :ref:`adaptive_aggregation_functions` FedCurv Paper: `Shoham et al., 2019 `_ diff --git a/docs/about/features_index/fed_eval.rst b/docs/about/features_index/fed_eval.rst index e8ec5fbdd7..e35c0f5afa 100644 --- a/docs/about/features_index/fed_eval.rst +++ b/docs/about/features_index/fed_eval.rst @@ -1,7 +1,7 @@ .. # Copyright (C) 2020-2024 Intel Corporation .. # SPDX-License-Identifier: Apache-2.0 -Federated Evaluation with |productName| +Federated Evaluation ======================================= Introduction to Federated Evaluation @@ -9,10 +9,10 @@ Introduction to Federated Evaluation Model evaluation is an essential part of the machine learning development cycle. In a traditional centralized learning system, all evaluation data is collected on a localized server. Because of this, centralized evaluation of machine learning models is a fairly straightforward task. However, in a federated learning system, data is distributed across multiple decentralized devices or nodes. In an effort to preserve the security and privacy of the distributed data, it is infeasible to simply aggregate all the data into a centralized system. Federated evaluation offers a solution by assessing the model at the client side and aggregating the accuracy without ever having to share the data. This is crucial for ensuring the model's effectiveness and reliability in diverse and real-world environments while respecting privacy and data locality -|productName|'s Support for Federated Evaluation +OpenFL's Support for Federated Evaluation ------------------------------------------------- -|productName|, a flexible framework for Federated Learning, has the capability to perform federated evaluation by modifying the federation plan. In this document, we will show how OpenFL can facilitate this process through its task runner API (aggregator-based workflow), where the model evaluation is distributed across various collaborators before being sent to the aggregator. For the task runner API, this involves minor modifications to the ``plan.yaml`` file, which defines the workflow and tasks for the federation. In particular, the federation plan should be defined to run for one forward pass and perform only aggregated model validation +OpenFL, a flexible framework for Federated Learning, has the capability to perform federated evaluation by modifying the federation plan. In this document, we will show how OpenFL can facilitate this process through its task runner API (aggregator-based workflow), where the model evaluation is distributed across various collaborators before being sent to the aggregator. For the task runner API, this involves minor modifications to the ``plan.yaml`` file, which defines the workflow and tasks for the federation. In particular, the federation plan should be defined to run for one forward pass and perform only aggregated model validation In general pipeline is as follows: @@ -28,7 +28,7 @@ To demonstrate usage of the task runner API (aggregator-based workflow) for fede This script can be directly executed as follows: -.. code-block:: console +.. code-block:: shell $ python test_hello_federation.py --template torch_cnn_mnist_fed_eval diff --git a/docs/about/features_index/interactive.rst b/docs/about/features_index/interactive.rst index ea7654443c..3cf05571d5 100644 --- a/docs/about/features_index/interactive.rst +++ b/docs/about/features_index/interactive.rst @@ -26,7 +26,7 @@ Follow the procedure in the director-based workflow to become familiar with the - *Collaborator manager* is Data owner's representative controlling Envoy. .. note:: - The Open Federated Learning (|productName|) interactive Python API enables the Experiment manager (data scientists) to define and start a federated learning experiment from a single entry point: a Jupyter\*\ notebook or a Python\*\ script. + The Open Federated Learning (OpenFL) interactive Python API enables the Experiment manager (data scientists) to define and start a federated learning experiment from a single entry point: a Jupyter\*\ notebook or a Python\*\ script. See `Interactive Python API (Beta)`_ for details. @@ -66,14 +66,14 @@ On the other hand, if the Director rejects the experiment, the experiment is abo OPTIONAL STEP: Create PKI Certificates Using Step-CA ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The use of mutual Transport Layer Security (mTLS) is recommended for deployments in untrusted environments to establish participant identity and to encrypt communication. You may either import certificates provided by your organization or generate certificates with the :ref:`semi-automatic PKI ` provided by |productName|. +The use of mutual Transport Layer Security (mTLS) is recommended for deployments in untrusted environments to establish participant identity and to encrypt communication. You may either import certificates provided by your organization or generate certificates with the :ref:`semi-automatic PKI ` provided by OpenFL. .. _step0_install_director_prerequisites: -STEP 1: Install Open Federated Learning (|productName|) +STEP 1: Install Open Federated Learning (OpenFL) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Install |productName| in a virtual Python\*\ environment. See :ref:`install_package` for details. +Install OpenFL in a virtual Python\*\ environment. See :ref:`installation` for details. .. _step1_start_the_director: @@ -84,7 +84,7 @@ Start the Director on a node with at least two open ports. See :ref:`openfl_ll_c 1. Create a Director workspace with a default config file. - .. code-block:: console + .. code-block:: shell $ fx director create-workspace -p path/to/director_workspace_dir @@ -98,13 +98,13 @@ Start the Director on a node with at least two open ports. See :ref:`openfl_ll_c If mTLS protection is not set up, run this command. - .. code-block:: console + .. code-block:: shell $ fx director start --disable-tls -c director_config.yaml If you have a federation with PKI certificates, run this command. - .. code-block:: console + .. code-block:: shell $ fx director start -c director_config.yaml \ -rc cert/root_ca.crt \ @@ -137,15 +137,15 @@ If any of the Envoys rejects the experiment, a :code:`set_experiment_failed` req OPTIONAL STEP: Sign PKI Certificates (Optional) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The use of mTLS is recommended for deployments in untrusted environments to establish participant identity and to encrypt communication. You may either import certificates provided by your organization or use the :ref:`semi-automatic PKI certificate ` provided by |productName|. +The use of mTLS is recommended for deployments in untrusted environments to establish participant identity and to encrypt communication. You may either import certificates provided by your organization or use the :ref:`semi-automatic PKI certificate ` provided by OpenFL. .. _step0_install_envoy_prerequisites: -STEP 1: Install |productName| +STEP 1: Install OpenFL ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Install |productName| in a Python\*\ virtual environment. See :ref:`install_package` for details. +Install OpenFL in a Python\*\ virtual environment. See :ref:`installation` for details. .. _step1_start_the_envoy: @@ -155,7 +155,7 @@ STEP 2: Start the Envoy 1. Create an Envoy workspace with a default config file and shard descriptor Python\*\ script. - .. code-block:: console + .. code-block:: shell $ fx envoy create-workspace -p path/to/envoy_workspace_dir @@ -179,7 +179,7 @@ STEP 2: Start the Envoy If mTLS protection is not set up, run this command. - .. code-block:: console + .. code-block:: shell ENVOY_NAME=envoy_example_name @@ -192,7 +192,7 @@ STEP 2: Start the Envoy If you have a federation with PKI certificates, run this command. - .. code-block:: console + .. code-block:: shell $ ENVOY_NAME=envoy_example_name @@ -222,7 +222,7 @@ that is allow to communicate with the Director using a gRPC client. Interactive Python API (Beta) ----------------------------- -The Open Federated Learning (|productName|) interactive Python API enables the Experiment manager (data scientists) to define and start a federated learning experiment from a single entry point: a Jupyter\*\ notebook or a Python script. +The Open Federated Learning (OpenFL) interactive Python API enables the Experiment manager (data scientists) to define and start a federated learning experiment from a single entry point: a Jupyter\*\ notebook or a Python script. - `Prerequisites`_ - `Define a Federated Learning Experiment`_ @@ -345,11 +345,11 @@ Instantiate and initialize a model and optimizer in your preferred deep learning from openfl.interface.interactive_api.experiment import ModelInterface MI = ModelInterface(model, optimizer, framework_plugin: str) -The initialized model and optimizer objects should be passed to the :code:`ModelInterface` along with the path to correct Framework Adapter plugin inside the |productName| package +The initialized model and optimizer objects should be passed to the :code:`ModelInterface` along with the path to correct Framework Adapter plugin inside the OpenFL package or from local workspace. .. note:: - The |productName| interactive API supports *TensorFlow* and *PyTorch* frameworks via existing plugins. + The OpenFL interactive API supports *TensorFlow* and *PyTorch* frameworks via existing plugins. User can add support for other deep learning frameworks via the plugin interface and point to your implementation of a :code:`framework_plugin` in :code:`ModelInterface`. @@ -384,7 +384,7 @@ Register an FL task and accompanying information. FL tasks return a dictionary object with metrics: :code:`{metric name: metric value for this task}`. .. note:: - The |productName| interactive API currently allows registering only standalone functions defined in the main module or imported from other modules inside the workspace. + The OpenFL interactive API currently allows registering only standalone functions defined in the main module or imported from other modules inside the workspace. The :code:`TaskInterface` class must be instantiated before you can use its methods to register FL tasks. diff --git a/docs/about/features_index/privacy_meter.rst b/docs/about/features_index/privacy_meter.rst index d911eff416..45f4b2d214 100644 --- a/docs/about/features_index/privacy_meter.rst +++ b/docs/about/features_index/privacy_meter.rst @@ -4,13 +4,11 @@ Privacy Meter ============== -On the Integration of Privacy and |productName| ------------------------------------------------ Federated learning (FL) enables parties to learn from each other without sharing their data. In FL, parties share the local update about a global model in each round with a server. The server aggregates the local updates from all parties to produce the next version of the global model, which will be used by all parties as the initialization for training in the next round. Although each party's data remains local, the shared local updates and aggregate global model each round can leak significant information about the private local training datasets. Specifically, the server can infer information about (even potentially reconstruct) the private data from each party based on their shared local update. Even when the server is trusted, collaborating parties of FL can infer other parties' sensitive data based on the updated global model in each round due to the fact that it is influenced by all local model updates. Due to this serious privacy issue, enabling parties to audit their privacy loss becomes a compelling need. -Privacy meter, based on state-of-the-art membership inference attacks, provides a tool to quantitatively audit data privacy in statistical and machine learning algorithms. The objective of a membership inference attack is to determine whether a given data record was in the training dataset of the target model. Measures of success (accuracy, area under the ROC curve, true positive rate at a given false positive rate ...) for particular membership inference attacks against a target model are used to estimate privacy loss for that model (how much information a target model leaks about its training data). Since stonger attacks may be possible, these measures serve as lower bounds of the actual privacy loss. We have integrated the ML Privacy Meter library into |productName|, generating privacy loss reports for all party's local model updates as well as the global models throughout all rounds of the FL training. +Privacy meter, based on state-of-the-art membership inference attacks, provides a tool to quantitatively audit data privacy in statistical and machine learning algorithms. The objective of a membership inference attack is to determine whether a given data record was in the training dataset of the target model. Measures of success (accuracy, area under the ROC curve, true positive rate at a given false positive rate ...) for particular membership inference attacks against a target model are used to estimate privacy loss for that model (how much information a target model leaks about its training data). Since stonger attacks may be possible, these measures serve as lower bounds of the actual privacy loss. We have integrated the ML Privacy Meter library into OpenFL, generating privacy loss reports for all party's local model updates as well as the global models throughout all rounds of the FL training. Threat Model ----------------------------------------------- @@ -23,17 +21,17 @@ In this threat model, each party can audit the privacy loss of the local and glo Workflow ----------------------------------------------- We provide a demo code in `cifar10_PM.py `_. Here, we briefly describe its workflow. -In each round of FL, parties train, starting with the current global model as initialization, using their local dataset. Then, the current global model and updated local model will be passed to the privacy auditing module (See `audit` function in `cifar10_PM.py`) to produce a privacy loss report. The local model update will then be shared to the server and all such updates aggregated to form the next global model. Though this is a simulation so that no network sharing of models is involved, these reports could be used in a fully distributed setting to trigger actions when the loss is too high. These actions could include not sharing local updates to the aggregator, not +In each round of FL, parties train, starting with the current global model as initialization, using their local dataset. Then, the current global model and updated local model will be passed to the privacy auditing module (See :code:`audit` function in :code:`cifar10_PM.py`) to produce a privacy loss report. The local model update will then be shared to the server and all such updates aggregated to form the next global model. Though this is a simulation so that no network sharing of models is involved, these reports could be used in a fully distributed setting to trigger actions when the loss is too high. These actions could include not sharing local updates to the aggregator, not allowing the FL system to release the model to other outside entities, or potentially re-running local training in a differentially private mode and re-auditing in an attempt to reduce the leakage before sharing occurs. Methodology ----------------------------------------------- -We integrate the population attack from ML Privacy Meter into |productName|. In the population attack, the adversary first computes the signal (e.g., loss, logits) on all samples in a population dataset using the target model. The population dataset is sampled from the same distribution as the train and test datasets, but is non-overlapping with both. The population dataset signals are then used to determine (using the fact that all population data are known not to be target training samples) a signal threshold for which false positives (samples whose signal against the threshold would be erroneously identified as target training samples) would occur at a rate below a provided false positive rate tolerance. Known positives (target training samples) as well as known negatives (target test samples) are tested against the threshold to determine how well this threshold does at classifying training set memberhsip. +We integrate the population attack from ML Privacy Meter into OpenFL. In the population attack, the adversary first computes the signal (e.g., loss, logits) on all samples in a population dataset using the target model. The population dataset is sampled from the same distribution as the train and test datasets, but is non-overlapping with both. The population dataset signals are then used to determine (using the fact that all population data are known not to be target training samples) a signal threshold for which false positives (samples whose signal against the threshold would be erroneously identified as target training samples) would occur at a rate below a provided false positive rate tolerance. Known positives (target training samples) as well as known negatives (target test samples) are tested against the threshold to determine how well this threshold does at classifying training set memberhsip. -Therefore, to use this attack for auditing privacy, we assume there is a set of data points used for auditing which is not overlapped with the training dataset. The size of the auditing dataset is indicated by `audit_dataset_ratio` argument. In addition, we also need to define which signal will be used to distinguish members and non-members. Currently, we support loss, logits and gradient norm. When the gradient norm is used for inferring the membership information, we need to specify which layer of the model we would like to compute the gradient with respect to. For instance, if we want to measure the gradient norm with respect to the 10th layer of the representation (before the fully connected layers), we can pass the following argument `--is_feature True` and `--layer_number 10` to the `cifar10_PM.py`. +Therefore, to use this attack for auditing privacy, we assume there is a set of data points used for auditing which is not overlapped with the training dataset. The size of the auditing dataset is indicated by :code:`audit_dataset_ratio` argument. In addition, we also need to define which signal will be used to distinguish members and non-members. Currently, we support loss, logits and gradient norm. When the gradient norm is used for inferring the membership information, we need to specify which layer of the model we would like to compute the gradient with respect to. For instance, if we want to measure the gradient norm with respect to the 10th layer of the representation (before the fully connected layers), we can pass the following argument :code:`--is_feature True` and :code:`--layer_number 10` to the :code:`cifar10_PM.py`. -To measure the success of the attack (privacy loss), we generate the ROC of the attack and the dynamic of the AUC during the training. In addition, parties can also indicate the false positive rate tolerance, and the privacy loss report will show the maximal true positive rate (fraction of members which is correctly identified) during the training. This false positive rate tolerance is passed to `fpr_tolerance` argument. The privacy loss report will be saved in the folder indicated by `log_dir` argument. +To measure the success of the attack (privacy loss), we generate the ROC of the attack and the dynamic of the AUC during the training. In addition, parties can also indicate the false positive rate tolerance, and the privacy loss report will show the maximal true positive rate (fraction of members which is correctly identified) during the training. This false positive rate tolerance is passed to :code:`fpr_tolerance` argument. The privacy loss report will be saved in the folder indicated by :code:`log_dir` argument. Examples ----------------------------------------------- -`Here `_, we give a few commands and the results for each of them. \ No newline at end of file +`Here `_, we give a few commands and the results for each of them. \ No newline at end of file diff --git a/docs/about/features_index/taskrunner.rst b/docs/about/features_index/taskrunner.rst index 75b408f0d2..37e050e5a3 100644 --- a/docs/about/features_index/taskrunner.rst +++ b/docs/about/features_index/taskrunner.rst @@ -4,7 +4,7 @@ .. _running_the_task_runner: ================ -Task Runner API +TaskRunner API ================ Let's take a deeper dive into the Task Runner API. If you haven't already, we suggest checking out the :ref:`quick_start` for a primer on doing a simple experiment on a single node. @@ -121,9 +121,9 @@ Bare Metal Approach .. note:: - Ensure you have installed the |productName| package on every node (aggregator and collaborators) in the federation. + Ensure you have installed the OpenFL package on every node (aggregator and collaborators) in the federation. - See :ref:`install_package` for details. + See :ref:`installation` for details. @@ -147,9 +147,9 @@ Bare Metal Approach STEP 1: Create a Workspace ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. Start a Python 3.9 (>=3.9, <3.12) virtual environment and confirm |productName| is available. +1. Start a Python 3.9 (>=3.9, <3.12) virtual environment and confirm OpenFL is available. - .. code-block:: console + .. code-block:: shell $ fx @@ -158,7 +158,7 @@ STEP 1: Create a Workspace Set the environment variables to use the :code:`keras_cnn_mnist` as the template and :code:`${HOME}/my_federation` as the path to the workspace directory. - .. code-block:: console + .. code-block:: shell $ export WORKSPACE_TEMPLATE=keras_cnn_mnist $ export WORKSPACE_PATH=${HOME}/my_federation @@ -173,14 +173,14 @@ STEP 1: Create a Workspace See the complete list of available templates. - .. code-block:: console + .. code-block:: shell $ fx workspace create --prefix ${WORKSPACE_PATH} 4. Create a workspace directory for the new federation project. - .. code-block:: console + .. code-block:: shell $ fx workspace create --prefix ${WORKSPACE_PATH} --template ${WORKSPACE_TEMPLATE} @@ -191,13 +191,13 @@ STEP 1: Create a Workspace 5. Change to the workspace directory. - .. code-block:: console + .. code-block:: shell $ cd ${WORKSPACE_PATH} 6. Install the workspace requirements: - .. code-block:: console + .. code-block:: shell $ pip install -r requirements.txt @@ -211,7 +211,7 @@ STEP 1: Create a Workspace The protobuf file with the initial weights is found in **${WORKSPACE_TEMPLATE}_init.pbuf**. - .. code-block:: console + .. code-block:: shell $ fx plan initialize @@ -222,19 +222,19 @@ STEP 1: Create a Workspace - OPTION 1: override the auto populated FQDN value with the :code:`-a` flag. - .. code-block:: console + .. code-block:: shell $ fx plan initialize -a aggregator-hostname.internal-domain.com - OPTION 2: override the apparent FQDN of the system by setting an FQDN environment variable. - .. code-block:: console + .. code-block:: shell $ export FQDN=x.x.x.x and initializing the FL plan - .. code-block:: console + .. code-block:: shell $ fx plan initialize @@ -275,7 +275,7 @@ Setting Up the Certificate Authority 1. Change to the path of your workspace: - .. code-block:: console + .. code-block:: shell $ cd WORKSPACE_PATH @@ -283,13 +283,13 @@ Setting Up the Certificate Authority All certificates will be signed by the aggregator node. Follow the instructions and enter the information as prompted. The command will create a simple database file to keep track of all issued certificates. - .. code-block:: console + .. code-block:: shell $ fx workspace certify 3. Run the aggregator certificate creation command, replacing :code:`AFQDN` with the actual `fully qualified domain name (FQDN) `_ for the aggregator node. - .. code-block:: console + .. code-block:: shell $ fx aggregator generate-cert-request --fqdn AFQDN @@ -297,7 +297,7 @@ Setting Up the Certificate Authority On Linux\*\, you can discover the FQDN with this command: - .. code-block:: console + .. code-block:: shell $ hostname --all-fqdns | awk '{print $1}' @@ -305,19 +305,19 @@ Setting Up the Certificate Authority You can override the apparent FQDN by setting it explicitly via the :code:`--fqdn` parameter. - .. code-block:: console + .. code-block:: shell $ fx aggregator generate-cert-request --fqdn AFQDN If you omit the :code:`--fdqn` parameter, then :code:`fx` will automatically use the FQDN of the current node assuming the node has been correctly set with a static address. - .. code-block:: console + .. code-block:: shell $ fx aggregator generate-cert-request 4. Run the aggregator certificate signing command, replacing :code:`AFQDN` with the actual `fully qualified domain name (FQDN) `_ for the aggregator node. - .. code-block:: console + .. code-block:: shell $ fx aggregator certify --fqdn AFQDN @@ -326,7 +326,7 @@ Setting Up the Certificate Authority You can override the apparent FQDN of the system by setting an FQDN environment variable (:code:`export FQDN=AFQDN`) before signing the certificate. - .. code-block:: console + .. code-block:: shell $ fx aggregator certify --fqdn AFQDN @@ -351,7 +351,7 @@ Exporting the Workspace 1. Export the workspace so that it can be imported to the collaborator nodes. - .. code-block:: console + .. code-block:: shell $ fx workspace export @@ -370,7 +370,7 @@ Importing the Workspace 2. Import the workspace archive. - .. code-block:: console + .. code-block:: shell $ fx workspace import --archive WORKSPACE.zip @@ -380,7 +380,7 @@ Importing the Workspace Replace :code:`COL_LABEL` with the label you assigned to the collaborator. This label does not have to be the FQDN; it can be any unique alphanumeric label. - .. code-block:: console + .. code-block:: shell $ fx collaborator create -n {COL_LABEL} -d {DATA_PATH:optional} $ fx collaborator generate-cert-request -n {COL_LABEL} @@ -403,7 +403,7 @@ Importing the Workspace 4. On the aggregator node (i.e., the certificate authority in this example), sign the Collaborator CSR Package from the collaborator nodes. - .. code-block:: console + .. code-block:: shell $ fx collaborator certify --request-pkg /PATH/TO/col_{COL_LABEL}_to_agg_cert_request.zip @@ -419,7 +419,7 @@ Importing the Workspace 5. On the collaborator node, import the signed certificate and certificate chain into your workspace. - .. code-block:: console + .. code-block:: shell $ fx collaborator certify --import /PATH/TO/agg_to_col_{COL_LABEL}_signed_cert.zip @@ -435,7 +435,7 @@ STEP 3: Start the Federation 1. Start the Aggregator. - .. code-block:: console + .. code-block:: shell $ fx aggregator start @@ -449,7 +449,7 @@ STEP 3: Start the Federation 2. Run the Collaborator. - .. code-block:: console + .. code-block:: shell $ fx collaborator start -n {COLLABORATOR_LABEL} @@ -481,7 +481,7 @@ Post Experiment Experiment owners may access the final model in its native format. Among other training artifacts, the aggregator creates the last and best aggregated (highest validation score) model snapshots. One may convert a snapshot to the native format and save the model to disk by calling the following command from the workspace: -.. code-block:: console +.. code-block:: shell $ fx model save -i model_protobuf_path.pth -o save_model_path @@ -503,7 +503,7 @@ In fact, the :code:`get_model()` method returns a **TaskRunner** object loaded w Running inside Docker --------------------- -There are two ways you can run |productName| with Docker\*\. +There are two ways you can run OpenFL with Docker\*\. - `Option 1: Deploy a Federation in a Docker Container`_ - `Option 2: Deploy Your Workspace in a Docker Container`_ @@ -515,17 +515,17 @@ Option 1: Deploy a Federation in a Docker Container ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: - You have to built an |productName| image. See :ref:`install_docker` for details. + You have to built an OpenFL image. See :ref:`installation` for details. -1. Run the |productName| image. +1. Run the OpenFL image. - .. code-block:: console + .. code-block:: shell $ docker run -it --network host openfl -You can now experiment with |productName| in the container. For example, you can test the project pipeline with the `"Hello Federation" bash script `_. +You can now experiment with OpenFL in the container. For example, you can test the project pipeline with the `"Hello Federation" bash script `_. .. _running_the_federation_docker_workspace: @@ -539,7 +539,7 @@ Option 2: Deploy Your Workspace in a Docker Container 1. Build an image with the workspace you created. - .. code-block:: console + .. code-block:: shell $ fx workspace dockerize diff --git a/docs/about/features_index/workflowinterface.rst b/docs/about/features_index/workflowinterface.rst index b076738b0b..6dc9527ba1 100644 --- a/docs/about/features_index/workflowinterface.rst +++ b/docs/about/features_index/workflowinterface.rst @@ -1,36 +1,31 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - .. _workflow_interface: -****************** -Workflow Interface -****************** - -**Important Note** +Workflow API +============ -The OpenFL workflow interface is experimental and subject to change. For an overview of options supported to setup Federation and run FL experiments, see `Features <../features.rst>`_ +.. note:: + This is an experimental functionality and subject to change. For an overview of options supported to setup FL experiments, refer to `features <../features.html>`_. What is it? =========== -A new OpenFL interface that gives significantly more flexility to researchers in the construction of federated learning experiments. It is heavily influenced by the interface and design of `Metaflow` , the popular framework for data scientists originally developed at Netflix. There are several reasons we converged on Metaflow as inspiration for our work: +A new OpenFL interface that gives significantly more flexility to researchers in the construction of federated learning experiments. It is heavily influenced by the interface and design of `Metaflow `_, a framework originally developed at Netflix. There are several reasons we converged on Metaflow as inspiration for our work: -1. Clean expression of task sequence. Flows start with a `start` task, and end with `end`. The next task in the sequence is called by `self.next`. -2. Easy selection of what should be sent between tasks using `include` or `exclude` +1. Clean expression of task sequence. Flows start with a :code:`start` task, and end with :code:`end`. The next task in the sequence is called by :code:`self.next`. +2. Easy selection of what should be sent between tasks using :code:`include` or :code:`exclude` 3. Excellent tooling ecosystem: the metaflow client gives easy access to prior runs, tasks, and data artifacts generated by an experiment. There are several modifications we make in our reimagined version of this interface that are necessary for federated learning: -1. *Placement*: Metaflow's `@step` decorator is replaced by placement decorators that specify where a task will run. In horizontal federated learning, there are server (or aggregator) and client (or collaborator) nodes. Tasks decorated by `@aggregator` will run on the aggregator node, and `@collaborator` will run on the collaborator node. These placement decorators are interpreted by *Runtime* implementations: these do the heavy lifting of figuring out how to get the state of the current task to another process or node. -2. *Runtime*: Each flow has a `.runtime` attribute. The runtime encapsulates the details of the infrastucture where the flow will run. We support the LocalRuntime for simulating experiments on local node and FederatedRuntime to launch experiments on distributed infrastructure. +1. *Placement*: Metaflow's :code:`@step` decorator is replaced by placement decorators that specify where a task will run. In horizontal federated learning, there are server (or aggregator) and client (or collaborator) nodes. Tasks decorated by :code:`@aggregator` will run on the aggregator node, and :code:`@collaborator` will run on the collaborator node. These placement decorators are interpreted by *Runtime* implementations: these do the heavy lifting of figuring out how to get the state of the current task to another process or node. +2. *Runtime*: Each flow has a :code:`.runtime` attribute. The runtime encapsulates the details of the infrastucture where the flow will run. We support the LocalRuntime for simulating experiments on local node and FederatedRuntime to launch experiments on distributed infrastructure. 3. *Conditional branches*: Perform different tasks if a criteria is met 4. *Loops*: Internal loops are within a flow; this is necessary to support rounds of training where the same sequence of tasks is performed repeatedly. How to use it? ============== -Let's start with the basics. A flow is intended to define the entirety of federated learning experiment. Every flow begins with the `start` task and concludes with the `end` task. At each step in the flow, attributes can be defined, modified, or deleted. Attributes get passed forward to the next step in the flow, which is defined by the name of the task passed to the `next` function. In the line before each task, there is a **placement decorator**. The placement decorator defines where that task will be run. The OpenFL Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with start and concludes with the end task. In the following example, the aggregator begins with an optionally passed in model and optimizer. The aggregator begins the flow with the start task, where the list of collaborators is extracted from the runtime (:code:`self.collaborators = self.runtime.collaborators`) and is then used as the list of participants to run the task listed in self.next, aggregated_model_validation. The model, optimizer, and anything that is not explicitly excluded from the next function will be passed from the start function on the aggregator to the aggregated_model_validation task on the collaborator. Where the tasks run is determined by the placement decorator that precedes each task definition (:code:`@aggregator` or :code:`@collaborator`). Once each of the collaborators (defined in the runtime) complete the aggregated_model_validation task, they pass their current state onto the train task, from train to local_model_validation, and then finally to join at the aggregator. It is in join that an average is taken of the model weights, and the next round can begin. +Let's start with the basics. A flow is intended to define the entirety of federated learning experiment. Every flow begins with the :code:`start` task and concludes with the :code:`end` task. At each step in the flow, attributes can be defined, modified, or deleted. Attributes get passed forward to the next step in the flow, which is defined by the name of the task passed to the :code:`next` function. In the line before each task, there is a **placement decorator**. The placement decorator defines where that task will be run. The OpenFL Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with start and concludes with the end task. In the following example, the aggregator begins with an optionally passed in model and optimizer. The aggregator begins the flow with the start task, where the list of collaborators is extracted from the runtime (:code:`self.collaborators = self.runtime.collaborators`) and is then used as the list of participants to run the task listed in self.next, aggregated_model_validation. The model, optimizer, and anything that is not explicitly excluded from the next function will be passed from the start function on the aggregator to the aggregated_model_validation task on the collaborator. Where the tasks run is determined by the placement decorator that precedes each task definition (:code:`@aggregator` or :code:`@collaborator`). Once each of the collaborators (defined in the runtime) complete the aggregated_model_validation task, they pass their current state onto the train task, from train to local_model_validation, and then finally to join at the aggregator. It is in join that an average is taken of the model weights, and the next round can begin. .. code-block:: python @@ -122,7 +117,7 @@ Prior interfaces in OpenFL support the standard horizontal FL training workflow: 4. The collaborator performs validation with their local validation dataset on their locally trained model, and sends their validation metrics to the aggregator (locally_tuned_model_validation task) 5. The aggregator applies an aggregation function (weighted average, FedCurv, FedProx, etc.) to the model weights, and reports the aggregate metrics. -The Task Assigner determines the list of collaborator tasks to be performed, and both in the task runner API as well as the interactive API these tasks can be modified (to varying degrees). For example, to perform federated evaluation of a model, only the aggregated_model_validation task would be selected for the assigner's block of the federated plan. Equivalently for the interactive API, this can be done by only registering a single validation task. But there are many other types of workflows that can't be easily represented purely by training / validation tasks performed on a collaborator with a single model. An example is training a Federated Generative Adversarial Network (GAN); because this may be represented by separate generative and discriminator models, and could leak information about a collaborator dataset, the interface we provide should allow for better control over what gets sent over the network and how. Another common request we get is for validation with an aggregator's dataset after training. Prior to |productName| 1.5, there has not a great way to support this in OpenFL. +The Task Assigner determines the list of collaborator tasks to be performed, and both in the task runner API as well as the interactive API these tasks can be modified (to varying degrees). For example, to perform federated evaluation of a model, only the aggregated_model_validation task would be selected for the assigner's block of the federated plan. Equivalently for the interactive API, this can be done by only registering a single validation task. But there are many other types of workflows that can't be easily represented purely by training / validation tasks performed on a collaborator with a single model. An example is training a Federated Generative Adversarial Network (GAN); because this may be represented by separate generative and discriminator models, and could leak information about a collaborator dataset, the interface we provide should allow for better control over what gets sent over the network and how. Another common request we get is for validation with an aggregator's dataset after training. Prior to OpenFL 1.5, there has not a great way to support this in OpenFL. Goals ===== @@ -137,7 +132,7 @@ Goals Workflow Interface API ====================== -The workflow interface formulates the experiment as a series of tasks, or a flow. Every flow begins with the `start` task and concludes with `end`. +The workflow interface formulates the experiment as a series of tasks, or a flow. Every flow begins with the :code:`start` task and concludes with :code:`end`. Runtimes ======== @@ -178,7 +173,7 @@ You can simulate a Federated Learning experiment locally using :code:`LocalRunti local_runtime = LocalRuntime(aggregator=aggregator, collaborators=collaborators, backend='single_process') -Let's break this down, starting with the :code:`Aggregator` and :code:`Collaborator` components. These components represent the *Participants* in a Federated Learning experiment. Each participant has its own set of *private attributes*. As the name suggests, these *private attributes* represent private information they do not want to share with others, and will be filtered out when there is a transition from the aggregator to the collaborator or vice versa. In the example above each collaborator has it's own `train_dataloader` and `test_dataloader` that are only available when that collaborator is performing it's tasks via `self.train_loader` and `self.test_loader`. Once those collaborators transition to a task at the aggregator, this private information is filtered out and the remaining collaborator state can safely be sent back to the aggregator. +Let's break this down, starting with the :code:`Aggregator` and :code:`Collaborator` components. These components represent the *Participants* in a Federated Learning experiment. Each participant has its own set of *private attributes*. As the name suggests, these *private attributes* represent private information they do not want to share with others, and will be filtered out when there is a transition from the aggregator to the collaborator or vice versa. In the example above each collaborator has it's own :code:`train_dataloader` and :code:`test_dataloader` that are only available when that collaborator is performing it's tasks via :code:`self.train_loader` and :code:`self.test_loader`. Once those collaborators transition to a task at the aggregator, this private information is filtered out and the remaining collaborator state can safely be sent back to the aggregator. These *private attributes* need to be set in form of a dictionary(user defined), where the key is the name of the attribute and the value is the object. In this example :code:`collaborator.private_attributes` sets the collaborator *private attributes* :code:`train_loader` and :code:`test_loader` that are accessed by collaborator steps (:code:`aggregated_model_validation`, :code:`train` and :code:`local_model_validation`). @@ -230,7 +225,7 @@ Participant *private attributes* are returned by the callback function in form o Some important points to remember while creating callback function and private attributes are: - Callback Function needs to be defined by the user and should return the *private attributes* required by the participant in form of a key/value pair - - Callback function can be provided with any parameters required as arguments. In this example, parameters essential for the callback function are supplied with corresponding values bearing *same names* during the instantiation of the Collaborator + - Callback function can be provided with any parameters required as arguments. In this example, parameters essential for the callback function are supplied with corresponding values bearing same names during the instantiation of the Collaborator * :code:`index`: Index of the particular collaborator needed to shard the dataset * :code:`n_collaborators`: Total number of collaborators in which the dataset is sharded @@ -301,19 +296,19 @@ First step is to create the participants in the Federation: the Director and Env **Director: The central node in the Federation** -The `fx director start` command is used to start the Director. You can run it with or without TLS, depending on your setup. +The :code:`fx director start` command is used to start the Director. You can run it with or without TLS, depending on your setup. **With TLS:** Use the following command: -.. code-block:: console +.. code-block:: shell $ fx director start -c -rc -pk -oc **Without TLS:** Use the following command: -.. code-block:: console +.. code-block:: shell $ fx director start --disable-tls -c @@ -352,14 +347,14 @@ The `fx envoy start` command is used to start the Envoy. You can run it with or **With TLS:** Use the following command: -.. code-block:: console +.. code-block:: shell $ fx envoy start -n -ec -dh -dp -rc -pk -oc **Without TLS:** Use the following command: -.. code-block:: console +.. code-block:: shell $ fx envoy start -n --disable-tls -ec @@ -374,7 +369,7 @@ Use the following command: - `-oc `: Path to the API certificate file (used with TLS). - `--disable-tls`: Disables TLS encryption. -The Envoy configuration file includes details about the private attributes. An example configuration file `envoy_config.yaml` for `envoy_one` is shown below: +The Envoy configuration file includes details about the private attributes. An example configuration file :code:`envoy_config.yaml` for :code:`envoy_one` is shown below: .. code-block:: yaml @@ -393,9 +388,9 @@ Now we proceed to instantiate the :code:`FederatedRuntime` to facilitate the dep - Port number on which the Director is listening. - (Optional) Certificate information for TLS: - - `cert_chain`: Path to the certificate chain. - - `api_cert`: Path to the API certificate. - - `api_private_key`: Path to the API private key. + - :code:`cert_chain`: Path to the certificate chain. + - :code:`api_cert`: Path to the API certificate. + - :code:`api_private_key`: Path to the API private key. 2. **collaborators** @@ -406,7 +401,7 @@ Now we proceed to instantiate the :code:`FederatedRuntime` to facilitate the dep File path to the Jupyter notebook defining the experiment logic. -Below is an example of how to set up and instantiate a `FederatedRuntime`: +Below is an example of how to set up and instantiate a :code:`FederatedRuntime`: .. code-block:: python diff --git a/docs/about/license.rst b/docs/about/license.rst index 7f03f95c1d..3f96d19e0b 100644 --- a/docs/about/license.rst +++ b/docs/about/license.rst @@ -1,6 +1,3 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - ========== License ========== diff --git a/docs/about/notices_and_disclaimers.rst b/docs/about/notices_and_disclaimers.rst index 9014503013..d0b612de03 100644 --- a/docs/about/notices_and_disclaimers.rst +++ b/docs/about/notices_and_disclaimers.rst @@ -1,11 +1,8 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - *********************** Notices and Disclaimers *********************** -© Intel Corporation. Intel, the Intel logo, and other Intel marks are trademarks of Intel Corporation or its subsidiaries. Other names and brands may be claimed as the property of others. +Copyright (C) 2020-2024 Intel Corporation. Intel, the Intel logo, and other Intel marks are trademarks of Intel Corporation or its subsidiaries. Other names and brands may be claimed as the property of others. ​​Intel technologies may require enabled hardware, software or service activation.​​​​ diff --git a/docs/about/overview.how_can_intel_protect_federated_learning.rst b/docs/about/overview.how_can_intel_protect_federated_learning.rst index a74ac539f6..5c97d325b6 100644 --- a/docs/about/overview.how_can_intel_protect_federated_learning.rst +++ b/docs/about/overview.how_can_intel_protect_federated_learning.rst @@ -56,7 +56,7 @@ This manifest file is used to automatically create the enclave on an SGX-compatible CPU. For example, once Graphene is installed and the manifest file is specified, the command -.. code-block:: console +.. code-block:: shell $ SGX=1 ./pal_loader httpd diff --git a/docs/about/overview.rst b/docs/about/overview.rst deleted file mode 100644 index e0a6eb5134..0000000000 --- a/docs/about/overview.rst +++ /dev/null @@ -1,57 +0,0 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -========== -Overview -========== - - -.. note:: - - This project is continually being developed and improved. Expect changes to this manual, the project code, and the project design. - -Open Federated Learning (OpenFL) is a Python\*\ 3 project developed by Intel Internet of Things Group (IOTG) and Intel Labs. - -.. figure:: ../images/ct_vs_fl.png - -.. centered:: Federated Learning - -.. _what_is_openfl: - -*************************** -What is Federated Learning? -*************************** - -`Federated learning `_ is a distributed machine learning approach that -enables collaboration on machine learning projects without sharing sensitive data, such as patient records, financial data, -or classified secrets (`McMahan, 2016 `_; -`Sheller, Reina, Edwards, Martin, & Bakas, 2019 `_; -`Yang, Liu, Chen, & Tong, 2019 `_; -`Sheller et al., 2020 `_). -In federated learning, the model moves to meet the data rather than the data moving to meet the model. The movement of data across the federation are the model parameters and their updates. - -.. figure:: ../images/diagram_fl_new.png - -.. centered:: Federated Learning - -.. _definitions_and_conventions: - -*************************** -Definitions and Conventions -*************************** - -Federated learning brings in a few more components to the traditional data science training pipeline: - -Collaborator - A collaborator is a client in the federation that has access to the local training, validation, and test datasets. By design, the collaborator is the only component of the federation with access to the local data. The local dataset should never leave the collaborator. - -Aggregator - A parameter server sends a global model to the collaborators. Parameter servers are often combined with aggregators on the same compute node. - An aggregator receives locally tuned models from collaborators and combines the locally tuned models into a new global model. Typically, `federated averaging `_, (a weighted average) is the algorithm used to combine the locally tuned models. - -Round - A federation round is defined as the interval (typically defined in terms of training steps) where an aggregation is performed. Collaborators may perform local training on the model for multiple epochs (or even partial epochs) within a single training round. - -.. toctree -.. overview.how_can_intel_protect_federated_learning -.. overview.what_is_intel_federated_learning diff --git a/docs/conf.py b/docs/conf.py index bc5857847e..d24a5cc3d7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,8 +1,6 @@ -"""Docs configuration module.""" - -# Copyright (C) 2020-2023 Intel Corporation +# Copyright (C) 2020-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 - +"""Docs configuration module.""" # Configuration file for the Sphinx documentation builder. # @@ -17,6 +15,8 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # import os +import inspect +import operator import sys from datetime import datetime @@ -31,80 +31,38 @@ # import sphinxcontrib.napoleon # NOQA:E800 extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', 'sphinx.ext.napoleon', - 'sphinx_rtd_theme', - 'sphinx.ext.autosectionlabel', - 'sphinx-prompt', + 'sphinx.ext.linkcode', + 'sphinx.ext.mathjax', + 'sphinx_remove_toctrees', 'sphinx_copybutton', - 'sphinx_substitution_extensions', - 'sphinx.ext.ifconfig', + 'sphinx_design', + 'sphinxext.rediraffe', 'sphinxcontrib.mermaid', - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'recommonmark' + 'sphinx-prompt', + 'recommonmark', + 'myst_nb', + 'sphinx.ext.ifconfig', + 'sphinx.ext.autosectionlabel', + 'sphinx_substitution_extensions', ] -autodoc_default_options = { - 'imported-members': True, -} + +pygments_style = None autosummary_generate = True # Turn on sphinx.ext.autosummary +napolean_use_rtype = False -source_suffix = ['.rst', '.md'] +source_suffix = ['.rst', '.md', '.ipynb'] # -- Project information ----------------------------------------------------- - # This will replace the |variables| within the rST documents automatically - -PRODUCT_VERSION = 'Intel' - project = 'OpenFL' -copyright = f'{datetime.now().year}, Intel' # NOQA -author = 'Intel Corporation' -version = f'{datetime.now().year}.{datetime.now().month}' -release = version -master_doc = 'index' - -# Global variables for rST -rst_prolog = ''' -.. |productName| replace:: OpenFL -.. |productZip| replace:: openfl.zip -.. |productDir| replace:: openfl -.. |productWheel| replace:: openfl - -''' - -napoleon_google_docstring = True - -# Config the returns section to behave like the Args section -napoleon_custom_sections = [('Returns', 'params_style')] - -# This code extends Sphinx's GoogleDocstring class to support 'Keys', -# 'Attributes', and 'Class Attributes' sections in docstrings. Allows for more -# detailed and structured documentation of Python classes and their attributes. -from sphinx.ext.napoleon.docstring import GoogleDocstring # NOQA - -# Define new sections and their corresponding parse methods -new_sections = { - 'keys': 'Keys', - 'attributes': 'Attributes', - 'class attributes': 'Class Attributes' -} - -# Add new sections to GoogleDocstring class -for section, title in new_sections.items(): - setattr(GoogleDocstring, f'_parse_{section}_section', - lambda self, section: self._format_fields(title, self._consume_fields())) - - -# Patch the parse method to include new sections -def patched_parse(self): - for section in new_sections: - self._sections[section] = getattr(self, f'_parse_{section}_section') - self._unpatched_parse() - - -# Apply the patch -GoogleDocstring._unpatched_parse = GoogleDocstring._parse -GoogleDocstring._parse = patched_parse +copyright = f'{datetime.now().year}, The OpenFL Team' +author = 'The OpenFL Team' +version = '' +release = '' +main_doc = 'index' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -113,11 +71,10 @@ def patched_parse(self): # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', 'README.md', 'structurizer_dsl/README.md', - '.DS_Store', 'tutorials/*', 'graveyard/*', '_templates'] + '.DS_Store', 'graveyard/*', '_templates'] # add temporary unused files -exclude_patterns.extend(['modules.rst', - 'install.singularity.rst', +exclude_patterns.extend(['install.singularity.rst', 'overview.what_is_intel_federated_learning.rst', 'overview.how_can_intel_protect_federated_learning.rst', 'source/workflow/running_the_federation.singularity.rst']) @@ -126,16 +83,72 @@ def patched_parse(self): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' +html_logo = '_static/openfl_logo.png' +html_favicon = '_static/favicon.png' +html_theme = 'sphinx_book_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + 'show_toc_level': 2, + 'repository_url': 'https://github.com/securefederatedai/openfl', + 'use_repository_button': True, # add a "link to repository" button + 'navigation_with_keys': False, +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -html_style = 'css/Intel_One_Mono_Font_Theme.css' -autosectionlabel_prefix_document = True +html_css_files = [ + 'style.css', +] +# -- Options for myst ---------------------------------------------- +myst_heading_anchors = 3 # auto-generate 3 levels of heading anchors +myst_enable_extensions = ['dollarmath'] +nb_execution_mode = "force" +nb_execution_allow_errors = False +nb_merge_streams = True +nb_execution_show_tb = True +nb_execution_timeout = 600 # secs +nb_execution_excludepatterns = [ + # TODO(MasterSkepticista) this requires fx experimental enabled, conflicts with taskrunner CLI + "tutorials/workflow.ipynb", +] -def setup(app): - app.add_css_file('css/custom.css') +# Tell sphinx autodoc how to render type aliases. +autodoc_typehints = "description" +autodoc_typehints_description_target = "all" + +# Remove auto-generated API docs from sidebars. They take too long to build. +remove_from_toctrees = ["_autosummary/*"] + +# Customize code links via sphinx.ext.linkcode + +def linkcode_resolve(domain, info): + import openfl + + if domain != 'py': + return None + if not info['module']: + return None + if not info['fullname']: + return None + if info['module'].split(".")[0] != 'openfl': + return None + try: + mod = sys.modules.get(info['module']) + obj = operator.attrgetter(info['fullname'])(mod) + if isinstance(obj, property): + obj = obj.fget + while hasattr(obj, '__wrapped__'): # decorated functions + obj = obj.__wrapped__ + filename = inspect.getsourcefile(obj) + source, linenum = inspect.getsourcelines(obj) + except: + return None + filename = os.path.relpath(filename, start=os.path.dirname(openfl.__file__)) + lines = f"#L{linenum}-L{linenum + len(source)}" if linenum else "" + return f"https://github.com/securefederatedai/openfl/blob/develop/openfl/{filename}{lines}" diff --git a/docs/contributing.md b/docs/contributing.md new file mode 100644 index 0000000000..69fb5f8645 --- /dev/null +++ b/docs/contributing.md @@ -0,0 +1,100 @@ +# Contributing + +We welcome contributions from the community. There are several ways to contribute: +* Improvements in [documentation](https://openfl.readthedocs.io/en/latest/). +* Contributing to OpenFL's code-base: via bug-fixes or feature additions. +* Answering questions on our [discussions page](https://github.com/securefederatedai/openfl/discussions). +* Participating in our [roadmap](https://github.com/securefederatedai/openfl/blob/develop/ROADMAP.md) discussions. + +We have a slack [channel](https://join.slack.com/t/openfl/shared_invite/zt-ovzbohvn-T5fApk05~YS_iZhjJ5yaTw) and we host regular [community meetings](https://github.com/securefederatedai/openfl#support). + + +## How to contribute code +### Step 1. Open an issue + +Before you start making any changes, it is always good to open an [issue](https://github.com/securefederatedai/openfl/issues/new/choose) first (assuming one does not already exist), outlining your proposed changes. We can give you feedback, and potentially validate the proposed changes. + +For minor changes (akin to a documentation or bug fix), proceed to opening a Pull Request (PR) directly. + +### Step 2. Make code changes + +To modify code, you need to fork the repository. Set up a development environment as covered in the section "Setup environment" below. + +### Step 3. Create a Pull Request (PR) + +Once the change is ready, open a PR from your branch in your fork, to the `develop` branch in [securefederatedai/openfl](https://github.com/securefederatedai/openfl). OpenFL follows standard recommendations of PR formatting. Find more details [here](https://github.blog/2015-01-21-how-to-write-the-perfect-pull-request/). + +### Step 4. Sign your work + +Signoff your patch commits using your real name. We discourage anonymous contributions. + + Signed-off-by: Joe Smith + +If you set your `user.name` and `user.email` git configs, you can sign your commits using: +```bash +git commit --signoff -m +``` + +Your signature [certifies](http://developercertificate.org/) that you wrote the patch, or, you otherwise have the right to pass it on as an open-source patch. + +OpenFL is licensed under the [Apache 2.0 license](https://github.com/securefederatedai/openfl/blob/develop/LICENSE). By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms. + +### Step 5. Code review and merge + +Verify that your contribution passes all tests in our CI/CD pipeline. In case of any failures, look into the error messages and try to fix them. + +![CI/CD](images/CI_details.png) + +Meanwhile, a reviewer will review the pull request and provide comments. Post few iterations of +reviews and changes (depending on the complexity of the changes), the PR will be approved for merge. + +## Setup environment + +We recommend setting up a local dev environment. Clone your forked repo to your local machine and install the dependencies. + +```shell +git clone https://github.com/YOUR_GITHUB_USERNAME/openfl.git +cd openfl +pip install -U pip setuptools wheel +pip install . +pip install -r linters-requirements.txt +``` + +## Code style + +OpenFL uses [ruff](https://github.com/astral-sh/ruff) to lint/format code and [precommit](https://pre-commit.com/) checks. + +Run the following command at the **root** directory of the repo to format your code. + +``` +sh scripts/format.sh +``` +You may need to resolve errors that could not be resolved by autoformatting. To only show lint errors, run `sh scripts/lint.sh` at the **root** directory of the repo. + +### Docstrings +Since docstrings cannot be verified programmatically, if you do write/edit a docstring, make sure to check them manually. OpenFL docstrings should follow the conventions below: + +A **class** or a **function** docstring may contain: +* A one-line description of the class/function. +* Paragraph(s) of detailed information. +* Usage examples wherever applicable. +* Detailed description of function arguments, return types and possible exceptions raised. + +## Update documentation +To rebuild documentation, install packages: + +```bash +pip install -r docs/requirements.txt +``` + +Next, run: +```bash +sphinx-build -b html docs docs/_build/html -j auto +``` + +You may disable notebook execution if that takes too long: +```bash +sphinx-build -b html -D nb_execution_mode=off docs docs/_build/html -j auto +``` + +The `-j auto` option controls build parallelism. You may replace `auto` with a number to specify the number of jobs to run in parallel. \ No newline at end of file diff --git a/docs/contributing_guidelines/contributing.md b/docs/contributing_guidelines/contributing.md deleted file mode 100644 index 2f6ec5e954..0000000000 --- a/docs/contributing_guidelines/contributing.md +++ /dev/null @@ -1,102 +0,0 @@ -Contributing to OpenFL -===================================================================== - -We welcome contributions from the community. We believe that anyone can bring something valuable to OpenFL and help us to improve the project. This document explains how to contribute to OpenFL. - -We accept various contributions from documentation improvement and bug fixing to major features proposals and [roadmap](https://github.com/intel/openfl/blob/develop/ROADMAP.md) suggestions. - -Documentation improvement: review our [documentation](https://openfl.readthedocs.io/en/latest) and let us know if something is not clear or not relevant. -Propose your own formulations or even write new section explaining something that you know how works, but do not see in the documentation. -Propose it through GitHub [issues](https://github.com/intel/openfl/issues/new/choose) or [Discussions](https://github.com/intel/openfl/discussions). - -To propose bugs, new features, or other code improvements: - -1. Check open and closed [issues](https://github.com/intel/openfl/issues) and make sure there is no similar proposal. -2. Open a [new issue](https://github.com/intel/openfl/issues/new/choose), select a relevant category (Bug report / Feature request / Report a security vulnerability) and describe your idea using the template. -3. If you want to fix a bug or create this feature by yourself, prepare a contribution. - - Format your code following the [flake8 style](https://flake8.pycqa.org/en/latest/). - - Make sure that your code is original and corresponds to [OpenFL license](#license). - - Sing your work - [see below](#sign-your-work). - - Create a [pull request](#formatting-of-pull-requests) and wait for feedback. - - Verify that all tests in our [CI/CD pipeline](#Continuous-Integration-and-Continuous-Development) passed. -4. Hurrah! You are a new contributor to OpenFL! You will see your name in released notes of the subsequent releases!😊 - -Join our [Slack](https://join.slack.com/t/openfl/shared_invite/zt-ovzbohvn-T5fApk05~YS_iZhjJ5yaTw) and [Community meetings](https://github.com/intel/openfl#support) and participate in the discussions. - -Are you an expert in Federated Learning and want to contribute to our roadmap? You can nominate yourself as a member of our Technical Steering Committee and be part of the OpenFL decision making group. Please reach us through our [Slack](https://join.slack.com/t/openfl/shared_invite/zt-ovzbohvn-T5fApk05~YS_iZhjJ5yaTw). - -### Code format and style - -We use [flake8](https://flake8.pycqa.org/en/latest/) for PEP8 style guide enforcement. This is run as a part of our CI/CD pipeline and it’s required prior a merge. - -### Formatting of Pull Requests - -OpenFL follows standard recommendations of PR formatting. Please find more details [here](https://github.blog/2015-01-21-how-to-write-the-perfect-pull-request/). - -### Continuous Integration and Continuous Development - -OpenFL uses GitHub actions to perform all functional and unit tests. Before your contribution can be merged make sure that all your tests are passing. -For more information of what fails you can click on the “details” link near the pipeline that failed. - -![CI/CD](../images/CI_details.png) - -### Writing the tests - -The OpenFL team recommend including tests for all new features contributions. Test can be found in the “Tests” directory. -The [Tests/OpenFL folder](https://github.com/intel/openfl/tree/develop/tests/openfl) contains unit tests and the [Tests/GitHub folder](https://github.com/intel/openfl/tree/develop/tests/github) contains end-to-end and functional tests. - -### License - -OpenFL is licensed under the terms in [Apache 2.0 license](https://github.com/intel/openfl/blob/develop/LICENSE). By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms. - -### Sign your work - -Please use the sign-off line at the end of the patch. Your signature certifies that you wrote the patch or otherwise have the right to pass it on as an open-source patch. The rules are pretty simple: if you can certify -the below (from [developercertificate.org](http://developercertificate.org/)): - -``` -Developer Certificate of Origin -Version 1.1 - -Copyright (C) 2004, 2006 The Linux Foundation and its contributors. -660 York Street, Suite 102, -San Francisco, CA 94110 USA - -Everyone is permitted to copy and distribute verbatim copies of this -license document, but changing it is not allowed. - -Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -(a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -(b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -(c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -(d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. -``` - -Then you just add a line to every git commit message: - - Signed-off-by: Joe Smith - -Use your real name (sorry, no pseudonyms or anonymous contributions.) - -If you set your `user.name` and `user.email` git configs, you can sign your -commit automatically with `git commit -s`. diff --git a/docs/deprecation.md b/docs/deprecation.md new file mode 100644 index 0000000000..af8e5c4f76 --- /dev/null +++ b/docs/deprecation.md @@ -0,0 +1,8 @@ +# Python version support policy + +OpenFL extends the Python scientific community's [SPEC-0](https://scientific-python.org/specs/spec-0000/) recommendation of 36 months of support for Python versions, by 6 more months. This is in-line with [NumPy](https://numpy.org/neps/nep-0029-deprecation_policy.html), for which NEP-29 has been very successful. + +This translates to the following support policy: +* **Python3.10** was released in October 2021, and will be supported by OpenFL at least until April 2025. +* **Python3.11** was released in October 2022, and will be supported by OpenFL at least until April 2026. +* **Python3.12** was released in October 2023, and will be supported by OpenFL at least until April 2027. diff --git a/docs/developer_guide/advanced_topics.rst b/docs/developer_guide/advanced_topics.rst index c3c687d1b2..4b960e430a 100644 --- a/docs/developer_guide/advanced_topics.rst +++ b/docs/developer_guide/advanced_topics.rst @@ -8,7 +8,7 @@ Advanced Topics *************** **General** - Speed up activating Open Federated Learning (|productName|) commands: + Speed up activating Open Federated Learning (OpenFL) commands: - :doc:`advanced_topics/bash_autocomplete_activation` @@ -18,7 +18,7 @@ Advanced Topics - :doc:`advanced_topics/multiple_plans` - Reduce the amount of data transferred in a federation through compression pipelines available in |productName|: + Reduce the amount of data transferred in a federation through compression pipelines available in OpenFL: - :doc:`advanced_topics/compression_settings` diff --git a/docs/developer_guide/advanced_topics/bash_autocomplete_activation.rst b/docs/developer_guide/advanced_topics/bash_autocomplete_activation.rst index f804200638..cab627268b 100644 --- a/docs/developer_guide/advanced_topics/bash_autocomplete_activation.rst +++ b/docs/developer_guide/advanced_topics/bash_autocomplete_activation.rst @@ -10,7 +10,7 @@ Get faster access to available commands by activating bash completion in CLI mod STEP 1: Preparation =================== -Make sure you are inside a virtual environment with Open Federated Learning (|productName|) installed. See :ref:`install_package` for details. +Make sure you are inside a virtual environment with Open Federated Learning (OpenFL) installed. See :ref:`installation` for details. STEP 2: Create the fx-autocomplete.sh Script @@ -22,20 +22,20 @@ STEP 2: Create the fx-autocomplete.sh Script 1. Create the script. - .. code-block:: console + .. code-block:: shell $ _FX_COMPLETE=bash_source fx > ~/.fx-autocomplete.sh 2. Check that the script was created properly. - .. code-block:: console + .. code-block:: shell $ cat ~/.fx-autocomplete.sh The output should look like the example below (Click==8.0.1), but could be different depend on `Click `_ version: - .. code-block:: console + .. code-block:: shell _fx_completion() { local IFS=$'\n' @@ -72,7 +72,7 @@ STEP 3: Activate the Autocomplete Feature Perform this command every time you open a new terminal window. - .. code-block:: console + .. code-block:: shell $ source ~/.fx-autocomplete.sh @@ -81,7 +81,7 @@ To save time, add the script into **.bashrc** so the script is activated when yo 1. Edit the **.bashrc** file. The **nano** command line editor is used in this example. - .. code-block:: console + .. code-block:: shell $ nano ~/.bashrc @@ -94,4 +94,3 @@ To save time, add the script into **.bashrc** so the script is activated when yo 3. Save your changes. 4. Open a new terminal to use the updated bash shell. - diff --git a/docs/developer_guide/advanced_topics/compression_settings.rst b/docs/developer_guide/advanced_topics/compression_settings.rst index 2503af2b1b..f46a11e6e7 100644 --- a/docs/developer_guide/advanced_topics/compression_settings.rst +++ b/docs/developer_guide/advanced_topics/compression_settings.rst @@ -7,12 +7,12 @@ Apply Compression Settings ************************** -The Open Federated Learning (|productName|) framework supports lossless and lossy compression pipelines. Federated learning enables a large number of participants to work together on the same model. Without a compression pipeline, this scalability results in increased communication cost. Furthermore, large models exacerbate this problem. +The Open Federated Learning (OpenFL) framework supports lossless and lossy compression pipelines. Federated learning enables a large number of participants to work together on the same model. Without a compression pipeline, this scalability results in increased communication cost. Furthermore, large models exacerbate this problem. .. note:: In general, the weights of a model are typically not robust to information loss, so no compression is applied by default to the model weights sent bidirectionally; however, the deltas between the model weights for each round are inherently more sparse and better suited for lossy compression. -The following are the compression pipelines supported in |productName|: +The following are the compression pipelines supported in OpenFL: ``NoCompressionPipeline`` The default option applied to model weights @@ -55,4 +55,3 @@ The example template, **keras_cnn_with_compression**, uses the ``KCPipeline`` wi template : openfl.pipelines.KCPipeline settings : n_clusters : 6 - diff --git a/docs/developer_guide/advanced_topics/multiple_plans.rst b/docs/developer_guide/advanced_topics/multiple_plans.rst index 0e872e0e06..061a7a19f4 100644 --- a/docs/developer_guide/advanced_topics/multiple_plans.rst +++ b/docs/developer_guide/advanced_topics/multiple_plans.rst @@ -30,7 +30,7 @@ Save a New FL Plan When you are working on an FL plan, you can save it for future use. - .. code-block:: console + .. code-block:: shell $ fx plan save -n NEW.PLAN.NAME @@ -45,7 +45,7 @@ Switch FL Plans To switch to a different FL plan, run the following command from the workspace directory. - .. code-block:: console + .. code-block:: shell $ fx plan switch -n PLAN.NAME @@ -62,7 +62,7 @@ Remove FL Plans To remove an FL plan, run the following command from the workspace directory. - .. code-block:: console + .. code-block:: shell $ fx plan remove -n PLAN.NAME diff --git a/docs/developer_guide/advanced_topics/straggler_handling_algorithms.rst b/docs/developer_guide/advanced_topics/straggler_handling_algorithms.rst index a71b1385e5..cdfa2b6a2c 100644 --- a/docs/developer_guide/advanced_topics/straggler_handling_algorithms.rst +++ b/docs/developer_guide/advanced_topics/straggler_handling_algorithms.rst @@ -7,9 +7,9 @@ Straggler Handling Interface ***************************** -The Open Federated Learning (|productName|) framework supports straggler handling interface for identifying stragglers or slow collaborators for a round and ending the round early as a result of it. The updates from these stragglers are not aggregated in the aggregator model. +The Open Federated Learning (OpenFL) framework supports straggler handling interface for identifying stragglers or slow collaborators for a round and ending the round early as a result of it. The updates from these stragglers are not aggregated in the aggregator model. -The following are the straggler handling algorithms supported in |productName|: +The following are the straggler handling algorithms supported in OpenFL: ``CutoffTimeBasedStragglerHandling`` Identifies stragglers based on the cutoff time specified in the settings. Arguments to the function are: @@ -38,4 +38,3 @@ The example template, **torch_cnn_mnist_straggler_check**, uses the ``Percentage settings : straggler_cutoff_time : 20 minimum_reporting : 1 - diff --git a/docs/developer_guide/experimental_features.rst b/docs/developer_guide/experimental_features.rst index b95c882e8a..dc1c052e57 100644 --- a/docs/developer_guide/experimental_features.rst +++ b/docs/developer_guide/experimental_features.rst @@ -7,8 +7,8 @@ Experimental Features ********************* -This section includes a set of experimental features that our team wants feedback on before adding them into core |productName|. -Experimental features are *not* ready for production. These features are under active development and intended to make their way into core |productName|, but there are several key considerations to make when building on top of these: +This section includes a set of experimental features that our team wants feedback on before adding them into core OpenFL. +Experimental features are *not* ready for production. These features are under active development and intended to make their way into core OpenFL, but there are several key considerations to make when building on top of these: 1. *Backward compatibility is not guaranteed* - Our goal is to maintain backward compatibility whenever possible, but user feedback (and our own internal research) may result in necessary changes to the APIs. diff --git a/docs/developer_guide/openfl_structure.rst b/docs/developer_guide/openfl_structure.rst index 4b9d126a8a..426c9e8473 100644 --- a/docs/developer_guide/openfl_structure.rst +++ b/docs/developer_guide/openfl_structure.rst @@ -2,10 +2,10 @@ .. # SPDX-License-Identifier: Apache-2.0 ************************************************* -|productName| Structure +OpenFL Structure ************************************************* -Learn about the short-lived and long-lived components that compose Open Federated Learning (|productName|): +Learn about the short-lived and long-lived components that compose Open Federated Learning (OpenFL): - :doc:`structure/components` @@ -13,7 +13,7 @@ Understand the procedure calls to the Director service. - :doc:`structure/communication` -Learn about the plugin framework that makes |productName| flexible and extensible for your use: +Learn about the plugin framework that makes OpenFL flexible and extensible for your use: - :doc:`structure/plugins` diff --git a/docs/developer_guide/running_the_federation.notebook.rst b/docs/developer_guide/running_the_federation.notebook.rst index f5bd9c12ff..e15d3b91de 100644 --- a/docs/developer_guide/running_the_federation.notebook.rst +++ b/docs/developer_guide/running_the_federation.notebook.rst @@ -17,7 +17,7 @@ You will start a Jupyter\* \ lab server and receive a URL you can use to access Start the Tutorials =================== -1. Start a Python\* \ 3.9 (>=3.9, <3.12) virtual environment and confirm |productName| is available. +1. Start a Python\* \ 3.9 (>=3.9, <3.12) virtual environment and confirm OpenFL is available. .. code-block:: python @@ -44,7 +44,7 @@ Start the Tutorials Familiarize with the API Concepts in an Aggregator-Based Worklow ================================================================ -Step 1: Enable the |productName| Python API +Step 1: Enable the OpenFL Python API ------------------------------------------- Add the following lines to your Python script. @@ -54,7 +54,7 @@ Add the following lines to your Python script. import openfl.native as fx from openfl.federated import FederatedModel, FederatedDataSet -This loads the |productName| package and import wrappers that adapt your existing data and models to a (simulated) federated context. +This loads the OpenFL package and import wrappers that adapt your existing data and models to a (simulated) federated context. Step 2: Set Up the Experiment ----------------------------- @@ -207,7 +207,7 @@ This command creates a model for each collaborator with their data shard. .. note:: - In production deployments of |productName|, each collaborator will have the data on premise. Splitting data into shards is not necessary. + In production deployments of OpenFL, each collaborator will have the data on premise. Splitting data into shards is not necessary. Step 6: Run the Experiment -------------------------- diff --git a/docs/developer_guide/running_the_federation.tutorial.rst b/docs/developer_guide/running_the_federation.tutorial.rst index 0d6a507a90..6f82a7be1d 100644 --- a/docs/developer_guide/running_the_federation.tutorial.rst +++ b/docs/developer_guide/running_the_federation.tutorial.rst @@ -3,11 +3,10 @@ .. _running_tutorial: -************************************************* -Open Federated Learning (|productName|) Tutorials -************************************************* +OpenFL Tutorials +================ -These tutorials use the Jupyter\* \ Lab server to help you become familiar with the APIs used in Open Federated Learning (|productName|). +These tutorials use the Jupyter\* \ Lab server to help you become familiar with the APIs used in Open Federated Learning (OpenFL). :ref:`running_notebook` Use this tutorial to familiarize with the APIs of the short-lived components (*Aggregator* and *Collaborator*). diff --git a/docs/developer_guide/running_the_federation_with_gandlf.rst b/docs/developer_guide/running_the_federation_with_gandlf.rst index 331b17c804..e2d2cd37f9 100644 --- a/docs/developer_guide/running_the_federation_with_gandlf.rst +++ b/docs/developer_guide/running_the_federation_with_gandlf.rst @@ -3,9 +3,8 @@ .. _running_the_federation_with_gandlf: -**************************************************** -Run the Federation with a model defined using GaNDLF -**************************************************** +Federated Learning using GaNDLF +=============================== This guide will show you how to take an existing model using the `Generally Nuanced Deep Learning Framework (GaNDLF) `_ experiment to a federated environment. @@ -16,17 +15,17 @@ This guide will show you how to take an existing model using the `Generally Nuan .. _running_the_federation_aggregator_based_gandlf: -Aggregator-Based Workflow +TaskRunner API ========================= An overview of this workflow is shown below. .. figure:: ../images/openfl_flow.png -.. centered:: Overview of the Aggregator-Based Workflow +.. centered:: Overview of the TaskRunner API -This workflow uses short-lived components in a federation, which is terminated when the experiment is finished. The components are as follows: +This method uses short-lived components in a federation, which is terminated when the experiment is finished. The components are as follows: - The *Collaborator* uses a local dataset to train a global model and sends the model updates to the *Aggregator*, which aggregates them to create the new global model. - The *Aggregator* is framework-agnostic, while the *Collaborator* can use any deep learning frameworks, such as `TensorFlow `_\* \ or `PyTorch `_\*\. `GaNDLF `_ provides a straightforward way to define complete model training pipelines for healthcare data, and is directly compatible with OpenFL. @@ -81,9 +80,9 @@ Simulate a federation .. note:: - Ensure you have installed the |productName| package on every node (aggregator and collaborators) in the federation. + Ensure you have installed the OpenFL package on every node (aggregator and collaborators) in the federation. - See :ref:`install_package` for details. + See :ref:`installation` for details. You can use the `"Hello Federation" bash script `_ to quickly create a federation (an aggregator node and two collaborator nodes) to test the project pipeline. @@ -114,7 +113,7 @@ However, continue with the following procedure for details in creating a federat STEP 1: Install GaNDLF prerequisites and Create a Workspace ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. Start a Python 3.9 (>=3.9, <3.12) virtual environment and confirm |productName| is available. +1. Start a Python 3.9 (>=3.9, <3.12) virtual environment and confirm OpenFL is available. .. code-block:: python @@ -125,7 +124,7 @@ STEP 1: Install GaNDLF prerequisites and Create a Workspace 3. Create GaNDLF's Data CSVs. The example below is for 3D Segmentation using the unit test data: - .. code-block:: console + .. code-block:: shell $ python -c "from testing.test_full import test_generic_download_data, test_generic_constructTrainingCSV; test_generic_download_data(); test_generic_constructTrainingCSV()" # Creates training CSV @@ -143,27 +142,27 @@ STEP 1: Install GaNDLF prerequisites and Create a Workspace Set the environment variables to use the :code:`gandlf_seg_test` as the template and :code:`${HOME}/my_federation` as the path to the workspace directory. - .. code-block:: console + .. code-block:: shell $ export WORKSPACE_TEMPLATE=gandlf_seg_test $ export WORKSPACE_PATH=${HOME}/my_federation 4. Create a workspace directory for the new federation project. - .. code-block:: console + .. code-block:: shell $ fx workspace create --prefix ${WORKSPACE_PATH} --template ${WORKSPACE_TEMPLATE} 5. Change to the workspace directory. - .. code-block:: console + .. code-block:: shell $ cd ${WORKSPACE_PATH} 6. Copy the GaNDLF Data CSVs into the default path for model initialization - .. code-block:: console + .. code-block:: shell # 'one' is the default name of the first collaborator in `plan/data.yaml`. $ mkdir -p data/one @@ -175,13 +174,13 @@ STEP 1: Install GaNDLF prerequisites and Create a Workspace This step will initialize the federated learning plan and initialize the random model weights that will be used by all collaborators at the start of the expeirment. - .. code-block:: console + .. code-block:: shell $ fx plan initialize Alternatively, to use your own GaNDLF configuration file, you can import it into the plan with the following command: - .. code-block:: console + .. code-block:: shell $ fx plan initialize --gandlf_config ${PATH_TO_GANDLF_CONFIG}.yaml @@ -207,19 +206,19 @@ STEP 1: Install GaNDLF prerequisites and Create a Workspace - OPTION 1: override the auto populated FQDN value with the :code:`-a` flag. - .. code-block:: console + .. code-block:: shell $ fx plan initialize -a aggregator-hostname.internal-domain.com - OPTION 2: override the apparent FQDN of the system by setting an FQDN environment variable. - .. code-block:: console + .. code-block:: shell $ export FQDN=x.x.x.x and initializing the FL plan - .. code-block:: console + .. code-block:: shell $ fx plan initialize @@ -260,7 +259,7 @@ Setting Up the Certificate Authority 1. Change to the path of your workspace: - .. code-block:: console + .. code-block:: shell $ cd WORKSPACE_PATH @@ -268,13 +267,13 @@ Setting Up the Certificate Authority All certificates will be signed by the aggregator node. Follow the instructions and enter the information as prompted. The command will create a simple database file to keep track of all issued certificates. - .. code-block:: console + .. code-block:: shell $ fx workspace certify 3. Run the aggregator certificate creation command, replacing :code:`AFQDN` with the actual `fully qualified domain name (FQDN) `_ for the aggregator node. - .. code-block:: console + .. code-block:: shell $ fx aggregator generate-cert-request --fqdn AFQDN @@ -282,7 +281,7 @@ Setting Up the Certificate Authority On Linux\*\, you can discover the FQDN with this command: - .. code-block:: console + .. code-block:: shell $ hostname --all-fqdns | awk '{print $1}' @@ -290,20 +289,20 @@ Setting Up the Certificate Authority You can override the apparent FQDN of the system by setting an FQDN environment variable before creating the certificate. - .. code-block:: console + .. code-block:: shell $ export FQDN=x.x.x.x $ fx aggregator generate-cert-request If you omit the :code:`--fdqn` parameter, then :code:`fx` will automatically use the FQDN of the current node assuming the node has been correctly set with a static address. - .. code-block:: console + .. code-block:: shell $ fx aggregator generate-cert-request 4. Run the aggregator certificate signing command, replacing :code:`AFQDN` with the actual `fully qualified domain name (FQDN) `_ for the aggregator node. - .. code-block:: console + .. code-block:: shell $ fx aggregator certify @@ -329,7 +328,7 @@ Exporting the Workspace 1. Export the workspace so that it can be imported to the collaborator nodes. - .. code-block:: console + .. code-block:: shell $ fx workspace export @@ -350,7 +349,7 @@ Importing the Workspace 3. Import the workspace archive. - .. code-block:: console + .. code-block:: shell $ fx workspace import --archive WORKSPACE.zip @@ -360,7 +359,7 @@ Importing the Workspace Replace :code:`COL_LABEL` with the label you assigned to the collaborator. This label does not have to be the FQDN; it can be any unique alphanumeric label. - .. code-block:: console + .. code-block:: shell $ fx collaborator generate-cert-request -n {COL_LABEL} -d data/{COL_LABEL} @@ -381,14 +380,14 @@ The creation script will specify the path to the data. In this case, the GaNDLF 5. Copy/scp the WORKSPACE.PATH/col_{COL_LABEL}_to_agg_cert_request.zip file to the aggregator node (or local workspace if using the same system) - .. code-block:: console + .. code-block:: shell $ scp WORKSPACE.PATH/col_{COL_LABEL}_to_agg_cert_request.zip AGGREGATOR_NODE:WORKSPACE_PATH/ 6. On the aggregator node (i.e., the certificate authority in this example), sign the Collaborator CSR Package from the collaborator nodes. - .. code-block:: console + .. code-block:: shell $ fx collaborator certify --request-pkg /PATH/TO/col_{COL_LABEL}_to_agg_cert_request.zip @@ -404,14 +403,14 @@ The creation script will specify the path to the data. In this case, the GaNDLF 7. Copy/scp the WORKSPACE.PATH/agg_to_col_{COL_LABEL}_signed_cert.zip file to the collaborator node (or local workspace if using the same system) - .. code-block:: console + .. code-block:: shell $ scp WORKSPACE.PATH/agg_to_col_{COL_LABEL}_signed_cert.zip COLLABORATOR_NODE:WORKSPACE_PATH/ 8. On the collaborator node, import the signed certificate and certificate chain into your workspace. - .. code-block:: console + .. code-block:: shell $ fx collaborator certify --import /PATH/TO/agg_to_col_{COL_LABEL}_signed_cert.zip @@ -427,7 +426,7 @@ STEP 3: Start the Federation 1. Start the Aggregator. - .. code-block:: console + .. code-block:: shell $ fx aggregator start @@ -441,7 +440,7 @@ STEP 3: Start the Federation 2. Run the Collaborator. - .. code-block:: console + .. code-block:: shell $ fx collaborator start -n {COLLABORATOR_LABEL} @@ -473,7 +472,7 @@ Post Experiment Experiment owners may access the final model in its native format. Once the model has been converted to its native format, inference can be done using `GaNDLF's inference API `_. Among other training artifacts, the aggregator creates the last and best aggregated (highest validation score) model snapshots. One may convert a snapshot to the native format and save the model to disk by calling the following command from the workspace: -.. code-block:: console +.. code-block:: shell $ fx model save -i model_protobuf_path.pth -o save_model_path diff --git a/docs/developer_guide/structure/components.rst b/docs/developer_guide/structure/components.rst index ad2cc140dd..cc59e95949 100644 --- a/docs/developer_guide/structure/components.rst +++ b/docs/developer_guide/structure/components.rst @@ -7,7 +7,7 @@ Core Components ***************************** -Open Federated Learning (|productName|) has the following components: +Open Federated Learning (OpenFL) has the following components: - :ref:`openfl_short_lived_components` - :ref:`openfl_ll_components` @@ -45,7 +45,7 @@ Collaborator The Collaborator is a short-lived entity that manages training the model on local data, which includes - executing assigned tasks, - - converting deep learning framework-specific tensor objects to |productName| inner representation, and + - converting deep learning framework-specific tensor objects to OpenFL inner representation, and - exchanging model parameters with the Aggregator. The Collaborator is created by the :ref:`Envoy ` when a new experiment is submitted @@ -57,9 +57,9 @@ included in an FL experiment. At the end of the training task, weight tensors ar and aggregated. Converting tensor objects is handled by :ref:`framework adapter ` plugins. -Included in |productName| are framework adapters for PyTorch and TensorFlow 2.x. +Included in OpenFL are framework adapters for PyTorch and TensorFlow 2.x. The list of framework adapters is extensible. User can contribute new framework adapters for deep learning frameworks -they would like see supported in |productName|. +they would like see supported in OpenFL. .. _openfl_ll_components: @@ -110,4 +110,4 @@ regarding collaborator machine resource utilization. Refer to :ref:`device monit Static Diagram ============== -.. figure:: ../../source/openfl/director_workflow.svg +.. figure:: ../../images/director_workflow.svg diff --git a/docs/developer_guide/structure/plugins.rst b/docs/developer_guide/structure/plugins.rst index 4f914829c8..b2724f331c 100644 --- a/docs/developer_guide/structure/plugins.rst +++ b/docs/developer_guide/structure/plugins.rst @@ -5,20 +5,20 @@ Plugin Components ***************** -Open Federated Learning (|productName|) is designed to be a flexible and extensible framework. Plugins are interchangeable parts of |productName| components. Different plugins support varying usage scenarios. +Open Federated Learning (OpenFL) is designed to be a flexible and extensible framework. Plugins are interchangeable parts of OpenFL components. Different plugins support varying usage scenarios. A plugin may be **required** or **optional**. -You can provide your implementations of |productName| plugins to achieve a desired behavior. Technically, a plugin is just a class that implements some interface. You may enable a plugin by putting its -import path and initialization parameters to the config file of a corresponding |productName| component or to the frontend Python API. See `openfl-tutorials `_ for more details. +You can provide your implementations of OpenFL plugins to achieve a desired behavior. Technically, a plugin is just a class that implements some interface. You may enable a plugin by putting its +import path and initialization parameters to the config file of a corresponding OpenFL component or to the frontend Python API. See `openfl-tutorials `_ for more details. .. _framework_adapter: Framework Adapter ###################### -The Framework Adapter plugin enables |productName| support for Deep Learning frameworks usage in FL experiments. +The Framework Adapter plugin enables OpenFL support for Deep Learning frameworks usage in FL experiments. It is a **required** plugin for the frontend API component and Envoy. -All the framework-specific operations on model weights are isolated in this plugin so |productName| can be framework-agnostic. +All the framework-specific operations on model weights are isolated in this plugin so OpenFL can be framework-agnostic. The Framework adapter plugin interface has two required methods to load and extract tensors from a model and an optimizer: diff --git a/docs/developer_guide/utilities.rst b/docs/developer_guide/utilities.rst index 13d68acfdf..90a4c1f052 100644 --- a/docs/developer_guide/utilities.rst +++ b/docs/developer_guide/utilities.rst @@ -1,11 +1,7 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 +OpenFL Utilities +================ -************************************************* -Open Federated Learning (|productName|) Utilities -************************************************* - -The following are utilities available in Open Federated Learning (|productName|). +The following are utilities available in Open Federated Learning (OpenFL). :doc:`utilities/pki` Use the Public Key Infrastructure (PKI) solution workflows to certify the nodes in your federation. diff --git a/docs/developer_guide/utilities/pki.rst b/docs/developer_guide/utilities/pki.rst index 9a78d0f13b..c5982755d5 100644 --- a/docs/developer_guide/utilities/pki.rst +++ b/docs/developer_guide/utilities/pki.rst @@ -1,9 +1,9 @@ .. # Copyright (C) 2020-2023 Intel Corporation .. # SPDX-License-Identifier: Apache-2.0 -******************************************************* -|productName| Public Key Infrastructure (PKI) Solutions -******************************************************* +******************************* +Public Key Infrastructure (PKI) +******************************* .. _pki_overview: @@ -15,14 +15,14 @@ Transport Layer Security (`TLS `_ as a server and `step `_ as a client utilities. They are downloaded during the workspace setup. + The OpenFL PKI solution is based on `step-ca `_ as a server and `step `_ as a client utilities. They are downloaded during the workspace setup. .. note:: @@ -43,11 +43,11 @@ Otherwise, you can certify nodes with your own PKI solution or use the PKI solut Semi-Automatic PKI Workflow =========================== -The |productName| PKI pipeline involves creating a local certificate authority (CA) on a \HTTPS \ server that listens for signing requests. Certificates from each client are signed by the CA via a token. The token must be copied to clients in a secure manner. +The OpenFL PKI pipeline involves creating a local certificate authority (CA) on a \HTTPS \ server that listens for signing requests. Certificates from each client are signed by the CA via a token. The token must be copied to clients in a secure manner. 1. Create the CA. - .. code-block:: console + .. code-block:: shell $ fx pki install -p --ca-url @@ -60,7 +60,7 @@ The |productName| PKI pipeline involves creating a local certificate authority ( 2. Run the CA server. - .. code-block:: console + .. code-block:: shell $ fx pki run -p @@ -69,7 +69,7 @@ The |productName| PKI pipeline involves creating a local certificate authority ( 3. Create a token for client. - .. code-block:: console + .. code-block:: shell $ fx pki get-token -n --ca-path --ca-url @@ -82,7 +82,7 @@ The |productName| PKI pipeline involves creating a local certificate authority ( 4. Copy the token to the clients (director or envoy) via a secure channel, and certify the token. - .. code-block:: console + .. code-block:: shell $ cd $ fx pki certify -n -t diff --git a/docs/developer_guide/utilities/splitters_data.rst b/docs/developer_guide/utilities/splitters_data.rst index 66064706d1..ef30f540f0 100644 --- a/docs/developer_guide/utilities/splitters_data.rst +++ b/docs/developer_guide/utilities/splitters_data.rst @@ -8,15 +8,15 @@ Dataset Splitters ***************** -|productName| allows you to specify custom data splits **for simulation runs on a single dataset**. +OpenFL allows you to specify custom data splits **for simulation runs on a single dataset**. -You may apply data splitters differently depending on the |productName| workflow that you follow. +You may apply data splitters differently depending on the OpenFL workflow that you follow. OPTION 1: Use **Native Python API** (Aggregator-Based Workflow) Functions to Split the Data (Deprecated) =========================================================================================== -Predefined |productName| data splitters functions are as follows: +Predefined OpenFL data splitters functions are as follows: - ``openfl.utilities.data_splitters.EqualNumPyDataSplitter`` (default) - ``openfl.utilities.data_splitters.RandomNumPyDataSplitter`` diff --git a/docs/developer_guide/utilities/timeouts.rst b/docs/developer_guide/utilities/timeouts.rst index de0ce2352f..b09be38142 100644 --- a/docs/developer_guide/utilities/timeouts.rst +++ b/docs/developer_guide/utilities/timeouts.rst @@ -2,7 +2,7 @@ .. # SPDX-License-Identifier: Apache-2.0 ******************************************************* -|productName| Component Timeouts +OpenFL Component Timeouts ******************************************************* .. _comp_timeout_overview: @@ -17,7 +17,7 @@ The decorated functions is then monitored and gets terminated right after the ex .. note:: - The `fedtiming` class, `SyncAsyncTaskDecoFactory` factory class, custom synchronous and asynchronous execution of decorated function is in-place. The end to end implementation of |productName| Component timeouts feature is still in beta mode and would undergo design and implementation changes before the complete feature is made available. Appreciate any feedbacks or issues. + The `fedtiming` class, `SyncAsyncTaskDecoFactory` factory class, custom synchronous and asynchronous execution of decorated function is in-place. The end to end implementation of OpenFL Component timeouts feature is still in beta mode and would undergo design and implementation changes before the complete feature is made available. Appreciate any feedbacks or issues. .. _comp_timeout_design: @@ -42,7 +42,7 @@ Flow of execution #. [Step A] Decorate any sync or async function :code:`@fedtiming(timeout=)` to monitor its execution time and terminate after `timeout=` value. - .. code-block:: console + .. code-block:: shell @fedtiming(timeout=5) def some_sync_function(): @@ -50,7 +50,7 @@ Flow of execution | This decorated function execution gets terminated after `5 seconds`. - .. code-block:: console + .. code-block:: shell @fedtiming(timeout=10) async def some_async_function(): @@ -64,7 +64,7 @@ Flow of execution **Synchronous Example:** - .. code-block:: console + .. code-block:: shell some_sync_function = fedtiming(timeout=5)(some_sync_function) @@ -76,7 +76,7 @@ Flow of execution **Aynchronous Example:** - .. code-block:: console + .. code-block:: shell some_async_function = fedtiming(timeout=5)(some_async_function) @@ -98,7 +98,7 @@ Flow of execution The prepared `some_sync_function` or `some_async_function` when called internally with its respective parameters. - .. code-block:: console + .. code-block:: shell some_sync_function(*args, **kwargs) -> sync_wrapper(*args, **kwargs) some_async_function(*args, **kwargs) -> async_wrapper(*args, **kwargs) diff --git a/docs/developer_ref/api_documentation.rst b/docs/developer_ref/api_documentation.rst deleted file mode 100644 index fa1126d4a9..0000000000 --- a/docs/developer_ref/api_documentation.rst +++ /dev/null @@ -1,33 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -|productName| API -************************************************* - -Welcome to the |productName| API reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - -- :doc:`../source/api/openfl_component` -- :doc:`../source/api/openfl_cryptography` -- :doc:`../source/api/openfl_databases` -- :doc:`../source/api/openfl_experimental` -- :doc:`../source/api/openfl_federated` -- :doc:`../source/api/openfl_interface` -- :doc:`../source/api/openfl_native` -- :doc:`../source/api/openfl_pipelines` -- :doc:`../source/api/openfl_plugins` -- :doc:`../source/api/openfl_protocols` -- :doc:`../source/api/openfl_transport` -- :doc:`../source/api/openfl_utilities` - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` \ No newline at end of file diff --git a/docs/developer_ref/troubleshooting.rst b/docs/developer_ref/troubleshooting.rst deleted file mode 100644 index dae59bc763..0000000000 --- a/docs/developer_ref/troubleshooting.rst +++ /dev/null @@ -1,39 +0,0 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -.. _troubleshooting: - -******************************************************* -|productName| Troubleshooting -******************************************************* - -The following is a list of commonly reported issues in Open Federated Learning (|productName|). If you don't see your issue reported here, please submit a `Github issue -`_ or contact us directly on `Slack `_. - -1. I see the error :code:`Cannot import name KerasDataLoader from openfl.federated` - - |productName| currently uses conditional imports to attempt to be framework agnostic. If your task runner is derived from `KerasTaskRunner` or `TensorflowTaskRunner`, this error could come up if TensorFlow\*\ was not installed in your collaborator's virtual environment. If running on multi-node experiment, we recommend using the :code:`fx workspace export` and :code:`fx workspace import` commands, as this will ensure consistent modules between aggregator and collaborators. - -2. **None of the collaborators can connect to my aggregator node** - - There are a few reasons that this can happen, but the most common is the aggregator node's FQDN (Fully qualified domain name) was incorrectly specified in the plan. By default, :code:`fx plan initialize` will attempt to resolve the FQDN for you (this should look something like :code:`hostname.domain.com`), but this can sometimes parse an incorrect domain name. - - If you face this issue, look at :code:`agg_addr` in **plan/plan.yaml** and verify that you can access this address externally. If the address is externally accessible and you are running |productName| in an enterprise environment, verify that the aggregator's listening port is not blocked. In such cases, :code:`agg_port` should be manually specified in the FL plan and then redistributed to all participants. - -3. **After starting the collaborator, I see the error** :code:`Handshake failed with fatal error SSL_ERROR_SSL` - - This error likely results from a bad certificate presented by the collaborator. Steps for regenerating the collaborator certificate can be found :ref:`here `. - -4. **I am seeing some other error while running the experiment. Is there more verbose logging available so I can investigate this on my own?** - - Yes! You can turn on verbose logging with :code:`fx -l DEBUG collaborator start` or :code:`fx -l DEBUG aggregator start`. This will give verbose information related to gRPC, bidirectional tensor transfer, and compression related information. - -5. **Silent failures resulting from Out of Memory errors** - - Observations: - * :code:`fx envoy` command terminates abruptly during the execution of training or validation loop due to the SIGKILL command issued by the kernel. - * OOM error is captured in the kernel trace but not at the user program level. - * The failure is likely due to non-optimal memory resource utilization in the prior PyTorch version 1.3.1 & 1.4.0. - - Solution: - * Recent version of PyTorch better handles the memory utilization during runtime. Upgrade the PyTorch version to >= 1.11.0 \ No newline at end of file diff --git a/docs/get_started/examples.rst b/docs/get_started/examples.rst index ff1e3f0364..7c2bb92dd2 100644 --- a/docs/get_started/examples.rst +++ b/docs/get_started/examples.rst @@ -7,7 +7,7 @@ Examples for Running a Federation ================================= -|productName| currently offers four ways to set up and run experiments with a federation: +OpenFL currently offers four ways to set up and run experiments with a federation: the Task Runner API, Python Native API, the Interactive API (Deprecated), and the Workflow API. the Task Runner API is advised for production scenarios where the workload needs to be verified prior to execution, whereas the python native API provides a clean python interface on top of it intended for simulation purposes. The Interactive API (Deprecated) introduces a convenient way to set up a federation and brings “long-lived” components in a federation (“Director” and “Envoy”), diff --git a/docs/get_started/examples/interactive_tensorflow_mnist.rst b/docs/get_started/examples/interactive_tensorflow_mnist.rst index 0cecfc6b35..191b4c6aca 100644 --- a/docs/get_started/examples/interactive_tensorflow_mnist.rst +++ b/docs/get_started/examples/interactive_tensorflow_mnist.rst @@ -21,27 +21,27 @@ between 0 and 9. More info at `wiki `_. **What is it?** -The workflow interface is a new way of composing federated learning experiments with |productName|. +The workflow interface is a new way of composing federated learning experiments with OpenFL. It was borne through conversations with researchers and existing users who had novel use cases that didn't quite fit the standard horizontal federated learning paradigm. **Getting Started** First we start by installing the necessary dependencies for the workflow interface: -.. code-block:: console +.. code-block:: shell $ pip install git+https://github.com/intel/openfl.git $ pip install -r workflow_interface_requirements.txt @@ -144,7 +144,7 @@ Next we import the FLSpec, LocalRuntime, and placement decorators. return new_model Now we come to the flow definition. -The |productName| Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with `start` +The OpenFL Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with `start` and concludes with the `end` task. The aggregator begins with an optionally passed in model and optimizer. The aggregator begins the flow with the `start` task, where the list of collaborators is extracted from the runtime (`self.collaborators = self.runtime.collaborators`) diff --git a/docs/get_started/install.singularity.rst b/docs/get_started/install.singularity.rst index a90d56b6d0..8e90556c69 100644 --- a/docs/get_started/install.singularity.rst +++ b/docs/get_started/install.singularity.rst @@ -13,7 +13,7 @@ Singularity Installation .. note:: - Make sure you've run the :ref:`the initial steps ` section first. + Make sure you've run the :ref:`the initial steps ` section first. .. note:: You'll need Docker installed on the node where you'll @@ -21,7 +21,7 @@ Singularity Installation that Docker is installed and running properly, you can run the Docker *Hello World* command like this: - .. code-block:: console + .. code-block:: shell $ docker run hello-world Hello from Docker! @@ -34,7 +34,7 @@ Singularity Installation You'll need Singularity installed on all nodes. To check that Singularity is installed, run the following: - .. code-block:: console + .. code-block:: shell $ singularity help diff --git a/docs/get_started/installation.rst b/docs/get_started/installation.rst deleted file mode 100644 index 2e79c92250..0000000000 --- a/docs/get_started/installation.rst +++ /dev/null @@ -1,142 +0,0 @@ -.. # Copyright (C) 2020-2023 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -.. _install_software_root: - -===================== -Installation -===================== - -Depending on how you want to set up |productName|, choose one of the following installation procedure. - - -.. _install_package: - -********************************* -Install the Package -********************************* - -Follow this procedure to prepare the environment and install the |productName| package. -Perform this procedure on every node in the federation. - -1. Install a Python 3.9 (>=3.9, <3.12) virtual environment using venv. - - See the `Venv installation guide `_ for details. - -2. Create a new Virtualenv environment for the project. - - .. code-block:: console - - $ python3 -m venv venv - -3. Activate the virtual environment. - - .. code-block:: console - - $ source venv/bin/activate - -4. Install the |productName| package. - - A. Installation from PyPI: - - .. code-block:: console - - $ python -m pip install openfl - - B. Installation from source: - - #. Clone the |productName| repository: - - .. code-block:: console - - $ git clone https://github.com/intel/openfl.git - - - #. Install build tools, before installing |productName|: - - .. code-block:: console - - $ python -m pip install -U pip setuptools wheel - $ cd openfl/ - $ python -m pip install . - - - -5. Run the :code:`fx` command in the virtual environment to confirm |productName| is installed. - - .. code-block:: console - - - OpenFL - Open Federated Learning - - BASH COMPLETE ACTIVATION - - Run in terminal: - _FX_COMPLETE=bash_source fx > ~/.fx-autocomplete.sh - source ~/.fx-autocomplete.sh - If ~/.fx-autocomplete.sh has already exist: - source ~/.fx-autocomplete.sh - - CORRECT USAGE - - fx [options] [command] [subcommand] [args] - - GLOBAL OPTIONS - - -l, --log-level TEXT Logging verbosity level. - --no-warnings Disable third-party warnings. - --help Show this message and exit. - - AVAILABLE COMMANDSAVAILABLE COMMANDS - - plan Manage Federated Learning Plans. - ──────────────────────────────────────────────────────────────────────────────── - * freeze Finalize the Data Science plan. - * initialize Initialize Data Science plan. - * print Print the current plan. - * remove Remove this plan. - * save Save the current plan to this plan and... - * switch Switch the current plan to this plan. -.. centered:: Output of the fx Command - - -.. _install_docker: - -**************************************** -|productName| with Docker\* \ -**************************************** - -Follow this procedure to download or build a Docker\*\ image of |productName|, which you can use to run your federation in an isolated environment. - -.. note:: - - The Docker\* \ version of |productName| is to provide an isolated environment complete with the prerequisites to run a federation. When the execution is over, the container can be destroyed and the results of the computation will be available on a directory on the local host. - -1. Install Docker on all nodes in the federation. - - See the `Docker installation guide `_ for details. - -2. Check that Docker is running properly with the *Hello World* command: - - .. code-block:: console - - $ docker run hello-world - Hello from Docker! - This message shows that your installation appears to be working correctly. - ... - ... - ... - -3. Build an image from the latest official |productName| release: - - .. code-block:: console - - $ docker pull intel/openfl - - If you prefer to build an image from a specific commit or branch, perform the following commands: - - .. code-block:: console - - $ git clone https://github.com/intel/openfl.git - $ cd openfl - $ docker build -f openfl-docker/Dockerfile.base . diff --git a/docs/get_started/quickstart.rst b/docs/get_started/quickstart.rst deleted file mode 100644 index 7e092b3771..0000000000 --- a/docs/get_started/quickstart.rst +++ /dev/null @@ -1,370 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -.. _quick_start: - -===================== -Quick Start -===================== - -|productName| has a variety of APIs to choose from when setting up and running a federation. -In this quick start guide, we will demonstrate how to run a simple federated learning example using the Task Runner API. - - - -.. _creating_a_federation: - -******************************** -Creating a federation in 5 steps -******************************** - -To begin we recommend installing the latest OpenFL inside a python virtual environment. This can be done with the following: - -.. code-block:: console: - - pip install virtualenv - virtualenv ~/openfl-quickstart - source ~/openfl-quickstart/bin/activate - git clone https://github.com/securefederatedai/openfl.git - cd openfl - pip install . - - -Now you're ready to run your first federation! Copying these commands to your terminal will run a simple federation with an aggregator and two collaborators all on your local machine. These commands can be broken down into five steps, which you can read more about `here <../about/features_index/taskrunner.html#step-1-create-a-workspace>`_ - -1. Setup Federation Workspace & Certificate Authority (CA) for Secure Communication -2. Setup Aggregator & Initialize Federation Plan + Model -3. Setup Collaborator 1 -4. Setup Collaborator 2 -5. Run the Federation - -.. code-block:: console - - ############################################################################################ - # Step 1: Setup Federation Workspace & Certificate Authority (CA) for Secure Communication # - ############################################################################################ - - # Generate an OpenFL Workspace. This example will train a pytorch - # CNN model on the MNIST dataset - fx workspace create --template torch_cnn_mnist --prefix my_workspace - cd my_workspace - - # This will create a certificate authority (CA), so the participants communicate over a secure TLS Channel - fx workspace certify - - ################################################################# - # Step 2: Setup Aggregator & Initialize Federation Plan + Model # - ################################################################# - - # Generate a Certificate Signing Request (CSR) for the Aggregator - fx aggregator generate-cert-request --fqdn localhost - - # The CA signs the aggregator's request, which is now available in the workspace - fx aggregator --fqdn localhost certify --silent - - # Initialize FL Plan and Model Weights for the Federation - fx plan initialize --aggregator_address localhost - - ################################ - # Step 3: Setup Collaborator 1 # - ################################ - - # Create a collaborator named "collaborator1" that will use data path "1" - fx collaborator create -n collaborator1 -d 1 - - # Generate a CSR for collaborator1 - fx collaborator generate-cert-request -n collaborator1 - - # The CA signs collaborator1's certificate - fx collaborator certify -n collaborator1 --silent - - ################################ - # Step 4: Setup Collaborator 2 # - ################################ - - # Create a collaborator named "collaborator2" that will use data path "2" - fx collaborator create -n collaborator2 -d 2 - - # Generate a CSR for collaborator2 - fx collaborator generate-cert-request -n collaborator2 - - # The CA signs collaborator2's certificate - fx collaborator certify -n collaborator2 --silent - - ############################## - # Step 5. Run the Federation # - ############################## - - # Run the Aggregator - fx aggregator start & - - # Run Collaborator 1 - fx collaborator start -n collaborator1 & - - # Run Collaborator 2 - fx collaborator start -n collaborator2 - - echo "Congratulations! You've run your first federation with OpenFL" - - -You should see this output at the end of the experiment: - -.. code-block:: console - - INFO Starting round 9... aggregator.py:897 - [15:36:28] INFO Waiting for tasks... collaborator.py:178 - INFO Sending tasks to collaborator collaborator2 for round 9 aggregator.py:329 - INFO Received the following tasks: [name: "aggregated_model_validation" collaborator.py:143 - , name: "train" - , name: "locally_tuned_model_validation" - ] - [15:36:30] METRIC Round 9, collaborator collaborator2 is sending metric for task aggregated_model_validation: accuracy 0.983597 collaborator.py:415 - [15:36:31] INFO Collaborator collaborator2 is sending task results for aggregated_model_validation, round 9 aggregator.py:520 - METRIC Round 9, collaborator validate_agg aggregated_model_validation result accuracy: 0.983597 aggregator.py:559 - [15:36:31] INFO Run 0 epoch of 9 round runner_pt.py:148 - [15:36:31] INFO Waiting for tasks... collaborator.py:178 - INFO Sending tasks to collaborator collaborator1 for round 9 aggregator.py:329 - INFO Received the following tasks: [name: "aggregated_model_validation" collaborator.py:143 - , name: "train" - , name: "locally_tuned_model_validation" - ] - [15:36:33] METRIC Round 9, collaborator collaborator1 is sending metric for task aggregated_model_validation: accuracy 0.981000 collaborator.py:415 - [15:36:34] INFO Collaborator collaborator1 is sending task results for aggregated_model_validation, round 9 aggregator.py:520 - METRIC Round 9, collaborator validate_agg aggregated_model_validation result accuracy: 0.981000 aggregator.py:559 - [15:36:34] INFO Run 0 epoch of 9 round runner_pt.py:148 - [15:36:34] METRIC Round 9, collaborator collaborator2 is sending metric for task train: cross_entropy 0.059750 collaborator.py:415 - [15:36:35] INFO Collaborator collaborator2 is sending task results for train, round 9 aggregator.py:520 - METRIC Round 9, collaborator metric train result cross_entropy: 0.059750 aggregator.py:559 - [15:36:35] METRIC Round 9, collaborator collaborator2 is sending metric for task locally_tuned_model_validation: accuracy 0.979596 collaborator.py:415 - INFO Collaborator collaborator2 is sending task results for locally_tuned_model_validation, round 9 aggregator.py:520 - METRIC Round 9, collaborator validate_local locally_tuned_model_validation result accuracy: 0.979596 aggregator.py:559 - INFO Waiting for tasks... collaborator.py:178 - [15:36:37] METRIC Round 9, collaborator collaborator1 is sending metric for task train: cross_entropy 0.019203 collaborator.py:415 - [15:36:38] INFO Collaborator collaborator1 is sending task results for train, round 9 aggregator.py:520 - METRIC Round 9, collaborator metric train result cross_entropy: 0.019203 aggregator.py:559 - [15:36:38] METRIC Round 9, collaborator collaborator1 is sending metric for task locally_tuned_model_validation: accuracy 0.977600 collaborator.py:415 - INFO Collaborator collaborator1 is sending task results for locally_tuned_model_validation, round 9 aggregator.py:520 - METRIC Round 9, collaborator validate_local locally_tuned_model_validation result accuracy: 0.977600 aggregator.py:559 - METRIC Round 9, aggregator: train cross_entropy: 0.039476 - [15:36:39] METRIC Round 9, aggregator: aggregated_model_validation accuracy: 0.982298 - METRIC Round 9: saved the best model with score 0.982298 aggregator.py:854 - METRIC Round 9, aggregator: locally_tuned_model_validation aggregator.py:838 - accuracy: - 0.978598 - INFO Saving round 10 model... aggregator.py:890 - INFO Experiment Completed. Cleaning up... aggregator.py:895 - [15:36:39] INFO Waiting for tasks... collaborator.py:178 - INFO Sending signal to collaborator collaborator1 to shutdown... aggregator.py:283 - INFO End of Federation reached. Exiting... collaborator.py:150 - - ✔ OK - [15:36:46] INFO Waiting for tasks... collaborator.py:178 - [15:36:46] INFO Sending signal to collaborator collaborator2 to shutdown... aggregator.py:283 - INFO End of Federation reached. Exiting... collaborator.py:150 - - ✔ OK - - Congratulations! You've run your first federation with OpenFL - -*************************** -Working with your own model -*************************** - -Now that you've run your first federation, let's see how to replace the model used in the federation. After copying in the text above, you should be in the :code:`my_workspace` directory. Every workspace has a :code:`src` directory that contains the Task Runner, an OpenFL interface that defines the deep learning model, as well as the training and validation functions that will run on that model. In this case, the Task Runner is defined in :code:`src/taskrunner.py`. After opening it you'll see the following: - -.. code-block:: python - - class PyTorchCNN(PyTorchTaskRunner): - """ - Simple CNN for classification. - - PyTorchTaskRunner inherits from nn.module, so you can define your model - in the same way that you would for PyTorch - """ - - def __init__(self, device='cpu', **kwargs): - """Initialize. - - Args: - device: The hardware device to use for training (Default = "cpu") - **kwargs: Additional arguments to pass to the function - - """ - super().__init__(device=device, **kwargs) - - #################################### - # Your model goes here # - #################################### - self.conv1 = nn.Conv2d(1, 20, 2, 1) - self.conv2 = nn.Conv2d(20, 50, 5, 1) - self.fc1 = nn.Linear(800, 500) - self.fc2 = nn.Linear(500, 10) - self.to(device) - #################################### - - ###################################################################### - # Your optimizer goes here # - # # - # `self.optimizer` must be set for optimizer weights to be federated # - ###################################################################### - self.optimizer = optim.Adam(self.parameters(), lr=1e-4) - - # Set the loss function - self.loss_fn = F.cross_entropy - - - def forward(self, x): - """ - Forward pass of the model. - - Args: - x: Data input to the model for the forward pass - """ - x = F.relu(self.conv1(x)) - x = F.max_pool2d(x, 2, 2) - x = F.relu(self.conv2(x)) - x = F.max_pool2d(x, 2, 2) - x = x.view(-1, 800) - x = F.relu(self.fc1(x)) - x = self.fc2(x) - return x - -:code:`PyTorchTaskRunner` inherits from :code:`nn.module`, so changing your deep learning model is as easy as modifying the network layers (i.e. :code:`self.conv1`, etc.) into the :code:`__init__` function, and then defining your :code:`forward` function. You'll notice that unlike PyTorch, the optimizer is also defined in this :code:`__init__` function. This is so the model AND optimizer weights can be distributed as part of the federation. - -****************************************** -Defining your own train and validate tasks -****************************************** - -If you continue scrolling down in :code:`src/taskrunner.py`, you'll see two functions: :code:`train_` and :code:`validate_`. These are the primary tasks performed by the collaborators that have access to local data. - -.. code-block:: python - - def train_(self, train_dataloader: Iterator[Tuple[np.ndarray, np.ndarray]]) -> Metric: - """ - Train single epoch. - - Override this function in order to use custom training. - - Args: - train_dataloader: Train dataset batch generator. Yields (samples, targets) tuples of - size = `self.data_loader.batch_size`. - Returns: - Metric: An object containing name and np.ndarray value. - """ - losses = [] - for data, target in train_dataloader: - data, target = data.to(self.device), target.to(self.device) - self.optimizer.zero_grad() - output = self(data) - loss = self.loss_fn(output, target) - loss.backward() - self.optimizer.step() - losses.append(loss.detach().cpu().numpy()) - loss = np.mean(losses) - return Metric(name=self.loss_fn.__name__, value=np.array(loss)) - - - def validate_(self, validation_dataloader: Iterator[Tuple[np.ndarray, np.ndarray]]) -> Metric: - """ - Perform validation on PyTorch Model - - Override this function for your own custom validation function - - Args: - validation_dataloader: Validation dataset batch generator. Yields (samples, targets) tuples - Returns: - Metric: An object containing name and np.ndarray value - """ - - total_samples = 0 - val_score = 0 - with torch.no_grad(): - for data, target in validation_dataloader: - samples = target.shape[0] - total_samples += samples - data, target = data.to(self.device), target.to(self.device, dtype=torch.int64) - output = self(data) - # get the index of the max log-probability - pred = output.argmax(dim=1) - val_score += pred.eq(target).sum().cpu().numpy() - - accuracy = val_score / total_samples - return Metric(name='accuracy', value=np.array(accuracy)) - -Each function is passed a dataloader, and returns a :code:`Metric` associated with that task. In this example the :code:`train_` function returns the Cross Entropy Loss for an epoch, and the :code:`validate_` function returns the accuracy. You'll see these metrics reported when running the collaborator locally, and the aggregator will report the average metrics coming from all collaborators. - -***************************** -Defining your own data loader -***************************** - -Now let's look at the OpenFL :code:`PyTorchDataLoader` and see how by subclassing it we are able to split the MNIST dataset across collaborators for training. You'll find the following defined in :code:`src/dataloader.py`. - - -.. code-block:: python - - from openfl.federated import PyTorchDataLoader - - class PyTorchMNISTInMemory(PyTorchDataLoader): - """PyTorch data loader for MNIST dataset.""" - - def __init__(self, data_path, batch_size, **kwargs): - """Instantiate the data object. - - Args: - data_path: The file path to the data - batch_size: The batch size of the data loader - **kwargs: Additional arguments, passed to super - init and load_mnist_shard - """ - super().__init__(batch_size, **kwargs) - - num_classes, X_train, y_train, X_valid, y_valid = load_mnist_shard( - shard_num=int(data_path), **kwargs) - - self.X_train = X_train - self.y_train = y_train - self.train_loader = self.get_train_loader() - - self.X_valid = X_valid - self.y_valid = y_valid - self.val_loader = self.get_valid_loader() - - self.num_classes = num_classes - -This example uses the classic MNIST dataset for digit recognition. For in-memory datasets, the :code:`data_path` is passed a number to determine which slice of the dataset the collaborator should receive. By initializing the :code:`train_loader` (:code:`self.train_loader = self.get_train_loader()`) and the :code:`val_loader` (:code:`self.val_loader = self.get_valid_loader()`), these dataloader will then be able to be passed into the :code:`train_` and :code:`validate_` functions defined above. - -*************************************** -Changing the number of federated rounds -*************************************** - -Now that we've seen how to change the code, let's explore the Federated Learning Plan (FL Plan). The plan, which is defined in :code:`plan/plan.yaml`, is used to configure everything about the federation that can't purely be expressed in python. This includes information like network connectivity details, how different components are configured, and how many rounds the federation should train. Different experiments may take more rounds to train depending on how similar data is between collaborators, the model, and the number of collaborators that participate. To tweak this parameter for your experiment, open :code:`plan/plan.yaml` and modify the following section: - -.. code-block:: yaml - - aggregator: - settings: - best_state_path: save/torch_cnn_mnist_best.pbuf - db_store_rounds: 2 - init_state_path: save/torch_cnn_mnist_init.pbuf - last_state_path: save/torch_cnn_mnist_last.pbuf - log_metric_callback: - template: src.utils.write_metric - rounds_to_train: 10 # Change this value to train for a different number of rounds - write_logs: true - -***************************************************** -Starting a new federation after making custom changes -***************************************************** - -Now that you've changed a few things, you can rerun the federation. Copying the below text will reinitialize your plan with new model weights, and relaunch the aggregator and two collaborators: - -.. code-block:: console - - fx plan initialize - fx aggregator start & - fx collaborator start -n collaborator1 & - fx collaborator start -n collaborator2 - -Well done! Now that you know the basics of using the Task Runner API to run OpenFL on a single node, check out some of the other :ref:`openfl_examples` for research purposes and in production. diff --git a/docs/images/CI_details.png b/docs/images/CI_details.png index a57fa0f38d..1f4feff0b9 100644 Binary files a/docs/images/CI_details.png and b/docs/images/CI_details.png differ diff --git a/docs/source/openfl/director_workflow.svg b/docs/images/director_workflow.svg similarity index 100% rename from docs/source/openfl/director_workflow.svg rename to docs/images/director_workflow.svg diff --git a/docs/source/openfl/static_diagram.svg b/docs/images/static_diagram.svg similarity index 100% rename from docs/source/openfl/static_diagram.svg rename to docs/images/static_diagram.svg diff --git a/docs/index.rst b/docs/index.rst index 1d51291cb3..d6614221d2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,67 +1,104 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0. +Overview +================= +Welcome to OpenFL, a Python library for Federated Learning. OpenFL enables organizations to collaboratively train and/or evaluate machine learning models without sharing sensitive information. OpenFL is agnostic to underlying deep learning backends like TensorFlow and PyTorch. -.. Documentation master file, created by - sphinx-quickstart on Thu Oct 24 15:07:19 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. +OpenFL is a community supported project originally developed by Intel Labs and the Intel Internet of Things Group. -===================================================================== -Welcome to the Open Federated Learning (|productName|) Documentation! -===================================================================== +.. note:: -Open Federated Learning (|productName|) is a Python\* \ 3 library for federated learning that enables organizations to collaboratively train a model without sharing sensitive information. + This project is continually being developed and improved. Expect changes to this manual, the project code, and the project design. + We encourage community contributions! Refer to the `contributing `_ guidelines for more details. + +Training of statistical models may be done with any deep learning framework, such as `TensorFlow `_\* \ or `PyTorch `_\*\, via a plugin mechanism. -|productName| is Deep Learning framework-agnostic. -Training of statistical models may be done with any deep learning framework, such as -`TensorFlow `_\* \ or `PyTorch `_\*\, via a plugin mechanism. +.. figure:: images/ct_vs_fl.png -|productName| is a community supported project, originally developed by Intel Labs and the Intel Internet of Things Group. The team would like to encourage any contributions, notes, or requests to improve the documentation. +.. centered:: Federated Learning -Looking for the Open Flash Library project also referred to as OpenFL? Find it `here `_! +.. _what_is_openfl: + +*************************** +What is Federated Learning? +*************************** + +`Federated learning `_ is a distributed machine learning approach that +enables collaboration on machine learning projects without sharing sensitive data, such as patient records, financial data, +or classified secrets (`McMahan, 2016 `_; +`Sheller, Reina, Edwards, Martin, & Bakas, 2019 `_; +`Yang, Liu, Chen, & Tong, 2019 `_; +`Sheller et al., 2020 `_). +In federated learning, the model moves to meet the data rather than the data moving to meet the model. The movement of data across the federation are the model parameters and their updates. + +.. figure:: images/diagram_fl_new.png + +.. centered:: Federated Learning + +.. _definitions_and_conventions: + +*************************** +Definitions and Conventions +*************************** + +Federated learning brings in a few more components to the traditional data science training pipeline: + +Collaborator + A collaborator is a client in the federation that has access to the local training, validation, and test datasets. By design, the collaborator is the only component of the federation with access to the local data. The local dataset should never leave the collaborator. + +Aggregator + A parameter server sends a global model to the collaborators. Parameter servers are often combined with aggregators on the same compute node. + An aggregator receives locally tuned models from collaborators and combines the locally tuned models into a new global model. Typically, `federated averaging `_, (a weighted average) is the algorithm used to combine the locally tuned models. + +Round + A federation round is defined as the interval (typically defined in terms of training steps) where an aggregation is performed. Collaborators may perform local training on the model for multiple epochs (or even partial epochs) within a single training round. + +.. toctree +.. overview.how_can_intel_protect_federated_learning +.. overview.what_is_intel_federated_learning .. toctree:: :hidden: - :caption: ABOUT + :caption: Getting Started :maxdepth: 2 - about/overview - about/features - about/releases - about/blogs_publications - about/license - about/notices_and_disclaimers + installation + tutorials/taskrunner .. toctree:: :hidden: - :caption: GET STARTED + :caption: Tutorials :maxdepth: 2 - get_started/installation - get_started/quickstart - get_started/examples + tutorials/workflow .. toctree:: :hidden: - :caption: DEVELOPER GUIDE + :caption: Developer Guide :maxdepth: 2 + about/features developer_guide/manual developer_guide/openfl_structure .. toctree:: - :hidden: - :caption: DEVELOPER REFERENCE + :hidden: + :caption: Resources :maxdepth: 2 - developer_ref/api_documentation - developer_ref/troubleshooting + openfl + troubleshooting + .. toctree:: :hidden: - :caption: CONTRIBUTING GUIDELINES + :caption: Notes :maxdepth: 2 - contributing_guidelines/contributing + releases + contributing + roadmap + deprecation + about/blogs_publications + about/license + about/notices_and_disclaimers diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 0000000000..037bf9fb44 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,74 @@ +# Installation + +This document provides instructions for installing OpenFL; either in a Python virtual environment or as a docker container. + +## Using `pip` + +We recommend using a Python virtual environment. Refer to the [venv installation guide](https://docs.python.org/3/library/venv.html) for details. + +* From PyPI (latest stable release): + + ```bash + pip install openfl + ``` + +* For development (editable build): + + ```bash + git clone https://github.com/securefederatedai/openfl.git && cd openfl + pip install -e . + ``` + +* Nightly (from the tip of `develop` branch): + + ```bash + pip install git+https://github.com/securefederatedai/openfl.git@develop + ``` + +Verify installation using the `fx --help` command. + +```bash +OpenFL - Open Federated Learning + +BASH COMPLETE ACTIVATION + +Run in terminal: +_FX_COMPLETE=bash_source fx > ~/.fx-autocomplete.sh +source ~/.fx-autocomplete.sh +If ~/.fx-autocomplete.sh already exists: +source ~/.fx-autocomplete.sh + +CORRECT USAGE + +fx [options] [command] [subcommand] [args] + +GLOBAL OPTIONS + +-l, --log-level TEXT Logging verbosity level. +--no-warnings Disable third-party warnings. +--help Show this message and exit. + +AVAILABLE COMMANDS +... +``` + +## Using `docker` + +This method can be used to run federated learning experiments in an isolated environment. Install and verify installation of Docker engine on all nodes in the federation. Refer to the Docker installation [guide](https://docs.docker.com/engine/install/) for details. + +* Pull the latest image: + + > **Note:** OpenFL image hosted on `docker.io` has not been updated since the 1.5 release due to a change in namespace. We are working on this issue. In the meantime, use the instructions below to build an image from source. + + ```bash + docker pull intel/openfl + ``` + +* Build from source: + ```bash + git clone https://github.com/securefederatedai/openfl.git && cd openfl + git checkout develop + ``` + ```bash + ./scripts/build_base_image.sh + ``` diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 2119f51099..0000000000 --- a/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs/modules.rst b/docs/modules.rst deleted file mode 100644 index 8dbe3f2e2c..0000000000 --- a/docs/modules.rst +++ /dev/null @@ -1,7 +0,0 @@ -openfl -====== - -.. toctree:: - :maxdepth: 4 - - openfl diff --git a/docs/openfl.component.rst b/docs/openfl.component.rst new file mode 100644 index 0000000000..978197bd7b --- /dev/null +++ b/docs/openfl.component.rst @@ -0,0 +1,20 @@ +``openfl.component`` module +=========================== + +.. currentmodule:: openfl.component + +.. automodule:: openfl.component + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + aggregator + assigner + collaborator + director + envoy + straggler_handling_functions + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.cryptography.rst b/docs/openfl.cryptography.rst new file mode 100644 index 0000000000..32b7cfeceb --- /dev/null +++ b/docs/openfl.cryptography.rst @@ -0,0 +1,17 @@ +``openfl.cryptography`` module +============================== + +.. currentmodule:: openfl.cryptography + +.. automodule:: openfl.cryptography + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + ca + io + participant + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.databases.rst b/docs/openfl.databases.rst new file mode 100644 index 0000000000..7239032ee8 --- /dev/null +++ b/docs/openfl.databases.rst @@ -0,0 +1,16 @@ +``openfl.databases`` module +=========================== + +.. currentmodule:: openfl.databases + +.. automodule:: openfl.databases + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + tensor_db + utilities + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.experimental.rst b/docs/openfl.experimental.rst new file mode 100644 index 0000000000..b144159b8b --- /dev/null +++ b/docs/openfl.experimental.rst @@ -0,0 +1,18 @@ +``openfl.experimental`` module +============================== + +.. currentmodule:: openfl.experimental + +.. automodule:: openfl.experimental + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + workflow.interface + workflow.placement + workflow.runtime + workflow.utilities + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.federated.rst b/docs/openfl.federated.rst new file mode 100644 index 0000000000..2210a29eae --- /dev/null +++ b/docs/openfl.federated.rst @@ -0,0 +1,17 @@ +``openfl.federated`` module +=========================== + +.. currentmodule:: openfl.federated + +.. automodule:: openfl.federated + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + plan + task + data + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.interface.rst b/docs/openfl.interface.rst new file mode 100644 index 0000000000..1542f7a0df --- /dev/null +++ b/docs/openfl.interface.rst @@ -0,0 +1,27 @@ +``openfl.interface`` module +=========================== + +.. currentmodule:: openfl.interface + +.. automodule:: openfl.interface + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + aggregation_functions + aggregator + cli_helper + cli + collaborator + director + envoy + experimental + model + pki + plan + tutorial + workspace + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.native.rst b/docs/openfl.native.rst new file mode 100644 index 0000000000..33fdbb1914 --- /dev/null +++ b/docs/openfl.native.rst @@ -0,0 +1,16 @@ +``openfl.native`` module (Deprecated) +===================================== + +.. currentmodule:: openfl.native + +.. automodule:: openfl.native + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + native + fastestimator + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.pipelines.rst b/docs/openfl.pipelines.rst new file mode 100644 index 0000000000..30d169b64b --- /dev/null +++ b/docs/openfl.pipelines.rst @@ -0,0 +1,22 @@ +``openfl.pipelines`` module +=========================== + +.. currentmodule:: openfl.pipelines + +.. automodule:: openfl.pipelines + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + eden_pipeline + kc_pipeline + no_compression_pipeline + pipeline + random_shift_pipeline + skc_pipeline + stc_pipeline + tensor_codec + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.plugins.rst b/docs/openfl.plugins.rst new file mode 100644 index 0000000000..a459bfd316 --- /dev/null +++ b/docs/openfl.plugins.rst @@ -0,0 +1,17 @@ +``openfl.plugins`` module +========================= + +.. currentmodule:: openfl.plugins + +.. automodule:: openfl.plugins + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + frameworks_adapters + interface_serializer + processing_units_monitor + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.protocols.rst b/docs/openfl.protocols.rst new file mode 100644 index 0000000000..39cbada5b2 --- /dev/null +++ b/docs/openfl.protocols.rst @@ -0,0 +1,16 @@ +``openfl.protocols`` module +=========================== + +.. currentmodule:: openfl.protocols + +.. automodule:: openfl.protocols + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + interceptors + utils + +.. TODO(MasterSkepticista) Shrink API namespace diff --git a/docs/openfl.rst b/docs/openfl.rst new file mode 100644 index 0000000000..9d053c2173 --- /dev/null +++ b/docs/openfl.rst @@ -0,0 +1,23 @@ +.. currentmodule:: openfl + +Public API: ``openfl`` package +=========================== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 1 + + openfl.component + openfl.cryptography + openfl.experimental + openfl.databases + openfl.federated + openfl.interface + openfl.native + openfl.pipelines + openfl.plugins + openfl.protocols + openfl.transport + openfl.utilities \ No newline at end of file diff --git a/docs/openfl.transport.rst b/docs/openfl.transport.rst new file mode 100644 index 0000000000..5ec55236ed --- /dev/null +++ b/docs/openfl.transport.rst @@ -0,0 +1,13 @@ +``openfl.transport`` module +=========================== + +.. currentmodule:: openfl.transport + +.. automodule:: openfl.transport + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + grpc diff --git a/docs/openfl.utilities.rst b/docs/openfl.utilities.rst new file mode 100644 index 0000000000..8e970bf3ab --- /dev/null +++ b/docs/openfl.utilities.rst @@ -0,0 +1,30 @@ +``openfl.utilities`` module +=========================== + + +.. currentmodule:: openfl.utilities + +.. automodule:: openfl.utilities + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + ca + data_splitters + fedcurv + optimizers + ca + checks + click_types + dataloading + fed_timer + logs + mocks + path_check + split + types + utils + workspace + \ No newline at end of file diff --git a/docs/about/releases.md b/docs/releases.md similarity index 98% rename from docs/about/releases.md rename to docs/releases.md index 1efec6438c..7bc328b7b9 100644 --- a/docs/about/releases.md +++ b/docs/releases.md @@ -31,7 +31,6 @@ We are excited to announce the release of OpenFL 1.5.1 - our first since moving to LF AI & Data! This release brings the following changes. -### 1.5.1 Highlights - **Documentation accessibility improvements**: As part of our [Global Accessibility Awareness Day](https://www.intel.com/content/www/us/en/developer/articles/community/open-fl-project-improve-accessibility-for-devs.html) (GAAD) Pledge, the OpenFL project is making strides towards more accessible documentation. This release includes the integration of [Intel® One Mono](https://www.intel.com/content/www/us/en/company-overview/one-monospace-font.html) font, contrast color improvements, formatting improvements, and [new accessibility focused issues](https://github.com/securefederatedai/openfl/issues?q=is%3Aissue+is%3Aopen+accessibility) to take up in the future. - **[Documentation to federate a Generally Nuanced Deep Learning Framework (GaNDLF) model with OpenFL](https://openfl.readthedocs.io/en/latest/running_the_federation_with_gandlf.html)** - **New OpenFL Interactive API Tutorials**: @@ -46,7 +45,6 @@ We are excited to announce the release of OpenFL 1.5.1 - our first since moving ## 1.5 [Full Release Notes](https://github.com/securefederatedai/openfl/releases/tag/v1.5) -### 1.5 Highlights * **New Workflows Interface (Experimental)** - a new way of composing federated learning experiments inspired by [Metaflow](https://github.com/Netflix/metaflow). Enables the creation of custom aggregator and collaborators tasks. This initial release is intended for simulation on a single node (using the LocalRuntime); distributed execution (FederatedRuntime) to be enabled in a future release. * **New use cases enabled by the workflow interface**: * **[End-of-round validation with aggregator dataset](https://github.com/intel/openfl/blob/develop/openfl-tutorials/experimental/workflow/102_Aggregator_Validation.ipynb)** @@ -139,4 +137,4 @@ This release includes: - `fx` CLI for multi-node production deployments - Additional test coverage for OpenFL components -\* Singularity supported via DockerHub integration: `singularity shell docker://openfl:latest` + diff --git a/docs/docs-requirements.txt b/docs/requirements.txt similarity index 63% rename from docs/docs-requirements.txt rename to docs/requirements.txt index f5e5832d9d..c20a8ae398 100644 --- a/docs/docs-requirements.txt +++ b/docs/requirements.txt @@ -5,6 +5,14 @@ recommonmark sphinx>=3.0.4 # not directly required, pinned by Snyk to avoid a vulnerability sphinx-copybutton sphinx-prompt -sphinx-rtd-theme -sphinx_substitution_extensions +sphinx-book-theme +sphinx_substitution_extensions sphinxcontrib-mermaid +sphinx-remove-toctrees +sphinx-design +sphinxext-rediraffe +myst-nb>=1.0.0 + +# Packages used for notebook execution +rich[jupyter] +-e . \ No newline at end of file diff --git a/docs/roadmap.md b/docs/roadmap.md new file mode 100644 index 0000000000..375c4a8da6 --- /dev/null +++ b/docs/roadmap.md @@ -0,0 +1,40 @@ +# Roadmap + +This document is intended to give users and contributors an idea of the OpenFL team's current priorities, features we plan to incorporate over the short, medium, and long term, and call out opportunities for the community to get involved. + +### When will this document be updated? +We expect to update this document at least once every quarter. + +## Long-term directions + +### Decoupling the FL specification interface from the infrastructure +The task runner interface is coupled with the the single experiment aggregator / collaborator infrastructure, and the Interactive API is tied to the director / envoy infrastructure. +The Interactive API was originally designed to be a high-level API for OpenFL, but for the cases when more control is required by users, access to lower level interfaces is necessary. +In OpenFL 1.5, we introduced the Workflow API as an experimental feature, which can be used to specify the federated learning flow, independently of the underlying computing infrastructure. The Workflow API facilitates a seamless transition from local simulation to a federated setting. Additionally, this approach offers greater control over the sequence and content of the FL experiment steps, which enables more complex experiments beyond just horizontal FL. Workflow API also provides more granular privacy controls, allowing the model owner to explicitly permit or forbid the transfer of specific attributes over the network. + +### Consolidating interfaces +OpenFL has supported multiple ways of running FL experiments for a long time, many of which are not interoperable: TaskRunner API, Workflow API, Python Native API, and Interactive API. The strategic vision is to consolidate OpenFL around the Workflow API, as it focuses on meeting the needs of the data scientist, who is the main user of the framework. Over the upcoming 1.x releases, we plan to gradually deprecate and eliminate the legacy Python Native API and Interactive API. OpenFL 2.0 will be centered around the Workflow API, facilitating a seamless transition from local simulations to distributed FL experiments, and even enabling the setup of permanent federations, which is currently only possible through the Interactive API. + +### Component standardization and framework interoperability + +Federated Learning is a [burgeoning space](https://github.com/weimingwill/awesome-federated-learning#frameworks). +Most core FL infrastructure (model weight extraction, network protocols, and serialization designs) must be reimplemented ad hoc by each framework. +This causes community fragmentation and distracts from some of the bigger problems to be solved in federated learning. In the short term, we want to collaborate on standards for FL, first at the communication and storage layer, and make these components modular across other frameworks. Our aim is also to provide a library for FL algorithms, compression methods, that can both be applied and interpreted easily. + +### Confidential computing support +Although OpenFL currently relies on Intel® SGX for trusted execution, the long term vision is towards broader confidential computing ecosystem support. This can be achieved by packaging OpenFL workspaces and workflows as Confidential Containers (CoCo), which supports a spectrum of TEE backends, including Intel® SGX and TDX, Arm TrustZone, and AMD SEV. + +## Upcoming OpenFL releases + +### 1.7 (Q1 2025) +This release is focused on enabling a great developer experience for OpenFL users: +1. Introducing the [FederatedRuntime](https://openfl.readthedocs.io/en/latest/about/features_index/workflowinterface.html#runtimes-future-plans) for Workflow API, which allows running FL workflows in a distributed setting (after local simulation with the LocalRuntime). +2. Adding support for federated XGBoost in OpenFL. See the example [XGBoost workspace](https://github.com/securefederatedai/openfl/tree/develop/openfl-workspace/xgb_higgs) based on Task Runner API. +3. Revised Task Runner API workspace dockerization process, with TEE-ready containers (using Gramine and Intel® Software Guard Extensions). The current release contains an initial set of changes that enable OpenFL compatibility with the broader confidential containers ecosystem. +4. Streamlining the Federated Evaluation experiments with TaskRunner API +5. Migrating a selection of key OpenFL tutorials from Python Native API to Workflow API. Check out the updated [Tutorials folder](https://github.com/securefederatedai/openfl/tree/develop/openfl-tutorials/experimental/workflow) +6. Deprecating the Python Native API +7. Deprecating the Interactive API + +### 1.8 (TBA) +Stay tuned for updates! \ No newline at end of file diff --git a/docs/source/api/openfl_component.rst b/docs/source/api/openfl_component.rst deleted file mode 100644 index 8deb6528ac..0000000000 --- a/docs/source/api/openfl_component.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Component Module -************************************************* - -Component modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.component.aggregator - openfl.component.assigner - openfl.component.collaborator - openfl.component.director - openfl.component.envoy - openfl.component.straggler_handling_functions diff --git a/docs/source/api/openfl_cryptography.rst b/docs/source/api/openfl_cryptography.rst deleted file mode 100644 index 475ebd1e9b..0000000000 --- a/docs/source/api/openfl_cryptography.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Cryptography Module -************************************************* - -Cryptography modules reference: - -.. autosummary:: - :toctree: _autosummary - :recursive: - - openfl.cryptography.ca - openfl.cryptography.io - openfl.cryptography.participant diff --git a/docs/source/api/openfl_databases.rst b/docs/source/api/openfl_databases.rst deleted file mode 100644 index 8014d42114..0000000000 --- a/docs/source/api/openfl_databases.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Databases Module -************************************************* - -Databases modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.databases - \ No newline at end of file diff --git a/docs/source/api/openfl_experimental.rst b/docs/source/api/openfl_experimental.rst deleted file mode 100644 index 907645686d..0000000000 --- a/docs/source/api/openfl_experimental.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Experimental Module -************************************************* - -Experimental modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.experimental.workflow.interface - openfl.experimental.workflow.placement - openfl.experimental.workflow.runtime - openfl.experimental.workflow.utilities diff --git a/docs/source/api/openfl_federated.rst b/docs/source/api/openfl_federated.rst deleted file mode 100644 index 8c3d50b81e..0000000000 --- a/docs/source/api/openfl_federated.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Federated Module -************************************************* - -Federated modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.federated.plan - openfl.federated.task - openfl.federated.data - \ No newline at end of file diff --git a/docs/source/api/openfl_interface.rst b/docs/source/api/openfl_interface.rst deleted file mode 100644 index 8685cce5f0..0000000000 --- a/docs/source/api/openfl_interface.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Interface Module -************************************************* - -Interface modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.interface - \ No newline at end of file diff --git a/docs/source/api/openfl_native.rst b/docs/source/api/openfl_native.rst deleted file mode 100644 index 5f3f513340..0000000000 --- a/docs/source/api/openfl_native.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Native Module (Deprecated) -************************************************* - -Native modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.native - \ No newline at end of file diff --git a/docs/source/api/openfl_pipelines.rst b/docs/source/api/openfl_pipelines.rst deleted file mode 100644 index 42ec1b33ad..0000000000 --- a/docs/source/api/openfl_pipelines.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Pipelines Module -************************************************* - -Pipelines modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.pipelines - \ No newline at end of file diff --git a/docs/source/api/openfl_plugins.rst b/docs/source/api/openfl_plugins.rst deleted file mode 100644 index de8df91f4f..0000000000 --- a/docs/source/api/openfl_plugins.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Plugins Module -************************************************* - -Plugins modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.plugins - \ No newline at end of file diff --git a/docs/source/api/openfl_protocols.rst b/docs/source/api/openfl_protocols.rst deleted file mode 100644 index e6e571ccc3..0000000000 --- a/docs/source/api/openfl_protocols.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Protocols Module -************************************************* - -Protocols modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.protocols - \ No newline at end of file diff --git a/docs/source/api/openfl_transport.rst b/docs/source/api/openfl_transport.rst deleted file mode 100644 index 19eb01d839..0000000000 --- a/docs/source/api/openfl_transport.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Transport Module -************************************************* - -Transport modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.transport diff --git a/docs/source/api/openfl_utilities.rst b/docs/source/api/openfl_utilities.rst deleted file mode 100644 index b44e1f74d7..0000000000 --- a/docs/source/api/openfl_utilities.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. # Copyright (C) 2020-2024 Intel Corporation -.. # SPDX-License-Identifier: Apache-2.0 - -************************************************* -Utilities Module -************************************************* - -Utilities modules reference: - -.. autosummary:: - :toctree: _autosummary - :template: custom-module-template.rst - :recursive: - - openfl.utilities - \ No newline at end of file diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst new file mode 100644 index 0000000000..2479e6b099 --- /dev/null +++ b/docs/troubleshooting.rst @@ -0,0 +1,38 @@ +.. # Copyright (C) 2020-2023 Intel Corporation +.. # SPDX-License-Identifier: Apache-2.0 + +.. _troubleshooting: + +Troubleshooting +=============== + +The following is a list of commonly encountered issues. If you do not see your issue reported here, please submit a `Github issue +`_ or contact us directly on `Slack `_. + +1. :code:`Cannot import name KerasDataLoader from openfl.federated` + + OpenFL currently uses conditional imports to attempt to be framework agnostic. If your task runner is derived from `KerasTaskRunner` or `TensorflowTaskRunner`, this error could come up if TensorFlow\*\ was not installed in your collaborator's virtual environment. If running on multi-node experiment, we recommend using the :code:`fx workspace export` and :code:`fx workspace import` commands, as this will ensure consistent modules between aggregator and collaborators. + +2. None of the collaborators can connect to my aggregator node. + + There are a few reasons that this can happen, but the most common is the aggregator node's FQDN (Fully qualified domain name) was incorrectly specified in the plan. By default, :code:`fx plan initialize` will attempt to resolve the FQDN for you (this should look something like :code:`hostname.domain.com`), but this can sometimes parse an incorrect domain name. + + If you face this issue, look at :code:`agg_addr` in **plan/plan.yaml** and verify that you can access this address externally. If the address is externally accessible and you are running OpenFL in an enterprise environment, verify that the aggregator's listening port is not blocked. In such cases, :code:`agg_port` should be manually specified in the FL plan and then redistributed to all participants. + +3. After starting the collaborator, I see :code:`Handshake failed with fatal error SSL_ERROR_SSL` + + This error likely results from a bad certificate presented by the collaborator. Steps for regenerating the collaborator certificate can be found :ref:`here `. + +4. I am seeing some other error while running the experiment. Is there more verbose logging available so I can investigate this on my own? + + Yes! You can turn on verbose logging with :code:`fx -l DEBUG collaborator start` or :code:`fx -l DEBUG aggregator start`. This will give verbose information related to gRPC, bidirectional tensor transfer, and compression related information. + +5. Silent failures resulting from Out of Memory errors + + Observations: + * :code:`fx envoy` command terminates abruptly during the execution of training or validation loop due to the SIGKILL command issued by the kernel. + * OOM error is captured in the kernel trace but not at the user program level. + * The failure is likely due to non-optimal memory resource utilization in the prior PyTorch version 1.3.1 & 1.4.0. + + Solution: + * Recent version of PyTorch better handles the memory utilization during runtime. Upgrade the PyTorch version to >= 1.11.0 \ No newline at end of file diff --git a/docs/tutorials/taskrunner.ipynb b/docs/tutorials/taskrunner.ipynb new file mode 100644 index 0000000000..d19fcdc6d0 --- /dev/null +++ b/docs/tutorials/taskrunner.ipynb @@ -0,0 +1,266 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Quickstart\n", + "\n", + "In this guide, we will train a simple Convolutional Neural Network (CNN) on MNIST handwritten digits dataset. We will simulate a Federated Learning experiment between two collaborators, orchestrated by an aggregator, using the TaskRunner CLI interface.\n", + "\n", + "OpenFL must be installed for this tutorial. Refer to the [installation guide](https://openfl.readthedocs.io/en/latest/installation.html)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Suppress TensorFlow warnings\n", + "%env TF_CPP_MIN_LOG_LEVEL=3" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create a workspace\n", + "\n", + "For brevity, we will reuse an existing workspace." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!fx workspace create --prefix ./mnist_example --template keras_cnn_mnist\n", + "%cd ./mnist_example" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Initialize a Plan\n", + "\n", + "This step builds an entire FL experiment plan, along with the initial set of parameters that will be used in the experiment.\n", + "We supply `localhost` as the aggregator address, for simulation purposes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx plan initialize -a localhost" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create a certificate authority (CA)\n", + "\n", + "OpenFL supports mTLS, which ensures secure communication between the collaborators and the aggregator. This step generates a certificate authority (CA) that will be used to sign the certificates of the collaborators. The CA is generated only once and can be reused for multiple experiments." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx workspace certify" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create an aggregator, and its key-pair" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx aggregator generate-cert-request --fqdn localhost\n", + "!fx aggregator certify --fqdn localhost --silent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create collaborators, and their key-pairs\n", + "\n", + "We will name our first collaborator `bob`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx collaborator create -d 0 -n bob --silent\n", + "!fx collaborator generate-cert-request -n bob --silent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Certify `bob` with the CA.\n", + "\n", + "> Note: This is a command that runs on the `aggregator` side. `bob`'s certificate signing request (CSR) is sent to the `aggregator`, which then signs the certificate with the CA. The signed certificate then has to be sent back to `bob`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx collaborator certify --request-pkg col_bob_to_agg_cert_request.zip --silent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And the second collaborator `charlie`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx collaborator create -d 1 -n charlie --silent\n", + "!fx collaborator generate-cert-request -n charlie --silent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Certify `charlie` with the CA.\n", + "\n", + "> Note: This is a command that runs on the `aggregator` side. `charlie`'s certificate signing request (CSR) is sent to the `aggregator`, which then signs the certificate with the CA. The signed certificate then has to be sent back to `charlie`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx collaborator certify --request-pkg col_charlie_to_agg_cert_request.zip --silent" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Import CA signed certificates\n", + "\n", + "This step imports the CA-signed certificates of the collaborator and the aggregator into the workspace.\n", + "\n", + "> Note: This command runs on respective collaborator sites." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx collaborator certify --import agg_to_col_bob_signed_cert.zip\n", + "!fx collaborator certify --import agg_to_col_charlie_signed_cert.zip" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Start the simulation\n", + "\n", + "This step starts the simulation of the FL experiment. The aggregator will orchestrate the training process between the collaborators." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "shellscript" + } + }, + "outputs": [], + "source": [ + "!fx aggregator start & fx collaborator start -n bob & fx collaborator start -n charlie" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/tutorials/workflow.ipynb b/docs/tutorials/workflow.ipynb new file mode 100644 index 0000000000..0a1e12f4b8 --- /dev/null +++ b/docs/tutorials/workflow.ipynb @@ -0,0 +1,1798 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "14821d97", + "metadata": {}, + "source": [ + "# MNIST Example: Workflow API" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "bd059520", + "metadata": {}, + "source": [ + "Welcome to the first OpenFL Experimental Workflow Interface tutorial! This notebook introduces the API to get up and running with your first horizontal federated learning workflow. This work has the following goals:\n", + "\n", + "- Simplify the federated workflow representation\n", + "- Help users better understand the steps in federated learning (weight extraction, compression, etc.)\n", + "- Designed to maintain data privacy\n", + "- Aims for syntatic consistency with the Netflix MetaFlow project. Infrastructure reuse where possible." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "39c3d86a", + "metadata": {}, + "source": [ + "# What is it?" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "a7989e72", + "metadata": {}, + "source": [ + "The workflow interface is a new way of composing federated learning expermients with OpenFL. It was borne through conversations with researchers and existing users who had novel use cases that didn't quite fit the standard horizontal federated learning paradigm. " + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "fc8e35da", + "metadata": {}, + "source": [ + "# Getting Started" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "4dbb89b6", + "metadata": {}, + "source": [ + "First we start by installing the necessary dependencies for the workflow interface" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "f7f98600", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-12-18 22:39:26.421661: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", + "2024-12-18 22:39:26.423459: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", + "2024-12-18 22:39:26.429968: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", + "2024-12-18 22:39:26.444879: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:477] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\n", + "E0000 00:00:1734541766.471717 258172 cuda_dnn.cc:8310] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "E0000 00:00:1734541766.479343 258172 cuda_blas.cc:1418] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2024-12-18 22:39:26.506693: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "/home/karan/playground/openfl/venv/lib/python3.10/site-packages/_distutils_hack/__init__.py:30: UserWarning: Setuptools is replacing distutils. Support for replacing an already imported distutils is deprecated. In the future, this condition will fail. Register concerns at https://github.com/pypa/setuptools/issues/new?template=distutils-deprecation.yml\n", + " warnings.warn(\n", + "Requirement already satisfied: chardet in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 1)) (5.2.0)\n", + "Requirement already satisfied: charset-normalizer in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 2)) (3.4.0)\n", + "Requirement already satisfied: dill==0.3.6 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 3)) (0.3.6)\n", + "Requirement already satisfied: matplotlib>=2.0.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (3.10.0)\n", + "Requirement already satisfied: metaflow==2.7.15 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (2.7.15)\n", + "Requirement already satisfied: nbdev==2.3.12 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (2.3.12)\n", + "Requirement already satisfied: nbformat==5.10.4 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (5.10.4)\n", + "Requirement already satisfied: ray==2.9.2 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (2.9.2)\n", + "Requirement already satisfied: tabulate==0.9.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from -r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 9)) (0.9.0)\n", + "Requirement already satisfied: requests in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (2.32.3)\n", + "Requirement already satisfied: boto3 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (1.35.83)\n", + "Requirement already satisfied: pylint in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (3.3.2)\n", + "Requirement already satisfied: fastcore>=1.5.27 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (1.7.27)\n", + "Requirement already satisfied: execnb>=0.1.4 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.1.11)\n", + "Requirement already satisfied: astunparse in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (1.6.3)\n", + "Requirement already satisfied: ghapi>=1.0.3 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (1.0.6)\n", + "Requirement already satisfied: watchdog in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (6.0.0)\n", + "Requirement already satisfied: asttokens in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (3.0.0)\n", + "Requirement already satisfied: PyYAML in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (6.0.2)\n", + "Requirement already satisfied: fastjsonschema>=2.15 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (2.21.1)\n", + "Requirement already satisfied: jsonschema>=2.6 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (4.23.0)\n", + "Requirement already satisfied: jupyter-core!=5.0.*,>=4.12 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (5.7.2)\n", + "Requirement already satisfied: traitlets>=5.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (5.14.3)\n", + "Requirement already satisfied: click>=7.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (8.1.7)\n", + "Requirement already satisfied: filelock in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (3.16.1)\n", + "Requirement already satisfied: msgpack<2.0.0,>=1.0.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (1.1.0)\n", + "Requirement already satisfied: packaging in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (24.2)\n", + "Requirement already satisfied: protobuf!=3.19.5,>=3.15.3 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (5.29.1)\n", + "Requirement already satisfied: aiosignal in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (1.3.2)\n", + "Requirement already satisfied: frozenlist in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ray==2.9.2->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 8)) (1.5.0)\n", + "Requirement already satisfied: contourpy>=1.0.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (1.3.1)\n", + "Requirement already satisfied: cycler>=0.10 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (4.55.3)\n", + "Requirement already satisfied: kiwisolver>=1.3.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (1.4.7)\n", + "Requirement already satisfied: numpy>=1.23 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (2.0.2)\n", + "Requirement already satisfied: pillow>=8 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (11.0.0)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (3.2.0)\n", + "Requirement already satisfied: python-dateutil>=2.7 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (2.9.0.post0)\n", + "Requirement already satisfied: ipython in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (8.30.0)\n", + "Requirement already satisfied: attrs>=22.2.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jsonschema>=2.6->nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (24.3.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jsonschema>=2.6->nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (2024.10.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jsonschema>=2.6->nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jsonschema>=2.6->nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (0.22.3)\n", + "Requirement already satisfied: platformdirs>=2.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jupyter-core!=5.0.*,>=4.12->nbformat==5.10.4->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 7)) (4.3.6)\n", + "Requirement already satisfied: six>=1.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib>=2.0.0->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 4)) (1.17.0)\n", + "Requirement already satisfied: wheel<1.0,>=0.23.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from astunparse->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.45.1)\n", + "Requirement already satisfied: botocore<1.36.0,>=1.35.83 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from boto3->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (1.35.83)\n", + "Requirement already satisfied: jmespath<2.0.0,>=0.7.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from boto3->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (1.0.1)\n", + "Requirement already satisfied: s3transfer<0.11.0,>=0.10.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from boto3->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (0.10.4)\n", + "Requirement already satisfied: astroid<=3.4.0-dev0,>=3.3.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (3.3.6)\n", + "Requirement already satisfied: isort!=5.13.0,<6,>=4.2.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (5.13.2)\n", + "Requirement already satisfied: mccabe<0.8,>=0.6 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (0.7.0)\n", + "Requirement already satisfied: tomli>=1.1.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (2.2.1)\n", + "Requirement already satisfied: tomlkit>=0.10.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (0.13.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from requests->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from requests->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from requests->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (2024.12.14)\n", + "Requirement already satisfied: typing-extensions>=4.0.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from astroid<=3.4.0-dev0,>=3.3.5->pylint->metaflow==2.7.15->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 5)) (4.12.2)\n", + "Requirement already satisfied: decorator in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (5.1.1)\n", + "Requirement already satisfied: exceptiongroup in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (1.2.2)\n", + "Requirement already satisfied: jedi>=0.16 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.19.2)\n", + "Requirement already satisfied: matplotlib-inline in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.1.7)\n", + "Requirement already satisfied: pexpect>4.3 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (4.9.0)\n", + "Requirement already satisfied: prompt_toolkit<3.1.0,>=3.0.41 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (3.0.48)\n", + "Requirement already satisfied: pygments>=2.4.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (2.18.0)\n", + "Requirement already satisfied: stack_data in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.6.3)\n", + "Requirement already satisfied: parso<0.9.0,>=0.8.4 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jedi>=0.16->ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.8.4)\n", + "Requirement already satisfied: ptyprocess>=0.5 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from pexpect>4.3->ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.7.0)\n", + "Requirement already satisfied: wcwidth in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from prompt_toolkit<3.1.0,>=3.0.41->ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.2.13)\n", + "Requirement already satisfied: executing>=1.2.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from stack_data->ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (2.1.0)\n", + "Requirement already satisfied: pure-eval in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from stack_data->ipython->execnb>=0.1.4->nbdev==2.3.12->-r /home/karan/playground/openfl/openfl-tutorials/experimental/workflow/workflow_interface_requirements.txt (line 6)) (0.2.3)\n", + "\n", + " ✔️ OK\n", + "Looking in indexes: https://download.pytorch.org/whl/cpu\n", + "Requirement already satisfied: torch in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (2.5.1+cpu)\n", + "Requirement already satisfied: torchvision in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (0.20.1+cpu)\n", + "Requirement already satisfied: filelock in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (3.16.1)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (4.12.2)\n", + "Requirement already satisfied: networkx in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (3.2.1)\n", + "Requirement already satisfied: jinja2 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (3.1.4)\n", + "Requirement already satisfied: fsspec in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (2024.2.0)\n", + "Requirement already satisfied: sympy==1.13.1 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torch) (1.13.1)\n", + "Requirement already satisfied: mpmath<1.4,>=1.1.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from sympy==1.13.1->torch) (1.3.0)\n", + "Requirement already satisfied: numpy in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torchvision) (2.0.2)\n", + "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from torchvision) (11.0.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /home/karan/playground/openfl/venv/lib/python3.10/site-packages (from jinja2->torch) (3.0.2)\n" + ] + } + ], + "source": [ + "!fx experimental activate\n", + "!pip install torch torchvision --index-url https://download.pytorch.org/whl/cpu" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "7237eac4", + "metadata": {}, + "source": [ + "We begin with the quintessential example of a small pytorch CNN model trained on the MNIST dataset. Let's start define our dataloaders, model, optimizer, and some helper functions like we would for any other deep learning experiment" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "7e85e030", + "metadata": {}, + "outputs": [], + "source": [ + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import torch.optim as optim\n", + "import torch\n", + "import torchvision\n", + "import numpy as np\n", + "\n", + "n_epochs = 3\n", + "batch_size_train = 64\n", + "batch_size_test = 1000\n", + "learning_rate = 0.01\n", + "momentum = 0.5\n", + "log_interval = 10\n", + "\n", + "random_seed = 1\n", + "torch.backends.cudnn.enabled = False\n", + "torch.manual_seed(random_seed)\n", + "\n", + "mnist_train = torchvision.datasets.MNIST(\n", + " \"/tmp/files/\",\n", + " train=True,\n", + " download=True,\n", + " transform=torchvision.transforms.Compose(\n", + " [\n", + " torchvision.transforms.ToTensor(),\n", + " torchvision.transforms.Normalize((0.1307,), (0.3081,)),\n", + " ]\n", + " ),\n", + ")\n", + "\n", + "mnist_test = torchvision.datasets.MNIST(\n", + " \"/tmp/files/\",\n", + " train=False,\n", + " download=True,\n", + " transform=torchvision.transforms.Compose(\n", + " [\n", + " torchvision.transforms.ToTensor(),\n", + " torchvision.transforms.Normalize((0.1307,), (0.3081,)),\n", + " ]\n", + " ),\n", + ")\n", + "\n", + "class Net(nn.Module):\n", + " def __init__(self):\n", + " super(Net, self).__init__()\n", + " self.conv1 = nn.Conv2d(1, 10, kernel_size=5)\n", + " self.conv2 = nn.Conv2d(10, 20, kernel_size=5)\n", + " self.conv2_drop = nn.Dropout2d()\n", + " self.fc1 = nn.Linear(320, 50)\n", + " self.fc2 = nn.Linear(50, 10)\n", + "\n", + " def forward(self, x):\n", + " x = F.relu(F.max_pool2d(self.conv1(x), 2))\n", + " x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))\n", + " x = x.view(-1, 320)\n", + " x = F.relu(self.fc1(x))\n", + " x = F.dropout(x, training=self.training)\n", + " x = self.fc2(x)\n", + " return F.log_softmax(x)\n", + "\n", + "def inference(network,test_loader):\n", + " network.eval()\n", + " test_loss = 0\n", + " correct = 0\n", + " with torch.no_grad():\n", + " for data, target in test_loader:\n", + " output = network(data)\n", + " test_loss += F.nll_loss(output, target, size_average=False).item()\n", + " pred = output.data.max(1, keepdim=True)[1]\n", + " correct += pred.eq(target.data.view_as(pred)).sum()\n", + " test_loss /= len(test_loader.dataset)\n", + " print('\\nTest set: Avg. loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.format(\n", + " test_loss, correct, len(test_loader.dataset),\n", + " 100. * correct / len(test_loader.dataset)))\n", + " accuracy = float(correct / len(test_loader.dataset))\n", + " return accuracy" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "cd268911", + "metadata": {}, + "source": [ + "Next we import the `FLSpec`, `LocalRuntime`, and placement decorators.\n", + "\n", + "- `FLSpec` – Defines the flow specification. User defined flows are subclasses of this.\n", + "- `Runtime` – Defines where the flow runs, infrastructure for task transitions (how information gets sent). The `LocalRuntime` runs the flow on a single node.\n", + "- `aggregator/collaborator` - placement decorators that define where the task will be assigned" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "precise-studio", + "metadata": {}, + "outputs": [], + "source": [ + "from copy import deepcopy\n", + "\n", + "from openfl.experimental.workflow.interface import FLSpec, Aggregator, Collaborator\n", + "from openfl.experimental.workflow.runtime import LocalRuntime\n", + "from openfl.experimental.workflow.placement import aggregator, collaborator\n", + "\n", + "\n", + "def FedAvg(models, weights=None):\n", + " new_model = models[0]\n", + " state_dicts = [model.state_dict() for model in models]\n", + " state_dict = new_model.state_dict()\n", + " for key in models[1].state_dict():\n", + " state_dict[key] = torch.from_numpy(np.average([state[key].numpy() for state in state_dicts],\n", + " axis=0, \n", + " weights=weights))\n", + " new_model.load_state_dict(state_dict)\n", + " return new_model" + ] + }, + { + "attachments": { + "image.png": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAt0AAAI6CAYAAAD7dvTIAAAgAElEQVR4nOzde3RUVZ43/C8mIVW5VlJFQqBCCk3R3BKCIB0gQRAaW2Y6QDuCtHar8D79rofhomucfp9xtThqr561enyWArbvWj1L2m7bB9RRSGba+wUJQqRBYkBAKkoCBSFQRSrXSkjFPH8Ue3NO3VKV5FTl8v2s5ZJUqs4+lfrVPr/z2/vsM6a3t7cXRERERESkmVtivQNERERERCMdk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSWHysd4CIhof9tS7sr3Wh/KQTddc6AQAutyfGe0UUHoM+HovzDbBk6rByphGL8w2x3iUiGmXG9Pb29sZ6J4ho6Npf68Jj+75F9cW2WO8K0aApmpiCrYsm4uF542O9K0Q0SjDpJqKAXG4PHtn9DfadcMR6V4g0UzQxBZ/+4ywY9Bz4JSJtMekmIj/VF9uwetfXchoJ0Uhm0Mfj03+chaKJKbHeFSIawZh0E5GKy+3B7OeOMeGmUcWgj8fxx+fAkqmL9a4Q0QjF1UuISOWR3d8w4aZRR0ynIiLSCpNuIpL2nXBwDjeNWvtrXXjlyOVY7wYRjVBMuolIevr9+ljvAlFMPbbv21jvAhGNUEy6iQiAt8rNZQFptHO5Pax2E5EmmHQTEQAw4Sa64bNvm2O9C0Q0AjHpJiIAwFeX2mO9C0RDwv5aV6x3gYhGICbdRASAlW4iweX2xHoXiGgEYtJNRACYaBAJ/C4QkRaYdBMRACYaREREWmLSTURERESksfhY7wAR0XC086f5WHSbAfkmPZLGeusXzvZuXGy+jgPfurD57dqgr33mHgsmZ+rw89fORGt3Y9ImERHdxKSbiCgCeRk6HNxSBLMh0e93xuQEGJMTUDghGasKTCjZUY36pk7Vcz76n4VYOiUDFSed0drlmLRJRERqTLqJiCLw4f8shNmQiI7r32PXFw14o/oqKr/zrutcems6Hl+Si2VTMmA2JOLgliLkPl2lev2kDF3U9zkWbRIRkRrndBMRhWnt7HGwjtMDANbvOYPNb9fKhBsAKr9rxsqXT+Jf/vodAMBsSMQz91hisq9ERDS0MOkmIgrTz27PBgDYXV14/fjVoM/bceAi7K4uAMDkTFaZiYgIGNPb29sb650gotgb89hnsd6FIW/nT/OxqXQiAMDyzBd+87VD2bJoIravzvd73HbVjSm/PSJ/Lr01Hb8ruxXm9ETVvHHbVTe+qG/xuxCyfMNMlM00ouKkE19dasPGhRNgTE6As70bNocbxXlpfbZJ/nqfvzPWu0BEIwwr3UREYXqj+mZ1++CWooimjjS2Xoftqhsd178H4F3pxHbVjfOKxH3nT/NxYHMRivPSkJmUANtVt3yNdZweD87Nxtkn5gXc/rTsJDy5PA+AN6nWJ8ShVvH6YG0SEVF0sNJNRABY6Q6Xstot2K66cbqxAx/bmrDjwMWQrz/7xDxYx+lRcdKJlS+flI/nZehw6n/dgaSxt/j9DrhZ0QaA+/98Sk5vUT5ec6kds/79qNyeqMQHa5OCY6WbiAYbK91ERBHY/HYttu6tlXO2AcA6To+ymUZsX52P3ufvxFf/PBdbFk0MsRV/G4rH41pHN5zt3QET45Uvn5QV6xnjkwNu47cf1ct/RzL1hYiItMekm4goQjsOXETu01VYtLMafznaiJpL7TIhBoDCCcnYvjofhx+dHfY2t71bh9ynq2D69aGgz7nY7E30Z01ICfj7UBd3EhFRbHGdbiKifqr8rlm1ZODa2ePw99ONuGdaJozJCSjOS0P5hpn9mtKxdvY4ZKeOxR25qchJG4sfZCUFvCGPYLvq7td7ICKi6GDSTUQ0SF4/fhWvH7+KvAwdKv6fmSickIxlUzLCfr1YuSTQiiMd179Hx/Xv5S3niYhoeGHvTUQUJsdvFqD3+Tv7nK9d39SJl79oAICwk+S1s8fhvf+3UCbctqtuVJx04i9HG7F1by2S/79KOb2EiIiGH1a6iYjCpE+IAwAstWb0uUpJpJ5YloeksbfA7upCyY7qgBdCTkwPPr2EiIiGNla6iYjCVNPQBgBYNiUDa2ePC/ncDT/MARD+XGt9grc7/tLeFjDh3rJoIqeWEBENY+zBiYjCdP+fTst51Xt+MR2HH52tmmqSl6HDM/dYcPaJeSic4F3W78WD6oq4u9u7ykmyTwItHl84OQ15Gepbx+/8aT7+7e9u7fd+B2uTiIiih9NLiIjCVN/UifV7zuC5sttgNiSiOC8NxXlpAW/v3nH9e+z6osFvGkrNpTYUTkjG0ikZOPvEPJxv6sSy/78Gv/2oHrvunwpjcgLqtv1QVsgnpiciaewtcLZ342JzF6zj9BHvd7A2iYgoeph0ExFFQKxQsvOn+Vh0mwET08fCmJwgfy/uTrnl7dqA00R+/toZ5KSNxXxLOqzj9HKetlhj+4llecg36WVybbvqxvtnrmHz27XY+dN8WMdNxMLJ/qubhBKsTSIiih7eBp6IAPA28ERKvA08EQ02TvAjIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIiIiIo0x6SYiIiIi0hiTbiIiIiIijTHpJiIiIiLSGJNuIiIiIiKNMekmIiIiItIYk24iIh+lt6aj9/k7cfaJebHelX4T76H01vRY7woREYFJNxGRn8eX5MJ21Q3rOD22LJoY690hIqIRID7WO0BENNQsnJyG3V9eAZCJdbdnYceBi6rfn31iHqzj9ACAipNOlM00YtHOalR+14wtiyZi++p8AICzvRs2hxsAMP+F4yjfMBNZqQkozksDAGzdW4sdBy6qtuds74bp14dkW+UbZqJsphEAUFXfAqtJj91fXsHmt2sBAL3P36nat4qTTjz36QUc2FwEADiwuQgvVl7E5rdrVfsGQD4OAI7fLIDN4UZxXprfPhAR0cCx0k1EpLDzp96kdPPbtXj/zDWZIAuHH52NzKR4jHnsM4x57DMsnHzz96W3pmP76ny8WHkRYx77DLu/vOL3+uK8NGzdW4sxj30mE25nR7fc3ufnWuD4zQK5LyKhH/PYZwAAY3KC3JbjNwtQcdIpXytOAABg0c5q+X9lwi3a3rq3FptKJ6oq+VaTHmMe+4wJNxGRBph0ExEp3D01E5+fawEAWQUWiTjgTZqf+aBe/qz8t5iWIl63+e1a2K66Vdt3tnfLynnpremwjtNj/gvH5e9XvnwSxuQEbFk0EXdPzUTFSScqv2sGANXzAMD060NY+fJJ+fPHtqag72vd7Vmoqm+Rbe84cBFV9S3YVHIz6Rbvm4iIBh+nlxAR3SCS4A17vpGPVdW34O6pmfL3AHDc3iZ/r/x3VmoCnB3dqm36/nytwyP/vaZoHAD/KSKAt+qcmRSP802d6u21d/s9Vzk9JRhjUgJON3aoHjt6vhXrbs+SP/u2RUREg4dJNxHRDY8vyQUAOR9aacuiiaoEe7CEmj+tTIgDEcm27aobYx77zG/ONhERDR1MuomIblg4OU11caHg+M0C1QWVs80pcsrHbHOKfN6V1m5My05SvdaY5F/9FmwON4zJCSi9NV1uT+lahweTMnTq7d2Y0y2q8uICzr44O7qRlZqgemzupFRV5Z2IiLTDOd1ERPDO2zYmJ/gl3IB3rrO4ILKqvgXblufJ3yn//dynF2Adp5dzwHf+ND/ktI8dBy7CdtWNvetnyMe2LJqI3ufvxJZFE/H+mWsom2mU01oOPzrbbxvKpD9UlVtc1CkunNyyaCKK89Lw4sGLQV9DRESDh5VuIiJ4L6Csqg98IeFzn15A2UwjyjfMxPwXjuPsE/PkPGzliiGV3zVj695abF+dj02lE+Fs7w66TWHKb4+otgdAVb2elKGT012q6lvknO7K75rxYuVFbF+dL5PtRTurcWBzEdYUjZMXcSqXDASger5YspCIiLQ3pre3tzfWO0FEsZfxxOdwuTnVIFJiHrVY0s/X2Sfm4XRjh2qVkYHoff5OJstREOjiViKigeD0EiKiCPQ+fyfKN8yUP6+7PUsuC1i+YaYqWRPzrkMt5RfK2SfmqW5FL6atMOEmIhp+WOkmIgDA7OeOofri4K/OMdL4rhBiu+rGlN8ekT/7Lt830Kq0b8U1WEWdBk/RxBQcf3xOrHeDiEYYJt1EBAB4bN+3eOEze6x3gyjmHp43Hn9c94NY7wYRjTCcXkJEAICVNy4GJBrt+F0gIi0w6SYiAMDifAMsmbq+n0g0glkydVicb4j1bhDRCMSkm4ikp+7O6/tJRCPYQ3dkw6DnarpENPiYdBOR9PC88azy0ai1ON+AR+80x3o3iGiEYtJNRCp7189gpY9GHYM+Hs+vuo2xT0SaYdJNRCoGfTyOPz6HyQeNGgZ9PPaun4GiiSmx3hUiGsG4ZCARBeRye7Dk919x7W4a0RbnG/DHdT/gRcREpDkm3UQU0guf2bH9wEXUXeuM9a4QDRqDPh5P3Z2Hh+eN56gOEUUFk24iCsu+Ew589m0z9te6WP0OR1cb0HJZ/d/VWu/js/8BmP5j9XOP/ydgrwYazwAJeiBjEjDuNiA1G0gbD6Td+L+eF7r2h0EfD0umDpZMHR66IxuL8w1Mtokoqph0ExH1g8vlQl1dnfyvvr4e1dXV8nGXyxX0tU899RT+9V//Ffv370d5eTleeeWVkM9XSklJQV5enuo/i8Ui/52TkzNYb5GIiAYRk24ioiCCJdYiqQ6VKBsMBlgsFvlfXl4eioqKEBcXh7/+9a94//33UV1dLZ9bVFSErVu3or29HXv37sX777+PtrabIwppaWno6elBe3t7yH3W6XR+ibgyOTebuSQeEVEsMOkmolFrINVqg8GgSqxFUltUVASLxQKDQT0NJFBV22AwYOvWrVi1ahWKior82njrrbfwxhtv4I033pCPxcXFYfny5SgqKkJOTg7q6+tRX18v9//q1ash33NCQkLQKrn4b8yYMZH8GYmIKAxMuoloxBLV6FDV6lCCVavF476JdaD2X3jhBZSXlwesai9evLjPbQBAc3OzTL4/+ugj+bjJZMKaNWuwZs0a3HnnnQCAtrY2v0Rc+V9DQ0PItsaMGRMwGVf+HB/PudBERJFi0k1Ew9pAq9UDSaqD2b9/P/70pz9h3759sn2LxYKHHnooaFU7XOfOncMbb7yBN998E8eOHZOPT5kyRSbgBQUFQV/f2dnpl4wrf7bb7X3uQ25ubsgpLImJif1+f0REIxWTbiIa0gaSVAOBE2tRYRb/DdZ+BqtqP/XUUzKZH0xffvmlrICfO3dOPl5cXIz77rsPa9asiXgOd3d3d9BKufi5LxMmTAg5hSU5OTni90pENNwx6SaimNPigsWBVqvD3e/q6mps374d+/fv96tqP/zww7BYLJq1r/TRRx/JBLy5uVk+fs8998gKeFJS0oDb+f777/2mrPgm5x6PJ+Q2srKyQk5hSUtLG/B+EhENNUy6iUhz0bxgMRpCVbWff/75mO2X8Oabb+KNN97Af/7nf8rHEhISZPJdVlamafsXLlwIOJ9cPNbV1RXy9UajMWSlPDMzU9P9JyLSApNuIhqwWF+wGA1DqaodrqamJln9/uSTT+TjWVlZMgEvLS2N+n5dunQp5MWefS2LmJ6eHnQ+eV5eHsaNGxeld0JEFD4m3UQUlqF4wWI01NXV4ZVXXsGf/vQn1NXVARhaVe1wffvttzIBF9V5AJg6dapMwGfMmBHDPbzpypUrIS/2bGlpCfl65Q2EAiXnvIEQEcUCk24iAjB4FywWFRUhLy8PBoNBkwsWo0FUtR977DHVe1+8eDFWrlyJhx9+eFi9H19Hjx6VCbjywsgFCxbIBHwoJ6ZOpzPkxZ5NTU0hX6/T6UIuicgbCBGRFph0E40iw/WCxWjpq6o9kKX+hqoPPvhAJuCtra3y8b/7u7+TCbhOp4vhHkbO5XKFvNgzkhsIBUvOiYgixaSbaAQZaRcsRsNIr2qH6/vvv5cXYL799tvy8cTERJl8//3f/30M93DwiBsIBbvY8/LlyyFff8stt/R5V0/eQIiIfDHpJhpGRsMFi9FSXV2NP/3pT363ZV+8eLFcV3u0cjqdsvq9f/9++XhOTg7WrFmD++67DwsXLozhHmrL7XaHXBIx3BsIhbrYkzcQIhp9mHQTDTGj9YLFaFBWtZUXE462qnYkbDabTMBramrk49OnT5cV8GnTpsVwD6Ovu7s75JKIkd5AKFByzhsIEY08TLqJomwwLlgUUz6UU0BEYk3+9u/fj/Lycr+q9qpVq7B169ZRXdWOxJEjR2QCfuHCBfl4SUmJTMCzs7NjuIdDg/IGQsGS856enpDbyM7ODlol5w2EiIYnJt1EGgiUWIu1nXnBYnS4XC7s378fTz/9NKvaGnjvvfdkAq5cV/snP/mJTMDHjh0bwz0c2s6fPx9yCku4NxAKNoUlIyMjSu+EiMLFpJuoHwbjgkVRrU5PT5f/ZlI9cKxqR5fH45HJd3l5uXxcr9fL5HvFihUx3MPhSdxAKNh65R0dHSFfbzAYQl7syRsIEUUfk26iAHjB4vDicrmwb98+bN++nVXtGLp69apMwA8cOCAfnzhxokzAi4uLY7iHI0djY2PISnm4NxAKtiTi+PHjo/ROiEYPJt00avGCxeEvWFX74YcfxkMPPcSqdgx98803MgE/efKkfHzmzJkyAf/BD34Qwz0c2ZxOZ8iLPfu6gZBerw9ZKecNhIgix6SbRqyBXrDom1TzgsWhgVXt4aeqqkom4BcvXpSPL1q0SCbgnO4QXcobCAVKzsO5gVCoJRF5AyEif0y6aVhTJtbV1dVobm7mBYsjVLCq9tatW7Fq1SpWtYeJd955RybgbrdbPr5q1SqZgMfFxcVwDwnw3kAo2Hzy+vr6iG4gFCw55+dMow2TbhrSOAVkdHO5XPK27KxqjyzXr1+Xyfd//dd/yceTk5Nl8v3jH/84hntIoShvIBQoOVeOaAQzadKkkFNYeAMhGmmYdFNMaXXBokiqmZANT6xqjy6NjY0yAT948KB8PDc3Vybg8+bNi+EeUqSuX78e8uZB4d5AKNQUlqSkpCi8E6LBw6SbNMdqNYXD5XLhhRdeQHl5Oavao9jp06dlAn7q1Cn5+KxZs3DfffdhzZo1sFqtMdxDGgziBkKhLvaM5AZCgZJz3kCIhhom3TRgg7FmNS9YHL1Y1aZgPv/8c7zxxht488030dDQIB9fvHixrIAbjcYY7iFpSXkDoUDJeTg3EApVKecNhCjamHRTWEQCLaZ9KC9YrKurC/laVqvJV6Cqtrhh0EMPPYRVq1YxLkjlv//7v2UFXJls/fSnP5UJ+JgxY2K4hxRtFy9eDDmFJZIbCAVKzrmiDg02Jt0EgFNAKDpY1aaB6uzslMn3X//6V/l4amqqTL6XL18ewz2koULcQCjYFJbW1taQr09NTQ25JCJvIESRYtI9yuzfv58XLFJUhapqb926FYsXL2b8UL80NDTIBPzQoUPy8by8PKxZswa/+93vYrh3NNQ5HI6QF3qGewMh3yr5z372syi9AxpumHSPIi6XK+gcNlarSSvKuLNYLHL6CKvaNJi+/vprmYCfOXMGAPDmm2/iH/7hH2K8ZzRciRsIBVuv3OFwBHzdtm3b8PTTT0d5b2k4GLVJt6vLg1fONKC8zgFXlwd1rZ1wdXlivVvae/VG5SczGzBmA+Z8wDge0KfEdr8GmSVVhyJTKmYZk/HorFwYEuNjvUsARnHcffoWYL7NG28jLNaUGHdDRO0J4MxR4O8fifWeRAXjLkY6O4BrjTf/c172/n/FL4Cc0bEIwFCNvaFq1CXdri4Pnj5ah1fONIysLz+F9OisXDy/MD9m7TPuRifGHcUC445iJdaxN9SNqqS72tGG1e+eQF1rZ6x3hWLAkqrD3nsKUGSKbqWVcTe6Me4oFhh3FCuxir3hYNQk3dWONiwpP86z7lHOkBiPT1fOjlpnwLgjgHFHscG4o1iJduwNF7fEegeiwdXlwSOfnGZHQHB1eaJ2UGDckcC4o1hg3FGsRDP2hpNRkXS/8NUFVDvaYr0bNES4ujx47PNazdth3JES445igXFHsRKt2BtORnzS7eryYPsJu+btrM3PQu/GJejduASlOVxeDwDKVxSgd+MSbCk0R/W14XjlTIOmcw6jFXcA0P7LRejduAQ7S61RaW+o21JoRu/GJShfURDV14ZjJMXdV2vvQO/GJTh875yotDcc9G5cgrMPFEf9tX0ZSXH36rLp6N24BI71JVFpbzg4+0Axejcuifprw6F17A03Iz7pjtbV04/OyoWzsxsA8PjsXM3bo4Hb991VzbYdrbjbWWpFUnwcnJ3duHuSUfP2aOBGQtyV5hhQaEyBs7MbhcZk5KXqNG+TBmYkxB0A3DMpE87Obhh1CZoVZWhwaRl7w82IT7o/u9QclXYKjcmwNbthb+vCwvHpUWmTBkbL2IhW3C2aYICzsxvVjjZY0/UcZRkGRkLc/XLGBADAu+evISk+Do8XsdAw1I2EuFubnwWjLgHvnr8GAFhnzY5KuzQw0YqP4WDEJ93VjlbN2xDVxg8vXMP+Sy4YdQl4Zt5kzdulgdl/KfQtfgciGnEnqo3Vjjb8x6lLAICn7hgdN2QYzoZ73AHeaqO9rQs//+gUOjw9WDV5XFTapf4bCXH36Czvyd2vv/gONc42FGencZRlGNAy9oabEX/rINd17Ye8Fk0woMPTg21HziEvVYcHp2Rj5WQTth05F/D5ry6bjnsmZcKoS4Czsxvvnr+GtLFxKLOYMOalT+XzSnMMeHGRFYVG75I7Nc42bDpgw4HVs1FR58DKd04E3afyFQUos5hgefUwKlYUqLZR9s4JFGen4dkf3gpruh4dnh4cvtyCDZ+eQb1i7lVeqg57ls9AoTEZSfFx6PD0oMbZjvs/+Fr1PAB4Zt5kPDI1B+aURLm9YJTPBQB7Wxf+vfo8dtREZ06goOVwaDTiTlQb/+PUJbxeewXPLcjH/PFpQZ+/pdCMfy6apPqMDl1uxpNzLdh60Kb6+4cbo4Ha2F5ixdaDNpRZTJg/Pg1J8XGwt3Xh8UO1qGpswZ7lM1Cc7d1PEdOVDS7VdspXFGDh+HQYdQnyeb89Vo/Xa6+onrc2PwtPzMlTxfeBS+ptBXuueF8//+hU0PejheEed6La+JezjQCAw5dbsNScgbX5WX6fDxBZPxZJjPrq3bgEFXUOnG/txDprtozd3bZGbK60oXxFAZaZM2Q8BupzAvVNfzzT4NeX+/aNYnuBBOpHA/W3WhvucQd4R5RrnG2ob+1E+TkHCo0p+M0Pbw34Hc5L1eHlJVNVfdC/V5/HpgLvlJQpr1XJ5w7kWCvm4T/5xXd4bkG+KnaXVVTjmXmTsXHmRFVf6ru/gfqmzy83B2xX2TeK7QUTqB8N1N9qjSuY3DTyk26NP2xRbaxq9CaZ9a2dqHG2odCYgtIcg19wf1RWhKXmDDg7u1FR50CWfiwenJKNDk+P33bf+0khkuLjUNXYgivu61g4Ph3v/aQwov07uPp2uHu+R0WdA5ZUHQqNKfiwrAgTk8eixtmO003tuN2UiqXmDOwotcoveaD2s/RjUZydhlPr5mH67iPygPHMvMl4cq4FHZ4efGz3ntGKjs7XzlIrNhWY5XPbPT1YOD4d20usMOkSgp6oDDfR6GTE3EaR6Oy/5MKDU7Kxs9SKzZU21XODfUaBkvRwYzSUfy6aBH38LfjI3oTk+DgsNWfg94umwO353i8eX75rquoAeOEXC2BOSYSt2Y3PLzcjOT4O88enYc/yGchOGisTpdIcA3bdNdXvOyIOqkpr87P8njstIxkPTslGTtJYLKuoDvu9DWXRun4FAP7wtXd05T9OXcJScwYenZXrl3RH0o9FEqPB3G5KxTJzBg5fbkG7pwfLzBnYVGDGogkGTExOVMXjvxXfivJzDtmP+cY9ACwcn44n51owy5SiSoAOrr5dxujppnZMy0jG9hL/C5nzUnV+z83Sj8VScwYOrr4duX8+FPZ7G8qief2KOKneduQc/qkoF/dMygz4/GCfUYenBxfbr8vnDcaxNjMxHrvumooaZzu+dLTKY+pXa+9AfrpeFY8PTsnG3660yH7MN+7bPT2YlpGMMosJZx8oVvWNwfrmQA7fOwfF2WnyuaIffe8nhfjxf9VEPfEmrxGfdGtNVBtf+OqCfEycgf9yxgRVYJfmGLDUnAFbs1v1RRLVQaXfLbgNSfFxftWdsw8Uw5quD3v/rnV1Y9brf5M/X/jFAljT9aqz97xUHU6tm4dpGcl+7b94wq5K4ETSvGf5DMx/6xgAYOPMiejw9KgScbFNZeKdl6rD+mk5cHZ2Y86bR1XPPXbfXPxTUe6ISbq15lttBLxDrg9OycaiCf7zukN9RkqRxGgo+vhbVJ+xOFj4btc3nneWWmFOSURVY4uML/F+d901FdvmWuT3IZLvyLM/vBVJ8XG4/4OvVYnhV2vvCFmlJX+i2ij6NjHKUmhM9ntuJAK2C/kAACAASURBVJ9RuDEaijklUfUZi4QmP12v2q4YCVw52YQdNXYZ9/a2LpTs/VLV/sHVt6PMYpJFFBGjvhVQsU2lHTee+5ezjarq5qvLpuPBKdl4ddn0qI+0DFdiRFl5PAo2yhLJZzQYx1rRF4vPMi9Vh7qfz0ehMUW1XdGPLjVnyMdE3K//5IzqPYikWRRRIumbtxSaUZydhhpnm+r4vzY/C3uWz8CLi6yqxyl6Rvycbq35VhsB7xl4h6fH7wxcJOh7bI2qx3fU2FHjVK9vWmhMhr2ty2/488kvvoto/14+3aD62d3zPQBgi6Ljqm/tVJ35A0BxdhrsbV1+FdPNlTbY27rk9ACR/B2+3KIaKq1v7fSbYvJ4US6S4uPw7vlrfs/dbWtEUnwc58KHybfaCPiPsgihPqO3v3OothtJjIby+eVmVVvtN6rkvts93dSu+lmswHL/B1+rHn+99goOX25RrVgQ7Dvi20ZpjgHWdD1qnG1+ifWmA974/h/TJ4T93kYz32qjsP+SC0nxcX7LVobbj0USo6HYmt2qz9hxY0Up3+2KSrogVpz645kGv/b/eKZB9RxxUrvFp2/0/RnwVso7PD1+ifXPPzoFZ2c3Fgc4QSZ/YkS5xqnuL8S1LKI/FIJ9RivfOeE3YjdYx1rlZyxiyHe7vm1sKTTLuPftm0QfKPrEUH2zva1L9Zi4wPS3x+pVj79eewVVjS0oNKZwLnyMsNI9AOJAASDgOpdJ8XHYUmiWX7T8G2fNvokwANS1dsr5XKU5BiTFx+Gwy39e9Ou1V7Bn+YwB73s4cwm/cXUEfNze3iXnPGYnjQXgnzwBwKHLzVhqzpA/T7rxJc9P1/utg5yl925nFm8ZGxZRVTywenbA3z8+O1dWIktyvKvpHLrsfwX53660qIYnw43R/hJJUCj2tq6A8Xm6qV0VT0nxcahp94+7l0834Mm5Ny8onT3Ou8/6+Lig629P4gEoLCKZ2VRgDjiNx5sgeBOdSPqxSGK0P9rDnBoVKO4/vNCkiif9jSU6fWO0vrXTL/kR826DxZ3oRyk0kXAWZ6cFPNaKZSvFZzIxOTFoP+I7tUTLY60ocvUlUNzXt3bKZYgBIG2sd9T4wwv+FyV+4+pQxZLIS342JRs/8/nuiN+JUR6KLibdAyDOrsX8PyUxb3CdNXvUBnawBEtUyal/RLVRzD/0tcycMaqXrQx2QmlN10c0NYvURLVRzJH1dbspVS5bOVrniwZKsoy6BL8pDRQZMaL8eYDkVFwb8nhRrt/I7GgR7KSScTf0MOkeADEsFezKZsf6ErmkUX1rp0yQNkzL8Zu7bFFU2sQB6weGJL9trs3PGqzd71Og9gHAnHzzjLqxw/uelPPBBWVVEgBarns7hr5WIaDQRLUx0CoywM25gM/Mm4xtR87B1uwGACwIkIjfkaU+AQo3RrVkTklUVa0E3xjr8PSoYlHwvWGGiNG+ViGg0JTD24GuvRDXezx1hwXLKqoj6sciiVEtBYr7H+Wq+zG3pwfGdH3AGLWm6+V7ASAv2lPOwaXIiBHlj+1NAb+/pTkGHFg9G6smj5NJ97UuD6xBPqOJyWNltXuoHGsDxX1eqg5GXQKu3bhIVRw/f5Sb4XdS69s3um8k4aFWmqLY4JzufhLVxv1BlicDIJfy+c0PbwUAPHfce7Hl/T4L+q/Nz/Ibtq9qbIE5JdEvgXhiTt6A9z0con3fOZrKC90A7xCcs7Mb88enqTqpvFSdX7VVzD8ONCx9+N45mt72faS4ObexLWhFV1zUu3Kyt8qxo8YuPyPlPL68VJ3fdQeRxKgW3j/vBAC/Yd21+VmYP957Jb44YatxtgeM0Q3TclQ/ixhdZs7wm8e4s9Sq6W3fR5J7JmXKpVED2VxpQ4enR7XaSLj9WCQxqgUR949MzfFr/5GpOarniPnsvjH66rLpftutcbbDmq73e/9r87PQ/stFmt32fSQRI8pi/ravygYXapxtMKckymOQ6Ed2+PQNry6b7reqViyPtcq4903yRXyJ9yKOn74xujY/y28ET8Sob7+Wl6rDhV8sQPsvF3FOd4yw0t1PotqovJDN1x++voQHp2TLg0Zlgwsf25uw1JwBx/oSfH65GVn6sSg0JqPD06PqDH516Fu895NCbC+xYp01G1fc13G7KRWZuuh8ZKL9TQVmzM1KUy0Z2OHpwa8OfSuf+8zROmwvsWLXXVPlBWmBlvmqbHDhxRN2bCoww7G+BNWONrk8kjVdH/BiFlIT1cZga1ED3iTz94umqJatfOnkRTw514JT6+bJC1yLTCnQx6vPuyOJUS1srrRh1eRxKM5Ow9kHinG6qV0udZUUH4f/XX1zlaD7P/gap9bNU8XowvHpfu8JAP7xwFnsumsqTq2bhxpnu188i4SKAhPVRnGyHUyNs1214kIk/Vi4MaoFZdwfu2+unMYg1jj+2N4kq4ubK224e5JRFaOiD/O9SO/+D77GwdW3Y3uJFRum5aCutVMVz74XxZG/QmOy32IFvg5ccqHQmCKXrRT9iFh273RTOyypOnnNilKsj7Ui7sXx0/eYKKr3lQ0uVNQ5UGYxyRhV9mHKvnlzpQ1zs9JQZjHhwi8W4MsbNy9SxnM014inm1jp7gdRbbS3dYWcu1jZ4IK9rUu14sKyimo5B7zMYoI1XY9dpxv8Vg+pbHBh/Sdn5F23yiwmXOvqViUdWqpscGH67iM3rnT2rhlaaExGVWMLpu8+onrfO2rsuP+Dr1Hb7MZScwaWmjNQ42wPuK+bK2149mgdrnV5sNScgTKLCZmJ8aioc6Bk75dReW/DmTiBe66POKh2eFcaESsubDtyzvt37/T+3eePT0O1ow0f2f0vygk3RrWS++dDqKhzIDMxHmUWE5aaM1Db7Mb9H3ytqrLWt3b6xei1Lg/Wf3LGb5uv117B+k/OoLbZ7RfPXLO2b6La+OGF4DfiUP5erLgQST8WSYxqYVlFNZ49Wge353uUWUwos5jg9nyPZ4/W+a3jPuW1KlWMZibG49mjdX7fkfrWTpTs/RJVjS2YmJyoiuetB21cIrUPYkRZ9GfBiP5QXFAJQPF3H3vjM0oI2DfE+li77cg51fFTeUz0Xcd95TsnVDFaaExGRZ0j4Hdk/lvHVP24iOcXT9hHzH0JhqMxvb29vbHeCS0NlzlNYk3QvvZXrMnJ+amDI9CV8INhuMSdWLc2nHn24cYo9W20x10k/VgkMUqhjfa4A7x/A9+1rgPhsXZwaRV7ww0r3VG0pdCM9l8u8luLWszJUq6DfPaB4oDz/cR81f9zlsOSFL5A85bFvPsOT4/q5g3hxihRXyLpx8KNUaK+lK8ogGN9iep+BcDNefdfKKZJ8VhL0cQ53VFUfs6Bfyu+FU/OteBHuZm44r4u5/d1eHpUC9mfbmpHmcUk59UCkPO8qhr9F9InCsXW7FbNbwRuzu978cTNZCaSGCXqSyT9WLgxStSXj+1NKLOY8N5PCuUt2MX8Z3tbF36tuPENj7UUTZxeEmWlOQb8bsFtKDQmywsfapxt+O2xer8v985SK1ZNHicXvRfrlHKoa/CMluHWvFQddpRaZRIDeG9C88czDX7zSiOJUeqf0RJ3QPj9WCQxSv0zmuJuS6EZG6blyFWXOjw9qHG2B1xqlcda7XF6iReTbhrVRtNBiIYOxh3FAuOOYoVJtxfndBMRERERaYxJNxERERGRxph0ExERERFpjEk3EREREZHGmHQPcVsKzX7r1/ZuXBJwXVFf5SsK0Ltxibwb5mAIt20a/nw/67MPFId1MUygmB2ocNum4c/3s46kHxvs/kmLPpSGJt/POpJ+bLD7Jy36UBoamHRTQHmpOpSvKJA3EyCKlp2lVhy+d06sd4NGmS2FZhYUKOpKcww4fO8cntiNErw5zjAUjeWZVk42ocxiQkWdI+pt09DU122TB8umAjNsze6YtE1DT7TWSt5UYIY1XR+Ttmno2VFjj8pdUB+fnYvi7DTstt2882W02qboY6WbiIiIiEhjrHQPop2lVmwqMOMvZxvx849OBfzdiyfs2Fxpk3dfu92UqroLlq3ZHfCOWUq9G5fA1uxWVf+emTcZj0zNgTklER2eHhy+3BLwteG0W76iAGUWEwCgzGJC78Yl2HrQhh019oBtr83PwhNz8uSdvwLdzWtLoRnbS6zYetCGpeYMLDNnICk+Ds7Obrx7/prf34vCV5pjwIHVs1HjbMOs1/8W8HdVjS2Y/9YxAN5YvHuSUVb1Ojw9qG1293nHybMPFMOarleNdvh+9jXONhy45Ar4+r7aFTECANZ0PXo3LkFFnQMr3zkRsO28VB32LJ8h75wZ7I5zYsrAk198h2d/eKtsv8bZhk0HbKhsCLy/1DfH+hLo429B8h8OBPwdAJh2HQRw8w6B+el6eadTW7Mbe2yNIe84Kfoj0QcB/p+9va0L/159PuDrw2lXOR9X2ccFalvsk/LOmYHu2Cpea3n1MPYsn4Hi7DQAkPvKSmb/Hb53Doqz03D/B1/79Vnid4v2Hkdlg0veYdearlfd6fRLR2vIkQzRH4k+SFB+9uL4FUg47Yp+DQC2l1ixvcSKMS99GrRt5XFebM/3jq081g5trHQPos2VNnR4erB4gsHvd4smGNDh6cHmShsA4MOyIpRZTLjW1Y2KOoecxlGcnYY9y2dE1O4z8ybjybkWZOri8bG9CYcvt2D++DQsNWf4PTecdj+2N6Gq0Zu025rdqKhz4PjVtqBt71k+A/npenxsb0JFnQPXujwos5gCzo/856JJWDg+HYcvt+BjexP08bfgwSnZ2Flqjeg9002VDS7UONtQaExBXqpO9btfzpgAAHjhqwsAvAeMTQVm6ONukZ//xfbrKDSm4PeLpkTUbmmOAbvumopCYwqqGltQUefAxOREbCrwn5sYTrvHr7bJeHR2euPzY3tT0LZPrZuH4uw01DjbUVHnQI2zHcXZaTi1bp7f3yEzMR677poKt6cHFXUO2JrdKDSmYO89MyN6z6T27vlrSIqPwzPzJqseX5ufBaMuQSYkIhGYmJyIw5e9sVLV2AJruh5PzrWgNMe/zwzl4OrbUZydhovt11FR54C753t5wqYUbrsVdQ44O7vlv98/7wza9oVfLLjRh3pkjOan67Fn+YyA83IPrr4d5uRE2bY5JRHbS6wRv2e6SfRn/2P6BL/fFRqTUeNsQ2WDC3mpOrz3k0IUGpPlsexjexMydfEos5giPu58dOP4CUD2Iw9OyfablhRuu++fd8qpdKIPDdX2k3Mt0Mff7EP18bfgybmWgBdc8lg7NLHSPcjEgb80xyAraKU5BpmYiJ8zE+NV1UfBsb5EVkTCtXHmRHR4ejB99xFZ4ctL1eHUunmyshNJu6ICU5ydhtNN7SGrAaLt9Z+cUVUcRLVhZ6lVnmgAgD7+Fsx586jcz7X5WdizfAYWBThRofAduORCoTEFjxflqv7e90zKhLOzW3420zKS4ezsRu6fD6leLz6vLYXmsCtwv1twG5Li4/yqgMrqjRBuu5UNLvRuXIJrXZ6QcSfaFiNHghhR2rN8hirGjboEvxGor9begUJjCtbmZ4Ws8FNwf/j6Eh6cko0f5Waqqm2PzsqVvwcgCwCr3z2pGlkQn9fjs3PDHnHYWWqFOSUxYAVSJERCuO2K0RSjLiFk3Im2ffvQtflZ2HXXVGyba/H7/lzr6laNQL26bDoenJKNX86YwFGWfnq99gp+v2gKikwpqsd3llqRFB8nR9s2TMsBAOw63aDqJ8QI4N2TjABsCEdpjgFLzRl+I73KEToh3HY3V9pQvkIHa7oeu22NQfte0ba9rQsle79UHecPrr4dZRaTKucAeKwdqljpHmTiDFxUGJX/Fr+rbHDBtOugX+ILANe6PBG1JypKhy+3qIbU61s7/aaYDGa7gLezEW37Ji33f/A1ANzoXG76/HKzaj/F6/SKkwOKnBhlUXaovtVGwHtBohjuV7rivh5xm4XGZNjbuvwOFHsUFwRp0S7gPSG0t3WpDmiA9+9gb+sKeOLqO6xadyMOs5PG9msfSDnKkqx6XFltBLwXJI556VO/JNP3gtlwiBjf4vPZ+/482O0CN/sz0b8Jr9deweHLLTDqEvyq3S+fblD9/Lcr3n45bSz7vIF49/w1GHUJWJufJR/zHVHeduQckv9wwK+f6M/JjjiO+/ZvO2rssLd1qR4bzHYB78WWAPDHMw1+x/k/nmlQPUfgsXZoYqV7kIkz8HsmZcrHFk8wqKqNSlsKvVfMT0rVYVpGsl+FsC8iYTjd1O73u0OXmwNOMRmMdn3b8VXf2imHayk6fEdZxNCrqDYqrc3PwozMZMwypcCSqkN+Pz7/pPg41LT7x93Lpxvw5FxLwNcMRrvCN66OgI/b27vknEfSnhhlEaNaz8ybrKo2KpXmGDB7XAqWmjOQpR/br35Hf2N+qu91L/WtnX7Jz2C2K9jbugJec3O6qT1of0uDT4yyPDorF6/XXkFeqk41oqyUl6rDyskm3JGVhpyksfiBISni9sRJ0ocX/Ke8fePqCNjnDEa7Sr4ncGJ/gvW3NPQw6dbAu+ev4cEp2Vibn4VL7ddhTknEX86qz44/KitSddDOzm5cbO+Cs7NbXnQxUI4ASW802qXYeOGrC9izfIYcti4ypaiqjYB3+HX9tBw57ajD04OL7ddxsf36gBIRpUAJSTTapdjYXGnD+mk5sgL9o9xMVbUR8J5s/X7RFFUfY2t242J716D2O+6e71U/R6tdij7fUZbHi7yVXjGiDHiT3g/LilR9jL2ta9BPzNs9Paqfo9UuDT9MujUgzsCVF3koq407S61Yas5AVWMLXvjqgqoCLuYVhquxwzs8Py0j2e93vlWXwWxXacH4dL/H8lJ1MOoS+jVthfpHjLIsnmDAM/Mmw6hLUK39WppjwKYCM+xtXfiX6u/8VmOINPnt8PTAnOx/APEdXh/sdoVgVaNA+0TaUo6yFGen+VUbf79oCvTxt+DZo3V4+fTNIfJA82H74vb0wJiuR16qzu8Ez5quV00dGcx2BXNKYsC2A/XBpC0xyvLMvMlYNXmc34jynuUzYE3X4y9nG/GHry+pChCR3kGy5bo3sf5RbobfNBHfz34w21XaMC3Hb6WfH+VydGU44ZxuDYgz8CJTSsBq46QbKyt8eOGaqoNYm58VcQLyeu0VODu7MX98mmpuW16qDgt9kuHBbBfwzmUL1DYAuRJKqFUAaPC9e/4azCmJuN+a7VdtnD3Oe9HRN64OVeIbKFbCUeNshzkl0e9qeHERkVbtApCrQPi2rbzQjaJHVBdfvmuq6mfBqEvAtU4Pth05p0pWfWMlHGLaiu8qT4HunjuY7QI3+zPfttfmZ2H++DQ4O7u5FGAUiWtZ7rdmw5yS6Ld8nygk/fqL71TH4P7caVkUzh6ZmqNaHSnQ8XMw2wWA545fCNh2XqoOj0zNUT2HhjZWujVy4JJLLp222+fCi68cbSizmPBPRblYMD4d7Z4eWG7MR+vw9KhWHAnHM0frsL3Eil13TZXV9fnj/S8ki6Td8nMObC/xruddvqIAzx2/EPAikJdOXsSTcy2y7XZPj5wjHuhCN9KWGGWxpuv9Es/jV9vQ4enBUnMGDt87B1fc15GlH+t3EVy47v/ga5xaNw+bCsyYm5WGK+7rWDg+Hfp49bl8pO3a27owMXksylcU4GN7U8Ak5leHvsV7PylUtZ2lH4vi7DR0eHrwq0Pf9us9Uf+IURZruj7g9Sv2Nu+w+tkHinG6qR3J8XEoMqX4xUo4NlfacPckI4qz0+T2RJ/T4TPMH0m751s7YU3X46OyIpxuag/Yd22utGHV5HGqtpPj4zB/fBqS4uPwv6uZ+ESbGGUB/K9fEZ/psfvm4vMb1x6Je1T4xkpfKhtcqKhzoMxikttT9jnK42ck7Yrj8oZpOVhqzgi4ek5lgwsf25uw1Jyh2qZYL/xjexNXwhkmWOnWiDgD9602At4rm188YYfb8z2WmjNQZjFBHx+HZ4/W4e3vvOt0+q57G8qOGjvu/+Br1Da7sdScgaXmDNQ42/0OAJG0W9/aiY/tTTCnJKLMYpLVSl/bjpxTtV1mMSEzMR4VdQ6/5eFIe2KUBfCvNlY2uLD+kzNydY8yiwnm5ER8ZG/C+k/OAIDfkmuh1Ld2YvruI6hqbEGhMVmuXSy21d929527Kh8LdmFaZYPLr+1CYzKqGlswffcRHoBiQFQZA90spGTvl6hxtsGarkeZxYQiUwpszW5M330EHZ4e3G5KjaitKa9VoaLOgczEeNnnPHu0Dhfb1avhRNLuf5y6BGdnN5aaM/xWXVLK/fMhVdtLzRmovXFzsVA3+SFtiH7Od0QZAJZVVMs1qsssJiwzZ+BaV7c8ZllvTFMK18p3TuDZo3Vwe76XfU5FnQMf+dxPIJJ2Xz7dIO8bEKr/XVZRrWq7zGKC2/M9nj1ah2UV1WG/B4qtMb29vb2x3gktKe9gR+RrIPPrQmHcUSiMO4oFxh3FilaxN9yw0k1EREREpDEm3UREREREGmPSTURERESkMSbdREREREQaY9JNRERERKQxJt1ERERERBpj0k1EREREpDEm3QPkWF+C3o1L5H/lKwrCfm35igKcfaAYgPf21Y71JWG97uwDxRG1MxBbCs2arK+p3ObZB4qxpdAc8DmlOYZBb3skOHzvHFXciTgKh/IzLc0xhP13VsZrNPRuXBIwLgZCGWvlKwpU3yPl3zPc7+Jos7PUqvo7Rdo3KD9Tx/oS7Cy19vkarfqgYLToX5WxtqXQrPoela8oYNz1QfRTyv8i6RuUn+nhe+fg8L1zwnqdFn1QMFr0r76xFux7dPaB4rD/JjQwTLr7SXQCu22NGPPSp/K/MouJwduHLYVm2Jrd8mdrut7vVt9cSD84cWBWxp3ycQpOGWvTMpLx8Y07yTnWl6CiziH/nte6PFE9wRgODt87B+us2aq4q6hz8OQ4DMpYW2rOwOmmdgDek5gyi0n+PW3Nbsadj52lVhxYPRuL9h6Xf6etB23YXmIN66RtNFPGmu9xV9hZaoU1XR/tXRu14mO9A8PV3ntmoqLO4XeL90V7j+PA6tnYUmiWB3dlAlnV2IL5bx3rc/uH752D4uw0+bOzsxumXQdVz1Fud9He4/IWuKU5BhxYPVv+rqLOgZXvnJDbBSC3vWjvccwel4LtJerOa+tB7/sSj/duXIKtB23YUWNH+YoC1e1qRdui3arGFhRnpwV9r9Z0PZyd3XJfxb+V71tsg9QO3zsH17o8fn/XKa9VwbG+BOUrClSftfJvGM5d47YUmgPGgvKk6OwDxbKTfvGEXfUdUP5OGbM7S624e5IRmYnxMOoS5Ot8T65ErIrHt5dYsdScgZXvnPDbN2XbjvUlsDW7UZydFvC7AvjHWmZiPI5fbUNpjgFGXYL8u4lt+/4dRrOdpVYUZ6f5xdDKd07g8L1z8PJdUzHltSr53E0FN6uDvvETTLBYEJT9jm/fEmmfpIxTALA1uzHltSr5uDVdj7MPFMv3FKwPD9Sf+t6KHLgZawCQpR+Lo1daAAB3TzKios4hn7fb1si4UyjNMWBTgRlbD9pUf9cdNXZY0/XYVGCWfUCo414owWJBWGrOkJ+Jb98SaZ/kG6eAt19WPt67cYn8nkXan/pSxpryuKu0qSBwMk7aGPFJtyExHq4uz6BuUxyknzt+we93lQ0u1YGpd+MS1ZdfTEEJ1RmUryiANV0vtyM6E+XryiwmVRJ8YPVsjHnpU/lc8SUUP+8stcovZXF2muqgtL3Eqjownn2gGNvmWuQXfHuJVe6L6ByUP4u2lQIleL5JoPJAJjqaK+7rqvcxXGkRd4C349xtawz4O+XB4PC9c1QxdPjeOXCsLwmYjAqhYkH8bE3Xo6LOgSmvVckDjq3ZjR01dlmhUx4wlImLNV3vd1BSfjdEbJXmGDDmpU9VJ3qiLd+fRdti+4HizjcJVMbdgdWzsfWgze91wQ5QQ51WcTc3y5uwBqJMfsXfWvQvO0ut2F5ixfGrbQGTUSFULAjTMpLl56TsRyPtk0SiLH4WsbSz1CoT79NN7ao+O1QfruxPlXz7MOW/i7PTMClVp0ruAGCdNXtYJkBaxd2a/HFwdnYHPGnbXGnzS7hDHfcCCRUL4nXK2FL2aZH2ScpRDaF34xIcvncO5r91DOUrCjAtI1nGRKT9qZJjfQmMugQA3lhT9n/KbRy+dw5ePGHH3ZOMQf9GNLhG/PQSw9jBP6+YPS4FAEIeRADvl8zZ2e1XQfM90/W18p0TquSossHllwBUNbbIL/bKd07A2dmNnaVWPD47F7Zmt/wiVja4UFHnwDprtnytrdkt912cJCg7NTEcFcjC8el48cTN54q2lXMggyWF8986hjEvfQpnZ7ccKqxqbMGLJ+yyYwmnMjFYDInanXNqEXcAYNQlhHVQLs5OwzNH6+TP8986BqMuIeRwbDixoIznHTV2VDW2YJ01G6U5BljT9djwyRn53A2fnIE1Xa+aE6k8QJh2HVR93mL4PZB11mxVzIu2lQeTzy83B3zt5kqbX6y9eML7et/3q2wv2PYGarjG3RX39T6fJyq3on/ZXGmDrdmN3y24LeTrwokFZWy9eMKOhePTAUTeJ81/65gq2Q1VhQ+nD1f2p0ri+6SMtUV7j8PZ2Y0xL33qV8Xv3bgExdlpqvc5mIZj3E1K1eFaGMl8OMe9QMKJBWVsiT6tNMcQcZ8k+iGlYH15f/pTJdOug6pYUx53xfvdUmiGUZcQ8qRksGgZe8PNiP9LWNJ0qGvtjEnbgTqMN2qvYlOBOaw5kL4VOiUxZCRc6/JgUqoOWfqxfgn6x/Ym1UEiWAVPWQEM9pxASZ+t2Y0s/Vj5sxhGDcaoS5AHqXCTSC1YUnXabTuGcSdiy/dzsDW7MSlVF9bfO1gs+L72ivs6pmUkBzwRFSeLlyMbGgAAIABJREFUyuHRQHyHdwMx6hL8TgCOXmlRHVTP9/H3VsbapFRd0CTSsb4E17o8mp0AjtS4A7zTKHw/h9NN7ZiWkRzW64PFgrOzWxVbtma3rOT1t08KNNTvK5w+vK8REWWszR6XEjCJFLEmKrThTsmJxEiOu3COe6GEioU3aq/Kf4sYnD0upd99ku+UlEDxM5D+VLkNZawpj7sAsG2uBavfPRlyG4NFy9gbbkZ80r3SYsL+i6Er0pESHXhpjiFghSPY4+FSJtti2FLLi+REgiWGUMtXFMgq0mBSTi9RJnXbS6yqKSzRsjLMDrm/2x7suAOg6nR9DTTugOjFAnAzwbI1uzHmpU8DzicfDMrvk4g1QTmFRfzsO6dzsA3XuFMmsUqDEXfRigVAnWApp6sMNt/pJb7T6QLNOa5scMHW7MZSc8agJ93DMe7Ot3aG7H8GGnvRigVAnWyLPkeri2aV00t8487W7MbppnZ8frl5wN/bcGkZe8PNiJ9e8vDUnEHfpugYH5+dG/D3Yv71+dZOZPoMq6zJHye3EYwYoh3z0qdBnzfJ58xRVJiuuK/LL5uw1JwR9KxYDJ/6DncGEyjps6brwxp6nv/WMVTUOeR723rQJg+y0U64AeDhaYMfG3LbGsQd4B2uDDb/7uW7puLsA8WqaoySNV0fshocTiz4xpaoMClPRAVx7UOgyroYPlUOd4YSKOmbm5UW1tDz5kpbwFgT/1Ym3FWNLZom3MDwjLvdtsagFzY/PjtXHtjFiJvStIzkkFW5cGLBN+6Uc+4j7ZPEdJRw+pz+9uHi976xZmt2y2sIVr5zIuxlEwfDcIy7zZU2GHUJAZft21JolosWRHrcE8KJBWU/qhxFjLRPEtNRgk1pU4q0P/Vl2nVQFWvK4+6U16qwcHw6yiwmufyiNV2P4uw0zYp7WsbecDPik25DYrwmHYKY1+e7nqs4k1z5zgnZYSifs6nArLpaPRjll/nwvXP8OhTlRUblKwrk3Kznjl+ANV0vO/LSHAPKLKag86wB9QFtS6E55JDc55ebVVNeRNvhDsVn6cfKxC+WF6s9OitX0yEvreJu5Tsn5MoKSqJSKJKWqsYWbJtrkb8/fO8cODu7+5y/11csKGNrS6EZxdlp2G1rlCeiL981VT735bumqi4qCkR5QAtV2RRJnzj4iraV8y1DCbViDuCtDIW7stBADNe421Fjh63Z7VcFFJVCsdrR++edqr5JLEf2q0Pf9tlGX7GgXIp1U4FZzpftT5+kPDEIVW0cSB8OBF8xR/Ddd/H3CnSR/kAM17gDvKNu20usqsRbVI0r6hzYUWPv13FP6CsWlP2o6NMqG1z96pOU/atYMCGQ/vanSr4r5igLLqZdB1VLf9qa3ahqbAl5oX1/aR17w82In14CAM8vzMf+i02DOudsR40dO2rsfjeI8D1wi1UYfIftQ5nyWpXfa6oaW1TzIivqHKqhS3GmXtngkssWis482BXOgPegMjcrTbbl7OyWa6CW5hiwo8aObXMtchheTDlQvudIqtTKA/DcrDS/uenRYEnV4SlFR6oVLeIO8P69zz5Q7DfvWvk5zH/rmLyBjvh9Xx1qX7EAeOfK3j3JiN6N3thSTs0QKz+I14eaplHZ4JLL8okES8TtmvxxqGxwoaqxRbVkIKCeHhLJvFdlrK3JH6eqFu0stcKoS4BRl+CXVAZbAq4/hnvcTXmtyu+7D6j/RqKfUfZNff0N+4oF8VldcV8P2I9G2ietfvckDqyeLZ//4gk7MMmIuVneSv77553YVGCWqzz0pw8XlPsvvkPKv4VYcjHY8q+DYbjHnXK50GDL80V63BP6igXAe2IUqE8TfU+4fdL8t47Jm+kB3lyhos4hp888d/yC3BdRkQ63P/XlG2vhnvgOtmjF3nAypre3tzfWOxEN1Y42zH7jb7HeDYoxQ2I8/njXVKyaPC4q7THuCGDcUWww7ihWoh17w8WIn14iFJlScHzNHRzmGMVi0Qkw7ohxR7HAuKNYYcId3KipdCs98skZvHKmIda7QVFiSIzHqsnj8NQdlpgeDBh3owvjjmKBcUexMlRibygblUk3ANS1dmLfd1dRXufQZKkjii1DYjyKTCm4M8eAVbeOQ5Eppe8XRQHjbmRj3FEsMO4oVoZq7A1VozbpHo3GjBkDAOBHTtHEuKNYYNxRLDDuKJRRM6ebiIiIiChWmHQTEREREWmMSTcRERERkcaYdBMRERERaYxJNxERERGRxph0ExERERFpjEk3EREREZHGmHQTEREREWmMSTcRERERkcaYdBMRERERaYxJNxERERGRxph0ExERERFpjEk3EREREZHGmHQTEREREWmMSTcRERERkcbiY70DFD379u2L9S4QERERjUpjent7e2O9E7Hg6vLglTMNKK9zwNXlQV1rJ1xdnljvFg0SS6oORaZUzDIm49FZuTAkDo3zS1eXB/vOXUX5OSfqWt2jI+4ufef9/4RbY7sfGjMkxsMwNh5FplSsnGzEw1NzYr1LEuNu5GLcDTGjJO6AoR17Q9WoS7pdXR48fbQOr5xpGPlffpIenZWL5xfmx6x9xt3oxLijWGDcUazEOvaGulGVdFc72rD63ROoa+2M9a5QDFhSddh7TwGKTClRbbeutROPfHIa+y+6otouDQ1FphTsvacAllRdVNtl3I1ujDuKlVjF3nAwapLuakcblpQf51n3KGdIjMenK2dHLfGua+3Ekn3HeaI3yhkS43F8zR1ROwgx7ghg3FHsRDv2hotRsXqJq8uDRz45zYSb4OryRO3kS8QdD0Dk6vJgyb7jUWuLcUcA445iJ5qxN5yMiqT7ha8uoNrRFuvdoCHC1eXBY5/Xat7OC19d4BArSXWtnfjXI+c0b4dxR0qMO4qVaMXecDLik25XlwfbT9hjvRs0xLxypkHTigzjjgLZfsLOuKOoY9xRrGgde8PNiE+6efU0BbPvu6vabfvcVcYd+XF1eRh3FHWMO4oVrWNvuBnxSfdnl5pjvQs0RGkZG+XnnJptm4Y3xh3FAuOOYoV52E0jPumudrTGehdoiNp/qUmzbTPuKBjGHcUC445iRcvYG25GfNLtus4hLwpMy+FQxh0Fw7ijWGDcUaxw6tFNIz/p5odNMcC4o1hg3FEsMO6IwjPik24iIiIiolhj0k1EREREpDEm3UREREREGouP9Q6Mdr0bl/TrdVsP2rCjRrubEZSvKECZxYSKOgdWvnNCs3aIiIiIRgMm3USj0NkHimFN10f8Oq1PwrYUmrG9xApbsxtTXqvSrB2KDXEyH6loxIMogIx56VNN26HoE/1Kf2gdD6Iv1rqQRkMDk+4YC/aFFgcAfhGJiIiIhj8m3RQQp5SMbMGqhqLqwmlFpIVgMSUq4BzhIC3sqLEHLF4pK+Ac4aBoYNJNRENGsIMjkdaYdFEs8CRzdGHSPcwp54MtNWdgmTkDSfFxsLd14fFDtXi99goA7xn9hmk5mJicCKMuAQDQ4elBbbMb5ecc2HbknGq7wS6kVM57fHXZdCyeYIA5JREAYG/rwr5zV7G50haNt05EREQ0bDDpHiE2FZhhTdfD3taFiz3XMTF5LKoaWwAAh++dg+LsNACAs7MbtmY3AMCarkehMQWFxhTMMqVENJ1AbFNsLzMxHuaURGwqMGNaRjKWVVQP/pukIUN5UvaVow0bZ06EUZcAZ2c3dtsa5YlXaY4Bv1twG8zJifLkDPBeGPdFYwt+/tEp1XaDXUipPLk06RKwcrIJhcYUAN6Y/vxyM7ZU2lDf2hmFd0+xooyPJ7/4Ds/+8FZY0/Xo8PTg8OUW2e/kpeqwo9SKaRnJqguG7W1d+MbVgQ2fnvGLlUAXUirj/P+cbcSjs3JRaExGUnwcOjw9qHG241eHvkVlgysK755iScSH5dXD2LN8hjym1jjbUPbOCRlPO0utuHuSEROTxyIpPg7AzePuC19dkIUwIdCFlMo4/1FFNV5eMhVFphRZMKtxtuHl0w0cFRyGmHSPENZ0Pf5ytlEmMXmpOtS3dmJLoVl2Dr4XZeal6lCxogCFxhQsM2dE1F5xdhpePGFXVbXFAWqpOQOlOQYeiEaBaRnJKLOY5EFlYvJYeVK3s9SKTQVmAN5RFfH4xOSxsKbrYU3X44fZaRENr66zZqM4O01uTx93C8wpiSizmHC7KRW5fz40+G+Shhx93C3YdddUAJAn/e2eHgDA2vws7Lprqkx4RNyJwoA5JRGn1s3D9N1Hwj5Js6Tq5DaVRYvi7DS895NC/Pi/atjfjRIflhXBmq6X/Y8+Pg71rZ3IS9Xh4OrbVSO/F3uuyz7KqEvAnuUzkJ00NuxkWR93i9ymva1L9rGFxhRsL7HCpEvwG6WmoY1J9wjR4elRVQ3FwWSpOUMmRL5f9PrWTvz2WD32LJ+BpPg4maiHo6LO4TeNZOU7J9D+y0VIio/DmvxxPAiNAtZ0PWqcbZj1+t8A3DzZy0vVYf20HACBlxkUJ2jWdD3W5mf5VX+CKc5OQ1VjC+a/dUw+JqpC5pRE7Cy1cnrTKGBOSYSzsxtz3jwq+6y8VB0A4Nkf3oqk+Di/CiRwM1aS4uPwmx/e6jfSEkyhMQX2ti5M33szUS/NMWDvPTNh1CXgdwtuU8UkjVzWdD3u/+Br2WeJuNtRapVxufrdk6rjnzJWNkzLCTvpNqckosPT49eeKJZtnDmRSfcwwztSjhAX268HfHzlOydg2nUw6AFBmeysnBz++rkf25tC7sekGx0RjXy/PVYv/y0Skg3TcnCt0wNnZ3fAaUsr3zmBjhuVyRmZyWG3ZWt2+8Xyjho7apxtAIC5WWkR7z8NT++ev6ZKqOtbO1GaY5A/bzrgP91IGSs5SWPDbqvD04OSvV+qtlfZ4MK7568BQL/WvKfhqcbZpjpuipiwpOrQ4enBblujX8FJGSuZiQkRtfcvVd/5tSf6XKMuQRXzNPSx0j1CnG5qD+t5Wwq9w/1LzRnI0o/t98Gi/JyjX6+jkSdQlXrbkXN9VmAutl+HNV2PWaaUsNsKFud1rZ0oNN6c80gj39+utPg9Vtng6nO6koiVSAoDF9uvBxwF/NuVFjw4JZtxN4rUBRkNFqN9wYhYUV7b8n/Zu/vgJs48X/RfBxtL+EUykm0cZFskFgMBGxt7GBMsEiCTnHBrTRjuhrDMbM3APVP3csFkarNza1ObzG6yNeeeOTmV4WWyVXMrZGqyuSR7KsPYt06ymUlCgp3Yw0AwdhIIYhIDIsYg2bKxLRnL8f1DPE23uiVLtlryy/dTRWG99fOo9eunf/3000/HQqtX/I2L1/H6wysAAFX52TyrPIMw6Z4DGips0oWW4byB0UktkxesEXBnvOxEtpcVoHDBfHy7IBdFC+bjW+YFce98AOCsZzDuz9DsFMspemeRGVX52XCYjCjJMagurIzVZbZ3dFuks7xypTkGbFlihdWQgVXWbNhzDCibRNy5B0cmU0Waxph0z3LhF7Nd7Pej62YAl28G0NLdjzcuXpeuyiZKJDFzibiQV244OIbh4Jh0sVusPJM8SKS5Q8xcIqZPDecNjMbdMy0u0iSK5pDTgR0O7TMf3sBo3O2df+ybRFWNpgkm3bOcuJgt/OIzYXtZQbKrRHOA1gwS5/qGMHBrDH++PoCDHW5pqiyiRNGaQeIL3zCGgmN4z92Hxq88OOh0oN4e+/UrRLHQmpr3uv8WznoG8ccrfajKz5bufklzF5PuWU4kPUddPZqv/83SwmRWh+aIp6tLpZs0hV+AJizOiv1CNqJY/Mt37pFmfNj1/nnN6w3svMibEmx7WYGUcIdPpSv8eMXdya4WTUOcvWSWEzNE7L7d4y2U5hikaduIEs14+2DvE89NzYS7ocIW96lWoonkzg/F1NWhW5oJd+kkx9YSRVMomwkn0pSl39EYZkdzD5PuWe7IuW4AoXlmh368Hhd21uLCzlp0/WAt6u1WuPr9k76YkigS/+2DvXWLTNI8tsIhpwP/pfaeVFSLZrmBW6G4E/O/yzVU2NCydTUP9ijheobvTNn76kP3KV5zFplxdvu3OZSOAHB4yay3r9mFvpEgnnAUSncBDN2++M5tZFu3VcNiyMAORyFvK0sJ8fPTl3Bk4zJYDBno+sFaxd0oF6TPgzcwKk0ZSJQo//inL/Hg3WbYsjPx+sMr8ML9ZfCPfYOFmemwGDKktq/CEvs0lUQTeePidTy5qhi1hbn4/tJCPFqyEL0jQelulEDouiqti8ppbmHSPU2lvXQ8pvfFcgvtieZM1rrAUuuGJrHUK55betPsJU7tP11dirLbB3tA6ILKdy57sa/ZhUNOBxzlNqxbZEplVWkWuXQzgLpjn+D1h1fAYTIqLqhs6vKgodmFkmwDTmytgsNkhLPIzDmOKSHWvnkajZvLsdqaI9323RsYRVvPAH768V/Q3O2DZ1cdLIYM3jl3DksbHx8fT3Ul9BRr8kpzk17TJTLuKBrGHaUC445ShVMTh3BMNxERERGRzph0ExERERHpjEk3EREREZHOmHQTEREREemMSTcRERERkc6YdBMRERER6YxJNxERERGRzph0T5FnVx3G92yQ/jVuLo/5s42by3FhZy2A0K2xPbvqYvrchZ21cZUzFQ0VNl3m15Qv88LOWjRU2DTf4ywyJ7xsIiIiomRj0j1JziIzxvdswFFXD9JeOi79q7db0bqtOtXVm9YaKmzSbcEBwGEyqm4/z4n0o2vdVq042BMHb7GQH0iJOI7l4EZ+kJgM43s2aB6MTYX8AK9xc7ni4FW+PmM9AJ5rDjkdivUU73Yq/009u+pwyOmY8DN6HfhHokenhjzWGipsiu2ocXM5424Cop2S/4unbZD/pq3bqmPeR+vRBkWiR/saHmuRtqMLO2uZtyQJbwM/ScceXYmmLo/qVq7rj53Bia1VaKiwSYmkPNDbegY0b7sernVbNWoLc6XH3sAorEdaFO+RL3f9sTPS7YydRWac2FolvdbU5ZFu6y42LLHs9cfOoCo/GwfqlDu//S2h7yWeH9+zAftbXDjY4Ubj5nLU262qskW5bT0DqC3MjfhdHSYjvIFRqa7ib/n3FssgNc+uOrj6/Yq7wF3YWQvPrjpVjJCS/ABveV4WDneG/vbsqlNsJxd21uLCzlosfa0tZXWdblq3VcNhMiriTiSM8vaH1OSxtsmWh3N9QwBCBzH1dqu0Tlu3VTPuwhxyOrC33KaIsYYKGw7UOeAwGXk79SjksRbe2SUccjoU+2TSF5PuSXAWmWExZOCFM1dUrzV3+xQ7pfE9GxQ7czEERTzW0ri5XLFzE8ms/HP1dqsiCT6xtQppLx2X3nu40419zS7p8SGnQ2qcagtzFYnygTqHtCwglHA8W2OXErgDdQ6pLiLhlj8WZctp3RY4/EBCftAwvmcD0l46juv+W4rvQUqt26rROxJUHcwsfa0Nnl11ihgJX9+x3KpZ7Mzk5LEBhOLDYTICgBRnWq/JDxQPOR14pMSChZnpsBgypM+F97yIbUU8f6DOgU22PGx5q1NVN3nZ4kCktjBX8wAVUB/gLcxMx5kbg9L2LN8mD3e6VethLjvkdKC2MFcVQ1ve6kTrtmq8vHGZlCiKJEkIj59IIsWCID/YDz+gj7cjQB6nAODq92Ppa23S8w6TUZH8Ruo40erE0Dr4ELEGAAXG+Th1fQAA8EiJBU1dHul9R109jDsZZ5EZe8tt2N/iUqzXgx1uOExG7C23SW1AtM6maCLFgrDJlif9JuFtS7xtUnicAqF2Wf682BeG1y2W9jScPNYiJdZ7y7WTcdIHk+5JqMrPBoAJe3YOOR3wBkZVO3P5DklLeEPR3O1TbSxtPQPSjmzLW53SqdqSHANc/X5pA2zu9qGpy4MdjkLpOVe/X6p7+EECAJzrG8K6RSbNuq1bZJJ6bORlN24ulw5Cjrp6ND8rdlSeXXXY+vanaO72oXVbNU5dH5DqFksjOZc5TMaI61e+MwjvlWzdVj1hT3i0AzDx2GEyoqnLg6WvtUk7HFe/Hwc73NJpTPkOQ564OExG1U5JvmMUOx5nkRlpLx1XnF0RZYU/FmWL5WsdWIQngfIE6sTWKuxvcak+x54fpZqCUMKqRZ78hvdKHnI6cKDOgTM3BqO2l9FiQVielyX9TvLOi3g7AkSiLB6LWDrkdEiJ97m+IUVHSbSOE3knhlx4Eij/u7YwFyU5BlWP9g5HIRMgmcfL8uENjGoetO1rdqkS7midTVqixYL4nDy25G1avG1S+FkNIBRLrduqsfbN02jcXI7leVlSTMTbnsp5dtXBYsgAEIo1efsnX0brtmoc7nTjkRJLxHVEicUx3ToqyTGgdySoeO7fL94AgJjG0MrHT4oNSBBHr0LvSBAlOQYUGOerkoX33H2Kz0dKJkRZ4UfichZDhmqn4Or3o8A4X3osenSiLUPsoLSWR5HFur5qC3Px3Kku6fHaN0/DYsiIOoZWHIDJd3Di1KQgP4g82OFGW88AdjgK4Swyw2EyYvf756X37n7/PBwmo2JMpHwHYT3SojjIes/dF7FuOxyFigNNUbZ8Z/LRtX7Nz+5rDiXVbT0DONzpRtpLx3G4M/T58O8rLy/S8uYiiyED1/23Jnyf6LkV2/e+Zhdc/X784v57o34ulliQx9bhTrfUMaDVEeANjCrGZcsPVNe+eVqR7EbrhY/UcSJvI+WdGHJie5LH2vpjZ+ANjCLtpeOqXvzxPRtQW5ir+J5zndY+VMtTVcURO5uiiSUW5LEl2jRnkTnuNkm0Q3KR2vLJtKdy1iMtilhLe+k4vIFRrD92Rvq+DRU2WAwZHJ6TZOzpngSRVDqLzJqNbaTnYyXvmRM9KHpeYCN6/kRvTuPm8og93VMhH+4g7208UOdQDGGhqREHdOEHP65+v3QmZCLy30d+kBb+2ev+W1iel6V59kecoZGfHtUSfnpXi8WQoToAOHV9QLFTvXwzMOEyRP1LcgwRk0jPrjr0jgR51mUSFmamq36Hc31DWJ6XFdPnI8WCNzCqiC1Xv1/qSJhsR4DWqf5wkTpO9pbbpO1sojMi8lirys/WTCJFrIke2liH5FBIpM6miX5fIVosiI4y4E77VpWfPek2KXxIilb8TKU9lS9DHmvyzi4AeLbGjq1vfxp1GZR4TLonobnbB1e/H09VFWsm1ye2VqGpy4PLNwOq5PXxsnxpGU9VFWsuX/QWRdvpl+QYFI/lO7vwHdwmW17EDVT05MR6AZ58oxccJmNMvYLiFBoAaYzu3nIbLxqKg9b6F6Z6sAck7wAMuJNgiYtCtcaTJ4L8IFYc4AnyISzicfiYTgrFnTyJlUtE3CUrFgBlgiUfrpJo4cNLwq9h0Wrjxb5lky2PSTeguQ+Vm2rsJSsWAGWyLdocvWaDkg8vCY87V78f5/qG8NG1fl78nAIcXjJJ4hRj+NRSIqi3vNWJfc0uWAwZivfsLbcpLpyJRL6Da91WrRpeIh/v2Li5XDpN9MKZK3CYjNIwAmeRGfV2a8RxwAAUy26osEXtHfjoWr/i9JkoO9ZewQLjfOnggONm4/fRtf6I4+9e3rgMF3bWKnpj5BwmY9TeYHEAFn7qWy48DkUPk/zsjyAuUNTqWRenT+WnO6PRSvpqCnJjOvW8r9mF/S0uKaETO1fxtzzhbusZYMKt4airJ+JsQk9VFUs7djHMTW55XlbU7TyWWAiPO3nbEakjINKZDDEcJZYza5dvBrAwU9k3Je84iUZ+vYyINVe/X7qGQH4tDmkT+1CtafsaKmzSTGHX/bdUMRKts0mIJRbk7aj8LGK8bZIYjhJpSJtcvO1pOOuRFkWsNXV50NTlQdpLx7H0tTasW2RCvd0qDSl1mIyoLczllJVJwKR7kg52uKV5ueVzh4bvtMPfE8sV1Utfa0NtYa70mev+W2jrGVD0YDd1eXBia5U0Bls0Gs3dPqw/dgZ7y0Nz28ovLtGyr9mFtp4Bqaxna+zY3xJq6JxFZhzscMMbGJXmK93yVieaujyK8d/xDAtxmIzS6bqaglzV2HSKbstbndLMCnKip1DEXlvPAJ6tsUuvt26rhjcwOuH4vYkOwOQHdA0VNtQW5uKoq0fqoXt54zLpvS9vXKa4qEiLfIcWrWdTJH1i5yvKlo+3jCbaNJVAqGco1uk856KDHW64+v2qXkDRUyimGH3nslfRISCmI/vpx3+ZsIyJYkE+j/Decpt0dm0yHQHyA4NovY1T6TgBIs+YI4TXXawvrZmx5qqmLg8O1DkUibfoNW7q8uBgh3tSnU3CRLEgb0dFm9bc7ZtUmyRvX8UsZVom257Khc+YI+9wsR5pUdxfxNXvR1vPAKecTQIOL5miWBLOSO+R7xTkV2JPtNyJeuK0ZiQRtJIKrefkG3b4hhhpZxatXK1lRUtwYlnWXJX20nFc2FmrGnctX19r3zwt3UBHvD5Rg7qv2YWaglzFZ/a3uHCgziElUa5+Px4psWB8T2hHIx+aIWZ+EJ+PNkyjudsnTcsnEiwxx/3jZflo7vahrWdAMWUgoBweEs+4V/kB3uNl+YreokNOByyGDFgMGaqkkvNP37H0tTbpgj85+ToSbZh8WMVE63CiWBC/1XX/LdXwJwDSMCh5vaK1HVvf/lTqsABuXyhXYkFNQagn/53LXuwtt0mzPIiZdLTKnoi8/mIbkq8LMeVipHsu0J2hiOFDw+SdSaKz6cTWKukgJlpnkzBRLAChAyOtNk20PbG2SWvfPC3dwRoIdYw0dXmk4TMvnLki1UX0SMfanoYLj7VYD3xJf2nj4+Pjqa6Enpi4UTR6jd9j3FE0jDtKBcYdpQrvMh0y64eXmDPZmU9EREREqTX7k+75TLpJm54HZDzYo1Rg3FEqMO6IYjPrk257rmHiN9GcZM/RLzZ4sEeRMO4oFRh3lCp6xt5MM+uT7i0xTo5Pc4+esfHg4jzdlk0zW6U1R7dlM+4oEsYwc97hAAAgAElEQVQdpYqesTfTzPqk+4fLilJdBZqmfrhcv9h44G59bihDM98qS2x3Z5wMxh1FwrijVNEz9maaWZ90mzPTmXiTypOrinU95fXYknzdlk0zlzkzXdeDPcYdaWHcUaroHXszzaxPugHgxXVlHFNEEnuOAT+T3fBAD+bMdN3LoJnnh8uK9B1by7gjDYw7ShW9Y2+mmRNJtzkzHcceLZ/4jTTrmTPT8WJdWVKutv+nNUtQac2e+I00JyTjYA9g3JES445SJVmxN5PMiaQbACqt2Tjz+Ld5xDWHmTPT8crGZUk9FXrs0XJOp0XSgX+yYoFxRwDjjlIn2bE3U8z6O1Jq+dH75/Gb892prgYliTkzHY8tycfPvm1PyUFX180Atr7diXbPYNLLptSrtGbj2KPlSY89xt3cxrijVElV7M0EczLpBkINw++/vIHGLg8+uOpLdXUowcyZ6ai0ZuOBIjMeuyd/Wpz2/KeTX+GfT3WluhqUJObMdOwvt+HJVcUp7e1h3M0tjDtKlekSe9PZnE2656K0tDQAAH/y1Prl2Sto7PKg3TMI30gw1dWhBJIf7P1w+fS6gIhxN3sx7ihVpnPsTUdMuucQJt2UCow7SgXGHaUC446imTMXUhIRERERpQqTbiIiIiIinTHpJiIiIiLSGZNuIiIiIiKdMekmIiIiItIZk24iIiIiIp0x6SYiIiIi0hmTbiIiIiIinTHpJiIiIiLSGZNuIiIiIiKdMekmIiIiItIZk24iIiIiIp0x6SYiIiIi0hmTbiIiIiIinaWNj4+Pp7oSqeAbCeI357vR2OWBbySIrpsB+EaCqa6Wvjo+Cv1fsS619UgCe44BldYcrLJk4clVxTBnpqe6SgDmaNwdfzP0/4Ztqa1HEjDuphHGXcox7ma/6Rp709WcS7p9I0H886ku/OZ89+zf+Eny5KpivLiuLGXlM+7mJsYdpQLjjlIl1bE33c2ppLvdM4itb3ei62Yg1VWhFLDnGHDs0XJUWrOTWi7jbm5j3FEqMO4oVVIVezPBnEm62z2D2NB4hkfdc5w5Mx3Ht1QlrTFg3BHAuKPUYNxRqiQ79maKOXEhpW8kiB+9f44NAcE3EkzaToFxRwLjjlKBcUepkszYm0nmRNL9y7NX0O4ZTHU1aJrwjQTxk48u6l4O447kGHeUCow7SpVkxd5MMuuTbt9IEAc63QlbXuPmcozv2YCGClvCljlZDRU2jO/ZgMbN5amuyoQu7KzF+J4NSf9sJL85363rmMNEx910+63H92zAhZ21qa7GhKayveqxrc+0uAOm12+tR1ugh6lsr3ps6zMl7mbC7zudcoCJTGXbTdRvoXfszTSzPunm1dMUye+/vKHbshl3FAnjjlKBcUepomfszTSzPun+8Ov+VFeBpik9Y4NxR5Ew7igVGHeUKoyPO2Z90t3uuZnqKtA09cHXfbotm3FHkTDuKBUYd5QqesbeTDPrbx3ku5WcU16Nm8uxbpEJFkMGAKDDO4ifn76ENy5eV7xve1kBnq4uRYUlNI2ONzCKj671Y8tbndJ7SnMMOOh0YLU1B7bsTOl9rn4/nvjDZ7gU5/goMabrmT99iRfuL4MtOxPDwTG0XhvAQ03teG7NEuxZuRgWQwa8gVG8fbkXP3j387jrrbUuxPK0lOYY8PrDK1BhycKC9HlSnXYfPx/3d5wMPU+HJivunluzBD9aViTFiXtwBK+c78azJ79SvE9rXXd4h1TxdMjpwCMlFjhMRgDAcHAMF/v9mrE8kcbN5ai3W2F/tRVNm8ul2OnwDqL+rU7UFubi+e/cA4fJGPG3j7Xe4etCLC9a3cK3170nXGju9sX1HSdjNsTdZNsDQLttbKiwYffyIpSZjFiQPg8A4Or343VXjyqWJ9JQYcOBOgf2t7hQb7di7aJcLEifB/fgCJ76+CLaegbw+sMrUFuYK9VH67efbJve4R3Eia+140hre/1v7ZdxsCOx4/C1zIa4C8f97p26y9tJEVdaUrHf5dCjO2Z/0p2EH/vK394PW3YmXP1+fHStH1np87B2US5ef3gFChfMlxpUsTMYDo6hrWcA1/23sDwvC/V2K96tr8RDTe0AgD/WV8JhMqLDO4hPbvcgrFtkQm1haJlr3zwddx0XZqbjyMZl6PAO4RPPTay25mCTLQ9nt38bZSYjWq8NYCg4hodsefj+0kL8+fqAVO/n1izBMzV2DAfH8J67D0PBManeF3bWYulrbVI579ZXYpMtD97AKJq6PCgwzsf3lxaq6lOaY0DL1tXSejvXN4QC43xssuWhZetqFP/247i/43SSjLgLX9dAKE6eqbFjlTVbaphLcwz4fMcaLEifhw7vILpuBlBgnI/awlzFuhZJsntwRFre8rwsVFiy8av1S+NOuoWWravhH/sGTV0e2HMMqLBk44/1lVicNR8d3iGc6xuS4vGg0yHV21lkxn/8VQUWpM+TthdR7893rMF9R09KO4nwGAUgJVvhWrdVo7YwV1pvYnv9j7+qwH/6/zqSknjrJRlxF097cGFnLRwmoxRTYl0f2bgMXw/dQnO3T2oXvYFRqR0Sv/MzNXb88UrfpH6Tv68sgTH9Lrzr7kNW+jxssuXhV+uXwh/8RhWPL29cpqh3rG26s8iMIxuXKWJ03SIT9parL7I75HRgb7lNsd7WLTLhQJ0DVkNG3AcX00kqkirud+/Ea/i+dHleFg7UOVT1me373Zlg1ifdejvkdMCWnYm2ngHFRrm9rABHNi7DszV2aSP6+8oSDAfHsOv984oE5uz2b+Nb5gUozTGgJNuAhZnpquUBgGdXndQ7Ey+LIQP/dqFHOpIuzTGg6wdrUWHJxv4Wl6qB2mTLk57bs3KxZr1F8nLI6cC+ZhecRWZssuXB1e9XNAhimXIHb683eZ0A4NWH7sP3lxbi1YfuUx310x1iXbsHR1B37BMp+RSNar3dCmeRGc3dPry8IZQUHO50Y1+zS1rGqw/dh0dLFqKhwoaDHW4sz8uCNzCqanjF7yzeF6/ekVGseuPP0uMrf3s/HCYjmro8qgOD5XlZ0vt+cf+9mvUWyYt8RyhiVJ6Iyw82hIYKG2oLc9HhHVTUaXtZAV5/eAUOr3conie1WNuD59YsgcNkjNg2/njF3Wju9mGTLQ8AsPXtTxXJtfidn6oqnlTSbUy/C9X/45QUD+IgNbx9EgcG8nJjbdNFjMrbUK1lluYYsGt5EbyBUUWdSnMMOP3XNfi7yuIZnXQnG/e7d7YzsS7k7SlwpxNFjvvd1Jv1Y7r19kiJBQDwxB8+Uzz/xsXraL02AIshAw0VNjiLzLBlZ6LDO6TqMVz1xp9R/NuPcelmAM3dPliPtGgeVfdOsTdBvjGJRt89OKLYWYQnVQ0VNlgMGWi9NqCqt/jOYh38eMXdAIDXXT2K9x3scMM9OKJ4bt0iE4aDY6oN/Afvfg5vYBQP3m2O+/vNJU9VFQMAXjnfrTgleOlmAK+c71a8p9KaDW9gVJG4AqF1bT3SIv3mS19rg/VIi6qs6/5bU6rry+e6FY/9Y98AABpk9bl0M4CrQ8pyagtz4R4cUdV7X7ML7sERaUe4vaxAitHwdRE+xGSHI3TW5eenLymef+PidbT1DKDCko3SHMNkvuacEE978N3ihQCAn378F8X73rh4HVm/PiFt+1ve6kTaS8dVibWr3z+lun50rV8RD0PBMQDq9ulc35DicaxtOgBUWLJUbahWGU9VFmNB+jy8fblXFaNHXT1YkD4Pz61ZMpmvOSdxv3tnHay/va9sCGsnwx8D3O9OB+zpTgD34IjmWKhzfUNSL05VfmhMVjwJTEOFDQ6TESU5BizPy1L0nCSKSIAm8vE19dXHl24G4A2MSo9z54d6FP94RX3RxBe+YWmcHABpHFukuWjl76XIwhNaILT+n6mxS48thoy4EpjtZQVYsTALq6zZsOcYUKZD3AGIafzgF75hzefdQyNSjBQumA9AnTwBobgV2yAAaezn3ywtxN+EDXsSr21ZYk3KGNuZLJb2QKzPWHupnUVmVOVnY5MtDwXG+bq0dwDgkdUxkljadAChIVtD6rh7+Vy3YhssuX0gV2Yyqtq8AmMoflfxdtlx4X43xJg+D97AqGpdXLoZUHV2cb+beky6pyFxGlTwBkZxdWgE3sCotCObaUQvk5zFkKE6/UWpc8jpwK7lRdJwjOHgGK4O3cLVoVu6JUB6i5RgMe6mj+1lBfjV+qWKts3V78fVoZEZ295FOqCc7DAF0t9s3O9qJffc76YWk+4EsGVnojTHoGpo5eNTz9wI3R5X9GrIvfrQffjePVbsev886opM2GTLQ1vPAH559ori1NKFnbUp2/jvX2RSPVeaY4DFkCGdfhu4FUqsv1ucp+rdkq8L4E5CJx9bSfHbvbxINRb0u8V5isfewCgWZqo3dWkcc6cb/37xBvaW2+AeHME/tH+p6Olt3FyesqT7W+YFms/bsu70yPQMh3qxwmMMgGInCgD+2wd/aS8dT1QV56RY2gNvYBQOk1G6tkBu6Mfr0eEdwto3T+NX65fCmH4Xnj/VhZfP3RkupXUtSLLE0qYDoXZMHotC+N0KRdsYPvabJo/73dB25g+OwWIyaq4Lh8moOMvJ/W7qcUz3FL1z2QsAeP3hFYrnt5cVYO2i0AwJBzvcaO72wT04ggpLFraXFUjvK80xSOOo3rh4XToN+ccrvYoNf3tZQUoSn4MdbngDo1i7KFdRb+DOdxbr4NeffQ0A+NGyIsW4WK26d3iH4DAZVTun7WUFGPrx+mlz2+np6oUzVwCo13VpjgE/WlakeE+7ZxAWQwYOOZUJzJOrQmO+W7r7pdOwX/iGFUlBaY4B6zQa/mRo6xmALTtTVW/5RVRAaLvRilGtuoup3MJPr5bmGHDlb+/H0I/Xc0x3FPG0B3+8Epoq9Bf336t43yGnAwvS5+Hi7WTAYshAbyCIZ09+pUgadi8v0u17RBNrmw6E2jGtGA2vu2gbtWY1ad1WPWNuKz5dcL97Zx2INi18Xbz60H2q5XK/m3rs6Z6ifc0uPLYkH7WFubiwsxbn+oakqYsWpM/Df2+/Ir33qY8v4sjGZTiycRn+8313S1NGWQwZONwZasTPegZRb7fi7yqLcf8iE4aCY9K0VsPBMc0p0PT20qdX8UyNXVFvMdZNfqFbc7cPTV0e1NutOP3XNfjoWr809Vd43Z/4w2do2boaB+oc2L28CF03A4r1Fn4hEik1d/vwnrsPm2x50roGIMXTe+4706ztPn4eLVtXY2+5DTUFudKUWWJmiTcuXoezyIzh4Bg22fLQuq1amp6vwqLuPU6Wn378F/zHX1Uo6i2PJ/kFes+d6sKBOocUo0BoysBw+5pdqCnIRb3diit/e79iajCx3pIxR/xMFmt78OzJr/Dd4oWKtlH8fu7BEfzjn74EEBqba8vOVLSfldZsGNNT0ycUT5v+xB8+w+c71ihidN0ik6ruzd0+HO50Y2+5DZ5ddWj3DKrWG3vA74g05vjyzQD2Nbu435VtZ/uaXXikxKJYF+J9w2HDOrnfTT32dCdA8W8/RlOXBwsz01Fvt2KTLQ8Xb0+oLz/1/8bF69j1/nlc7Pdjky0P9XYr/MFv8PypLsWO6nCnG/7gN9J7jOnz8PypLvzuy9Dcycm+yv3Zk1/hiT98pqj3wsx0NHV5VNPLbXmrE8+f6oI/+A3q7VZUWLLQ1OXBu27lxZWXbgZQd+wTtPUMYHFWpmK97W9xcfqsGDzU1K5Y1/J4EnPPAsp1XWHJUvx+4mr95m4fdr1/XpoVpN5uhS0rE++6+7Dr/fMAkj8Ournbh/uOnlTUu8KShbaeAdx39KRiyMLBDrciRjfZ8tDhHVLsfIW1b56W5iGXr7fDnW7FeiNt8bQHYl2LtlH8fvJpLuuOfYIO7yAcJiPq7VZUWrPh6vfjvqMnMRwcw2prTtK/Y6xt+qWbAVWM9o4EpW1Gbl+zC8+f6kLvSFC13uqOfZLMrzftie0y/J+YsQPgfldu6WttinWxMDMdz5/qUs0Ixf1u6qWNj4+Pp7oSeuLYTYpmfM8GXZbLuKNoGHeUCow7ShW9Ym+mYU83EREREZHOmHQTEREREemMSTcRERERkc6YdBMRERER6YxJNxERERGRzph0ExERERHpjEk3EREREZHOmHQTEREREemMSfc011Bhw/ieDYrb4o7v2YALO2sn/Gzj5nKM79mAhgpbwuoTa9k084X/1hd21sZ0gwOtmJ2qWMummS/8t46nHUt0+6RHG0rTU/hvHU87luj2SY82lKYHJt2kqTTHgMbN5Xj1oftSXRWaYw45HWjdVp3qatAc01BhY4cCJZ2zyIzWbdU8sJsj0lNdAYpfMm65u2WJFfV2K5q6PEkvm6anpa+1JaWcveU2uPr9KSmbpp8tb3UmpZy95TY4TMaUlE3Tz8EONw52uHUv56mqYtQW5uKoqyfpZVPysaebiIiIiEhn7OlOoENOB/aW2/BvF3rwg3c/13ztcKcb+5pdKM0x4KDTgdXWHNiyMwEA3sAoXP1+PPGHz3DpZiBiOeN7NsDV71f0/j23Zgl+tKwItuxMDAfH0HptQPOzsZTbuLkc9XYrAKDebsX4ng3Y3+LCwQ63ZtnbywrwdHUpKizZ0vI+utav6iUa37MBTV0enPUMYs/KxbAYMqS67j5+Pup3psicRWac2FqFDu8gVr3xZ83X2noGsPbN0wBCsfhIiUXq1RsOjuFivx8/P30Jb1y8HrGcCztr4TAZFWc7wn/7Du8gTnzt0/z8ROU2VNhwoM4BAHCYjFK8bHmrU7Ps0hwDXn94BSosWViQPg/DwTF0eIdU248YMvDMn77E89+5Ryq/wzuIvSdcaO7Wri9NzLOrDsb0u5D16xOarwGA9UgLgNDwjd3Li1BmMmJB+jwAgKvfj9ddPXj25FcRyxDtkWiDAPVv7x4cwX9rv6z5+VjKlY/HlbdxWmWLOq1bZILFkAEgFEvh24/4rP3VVrz+8ArUFuYCgFRX9mROXuu2atQW5uKJP3ymarPEa+uPnUFztw/OIjN+cf+9cJiM0u/lHhzBJ56bUc9kiPZItEGC/Lf3Bkbx9uVezc/HUq5o1wDgQJ0DB+ocSHvpeMSy5ft5sbxXzncrth/x2f0tLmyy5eEhWx4WpM+T6hqem1Bysac7gfY1uzAcHMODd5tVr62/24zh4Bj2NbsAAH+sr0S93YrekVE0dXmkYRy1hbl4/eEVcZX73JoleKbGjoWGdLzn7kPrtQGsXZSLTbY81XtjKfc9dx/aekJJu6vfj6YuD87cGIxY9usPr0CZyYj33H1o6vKgdySIertVc3zkamsO/q6yWFquP/gNNtny4v7OdEdztw8d3kFUWLJRmmNQvPbjFXcDAH559gqA0A5jb7kNxnl3Sb//1aFbqLBk41frl8ZVrrPIjCMbl6HCko22ngE0dXmwOCsTe8vVYxNjKffMjUEpHr2BUHy+5+6LWPbnO9agtjAXHd4hNHV50OEdQm1hLj7fsUa1HhZmpuPIxmXwB8fQ1OWBq9+PCks2jj26Mq7vTEpvX+7FgvR5eG7NEsXz28sKYDFkSAmJSAQWZ2Wi9VooVtp6BuAwGfFMjR3OInWbGU3L1tWoLczF1aFboXZk7BvpgE0u1nKbujzwBkalv9+57I1Y9pW/vf92GxqUYrTMZMTrD6/QHJfbsnU1bFmZUtm27EwcqHPE/Z3pDtGe/ef77la9VmHJQod3EM3dPpTmGPAff1WBCkuWtM95z92HhYZ01NutOORUx0w0797efwKQ2pHvLy1UDUuKtdx3LnuloXSiDY1W9jM1dhjT77ShxvS78EyNXfOCy7+vLMG6RSa0XhvAe+4+GNPvwveXFsb9nSmx2NOdYGLH7ywySz1oziKzlJiIxwsz0xW9j4JnV53UIxKrPSsXYzg4hvuOnpR6+EpzDPh8xxqpZyeeckUPTG1hLs71DUXtDRBl73r/vKLHQfQ2HHI6pAMNALBlZyp6J0pzDDj91zWosGTF9Z1J6cTXPlRYsvFUZbFifT9ashDewKi0vpfnZcEbGEXxbz9WfF78Xg0Vtph74H5x/71YkD5P1Qso770RYi23uduH8T0b0DsSjBp3omxx5kgQZ5Ref3iFIsYthgzVGaiz27+NCks2tpcVRO3hp8h+/dnX+P7SQny3eKGit+3JVcXS6wCkDoCtb3+qOLMgfq+nqopjPuNwyOmALTtTswdSJERCrOWKsykWQ0bUuBNlh7eh28sKcGTjMjxbY1dtP70jo4ozUK8+dB++v7QQP15xN8+yTNIbF6/jV+uXotKarXj+kNOBBenzpLNtu5cXAQCOnOtWtBPiDOAjJRYALsTCWWTGJlue6kyv/AydEGu5+5pdaNxsgMNkxFFXT8S2V5TtHhxB3bFPFPv5lq2rUW+3KnIOADCm34Xq/3FKeu/2sgK8/vAKrNfoFKTkYU93gokjcNHDKP9bvNbc7YP1SIsq8QWA3pFgXOWJHqXWawOKU+qXbgZUQ0wSWS4QamxE2eFJyxN/+AwAbjcud7j6/Yr3XroZQO9IUHFwQPETZ1nkDWp4byMQuiBRnO6Xu+6/FXeZFZYsuAdHVDuK12UXBOlRLhA6IHQPjih2aEBoPbgHRzQPXMNPq3bd3l4KF8yfVB1IfpZFedAs720EQhckpr10XJVkhl8wGwsR4w1hv33440SXC9xpz0T7Jrxx8Tparw3AYshQ9Xa/fK5b8fjP10Ptcu58tnlT8fblXlgMGdheViA9F35G+dmTXyHr1ydU7cRkDnbEfjy8fTvY4YZ7cETxXCLLBUIXWwLAK+e7Vfv5V853K94jfHStX/Fesd81cl+bUuzpTjBxBP5oyULpuQfvNit6G+UaKkJXzJfkGLA8L0vVQzgRkTCc6xtSvfbxtX7NISaJKDe8nHCXbgak07WUHOFnWcSpV9HbKLe9rAArFmZhlTUb9hwDyibx+y9In4eOIXXcvXyuG8/U2DU/k4hyhS98w5rPu4dGpDGPpD9xlkWc1XpuzRJFb6Ocs8iMqvxsbLLlocA4f1LtjvH2+NTwa0Au3Qyokp9Eliu4B0c0rz851zcUsb2lxBNnWZ5cVYw3Ll5HaY5BcUZZrjTHgC1LrPh2QS6KFszHt8wL4i5PHCT98Yp6yNsXvmHNNicR5cqFH8CJ+kRqb2n6YdKtg7cv9+L7SwuxvawAXw/dgi07E/92QXl0/G59paKB9gZGcXVoBN7AqHTRxVR5NJLeZJRLqfHLs1fw+sMrpNPWldZsRW8jEDr9umt5kXRmYTg4hqtDt3B16NaUEhE5rYQkGeVSauxrdmHX8iKpB/q7xQsVvY1A6GDrV+uXKtoYV78fV4dGEtru+Me+UTxOVrmUfOFnWZ6qDPX0ijPKQCjp/WN9paKNcQ+OJPzAfCg4pnicrHJp5mHSrQNxBC6/yEPe23jI6cAmWx7aegbwy7NXFD3gYlxhrHqGQ6fnl+epx0SH97oksly5+xeZVM+V5hhgMWRMatgKTY44y/Lg3WY8t2YJLIYMxdyvziIz9pbb4B4cwT+0f6majSHe5Hc4OAZblnoHEn56PdHlCpF6jbTqRPqSn2WpLcxV9Tb+av1SGNPvwvOnuvDyuTunyLXGw07EHxyDxWREaY5BdYDnMBkVQ0cSWa5gy87ULFurDSZ9ibMsz61ZgseW5KvOKL/+8Ao4TEb824Ue/PqzrxUdEPHeQXLgViix/m5xnmqYSPhvn8hy5XYvL1LN9PPdYp5dmUk4plsH4gi80pqt2dtYcntmhT9e6VU0ENvLCuJOQN64eB3ewCjWLspVjG0rzTFgXVgynMhygdBYNq2yAUizkUSbBYAS7+3LvaGLVR2Fqt7GqvzQRUdf+IYVia9WrMSiwzsEW3am6mp4cRGRXuUCkGaBCC9bfqEbJY/oXXx54zLFY8FiyEBvIIhnT36lSFbDYyUWYthK+IxHWnfPTWS5wJ32LLzs7WUFWLsoF97AKKcCTCJxLcsTjkLYsjNV0/eJjqR//NOXin3wZO60LDrOfrSsSDE7ktb+M5HlAsALZ65oll2aY8CPlhUp3kPTG3u6dXLia580ddrRsAsvznoGUW+34u8qi3H/IhOGgmOw3x6PNhwci/uiwudOdeFAnQNHNi6TetfXLlJfSBZPuY1feXCgLjSfd+Pmcrxw5ormRSAvfXoVz9TYpbKHgmPSGHGtC91IX+Isi8NkVCWeZ24MYjg4hk22PLRuq8Z1/y0UGOdPeuaYJ/7wGT7fsQZ7y22oKcjFdf8trFtkgjFdeSwfb7nuwREszpqPxs3leM/dp5nE/PTjv+A//qpCUXaBcT5qC3MxHBzDTz/+y6S+E02OOMviMBk1r19xD4ZOq1/YWYtzfUPISp+HSmu2KlZisa/ZhUdKLKgtzJWWJ9qc4bDT/PGUe/lmAA6TEe/WV+Jc35Bm27Wv2YXHluQrys5Kn4e1i3KxIH0e/ns7E59kE2dZAPX1K+I3Pf3XNfjo9rVH4h4V4bEykeZuH5q6PKi3W6Xlydsc+f4znnLFfnn38iJssuVpzp7T3O3De+4+bLLlKZYp5gt/z93HmXBmCPZ060QcgYf3NgKhK5sPd7qlOarr7VYY0+fh+VNd+N2XoXk6w+e9jeZghxtP/OEzXOz3Y5MtD5tseejwDql2APGUe+lmAO+5+2DLzkS93Sr1VoZ79uRXirLr7VYszExHU5dHNT0c6U+cZQHUvY3N3T7sev+8NLtHvd0KW1Ym3nX3Ydf75wFANeVaNJduBnDf0ZNo6xlAhSVLmrtYLGuy5f7+qxvSc5EuTGvu9qnKrrBkoa1nAPcdPckdUAqIXkatm4XUHfsEHd5BOExG1NutqLRmw9Xvx31HT2I4OIbV1py4ylr6WhuaujxYmJkutTnPn+rC1SHlbDjxlPv/fP41vIFRbLLlqWZdkg/vrDUAACAASURBVCv+7ceKsjfZ8nDx9s3Fot3kh/Qh2rnwM8oA8FBTuzRHdb3diodseegdGZX2WY7bw5RiteWtTjx/qgv+4DdSm9PU5cG7YfcTiKfcl891S/cNiNb+PtTUrii73m6FP/gNnj/VhYea2mP+DpRaaePj4+OproSe5HewIwo3lfF10TDuKBrGHaUC445SRa/Ym2nY001EREREpDMm3UREREREOmPSTURERESkMybdREREREQ6Y9JNRERERKQzJt1ERERERDpj0k1EREREpDMm3VPk2VWH8T0bpH+Nm8tj/mzj5nJc2FkLIHT7as+uupg+d2FnbVzlTEVDhU2X+TXly7ywsxYNFTbN9ziLzAkvezZo3VatiDsRR7GQ/6bOInPM61ker8kwvmeDZlxMhTzWGjeXK7Yj+fqMdVucaw45HYr1FG/bIP9NPbvqcMjpmPAzerVBkejRvspjraHCptiOGjeXM+4mINop+b942gb5b9q6rRqt26pj+pwebVAkerSv4bEWaTu6sLM25nVCU8Oke5JEI3DU1YO0l45L/+rtVgbvBBoqbHD1+6XHDpNRdatvTqQfmdgxy+NO/jxFJo+15XlZeO/2neQ8u+rQ1OWR1mfvSDCpBxgzQeu2auxwFCrirqnLw4PjGMhjbZMtD+f6hgCEDmLq7VZpfbr6/Yy7MIecDpzYWoX1x85I62l/iwsH6hwxHbTNZfJYC9/vCoecDjhMxmRXbc5KT3UFZqpjj65EU5dHdYv39cfO4MTWKjRU2KSdu2dXHSyGDACAq9+Ppa+1Tbj8xs3lqlvCht/1S56Yrj92RnELXPlrbT0DWPvmaQCQDghqC3Olz1XlZ+NAnbLx2t8S+l7i+fE9G7C/xYWDHW5V3UTZziIzTmytQlvPAGoLcxXlyjlMRngDowBCBy/ib1E/8VlRR7qjdVs1ekeCqvW69LU2eHbVoXFzOba81Sm9V74OY7lrXEOFTTMW5AdFF3bWSo304U63YhuQv+YNjMJ6pAVAqGF/pMSChZnpsBgypM+FH1w1dXmw5a1O6fkDdQ5ssuVhy1udqrrJy/bsqoOr34/awlxFuXLhsbYwMx1nbgzCWWSGxZAhrTex7PD1MJcdcjpQW5iriqEtb3WidVs1Xt64TGrXDjkd2Ft+p3cwPH4iiRQLgrzdCW9b4m2T5HEK3GmXxfMOkxEXdtZK3yme9jT8VuTAnVgDgALjfJy6PgAAeKTEgqYuj/S+o64exp2Ms8iMveU27G9xKdbrwQ43HCYj9pbbpDZA/NZCePxEEikWhE22POk3CW9b4m2TIu3X5c+P79kgbWfxtqfh5LEm3+/K7S3XTsZJH7M+6TZnpsM3EkzoMsVO+oUzV1SvNXf7FDsmseGJjcWzqw6t26o1k1FB3vshjO/ZoPhcvd2qSIJPbK2S3j++Z4OiwRHDXsTj2sJcxU7pQJ1DsWO8sLMWz9bYpTofqHNIyxaNg/yxvGxBK8ELTwLlOzLR0Fz330LaS8dVDehMo0fcAaGG86irR/M1+c6gdVs1HCaj9Du0bquGZ1edZjIqRIsF8dhhMqKpy4Olr7VJOxxXvx8HO9xSD518hyFPXBwmo2qnJI9TEVvOIjPSXjquONATZYU/FmWL5WvFXXgSKI+7E1ursL/FpfpcpB3UdKdX3NUUhBJWLfK2TKxr0b4ccjpwoM6BMzcGNZNRIVosCMvzshRtnGjT4m2TRKIsHotYOuR0SIn3ub4hRfsZa3sqF96Gyf+uLcxFSY5B1QGzw1E4IxMgveLu8bJ8eAOjmgdt+5pdqoRbtC/i8SGnQzMZFaLFgvicPLbkbVq8bdJE+/XGzeVYnpclxUS87amcvKOvtjBX0f7Jl9G6rRqHO914pMQScR1RYs364SXm+Yk/rqjKzwaAqDsRILQBWwwZip3Sc6e6UFuYG/V07L5mdRIQ3hC39QxIG/aWtzrhDYzikDPUWHgDo6peO/nRtavfL9VdHCTIGzVxOkrLukUmHO68815RtnwMZKSkcO2bp5H20nF4A6PSqcK2ngEc7nRL3zeWnolEMWfqd8ypR9wBgMWQEdNOubYwF8+d6pIer33zNCyGjKinY2OJBXlsHexwo61nADschXAWmeEwGbH7/fPSe3e/fx4Ok1ExJlK+g7AeaVH83uL0u5YdjkJFzIuy5TuTj671a35WbE/yWDvcGfp8+PeVlxdpeVM1U+Puuv/WhO8TPbeifdnX7IKr349f3H9v1M/FEgvy2Drc6ca6RSYA8bdJa988rUh2o/XCx9ueyontSR5r64+dgTcwirSXjqt68cf3bEBtYa7ieybSTIy7khwDemNI5p+qKoar3y+1L83dPjR1ebDDURj1c7HEgjy2RJvmLDLH3SbFsl8XJtOeylmPtChiTb7fFd9X5CfRDkoSRc/Ym2lm/Zqw5xrQdTOQkrK1essOdoROW4vEPZrwU1fyZYlTRkLvSBAlOQbpb7l/v3gDe8ttUqIfqQdP3gMY6T1aSZ+r348C43zpsTiNGonFkCHtpGJNIvVgv72+dFl2CuNO/M7hv4Or34+SHENM6ztSLIR/9rr/FpbnZWkeiDZ3++ANjCpOj2oJP72rxWLIUB0AnLo+oNipXp5gfctjrSTHEDGJ9OyqQ+9IULcDwNkad0BoGEX473CubwjL87Ji+nykWPAGRhWx5er3Sz15k22TtE71h9NK+mJtT+XLELFWlZ+tmUSKWBM9tLEOyYnHbI67AuN81e/wnrtvwt9XiBYL/37xhvS3iMGq/OxJt0nR9uvCVNpT+TLksSbf7wLAszV2bH3706jLSBQ9Y2+mmfVJ9xa7FR9cjd4jHS/RgDuLzJo9HJGej5V8oxSNr54X14gES5xCbdxcLvUiJZJ8eIk8qTtQ51AMYUmWLTE2yJNddqLjDoCi0Q031bgDkhcLwJ0Ey9XvR9pLxzXHkyeCfHiJiDVBPoRFPI71uovJmqlxJ09i5RIRd8mKBUCZYMmHqyRa+PCS8OF0WmOOm7t9cPX7scmWl/CkeybG3eWbgajtz1RjL1mxACR3vy4fXhIed65+P871DeGja/1T3m5jpWfszTSzfnjJD5cVJXyZomF8qqpY8/UTW6vQuLlc0RsjiFND0XqDxWmrSKe+AUi92oLoYbp8M4CFYadyHi/Ll+odTpw+DT/dGYlW0ucwGWM69bz2zdNo6vJIs0Tsb3FJO9lkJ9wA8MPliY8Nadk6xB0QOl0ZafzdyxuX4cLOWkVvjJzDZIzaGxxLLITHs+hhkh+ICuLaB62edXH6VH66MxqtpK+mIDemU8/7ml2asSb+lifcbT0DuibcwMyMu6OunogXNj9VVSzt2OVn3ITleVlRe+ViiYXwuJOfRYy3TRLDUWJpc+JtT+Xk1/fIZycR1xBseasz5mkTE2Emxt2+ZhcshgzNafsaKmzSpAXX/bdUMbLJljdhb3AssSBvR+VnEeNtk2LZrwvxtqfhrEdaFLEm3+8ufa0N6xaZUG+3StMvOkxG1Bbm6jYDlp6xN9PM+qTbnJmuS4MgxvWFz+cqjiS3vNWJgx1ueAOjiikEn62xo61nYMIGW96ANG4uV+1U5BcZNW4ul8ZmiUZKXq+95TbFFfLRymqosEU9JffRtX7FmDVRdqyn4guM86XEL5UXqz25qljXU156xd2WtzqlmRXkRE+hSFraegbwbI1der11WzW8gdEJx+9NFAsOk1FKEhoqbKgtzMVRV490IPryxmXSe1/euExxUZEW+Q4tWs+mSPrEzleULR9vGU20GXOAUM9QpNl2Emmmxt3BDjdc/X5VL6DoKRSzHb1z2atom8R0ZD/9+C8TljFRLMjb0b3lNmm87GTaJPmBQbTexsm0p3KRZswRwusu1pfWRfpTMVPjDgiddTtQ51Ak3qLXuKnLg4Mdbrxw5oqibXIWmVFvt0a8vkhuoliQt6OiTWvu9k2qTZpovy5Mtj2VC58xR97hYj3Sopj609XvR1vPQNQL7SdL79ibaWb98BIAeHFdGT642pfQMWcHO9w42OFW3SAifMdtPdIi3UBH63Uta988rfpMU5dHcZqtqcujOHUpP1IXMz+EDxXQsq/ZhZqCXOm93sCoNAeqs8iMgx1uPFtjl07DiyEH8u8cTy+1fAdcU5CrGpueDPYcA34ma0j1okfcAaH1fWFnrWrctfx3WPvmaekGOuL1iRrUiWIBCI2VfaTEgvE9oR2NfGiGmPlBfD7aMI3mbp80LZ9IsMR0m4+X5aO524e2ngHFlIGAcnhIPONe5bH2eFm+orfokNMBiyEDFkOGKqmMNAXcZMz0uFv6Wptq2weU60gc1MnbponW4USxIH6r6/5bmm1avG3S1rc/xYmtVdL7D3e6gRILagpCPfnvXPZib7lNmuUhnvY0nLz+YhuSrwsx5WK06V+naqbHnXy60EjT8zV3+6SYEQcxkWb2kJsoFoDQgZFWmybanljbpIn26y+cuSLVRfRIx9qehguPtVgPfBMtWbE3k6SNj4+Pp7oSydDuGUTVv/851dWgFDNnpuOVjcvw2JL8pJTHuCOAcUepwbijVEl27M0Us354iVBpzcaZx7/N0xxzWCoaAcYdMe4oFRh3lCpMuCObMz3dcj96/zx+c7471dWgJDFnpuOxJfn42bftKd0ZMO7mFsYdpQLjjlJlusTedDYnk24A6LoZwO+/vIHGLo8uUx1NW3+/BciYDyzIBow5wIKc0N8Lsm//nQMY5Y9lz8+fORuROTMdldZsPFBkxmP35KPSOvG86MkwZ+Puv/7vQE4eUPotoHhp6H/T7LsLGuMuhfyDwIV2oLcHcJ0FvNeAHz8HWBaluma6Y9zpwH/7gtfhwdDf3mt3nvcPyR7f/lu833sN+F/3ALX/Kfl1ToHpGnvT1ZxNuueqtLS0SX82MzMTZrMZeXl5in+xPJeTk5PAb0EzyejoKObPV8/xXFJSgpqaGlRXV0v/WyyzLxGnxPL5fOjq6kJXVxfa29vx4Ycfor29HT6fOql75ZVX8MMf/jAFtaTpwOfzRfwHAJcuXZLiSev9k/Wzn/0M//RP/5SQ70CzC5PuOWZ4eBg+nw99fX2Kf7E8Nzw8POlyMzIyJp2wm0z63JyFkufLL7/E6dOncfr0aZw6dQqnT5/W3Kk5HA5VIp6dzZ6Tucrn86G9vR3t7e24dOkSPvjgA3R1dWnGjtlsRmVlJSorK1FaWooHH3wQdrsdZrNZY8k0E4jfWSTF4f9funRJeqyVME8lcRZxYzabYTabYbfbFY9LS0ulx/I4k7+PKByTbopZIBCIOUEPf25wMPqt4aOZN29ezAm61nM0PZ07d06ViPv96hs/rFy5EjU1NVISXl1djYyMDI0l0kzW3t6u6L0WCXc4keTY7XasWrUKDz74oJRw0/QjT4TDe5RF0ixekyfLU+1tBu4kyFr/AKC0tFQzoZa/hyiRmHRTUty6dWvSCfvAwOTn8k5LS4srYQ9//q675swEP9NCe3u7Igk/ffo0vvnmG9X7wnvDq6qqNJZG05FIsD744IOYeq/tdjsefPBB9l6nSHgCLO9VBu4M0Yg2jGOywnuPw/8Xvc0iJsITZsYJTTdMumnaCwaDcQ2DkT+XiEY/3qRdvJaePifuPaWrYDCoSsI7OjpU78vMzFQl4vfdd18Kakxy8uEhH374oTQWO1KCLZLqBx54QOrNZuI0NZMZoiF/PZG9zbEO0WBvM81WTLppVvvmm2/iHrsuf24qm0dubm5cw2Dk/zh8IrKhoSFVIv7FF1+o3mcymRRJeHV1Ne69994U1Hj2i+fiRq3hIZWVlUywoojWk+zz+dDf35+UIRqAslcZuDNEI9owDiIKYdJNFMVUEvaxsbFJl5udnR3XMBj5v8zMzASugZmht7dXkYSfOnVK6sGTKygoUCTiNTU1WLx4cQpqPHPx4sb4zbQhGvLX59pvRaQnJt1EOunv75900j46OjrpchcsWBD32HXxz2g0JnANpNbXX3+tulDz2rVrqvcVFxcresNrampgtVpTUOPpR/RcT3RxIwBUVlbO2osbw3uPIw3RCO9t1mOIRvg/k8kUcfgGe5uJphcm3UTT0M2bNyedsI+MjEy6XIPBMKkLTvPy8pCVlZXANaCPr776SpWI9/X1qd5XVlamGiM+m+eaF8lie3s7zp49OysvboxniIbW+6eCQzSICGDSTTTrDA0NTWoe9r6+PgQCgUmXO3/+/LjGrctfT2VCe/78edUYca056VeuXKnoDa+urta86c90Jx8ecvbsWWmqvomGh6T64sbpMESDczYT0VQw6SYiyUy4eVL463rcPOns2bOqRFxrjH54Er569eqE12WytC5uFI/DJePixkhDNCaas5lDNIhotmDSTUQJEQgEJp2wT/XmSRNN3xjttViMjY2pkvCzZ8+q3peZmalKxFesWDHp7xarZF3cmOohGgDnbCaimYtJNxGl3HS8eVIss8SIBFwk5FpTF+bm5qqmLiwrK5t0neO5uNFut6OyslLqvZ43bx7Ky8t5W20iohRg0k1EM5q4eZJWcq71vPxxom+elJWVhWAwiKGhIfT29qK7uxter1f1ufz8fFUiXlxcrHiP1sWNX331Ffr7+1XLEzPWmEwm5Ofnw2Qyobe3VzqDoMcFgRyiQUQUHybdRDRnyW+eNJle9kQ2n0ajEQUFBcjKysK1a9cwMDCAYDCYsOUDHKJBRJRKTLqJiCZJj5snpaWlRU3m5Ykwb6tNRDRzMOkmoqh8/iB+c/IaGj/1wucPoqs3AJ8/sT2wc9LIEDByEwjcBEYGAe9XwK1BwHsJWFgCmBYD2VYgM1v5j6bEvtCAysXZWHV3Fp58wAazMT3VVSKiOYJJNxFp8vmD+Od3LuE3J68xyaZZ68kHbHjxsXtTXQ0imgOYdBORSvvVQWw98hm6eid/sxyimcK+0IBju1agcjHPJBCRfph0E5FC+9VBbPjVWfZu05xiNqbj+P+5iok3EenmrlRXgIimD58/iB8d/YIJN805Pn+QB5tEpCsm3UQk+eWHbrRfnfzdIYlmMp8/iJ/8/i+prgYRzVJMuokIQCjhOHDiaqqrQZRSvzl5jdcyEJEumHQTEQBwlhKi237f6Ul1FYhoFmLSTUQAgA//or69OE1O4+6VGH/xAVx4ek1Clnfh6TUYf/EBNKxfnJDlUXTcFohID0y6iQgAOJab6LYPLvpSXQUimoWYdBMRAHBoCdFt3BaISA+8/y0RAWCiMZ0t/fnJVFeBiIimiD3dREREREQ6Y083EVGSNaxfjN3fKUKZ1YgF80N9H64bfrxzvhf7fndR9f4LT6+BI9+I/ccu4uDtaR0b1i/Gga1lcN3w47v/2oGXn1iKysXZsGRlAAA6vh7Cy3/qlt5PRESpxaSbiCiJ3v0/KrBpaR4AYPjWN3Dd8MOYcRcc+UY48hfjsXIr6g6241JfbHNFGzPuQktDJWzmTLh9I3Dd8GOxKRMVd2fhwNYyWLMy8OzbXXp+JSIiigGHlxARJcmrO5dJCfe/nepB1v/VjKU/P4nif27D/mMXMXzrG9jMmWj631bGvEybORMLF2Tgid9+juJ/bsPSn5/Eff/3n9Hx9RAAYM+6u3X5LkREFB8m3URESfLo8oUAgKZPvfjBa+cVrx08cRX/8D+/BABU3J2F7VX5MS/3H/7nl3jjzA3p8aW+AH7+7iUAgCUrA857TFOtOhERTRGTbiKiJGhYv1gab/3C8Sua7zl44ircvhEAwN+sLox52VrjtuVJeJUtO56qEhGRDph0ExElgcNqBBAax938ZeQ7Hn5xfRgAsLxwQUzLFUk6ERFNb0y6iYiSoCTPAAC42h89SR669U1cy/WPxvd+IiJKDSbdRESTcOHpNRj6r86Yx15fvj0byWJTZtT3Zc1ns0xENBuxdScimgRHfmiO7cKc+arXCnIyVM+5PH4AwIL5d0W9sPFbBaFhJed6hhNUUyIimg6YdBMRTYIYS12/wqJ6zXa7N/uybK7tgyeuwjs0CgB4akOx5jIb1i+GzRz67P/7SU9C60tERKnFpJuIaBI+cQ8CANbaTTj0vTIAQGmeAa1PVkmJc9NnXsVn3j7XCwCoX2nBqzuXKV5rWL8Y/+V/uQdA6O6U8tlHiIho5uMdKYmIJqHhdxex2pYNmzkTe52Lsde5WPF626UB1VR+P3jtPIpy52PT0jx8v6YQ36vIx9X+ERgz7pISdbdvBN/9146kfQ8iIkoO9nQTEU3Cpb4A6g62o+lTrzRsBAj1Uh9uvoq1vzyj+bmH/rUD+49dlO4Y6cg3wmbOlD5X/M9tMd8CnoiIZo608fHx8VRXgohSL+0nH6a6CkTTxviLD6S6CkQ0y7Cnm4iIiIhIZ0y6iYiIiIh0xqSbiIiIiEhnTLqJiIiIiHTGpJuIiIiISGdMuomIiIiIdMakm4iIiIhIZ0y6iYiIiIh0xqSbiIiIiEhnTLqJiIiIiHTGpJuIiIiISGdMuomIiIiIdMakm4iIiIhIZ0y6iYiIiIh0xqSbiIiIiEhnTLqJiIiIiHTGpJuIiIiISGdMuomIiIiIdMakm4iIiIhIZ0y6iYjCOO8xYfzFB3Dh6TWprsqkie/gvMeU6qoQERGYdBMRqTy1oRiuG3448o1oWL841dUhIqJZID3VFSAimm7WLcnF0U+uA1iIHasLcPDEVcXrF55eA0e+EQDQ9KkX9SstWH+oHc1f9qNh/WIc2FoGAPAOjcLl8QMA1v7yDBp3r0RBTgZqS3MBAPuPXcTBE1cVy/MOjcL6jx9LZTXuXon6lRYAQNulATisRhz95Dr2/e4iAGD8xQcUdWv61IsXjl/BiX2VAIAT+ypxuPkq9v3uoqJuAKTnAcDzL/fD5fGjtjRXVQciIpo69nQTEckc+l4oKd33u4t453yvlCALrU9WYeGCdKT95EOk/eRDrFty53XnPSYc2FqGw81XkfaTD3H0k+uqz9eW5mL/sYtI+8mHUsLtHR6VlvfRVwPw/Mv9Ul1EQp/2kw8BAJasDGlZnn+5H02feqXPigMAAFh/qF36X55wi7L3H7uIvc7Fip58h9WItJ98yISbiEgHTLqJiGQeWbYQH301AABSL7BIxIFQ0vzcHy5Jj+V/i2Ep4nP7fncRrht+xfK9Q6NSz7nzHhMc+Uas/eUZ6fUtL38KS1YGGtYvxiPLFqLpUy+av+wHAMX7AMD6jx9jy8ufSo/fc/VF/F47Vheg7dKAVPbBE1fRdmkAe+vuJN3iexMRUeJxeAkR0W0iCd79+hfSc22XBvDIsoXS6wBwxj0ovS7/uyAnA97hUcUywx/3Dgelvx+vzAegHiIChHqdFy5Ix+W+gHJ5Q6Oq98qHp0RiWZCBcz3DiudOXb6JHasLpMfhZRERUeIw6SYiuu2pDcUAII2HlmtYv1iRYCdKtPHT8oRYi0i2XTf8SPvJh6ox20RENH0w6SYium3dklzFxYWC51/uV1xQWWXLloZ8VNmypfddvzmK5YULFJ+1LFD3fgsujx+WrAw47zFJy5PrHQ6iJM+gXN7tMd2iV15cwDkR7/AoCnIyFM/VlOQoet6JiEg/HNNNRAAAs3FuH4Mf+l4ZLFkZqoQbCI11FhdEtl0awLMPl0qvyf9+4fgVOPKN0hjwQ98rizrs4+CJq3Dd8OPYrhXScw3rF2P8xQfQsH4x3jnfi/qVFmlYS+uTVaplyJP+aL3c4qJOceFkw/rFqC3NxeGWqxE/Q0REiTO397JEJDEb0+Hzz91ez0eWLUTbJe0LCV84fgX1Ky1o3L0Sa395BheeXiONw5bPGNL8ZT/2H7uIA1vLsNe5GN6h0YjLFJb+/KRieQAUvdcleQZpuEvbpQFpTHfzl/043HwVB7aWScn2+kPtOLGvEo9X5ksXccqnDASgeL+YspCU5voBKBHpI218fHw81ZUgotTb8Kuz+OCiL9XVmHHEOGoxpV+4C0+vwbmeYcUsI1Mx/uIDTJZ1Vrk4G2eeqk51NYholuHwEiICAGy53VtL0Y2/+AAad6+UHu9YXSBNC9i4e6Wix1qMu442lV80F55eo7gVvRi2woRbX9wWiEgP7OkmIgCAzx9E3tMfpboa0174DCGuG34s/flJ6XH49H1T7ZUOn04wUo86Jc5Xz3wH9oWGid9IRBQHJt1EJPnR0S/wm5PXUl0NopR58gEbXnzs3lRXg4hmIQ4vISLJi4/dyx4+mrPsCw342SOlE7+RiGgSmHQTkcRsTFdMX0c0V5iN6XjxsXs5cwkR6YZJNxEpiJkb2ONNc4XZmI5XdnwLj5VbU10VIprFOKabiCLiGG+azczGdDxWbsXPHinlQSYR6Y5JNxFF1dUbwO87PWj81Mt5vGnGMxvTUbk4Gw/ca8Jj5VZULs6e+ENERAnApJuIaJpIS0sDALBZJiKafTimm4iIiIhIZ0y6iYiIiIh0xqSbiIiIiEhnTLqJiIiIiHTGpJuIiIiISGdMuomIiIiIdMakm4iIiIhIZ0y6iYiIiIh0xqSbiIiIiEhnTLqJiIiIiHTGpJuIiIiISGdMuomIiIiIdMakm4iIiIhIZ0y6iYiIiIh0ljY+Pj6e6koQ0fTl8wfxm5PX0PipFz5/EF29Afj8wf+/vbuLrbO+7wD+NYohzquxk2xgg0OIO6YG6oQqC7A4QpOWSZVCQqd2uVvJxS5GDJaiXUTVuk6olSamvMHd2nFHJ00JydWYVFE7pURoIynNBMTmxQUzIHFIUmwznOFdhPPgE+cFSJ4dx/58bnzO83Z+5+JYX//8e/6n1mVNT4f/9dzPlX9e2zqmsaVNs9PRMi/fuHluHl3XmsaGWbUuCZghhG7ggk6Nns0Pnx3IUy++J2QzbT26rjU7Nt5e6zKAGUDoBiY5MvhRNv30v/LWyY9rXQqUbmnT7Ox76OvpaJlX61KAaUzoBqocGfwo9z/5a91tZpTGhll57q+/wQDeQwAACcNJREFUIXgDpXEjJVA4NXo233v6NYGbGefU6Fl/bAKlErqBws6ed3Jk8KNalwE1cWr0bLqfeb3WZQDTlNANJDkXOHb1Dta6DKipp158z70MQCmEbiBJrFICn3nmNydqXQIwDQndQJKk5/XTtS4BpgSfBaAMvhUASBKz3F/R+I51X/qcuu6eEiqpdmz76rQvbsgj+/qz29jQl/KL/lO1LgGYhnS6gSQxWgKf8VkAyqDTDSQRNK6UjjIAl6LTDQAAJRO6AQCgZMZLAGpo4g2Pi+bW54EVi3LXzXOTJEPDY3n+zTPp2tufgQ8nrx29dtnC/MOGZbnrpnmZc/11Gfnk07z83x/lbw688f/9NgC4DKEbYArYvGpJ1rQtyMgnn6bv+Gga6q9La+MN2bCiOata5+WWHx6qOv67Kxfnp39xR+Zcf+4flpVz1rQtyL/91V0ZHfvfWrwNAC5C6AaYAta0LcihgTO5Z+fhYltXZ0t2bVqe1sYbsufB5dm6tz9J0nbj7Dz57fbMuf66vPzucDb809GiE97V2ZIff2tZmufW1+R9AHBhQjfAVbBr0/Ls2rT8ssddbJWTvuOjVYE7SXb3DmbLH92Uu26em2/eOr/Yvu3+1jTPrc/IJ59WBe7KOZV6AJg63EgJMAW88v7IBbe/dfJcoG6e83nnuvP2xiTJC2+dvuCs9+7ewQwNj5VQJQBflU43wFVwpet0//rdL/6NoA315/olw598etFjTo6cNWICMIXodANMASe+RGe6fXFDkuTnfR+WVQ4AV5nQDXCN6Ts+miT5k/Yba1wJAF+U0A1wjRkdOzdWsmT+xcdHmuaYHgSYSoRugGtM7+unkiR33TQvbTfOnrS/q7PFPDfAFCN0A1xjtu7tzzun/idzrr8uv+zqyNplC4t93125OD/+1rIaVgfAhfj/I8BV8EXX6U6ufKWTJNl24PU8+e32tDbekN6tHVXfYjnyyacZGh7T7QaYQnS6Aa5B/3L4eO7+x5dy4OhQhobH0r64Ia2NN+Tld4fz0M9ezcmRs7UuEYAJ6sbHx8drXQRQe3XdPbUuAaaM8R3ral0CMM3odAMAQMmEbgAAKJnQDQAAJRO6AQCgZEI3AACUTOgGAICSCd0AAFAyoRsAAEomdAMAQMmEbgAAKJnQDQAAJRO6AQCgZEI3AACUTOgGAICSCd0AAFAyoRsAAEomdAMAQMmEbgAAKJnQDQAAJRO6Ac6zdtnCjO9Yl2PbV9e6lK+s8h7WLltY61IAiNANMMm2+29J3/HRtC9uSFdnS63LAWAamFXrAgCmmvtuW5CnX/ogSVM2r1qS3b2DVfuPbV+d9sUNSZIDR4eyYUVzOvccycE3TqersyW7Ni1PkgwNj6XvxGiS5J6dh7N/y4osmV+fNW0LkiSP7OvP7t7BqusNDY9l0fd/VbzW/i0rsmFFc5Lk0MCZtC9qyNMvfZCte/uTJOM71lXVduDoUB5/7u30bu1IkvRu7cgTBwezdW9/VW1Jiu1JcuKxe9N3YjRr2hZMqgGAK6fTDTDBngfPhdKte/vz7Ksni4Bc8cKjK9M0Z1bquntS192T+277fP/aZQuza9PyPHFwMHXdPXn6pQ8mnb+mbUEe2defuu6eInAPjYwV13v+zTM58di9RS2VQF/X3ZMkaZ5bX1zrxGP35sDRoeLcyh8ASdK550jxc2Lgrrz2I/v68/DalqpOfvuihtR19wjcACUQugEmWH9HU55/80ySFF3gShBPzoXmv//3geL5xMeVsZTKeVv39qfv+GjV9YeGx4rO+dplC9O+uCH37Dxc7H/gJ0fTPLc+XZ0tWX9HUw4cHcrBN04nSdVxSbLo+7/KAz85Wjz/ed+HF31fm1ctyaGBM8Vr7+4dzKGBM3n4jz8P3ZX3DcDVZ7wE4DOVELzlZ68V2w4NnMn6O5qK/Uly+J2Piv0THy+ZX5+hkbGqa57//OTI2eLxdzoWJ5k8IpKc6zo3zZmV3374cfX1hscmHTtxPOVimufU55X3R6q2/cdvf5fNq5YUz89/LQCuHqEb4DPb7r8lSYp56Im6OluqAvbVcqn56YmB+EIqYbvv+GjqunsmzWwDMHUI3UCSpLFhVk6Nnr38gdPYfbctqLq5sOLEY/dW3VC5snVeMfKxsnVecdwHvxvLH/7enKpzm+dM7n5X9J0YTfPc+qxdtrC43kQnR87m1htnV1/vs5nuSle+cgPn5QyNjGXJ/Pqqbd+8dX5V5x2A8pjpBpKcC90z2Z4Hl6d5bv2kwJ2cm3Wu3BB5aOBM/vZP24p9Ex8//tzbaV/cUMyA73lw+SXHPnb3Dqbv+Gj2PfT1YltXZ0vGd6xLV2dLnn31ZDasaC7GWl54dOWka0wM/Zfqcldu6qzcONnV2ZI1bQvyxC8HL3rOTDXTPwtAOfxmAZIkS5tm562TM3emd/0dTTk0cOEbCR9/7u1sWNGc/VtW5J6dh3Ns++piDnviiiEH3zidR/b1Z9em5Xl4bUuGhscues2Kr/3oxarrJanqXt964+xi3OXQwJlipvvgG6fzxMHB7Nq0vAjbnXuOpHdrR77Tsbi4iXPikoFJqo6vLFlItaVNsy9/EMCXVDc+Pj5e6yKA2tvZ8066n3m91mVccypz1JUl/c53bPvqvPL+SNUqI1difMc6YblkP1jflr/7s6W1LgOYZoyXAEmSv1z9+7Uu4ZowvmNd9m9ZUTzfvGpJsSzg/i0rqjrWlbnrSy3ldynHtq+u+ir6ytiKwF0unwWgDDrdQOF7T7+Wp158r9ZlTGnnrxDSd3w0X/vRi8Xz85fvu9Ku9PnLCV6so87V8ei61uzYeHutywCmIaEbKJwaPZuVj//njJ7tZuZa2jQ7h7fd7UZKoBTGS4BCY8OsqpU0YKZobJiVHRtvF7iB0gjdQJWOlnk5vO1uKzgwYzQ2zMo/b/6DbLxzUa1LAaYx4yXARZnxZjprbJiVjXcuyg/Wt/kjEyid0A1c0lsnP84zvzmR/UeH8ov+U7UuB65IY8OsdLTMy7rbF2bjnYvS0TLv8icBXAVCNwAAlMxMNwAAlEzoBgCAkgndAABQMqEbAABKJnQDAEDJhG4AACiZ0A0AACUTugEAoGRCNwAAlEzoBgCAkgndAABQMqEbAABKJnQDAEDJhG4AACiZ0A0AACUTugEAoGRCNwAAlEzoBgCAkgndAABQMqEbAABKJnQDAEDJhG4AACiZ0A0AACUTugEAoGRCNwAAlEzoBgCAkgndAABQMqEbAABKJnQDAEDJhG4AACiZ0A0AACUTugEAoGRCNwAAlEzoBgCAkgndAABQMqEbAABKJnQDAEDJhG4AACjZ/wG/aKvqTLZkfAAAAABJRU5ErkJggg==" + } + }, + "cell_type": "markdown", + "id": "8e406db6", + "metadata": { + "scrolled": true + }, + "source": [ + "Now we come to the flow definition. The OpenFL Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with `start` and concludes with the `end` task. The aggregator begins with an optionally passed in model and optimizer. The aggregator begins the flow with the `start` task, where the list of collaborators is extracted from the runtime (`self.collaborators = self.runtime.collaborators`) and is then used as the list of participants to run the task listed in `self.next`, `aggregated_model_validation`. The model, optimizer, and anything that is not explicitly excluded from the next function will be passed from the `start` function on the aggregator to the `aggregated_model_validation` task on the collaborator. Where the tasks run is determined by the placement decorator that precedes each task definition (`@aggregator` or `@collaborator`). Once each of the collaborators (defined in the runtime) complete the `aggregated_model_validation` task, they pass their current state onto the `train` task, from `train` to `local_model_validation`, and then finally to `join` at the aggregator. It is in `join` that an average is taken of the model weights, and the next round can begin.\n", + "\n", + "![image.png](attachment:image.png)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "difficult-madrid", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Aggregator step \"start\" registered\n", + "Collaborator step \"aggregated_model_validation\" registered\n", + "Collaborator step \"train\" registered\n", + "Collaborator step \"local_model_validation\" registered\n", + "Aggregator step \"join\" registered\n", + "Aggregator step \"end\" registered\n" + ] + } + ], + "source": [ + "class FederatedFlow(FLSpec):\n", + "\n", + " def __init__(self, model=None, optimizer=None, rounds=3, **kwargs):\n", + " super().__init__(**kwargs)\n", + " if model is not None:\n", + " self.model = model\n", + " self.optimizer = optimizer\n", + " else:\n", + " self.model = Net()\n", + " self.optimizer = optim.SGD(self.model.parameters(), lr=learning_rate,\n", + " momentum=momentum)\n", + " self.rounds = rounds\n", + "\n", + " @aggregator\n", + " def start(self):\n", + " print(f'Performing initialization for model')\n", + " self.collaborators = self.runtime.collaborators\n", + " self.private = 10\n", + " self.current_round = 0\n", + " self.next(self.aggregated_model_validation, foreach='collaborators', exclude=['private'])\n", + "\n", + " @collaborator\n", + " def aggregated_model_validation(self):\n", + " print(f'Performing aggregated model validation for collaborator {self.input}')\n", + " self.agg_validation_score = inference(self.model, self.test_loader)\n", + " print(f'{self.input} value of {self.agg_validation_score}')\n", + " self.next(self.train)\n", + "\n", + " @collaborator\n", + " def train(self):\n", + " self.model.train()\n", + " self.optimizer = optim.SGD(self.model.parameters(), lr=learning_rate,\n", + " momentum=momentum)\n", + " train_losses = []\n", + " for batch_idx, (data, target) in enumerate(self.train_loader):\n", + " self.optimizer.zero_grad()\n", + " output = self.model(data)\n", + " loss = F.nll_loss(output, target)\n", + " loss.backward()\n", + " self.optimizer.step()\n", + " if batch_idx % log_interval == 0:\n", + " print('Train Epoch: 1 [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n", + " batch_idx * len(data), len(self.train_loader.dataset),\n", + " 100. * batch_idx / len(self.train_loader), loss.item()))\n", + " self.loss = loss.item()\n", + " torch.save(self.model.state_dict(), 'model.pth')\n", + " torch.save(self.optimizer.state_dict(), 'optimizer.pth')\n", + " self.training_completed = True\n", + " self.next(self.local_model_validation)\n", + "\n", + " @collaborator\n", + " def local_model_validation(self):\n", + " self.local_validation_score = inference(self.model, self.test_loader)\n", + " print(\n", + " f'Doing local model validation for collaborator {self.input}: {self.local_validation_score}')\n", + " self.next(self.join, exclude=['training_completed'])\n", + "\n", + " @aggregator\n", + " def join(self, inputs):\n", + " self.average_loss = sum(input.loss for input in inputs) / len(inputs)\n", + " self.aggregated_model_accuracy = sum(\n", + " input.agg_validation_score for input in inputs) / len(inputs)\n", + " self.local_model_accuracy = sum(\n", + " input.local_validation_score for input in inputs) / len(inputs)\n", + " print(f'Average aggregated model validation values = {self.aggregated_model_accuracy}')\n", + " print(f'Average training loss = {self.average_loss}')\n", + " print(f'Average local model validation values = {self.local_model_accuracy}')\n", + " self.model = FedAvg([input.model for input in inputs])\n", + " self.optimizer = [input.optimizer for input in inputs][0]\n", + " self.current_round += 1\n", + " if self.current_round < self.rounds:\n", + " self.next(self.aggregated_model_validation,\n", + " foreach='collaborators', exclude=['private'])\n", + " else:\n", + " self.next(self.end)\n", + "\n", + " @aggregator\n", + " def end(self):\n", + " print(f'This is the end of the flow')" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2aabf61e", + "metadata": {}, + "source": [ + "You'll notice in the `FederatedFlow` definition above that there were certain attributes that the flow was not initialized with, namely the `train_loader` and `test_loader` for each of the collaborators. These are **private_attributes** of the particular participant and (as the name suggests) are accessible ONLY to the particular participant's through its task. Additionally these private attributes are always filtered out of the current state when transferring from collaborator to aggregator, and vice versa.\n", + " \n", + "Users can directly specify a collaborator's private attributes via `collaborator.private_attributes` which is a dictionary where key is name of the attribute and value is the object that is made accessible to collaborator. In this example, we segment shards of the MNIST dataset for four collaborators: `Portland`, `Seattle`, `Chandler` and `Bangalore`. Each shard / slice of the dataset is assigned to collaborator's private_attribute.\n", + " \n", + "Note that the private attributes are flexible, and user can choose to pass in a completely different type of object to any of the collaborators or aggregator (with an arbitrary name).\n", + " \n", + "Subsequent tutorials shall show examples to assign private_attributes for aggregator and another methodology of specifying private attributes via a callable." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "forward-world", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Local runtime collaborators = ['Portland', 'Seattle', 'Chandler', 'Bangalore']\n" + ] + } + ], + "source": [ + "# Setup participants\n", + "aggregator = Aggregator()\n", + "aggregator.private_attributes = {}\n", + "\n", + "# Setup collaborators with private attributes\n", + "collaborator_names = ['Portland', 'Seattle', 'Chandler','Bangalore']\n", + "collaborators = [Collaborator(name=name) for name in collaborator_names]\n", + "for idx, collaborator in enumerate(collaborators):\n", + " local_train = deepcopy(mnist_train)\n", + " local_test = deepcopy(mnist_test)\n", + " local_train.data = mnist_train.data[idx::len(collaborators)]\n", + " local_train.targets = mnist_train.targets[idx::len(collaborators)]\n", + " local_test.data = mnist_test.data[idx::len(collaborators)]\n", + " local_test.targets = mnist_test.targets[idx::len(collaborators)]\n", + " collaborator.private_attributes = {\n", + " 'train_loader': torch.utils.data.DataLoader(local_train,batch_size=batch_size_train, shuffle=True),\n", + " 'test_loader': torch.utils.data.DataLoader(local_test,batch_size=batch_size_train, shuffle=True)\n", + " }\n", + "\n", + "local_runtime = LocalRuntime(aggregator=aggregator, collaborators=collaborators, backend='single_process')\n", + "print(f'Local runtime collaborators = {local_runtime.collaborators}')" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "278ad46b", + "metadata": {}, + "source": [ + "Now that we have our flow and runtime defined, let's run the experiment! " + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "a175b4d6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created flow FederatedFlow\n", + "\n", + "Calling start\n", + "\u001b[94mPerforming initialization for model\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for start\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for start\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/tmp/ipykernel_252106/3655034279.py:59: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.\n", + " return F.log_softmax(x)\n", + "\u001b[0m\u001b[94m/home/karan/playground/openfl/venv/lib/python3.10/site-packages/torch/nn/_reduction.py:51: UserWarning: size_average and reduce args will be deprecated, please use reduction='sum' instead.\n", + " warnings.warn(warning.format(ret))\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m\n", + "Test set: Avg. loss: 2.3264, Accuracy: 309/2500 (12%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPortland value of 0.12359999865293503\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 2.370591\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 2.265008\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 2.300407\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 2.249448\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 2.251498\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 2.267806\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 2.201275\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 2.181914\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 2.115410\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 2.086649\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 1.970717\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 1.829772\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 1.933031\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 1.816630\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 1.799785\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 1.677242\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 1.540448\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 1.904726\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 1.326228\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 1.266977\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 1.248409\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 1.293248\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 1.274603\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 1.013117\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.7313, Accuracy: 2043/2500 (82%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland: 0.8172000050544739\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 2.3319, Accuracy: 272/2500 (11%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSeattle value of 0.1088000014424324\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 2.352533\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 2.339137\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 2.271505\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 2.289989\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 2.271177\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 2.246639\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 2.195283\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 2.062479\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 2.093780\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 2.041380\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 1.820046\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 1.836269\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 1.683574\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 1.467967\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 1.540522\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 1.263291\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 1.366162\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 1.164680\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.912429\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.970741\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 1.132654\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.851275\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 1.117103\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.931540\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.6495, Accuracy: 2031/2500 (81%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle: 0.8123999834060669\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Chandler\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 2.3338, Accuracy: 284/2500 (11%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mChandler value of 0.1136000007390976\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 2.352149\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 2.302715\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 2.315893\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 2.304854\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 2.304877\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 2.232794\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 2.221907\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 2.163441\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 2.157472\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 2.062167\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 2.074321\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 2.086485\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 1.760424\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 1.859421\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 1.761246\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 1.723659\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 1.343333\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 1.431239\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 1.217595\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 1.334101\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 1.210616\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 1.095489\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 1.244167\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 1.456573\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.7379, Accuracy: 1977/2500 (79%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Chandler: 0.7907999753952026\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Bangalore\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 2.3345, Accuracy: 272/2500 (11%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mBangalore value of 0.1088000014424324\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 2.357561\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 2.313371\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 2.287950\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 2.250711\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 2.268091\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 2.169279\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 2.222546\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 2.007314\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 1.917653\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 1.837887\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 1.878475\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 1.594017\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 1.511708\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 1.271856\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 1.558927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 1.347723\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 1.140704\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 1.230179\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 1.153878\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 1.055537\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 1.085349\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.762103\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.928343\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.936020\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.5911, Accuracy: 2113/2500 (85%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Bangalore: 0.8452000021934509\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94mAverage aggregated model validation values = 0.11370000056922436\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage training loss = 1.084312453866005\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage local model validation values = 0.8163999915122986\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.6913, Accuracy: 2113/2500 (85%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPortland value of 0.8452000021934509\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 1.038085\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.984846\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.956596\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 1.218905\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 1.034170\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.982977\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.791037\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.817634\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 1.143449\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.992079\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.864237\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.905026\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 1.082687\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.984108\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.872094\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.677046\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 1.044158\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.805063\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.586559\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.802089\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.601361\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.684089\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.674800\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.822161\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.3770, Accuracy: 2212/2500 (88%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland: 0.8848000168800354\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.6930, Accuracy: 2134/2500 (85%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSeattle value of 0.853600025177002\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 1.074149\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.788044\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.824622\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.708563\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.743329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.991388\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.698764\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.800052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.866619\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.776506\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.761863\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.635450\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.523824\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.870733\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.598420\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.530209\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.842757\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.635391\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.490621\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.576472\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.357680\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.738054\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.490220\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.548587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.3586, Accuracy: 2230/2500 (89%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle: 0.8920000195503235\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Chandler\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.7033, Accuracy: 2094/2500 (84%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mChandler value of 0.8375999927520752\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.974749\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 1.142256\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 1.060130\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 1.345984\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.989349\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.891025\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 1.026930\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.817803\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.893464\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.902959\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.776052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.798137\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.700132\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.609538\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.676106\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.885856\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.794635\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.946624\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.588031\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.673586\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.605498\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.692368\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.727418\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.541666\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.3773, Accuracy: 2221/2500 (89%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Chandler: 0.8884000182151794\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Bangalore\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.6856, Accuracy: 2127/2500 (85%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mBangalore value of 0.8507999777793884\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 1.024403\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.831721\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.877109\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.689435\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.774114\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.671120\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.744448\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.772162\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.916608\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.591479\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.623087\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.545670\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.513708\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.736596\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.504368\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.795776\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.772787\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.594993\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.508895\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.499484\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.520032\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.492095\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.467968\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.747039\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.3471, Accuracy: 2233/2500 (89%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Bangalore: 0.8931999802589417\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94mAverage aggregated model validation values = 0.8467999994754791\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage training loss = 0.6648633033037186\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage local model validation values = 0.88960000872612\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling end\n", + "\u001b[94mThis is the end of the flow\u001b[0m\u001b[94m\n", + "\u001b[0mSaving data artifacts for end\n", + "Saved data artifacts for end\n" + ] + } + ], + "source": [ + "model = None\n", + "best_model = None\n", + "optimizer = None\n", + "flflow = FederatedFlow(model, optimizer, rounds=2, checkpoint=True)\n", + "flflow.runtime = local_runtime\n", + "flflow.run()" + ] + }, + { + "cell_type": "markdown", + "id": "9a7cc8f7", + "metadata": {}, + "source": [ + "Now that the flow has completed, let's get the final model and accuracy" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "863761fe", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sample of the final model weights: tensor([[[ 0.1219, -0.0850, -0.0638, 0.0587, -0.2061],\n", + " [ 0.1559, -0.0204, 0.1003, 0.0273, -0.0150],\n", + " [ 0.1037, 0.0561, 0.1091, -0.0362, 0.0187],\n", + " [ 0.0092, 0.0607, 0.0319, 0.2063, 0.0913],\n", + " [-0.0773, -0.1235, -0.0412, -0.0902, -0.0545]]])\n", + "\n", + "Final aggregated model accuracy for 2 rounds of training: 0.8467999994754791\n" + ] + } + ], + "source": [ + "print(f'Sample of the final model weights: {flflow.model.state_dict()[\"conv1.weight\"][0]}')\n", + "\n", + "print(f'\\nFinal aggregated model accuracy for {flflow.rounds} rounds of training: {flflow.aggregated_model_accuracy}')" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "5dd1558c", + "metadata": {}, + "source": [ + "We can get the final model, and all other aggregator attributes after the flow completes. But what if there's an intermediate model task and its specific output that we want to look at in detail? This is where **checkpointing** and reuse of Metaflow tooling come in handy.\n", + "\n", + "Let's make a tweak to the flow object, and run the experiment one more time (we can even use our previous model / optimizer as a base for the experiment)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "443b06e2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created flow FederatedFlow\n", + "\n", + "Calling start\n", + "\u001b[94mPerforming initialization for model\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for start\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for start\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/tmp/ipykernel_252106/3655034279.py:59: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.\n", + " return F.log_softmax(x)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m\n", + "Test set: Avg. loss: 0.3249, Accuracy: 2275/2500 (91%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPortland value of 0.9100000262260437\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.705955\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.617308\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.623395\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.713938\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.714206\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.563812\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.717757\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.394908\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.826978\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.462670\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.698488\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.846376\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.619333\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.575636\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.622939\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.886747\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.665729\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.516920\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.859567\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.466999\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.533711\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.521279\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.658550\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.926817\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2697, Accuracy: 2313/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland: 0.9251999855041504\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.3345, Accuracy: 2265/2500 (91%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSeattle value of 0.906000018119812\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.390258\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.465562\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.622512\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.569061\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.534309\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.609027\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.656029\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.454295\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.427925\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.572590\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.434475\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.433428\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.583645\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.375552\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.558989\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.561380\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.480449\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.548253\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.320670\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.515821\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.382779\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.295870\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.286087\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.469384\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2900, Accuracy: 2293/2500 (92%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle: 0.9172000288963318\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Chandler\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.3359, Accuracy: 2256/2500 (90%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mChandler value of 0.902400016784668\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.986641\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.487543\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.999929\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.838406\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 1.006288\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.875594\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.684269\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.751433\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.948535\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.701165\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.605181\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.572135\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.584587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.677527\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.775974\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.579040\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.779329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.705849\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.367069\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.578917\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.574488\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.290978\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.499532\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.531283\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2879, Accuracy: 2271/2500 (91%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Chandler: 0.9083999991416931\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Bangalore\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.3211, Accuracy: 2278/2500 (91%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mBangalore value of 0.9111999869346619\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.606566\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.423930\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.582356\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.404679\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.733127\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.458385\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.461127\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.653782\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.411580\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.520569\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.535583\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.577438\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.449876\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.511897\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.581871\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.644637\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.567783\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.576815\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.605296\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.441371\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.388708\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.354411\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.531725\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.479206\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2704, Accuracy: 2306/2500 (92%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Bangalore: 0.9223999977111816\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94mAverage aggregated model validation values = 0.9074000120162964\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage training loss = 0.6016728430986404\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage local model validation values = 0.9183000028133392\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.2481, Accuracy: 2323/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPortland value of 0.9291999936103821\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.630017\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.503889\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.640408\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.458764\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.736370\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.593804\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.686028\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.798474\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.703022\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.405487\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.453337\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.719088\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.863970\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.461701\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.630442\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.621713\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.644361\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.644697\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.399276\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.469072\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.474146\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.398161\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.599437\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.439608\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2183, Accuracy: 2336/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland: 0.9344000220298767\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.2562, Accuracy: 2317/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSeattle value of 0.926800012588501\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.385337\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.470521\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.459677\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.301743\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.486080\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.476714\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.440658\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.299032\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.578410\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.259214\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.277751\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.336378\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.357706\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.323220\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.347599\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.369618\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.364295\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.492413\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.271388\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.399994\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.334124\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.313602\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.413516\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.430733\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2245, Accuracy: 2336/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle: 0.9344000220298767\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Chandler\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.2511, Accuracy: 2312/2500 (92%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mChandler value of 0.9247999787330627\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.502659\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.646937\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.521962\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.603257\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.650282\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.574407\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.635170\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.475845\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.528372\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.500761\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.505273\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.738660\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.355279\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.360918\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.712853\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.650161\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.505021\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.459242\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.595233\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.449048\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.530338\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.592250\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.518594\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.716185\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2376, Accuracy: 2323/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Chandler: 0.9291999936103821\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94mPerforming aggregated model validation for collaborator Bangalore\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Test set: Avg. loss: 0.2437, Accuracy: 2327/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mBangalore value of 0.9308000206947327\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for aggregated_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.373329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.367368\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.246474\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.561947\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 0.213358\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.347174\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.427229\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.467920\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.509551\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.502692\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.362033\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.366702\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.621961\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.473972\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.648961\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.290578\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.334747\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.323814\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.343845\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.341860\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.212641\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.160580\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.487189\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.377483\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for train\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n", + "\u001b[94m\n", + "Test set: Avg. loss: 0.2261, Accuracy: 2335/2500 (93%)\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Bangalore: 0.9340000152587891\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for local_model_validation\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94mAverage aggregated model validation values = 0.9279000014066696\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage training loss = 0.491002157330513\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage local model validation values = 0.9330000132322311\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaving data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mSaved data artifacts for join\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling end\n", + "\u001b[94mThis is the end of the flow\u001b[0m\u001b[94m\n", + "\u001b[0mSaving data artifacts for end\n", + "Saved data artifacts for end\n" + ] + } + ], + "source": [ + "flflow2 = FederatedFlow(model=flflow.model, optimizer=flflow.optimizer, rounds=2, checkpoint=True)\n", + "flflow2.runtime = local_runtime\n", + "flflow2.run()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "a61a876d", + "metadata": {}, + "source": [ + "Now that the flow is complete, let's dig into some of the information captured along the way" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "verified-favor", + "metadata": {}, + "outputs": [], + "source": [ + "run_id = flflow2._run_id" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "statutory-prime", + "metadata": {}, + "outputs": [], + "source": [ + "from metaflow import Metaflow, Flow, Task, Step" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "fifty-tamil", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Flow('FederatedFlow')]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "m = Metaflow()\n", + "list(m)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "b55ccb19", + "metadata": {}, + "source": [ + "For existing users of Metaflow, you'll notice this is the same way you would examine a flow after completion. Let's look at the latest run that generated some results:" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "grand-defendant", + "metadata": {}, + "outputs": [], + "source": [ + "f = Flow('FederatedFlow').latest_run" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "incident-novelty", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Run('FederatedFlow/1734542077639071')" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "f" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "e5efa1ff", + "metadata": {}, + "source": [ + "And its list of steps" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "increasing-dressing", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Step('FederatedFlow/1734542077639071/end'),\n", + " Step('FederatedFlow/1734542077639071/join'),\n", + " Step('FederatedFlow/1734542077639071/local_model_validation'),\n", + " Step('FederatedFlow/1734542077639071/train'),\n", + " Step('FederatedFlow/1734542077639071/aggregated_model_validation'),\n", + " Step('FederatedFlow/1734542077639071/start')]" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(f)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "3292b2e0", + "metadata": {}, + "source": [ + "This matches the list of steps executed in the flow, so far so good..." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "olympic-latter", + "metadata": {}, + "outputs": [], + "source": [ + "s = Step(f'FederatedFlow/{run_id}/train')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "awful-posting", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Step('FederatedFlow/1734542077639071/train')" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "s" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "median-double", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Task('FederatedFlow/1734542077639071/train/25'),\n", + " Task('FederatedFlow/1734542077639071/train/22'),\n", + " Task('FederatedFlow/1734542077639071/train/19'),\n", + " Task('FederatedFlow/1734542077639071/train/16'),\n", + " Task('FederatedFlow/1734542077639071/train/12'),\n", + " Task('FederatedFlow/1734542077639071/train/9'),\n", + " Task('FederatedFlow/1734542077639071/train/6'),\n", + " Task('FederatedFlow/1734542077639071/train/3')]" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(s)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "eb1866b7", + "metadata": {}, + "source": [ + "Now we see **12** steps: **4** collaborators each performed **3** rounds of model training " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "adult-maldives", + "metadata": {}, + "outputs": [], + "source": [ + "t = Task(f'FederatedFlow/{run_id}/train/9')" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "changed-hungarian", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Task('FederatedFlow/1734542077639071/train/9')" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "t" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "ef877a50", + "metadata": {}, + "source": [ + "Now let's look at the data artifacts this task generated" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "academic-hierarchy", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "t.data" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "thermal-torture", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Chandler'" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "t.data.input" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "9826c45f", + "metadata": {}, + "source": [ + "Now let's look at its log output (stdout)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "auburn-working", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mTrain Epoch: 1 [0/15000 (0%)]\tLoss: 0.986641\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [640/15000 (4%)]\tLoss: 0.487543\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1280/15000 (9%)]\tLoss: 0.999929\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [1920/15000 (13%)]\tLoss: 0.838406\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [2560/15000 (17%)]\tLoss: 1.006288\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3200/15000 (21%)]\tLoss: 0.875594\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [3840/15000 (26%)]\tLoss: 0.684269\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [4480/15000 (30%)]\tLoss: 0.751433\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5120/15000 (34%)]\tLoss: 0.948535\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [5760/15000 (38%)]\tLoss: 0.701165\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [6400/15000 (43%)]\tLoss: 0.605181\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7040/15000 (47%)]\tLoss: 0.572135\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [7680/15000 (51%)]\tLoss: 0.584587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8320/15000 (55%)]\tLoss: 0.677527\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [8960/15000 (60%)]\tLoss: 0.775974\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [9600/15000 (64%)]\tLoss: 0.579040\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10240/15000 (68%)]\tLoss: 0.779329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [10880/15000 (72%)]\tLoss: 0.705849\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [11520/15000 (77%)]\tLoss: 0.367069\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12160/15000 (81%)]\tLoss: 0.578917\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [12800/15000 (85%)]\tLoss: 0.574488\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [13440/15000 (89%)]\tLoss: 0.290978\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14080/15000 (94%)]\tLoss: 0.499532\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTrain Epoch: 1 [14720/15000 (98%)]\tLoss: 0.531283\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "\n" + ] + } + ], + "source": [ + "print(t.stdout)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "dd962ddc", + "metadata": {}, + "source": [ + "And any error logs? (stderr)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "f439dff8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "print(t.stderr)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "426f2395", + "metadata": {}, + "source": [ + "# Congratulations!\n", + "Now that you've completed your first workflow interface quickstart notebook, see some of the more advanced things you can do in our [other tutorials](https://github.com/securefederatedai/openfl/tree/develop/openfl-tutorials/experimental/workflow), including:\n", + "\n", + "- Using the LocalRuntime Ray Backend for dedicated GPU access\n", + "- Vertical Federated Learning\n", + "- Model Watermarking\n", + "- Differential Privacy\n", + "- And More!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/openfl/component/envoy/__init__.py b/openfl/component/envoy/__init__.py index 3282962733..36f2c3ece4 100644 --- a/openfl/component/envoy/__init__.py +++ b/openfl/component/envoy/__init__.py @@ -2,8 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 -"""Envoy package.""" - from .envoy import Envoy __all__ = [