+
+ Read the Docs
+ v: ${config.versions.current.slug}
+
+
+
+
+ ${renderLanguages(config)}
+ ${renderVersions(config)}
+ ${renderDownloads(config)}
+
+ On Read the Docs
+
+ Project Home
+
+
+ Builds
+
+
+ Downloads
+
+
+
+ Search
+
+
+
+
+
+
+ Hosted by Read the Docs
+
+
+
+ `;
+
+ // Inject the generated flyout into the body HTML element.
+ document.body.insertAdjacentHTML("beforeend", flyout);
+
+ // Trigger the Read the Docs Addons Search modal when clicking on the "Search docs" input from inside the flyout.
+ document
+ .querySelector("#flyout-search-form")
+ .addEventListener("focusin", () => {
+ const event = new CustomEvent("readthedocs-search-show");
+ document.dispatchEvent(event);
+ });
+ })
+}
+
+if (themeLanguageSelector || themeVersionSelector) {
+ function onSelectorSwitch(event) {
+ const option = event.target.selectedIndex;
+ const item = event.target.options[option];
+ window.location.href = item.dataset.url;
+ }
+
+ document.addEventListener("readthedocs-addons-data-ready", function (event) {
+ const config = event.detail.data();
+
+ const versionSwitch = document.querySelector(
+ "div.switch-menus > div.version-switch",
+ );
+ if (themeVersionSelector) {
+ let versions = config.versions.active;
+ if (config.versions.current.hidden || config.versions.current.type === "external") {
+ versions.unshift(config.versions.current);
+ }
+ const versionSelect = `
+
+ ${versions
+ .map(
+ (version) => `
+
+ ${version.slug}
+ `,
+ )
+ .join("\n")}
+
+ `;
+
+ versionSwitch.innerHTML = versionSelect;
+ versionSwitch.firstElementChild.addEventListener("change", onSelectorSwitch);
+ }
+
+ const languageSwitch = document.querySelector(
+ "div.switch-menus > div.language-switch",
+ );
+
+ if (themeLanguageSelector) {
+ if (config.projects.translations.length) {
+ // Add the current language to the options on the selector
+ let languages = config.projects.translations.concat(
+ config.projects.current,
+ );
+ languages = languages.sort((a, b) =>
+ a.language.name.localeCompare(b.language.name),
+ );
+
+ const languageSelect = `
+
+ ${languages
+ .map(
+ (language) => `
+
+ ${language.language.name}
+ `,
+ )
+ .join("\n")}
+
+ `;
+
+ languageSwitch.innerHTML = languageSelect;
+ languageSwitch.firstElementChild.addEventListener("change", onSelectorSwitch);
+ }
+ else {
+ languageSwitch.remove();
+ }
+ }
+ });
+}
+
+document.addEventListener("readthedocs-addons-data-ready", function (event) {
+ // Trigger the Read the Docs Addons Search modal when clicking on "Search docs" input from the topnav.
+ document
+ .querySelector("[role='search'] input")
+ .addEventListener("focusin", () => {
+ const event = new CustomEvent("readthedocs-search-show");
+ document.dispatchEvent(event);
+ });
+});
\ No newline at end of file
diff --git a/_static/language_data.js b/_static/language_data.js
new file mode 100644
index 00000000..250f5665
--- /dev/null
+++ b/_static/language_data.js
@@ -0,0 +1,199 @@
+/*
+ * language_data.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * This script contains the language-specific data used by searchtools.js,
+ * namely the list of stopwords, stemmer, scorer and splitter.
+ *
+ * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
+
+
+/* Non-minified version is copied as a separate JS file, is available */
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
diff --git a/_static/minus.png b/_static/minus.png
new file mode 100644
index 00000000..d96755fd
Binary files /dev/null and b/_static/minus.png differ
diff --git a/_static/nbsphinx-broken-thumbnail.svg b/_static/nbsphinx-broken-thumbnail.svg
new file mode 100644
index 00000000..4919ca88
--- /dev/null
+++ b/_static/nbsphinx-broken-thumbnail.svg
@@ -0,0 +1,9 @@
+
+
+
+
diff --git a/_static/nbsphinx-code-cells.css b/_static/nbsphinx-code-cells.css
new file mode 100644
index 00000000..a3fb27c3
--- /dev/null
+++ b/_static/nbsphinx-code-cells.css
@@ -0,0 +1,259 @@
+/* remove conflicting styling from Sphinx themes */
+div.nbinput.container div.prompt *,
+div.nboutput.container div.prompt *,
+div.nbinput.container div.input_area pre,
+div.nboutput.container div.output_area pre,
+div.nbinput.container div.input_area .highlight,
+div.nboutput.container div.output_area .highlight {
+ border: none;
+ padding: 0;
+ margin: 0;
+ box-shadow: none;
+}
+
+div.nbinput.container > div[class*=highlight],
+div.nboutput.container > div[class*=highlight] {
+ margin: 0;
+}
+
+div.nbinput.container div.prompt *,
+div.nboutput.container div.prompt * {
+ background: none;
+}
+
+div.nboutput.container div.output_area .highlight,
+div.nboutput.container div.output_area pre {
+ background: unset;
+}
+
+div.nboutput.container div.output_area div.highlight {
+ color: unset; /* override Pygments text color */
+}
+
+/* avoid gaps between output lines */
+div.nboutput.container div[class*=highlight] pre {
+ line-height: normal;
+}
+
+/* input/output containers */
+div.nbinput.container,
+div.nboutput.container {
+ display: -webkit-flex;
+ display: flex;
+ align-items: flex-start;
+ margin: 0;
+ width: 100%;
+}
+@media (max-width: 540px) {
+ div.nbinput.container,
+ div.nboutput.container {
+ flex-direction: column;
+ }
+}
+
+/* input container */
+div.nbinput.container {
+ padding-top: 5px;
+}
+
+/* last container */
+div.nblast.container {
+ padding-bottom: 5px;
+}
+
+/* input prompt */
+div.nbinput.container div.prompt pre,
+/* for sphinx_immaterial theme: */
+div.nbinput.container div.prompt pre > code {
+ color: #307FC1;
+}
+
+/* output prompt */
+div.nboutput.container div.prompt pre,
+/* for sphinx_immaterial theme: */
+div.nboutput.container div.prompt pre > code {
+ color: #BF5B3D;
+}
+
+/* all prompts */
+div.nbinput.container div.prompt,
+div.nboutput.container div.prompt {
+ width: 4.5ex;
+ padding-top: 5px;
+ position: relative;
+ user-select: none;
+}
+
+div.nbinput.container div.prompt > div,
+div.nboutput.container div.prompt > div {
+ position: absolute;
+ right: 0;
+ margin-right: 0.3ex;
+}
+
+@media (max-width: 540px) {
+ div.nbinput.container div.prompt,
+ div.nboutput.container div.prompt {
+ width: unset;
+ text-align: left;
+ padding: 0.4em;
+ }
+ div.nboutput.container div.prompt.empty {
+ padding: 0;
+ }
+
+ div.nbinput.container div.prompt > div,
+ div.nboutput.container div.prompt > div {
+ position: unset;
+ }
+}
+
+/* disable scrollbars and line breaks on prompts */
+div.nbinput.container div.prompt pre,
+div.nboutput.container div.prompt pre {
+ overflow: hidden;
+ white-space: pre;
+}
+
+/* input/output area */
+div.nbinput.container div.input_area,
+div.nboutput.container div.output_area {
+ -webkit-flex: 1;
+ flex: 1;
+ overflow: auto;
+}
+@media (max-width: 540px) {
+ div.nbinput.container div.input_area,
+ div.nboutput.container div.output_area {
+ width: 100%;
+ }
+}
+
+/* input area */
+div.nbinput.container div.input_area {
+ border: 1px solid #e0e0e0;
+ border-radius: 2px;
+ /*background: #f5f5f5;*/
+}
+
+/* override MathJax center alignment in output cells */
+div.nboutput.container div[class*=MathJax] {
+ text-align: left !important;
+}
+
+/* override sphinx.ext.imgmath center alignment in output cells */
+div.nboutput.container div.math p {
+ text-align: left;
+}
+
+/* standard error */
+div.nboutput.container div.output_area.stderr {
+ background: #fdd;
+}
+
+/* ANSI colors */
+.ansi-black-fg { color: #3E424D; }
+.ansi-black-bg { background-color: #3E424D; }
+.ansi-black-intense-fg { color: #282C36; }
+.ansi-black-intense-bg { background-color: #282C36; }
+.ansi-red-fg { color: #E75C58; }
+.ansi-red-bg { background-color: #E75C58; }
+.ansi-red-intense-fg { color: #B22B31; }
+.ansi-red-intense-bg { background-color: #B22B31; }
+.ansi-green-fg { color: #00A250; }
+.ansi-green-bg { background-color: #00A250; }
+.ansi-green-intense-fg { color: #007427; }
+.ansi-green-intense-bg { background-color: #007427; }
+.ansi-yellow-fg { color: #DDB62B; }
+.ansi-yellow-bg { background-color: #DDB62B; }
+.ansi-yellow-intense-fg { color: #B27D12; }
+.ansi-yellow-intense-bg { background-color: #B27D12; }
+.ansi-blue-fg { color: #208FFB; }
+.ansi-blue-bg { background-color: #208FFB; }
+.ansi-blue-intense-fg { color: #0065CA; }
+.ansi-blue-intense-bg { background-color: #0065CA; }
+.ansi-magenta-fg { color: #D160C4; }
+.ansi-magenta-bg { background-color: #D160C4; }
+.ansi-magenta-intense-fg { color: #A03196; }
+.ansi-magenta-intense-bg { background-color: #A03196; }
+.ansi-cyan-fg { color: #60C6C8; }
+.ansi-cyan-bg { background-color: #60C6C8; }
+.ansi-cyan-intense-fg { color: #258F8F; }
+.ansi-cyan-intense-bg { background-color: #258F8F; }
+.ansi-white-fg { color: #C5C1B4; }
+.ansi-white-bg { background-color: #C5C1B4; }
+.ansi-white-intense-fg { color: #A1A6B2; }
+.ansi-white-intense-bg { background-color: #A1A6B2; }
+
+.ansi-default-inverse-fg { color: #FFFFFF; }
+.ansi-default-inverse-bg { background-color: #000000; }
+
+.ansi-bold { font-weight: bold; }
+.ansi-underline { text-decoration: underline; }
+
+
+div.nbinput.container div.input_area div[class*=highlight] > pre,
+div.nboutput.container div.output_area div[class*=highlight] > pre,
+div.nboutput.container div.output_area div[class*=highlight].math,
+div.nboutput.container div.output_area.rendered_html,
+div.nboutput.container div.output_area > div.output_javascript,
+div.nboutput.container div.output_area:not(.rendered_html) > img{
+ padding: 5px;
+ margin: 0;
+}
+
+/* fix copybtn overflow problem in chromium (needed for 'sphinx_copybutton') */
+div.nbinput.container div.input_area > div[class^='highlight'],
+div.nboutput.container div.output_area > div[class^='highlight']{
+ overflow-y: hidden;
+}
+
+/* hide copy button on prompts for 'sphinx_copybutton' extension ... */
+.prompt .copybtn,
+/* ... and 'sphinx_immaterial' theme */
+.prompt .md-clipboard.md-icon {
+ display: none;
+}
+
+/* Some additional styling taken form the Jupyter notebook CSS */
+.jp-RenderedHTMLCommon table,
+div.rendered_html table {
+ border: none;
+ border-collapse: collapse;
+ border-spacing: 0;
+ color: black;
+ font-size: 12px;
+ table-layout: fixed;
+}
+.jp-RenderedHTMLCommon thead,
+div.rendered_html thead {
+ border-bottom: 1px solid black;
+ vertical-align: bottom;
+}
+.jp-RenderedHTMLCommon tr,
+.jp-RenderedHTMLCommon th,
+.jp-RenderedHTMLCommon td,
+div.rendered_html tr,
+div.rendered_html th,
+div.rendered_html td {
+ text-align: right;
+ vertical-align: middle;
+ padding: 0.5em 0.5em;
+ line-height: normal;
+ white-space: normal;
+ max-width: none;
+ border: none;
+}
+.jp-RenderedHTMLCommon th,
+div.rendered_html th {
+ font-weight: bold;
+}
+.jp-RenderedHTMLCommon tbody tr:nth-child(odd),
+div.rendered_html tbody tr:nth-child(odd) {
+ background: #f5f5f5;
+}
+.jp-RenderedHTMLCommon tbody tr:hover,
+div.rendered_html tbody tr:hover {
+ background: rgba(66, 165, 245, 0.2);
+}
+
diff --git a/_static/nbsphinx-gallery.css b/_static/nbsphinx-gallery.css
new file mode 100644
index 00000000..365c27a9
--- /dev/null
+++ b/_static/nbsphinx-gallery.css
@@ -0,0 +1,31 @@
+.nbsphinx-gallery {
+ display: grid;
+ grid-template-columns: repeat(auto-fill, minmax(160px, 1fr));
+ gap: 5px;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.nbsphinx-gallery > a {
+ padding: 5px;
+ border: 1px dotted currentColor;
+ border-radius: 2px;
+ text-align: center;
+}
+
+.nbsphinx-gallery > a:hover {
+ border-style: solid;
+}
+
+.nbsphinx-gallery img {
+ max-width: 100%;
+ max-height: 100%;
+}
+
+.nbsphinx-gallery > a > div:first-child {
+ display: flex;
+ align-items: start;
+ justify-content: center;
+ height: 120px;
+ margin-bottom: 5px;
+}
diff --git a/_static/nbsphinx-no-thumbnail.svg b/_static/nbsphinx-no-thumbnail.svg
new file mode 100644
index 00000000..9dca7588
--- /dev/null
+++ b/_static/nbsphinx-no-thumbnail.svg
@@ -0,0 +1,9 @@
+
+
+
+
diff --git a/_static/plus.png b/_static/plus.png
new file mode 100644
index 00000000..7107cec9
Binary files /dev/null and b/_static/plus.png differ
diff --git a/_static/pygments.css b/_static/pygments.css
new file mode 100644
index 00000000..5f2b0a25
--- /dev/null
+++ b/_static/pygments.css
@@ -0,0 +1,75 @@
+pre { line-height: 125%; }
+td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #eeffcc; }
+.highlight .c { color: #408090; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #F00 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666 } /* Operator */
+.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */
+.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */
+.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #408090; background-color: #FFF0F0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */
+.highlight .gr { color: #F00 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #333 } /* Generic.Output */
+.highlight .gp { color: #C65D09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #04D } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #208050 } /* Literal.Number */
+.highlight .s { color: #4070A0 } /* Literal.String */
+.highlight .na { color: #4070A0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0E84B5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60ADD5 } /* Name.Constant */
+.highlight .nd { color: #555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #D55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287E } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0E84B5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #BB60D5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #BBB } /* Text.Whitespace */
+.highlight .mb { color: #208050 } /* Literal.Number.Bin */
+.highlight .mf { color: #208050 } /* Literal.Number.Float */
+.highlight .mh { color: #208050 } /* Literal.Number.Hex */
+.highlight .mi { color: #208050 } /* Literal.Number.Integer */
+.highlight .mo { color: #208050 } /* Literal.Number.Oct */
+.highlight .sa { color: #4070A0 } /* Literal.String.Affix */
+.highlight .sb { color: #4070A0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070A0 } /* Literal.String.Char */
+.highlight .dl { color: #4070A0 } /* Literal.String.Delimiter */
+.highlight .sd { color: #4070A0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070A0 } /* Literal.String.Double */
+.highlight .se { color: #4070A0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070A0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70A0D0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #C65D09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070A0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #06287E } /* Name.Function.Magic */
+.highlight .vc { color: #BB60D5 } /* Name.Variable.Class */
+.highlight .vg { color: #BB60D5 } /* Name.Variable.Global */
+.highlight .vi { color: #BB60D5 } /* Name.Variable.Instance */
+.highlight .vm { color: #BB60D5 } /* Name.Variable.Magic */
+.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/_static/searchtools.js b/_static/searchtools.js
new file mode 100644
index 00000000..97d56a74
--- /dev/null
+++ b/_static/searchtools.js
@@ -0,0 +1,566 @@
+/*
+ * searchtools.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for the full-text search.
+ *
+ * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+"use strict";
+
+/**
+ * Simple result scoring code.
+ */
+if (typeof Scorer === "undefined") {
+ var Scorer = {
+ // Implement the following function to further tweak the score for each result
+ // The function takes a result array [docname, title, anchor, descr, score, filename]
+ // and returns the new score.
+ /*
+ score: result => {
+ const [docname, title, anchor, descr, score, filename] = result
+ return score
+ },
+ */
+
+ // query matches the full name of an object
+ objNameMatch: 11,
+ // or matches in the last dotted part of the object name
+ objPartialMatch: 6,
+ // Additive scores depending on the priority of the object
+ objPrio: {
+ 0: 15, // used to be importantResults
+ 1: 5, // used to be objectResults
+ 2: -5, // used to be unimportantResults
+ },
+ // Used when the priority is not in the mapping.
+ objPrioDefault: 0,
+
+ // query found in title
+ title: 15,
+ partialTitle: 7,
+ // query found in terms
+ term: 5,
+ partialTerm: 2,
+ };
+}
+
+const _removeChildren = (element) => {
+ while (element && element.lastChild) element.removeChild(element.lastChild);
+};
+
+/**
+ * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
+ */
+const _escapeRegExp = (string) =>
+ string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
+
+const _displayItem = (item, searchTerms) => {
+ const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
+ const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT;
+ const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
+ const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
+ const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
+
+ const [docName, title, anchor, descr, score, _filename] = item;
+
+ let listItem = document.createElement("li");
+ let requestUrl;
+ let linkUrl;
+ if (docBuilder === "dirhtml") {
+ // dirhtml builder
+ let dirname = docName + "/";
+ if (dirname.match(/\/index\/$/))
+ dirname = dirname.substring(0, dirname.length - 6);
+ else if (dirname === "index/") dirname = "";
+ requestUrl = docUrlRoot + dirname;
+ linkUrl = requestUrl;
+ } else {
+ // normal html builders
+ requestUrl = docUrlRoot + docName + docFileSuffix;
+ linkUrl = docName + docLinkSuffix;
+ }
+ let linkEl = listItem.appendChild(document.createElement("a"));
+ linkEl.href = linkUrl + anchor;
+ linkEl.dataset.score = score;
+ linkEl.innerHTML = title;
+ if (descr)
+ listItem.appendChild(document.createElement("span")).innerHTML =
+ " (" + descr + ")";
+ else if (showSearchSummary)
+ fetch(requestUrl)
+ .then((responseData) => responseData.text())
+ .then((data) => {
+ if (data)
+ listItem.appendChild(
+ Search.makeSearchSummary(data, searchTerms)
+ );
+ });
+ Search.output.appendChild(listItem);
+};
+const _finishSearch = (resultCount) => {
+ Search.stopPulse();
+ Search.title.innerText = _("Search Results");
+ if (!resultCount)
+ Search.status.innerText = Documentation.gettext(
+ "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
+ );
+ else
+ Search.status.innerText = _(
+ `Search finished, found ${resultCount} page(s) matching the search query.`
+ );
+};
+const _displayNextItem = (
+ results,
+ resultCount,
+ searchTerms
+) => {
+ // results left, load the summary and display it
+ // this is intended to be dynamic (don't sub resultsCount)
+ if (results.length) {
+ _displayItem(results.pop(), searchTerms);
+ setTimeout(
+ () => _displayNextItem(results, resultCount, searchTerms),
+ 5
+ );
+ }
+ // search finished, update title and status message
+ else _finishSearch(resultCount);
+};
+
+/**
+ * Default splitQuery function. Can be overridden in ``sphinx.search`` with a
+ * custom function per language.
+ *
+ * The regular expression works by splitting the string on consecutive characters
+ * that are not Unicode letters, numbers, underscores, or emoji characters.
+ * This is the same as ``\W+`` in Python, preserving the surrogate pair area.
+ */
+if (typeof splitQuery === "undefined") {
+ var splitQuery = (query) => query
+ .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
+ .filter(term => term) // remove remaining empty strings
+}
+
+/**
+ * Search Module
+ */
+const Search = {
+ _index: null,
+ _queued_query: null,
+ _pulse_status: -1,
+
+ htmlToText: (htmlString) => {
+ const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
+ htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() });
+ const docContent = htmlElement.querySelector('[role="main"]');
+ if (docContent !== undefined) return docContent.textContent;
+ console.warn(
+ "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template."
+ );
+ return "";
+ },
+
+ init: () => {
+ const query = new URLSearchParams(window.location.search).get("q");
+ document
+ .querySelectorAll('input[name="q"]')
+ .forEach((el) => (el.value = query));
+ if (query) Search.performSearch(query);
+ },
+
+ loadIndex: (url) =>
+ (document.body.appendChild(document.createElement("script")).src = url),
+
+ setIndex: (index) => {
+ Search._index = index;
+ if (Search._queued_query !== null) {
+ const query = Search._queued_query;
+ Search._queued_query = null;
+ Search.query(query);
+ }
+ },
+
+ hasIndex: () => Search._index !== null,
+
+ deferQuery: (query) => (Search._queued_query = query),
+
+ stopPulse: () => (Search._pulse_status = -1),
+
+ startPulse: () => {
+ if (Search._pulse_status >= 0) return;
+
+ const pulse = () => {
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ Search.dots.innerText = ".".repeat(Search._pulse_status);
+ if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
+ };
+ pulse();
+ },
+
+ /**
+ * perform a search for something (or wait until index is loaded)
+ */
+ performSearch: (query) => {
+ // create the required interface elements
+ const searchText = document.createElement("h2");
+ searchText.textContent = _("Searching");
+ const searchSummary = document.createElement("p");
+ searchSummary.classList.add("search-summary");
+ searchSummary.innerText = "";
+ const searchList = document.createElement("ul");
+ searchList.classList.add("search");
+
+ const out = document.getElementById("search-results");
+ Search.title = out.appendChild(searchText);
+ Search.dots = Search.title.appendChild(document.createElement("span"));
+ Search.status = out.appendChild(searchSummary);
+ Search.output = out.appendChild(searchList);
+
+ const searchProgress = document.getElementById("search-progress");
+ // Some themes don't use the search progress node
+ if (searchProgress) {
+ searchProgress.innerText = _("Preparing search...");
+ }
+ Search.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (Search.hasIndex()) Search.query(query);
+ else Search.deferQuery(query);
+ },
+
+ /**
+ * execute search (requires search index to be loaded)
+ */
+ query: (query) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+ const allTitles = Search._index.alltitles;
+ const indexEntries = Search._index.indexentries;
+
+ // stem the search terms and add them to the correct list
+ const stemmer = new Stemmer();
+ const searchTerms = new Set();
+ const excludedTerms = new Set();
+ const highlightTerms = new Set();
+ const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
+ splitQuery(query.trim()).forEach((queryTerm) => {
+ const queryTermLower = queryTerm.toLowerCase();
+
+ // maybe skip this "word"
+ // stopwords array is from language_data.js
+ if (
+ stopwords.indexOf(queryTermLower) !== -1 ||
+ queryTerm.match(/^\d+$/)
+ )
+ return;
+
+ // stem the word
+ let word = stemmer.stemWord(queryTermLower);
+ // select the correct list
+ if (word[0] === "-") excludedTerms.add(word.substr(1));
+ else {
+ searchTerms.add(word);
+ highlightTerms.add(queryTermLower);
+ }
+ });
+
+ if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
+ localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
+ }
+
+ // console.debug("SEARCH: searching for:");
+ // console.info("required: ", [...searchTerms]);
+ // console.info("excluded: ", [...excludedTerms]);
+
+ // array of [docname, title, anchor, descr, score, filename]
+ let results = [];
+ _removeChildren(document.getElementById("search-progress"));
+
+ const queryLower = query.toLowerCase();
+ for (const [title, foundTitles] of Object.entries(allTitles)) {
+ if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) {
+ for (const [file, id] of foundTitles) {
+ let score = Math.round(100 * queryLower.length / title.length)
+ results.push([
+ docNames[file],
+ titles[file] !== title ? `${titles[file]} > ${title}` : title,
+ id !== null ? "#" + id : "",
+ null,
+ score,
+ filenames[file],
+ ]);
+ }
+ }
+ }
+
+ // search for explicit entries in index directives
+ for (const [entry, foundEntries] of Object.entries(indexEntries)) {
+ if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
+ for (const [file, id] of foundEntries) {
+ let score = Math.round(100 * queryLower.length / entry.length)
+ results.push([
+ docNames[file],
+ titles[file],
+ id ? "#" + id : "",
+ null,
+ score,
+ filenames[file],
+ ]);
+ }
+ }
+ }
+
+ // lookup as object
+ objectTerms.forEach((term) =>
+ results.push(...Search.performObjectSearch(term, objectTerms))
+ );
+
+ // lookup as search terms in fulltext
+ results.push(...Search.performTermsSearch(searchTerms, excludedTerms));
+
+ // let the scorer override scores with a custom scoring function
+ if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item)));
+
+ // now sort the results by score (in opposite order of appearance, since the
+ // display function below uses pop() to retrieve items) and then
+ // alphabetically
+ results.sort((a, b) => {
+ const leftScore = a[4];
+ const rightScore = b[4];
+ if (leftScore === rightScore) {
+ // same score: sort alphabetically
+ const leftTitle = a[1].toLowerCase();
+ const rightTitle = b[1].toLowerCase();
+ if (leftTitle === rightTitle) return 0;
+ return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
+ }
+ return leftScore > rightScore ? 1 : -1;
+ });
+
+ // remove duplicate search results
+ // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
+ let seen = new Set();
+ results = results.reverse().reduce((acc, result) => {
+ let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
+ if (!seen.has(resultStr)) {
+ acc.push(result);
+ seen.add(resultStr);
+ }
+ return acc;
+ }, []);
+
+ results = results.reverse();
+
+ // for debugging
+ //Search.lastresults = results.slice(); // a copy
+ // console.info("search results:", Search.lastresults);
+
+ // print the results
+ _displayNextItem(results, results.length, searchTerms);
+ },
+
+ /**
+ * search for object names
+ */
+ performObjectSearch: (object, objectTerms) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const objects = Search._index.objects;
+ const objNames = Search._index.objnames;
+ const titles = Search._index.titles;
+
+ const results = [];
+
+ const objectSearchCallback = (prefix, match) => {
+ const name = match[4]
+ const fullname = (prefix ? prefix + "." : "") + name;
+ const fullnameLower = fullname.toLowerCase();
+ if (fullnameLower.indexOf(object) < 0) return;
+
+ let score = 0;
+ const parts = fullnameLower.split(".");
+
+ // check for different match types: exact matches of full name or
+ // "last name" (i.e. last dotted part)
+ if (fullnameLower === object || parts.slice(-1)[0] === object)
+ score += Scorer.objNameMatch;
+ else if (parts.slice(-1)[0].indexOf(object) > -1)
+ score += Scorer.objPartialMatch; // matches in last name
+
+ const objName = objNames[match[1]][2];
+ const title = titles[match[0]];
+
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ const otherTerms = new Set(objectTerms);
+ otherTerms.delete(object);
+ if (otherTerms.size > 0) {
+ const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
+ if (
+ [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
+ )
+ return;
+ }
+
+ let anchor = match[3];
+ if (anchor === "") anchor = fullname;
+ else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
+
+ const descr = objName + _(", in ") + title;
+
+ // add custom score for some objects according to scorer
+ if (Scorer.objPrio.hasOwnProperty(match[2]))
+ score += Scorer.objPrio[match[2]];
+ else score += Scorer.objPrioDefault;
+
+ results.push([
+ docNames[match[0]],
+ fullname,
+ "#" + anchor,
+ descr,
+ score,
+ filenames[match[0]],
+ ]);
+ };
+ Object.keys(objects).forEach((prefix) =>
+ objects[prefix].forEach((array) =>
+ objectSearchCallback(prefix, array)
+ )
+ );
+ return results;
+ },
+
+ /**
+ * search for full-text terms in the index
+ */
+ performTermsSearch: (searchTerms, excludedTerms) => {
+ // prepare search
+ const terms = Search._index.terms;
+ const titleTerms = Search._index.titleterms;
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+
+ const scoreMap = new Map();
+ const fileMap = new Map();
+
+ // perform the search on the required terms
+ searchTerms.forEach((word) => {
+ const files = [];
+ const arr = [
+ { files: terms[word], score: Scorer.term },
+ { files: titleTerms[word], score: Scorer.title },
+ ];
+ // add support for partial matches
+ if (word.length > 2) {
+ const escapedWord = _escapeRegExp(word);
+ Object.keys(terms).forEach((term) => {
+ if (term.match(escapedWord) && !terms[word])
+ arr.push({ files: terms[term], score: Scorer.partialTerm });
+ });
+ Object.keys(titleTerms).forEach((term) => {
+ if (term.match(escapedWord) && !titleTerms[word])
+ arr.push({ files: titleTerms[word], score: Scorer.partialTitle });
+ });
+ }
+
+ // no match but word was a required one
+ if (arr.every((record) => record.files === undefined)) return;
+
+ // found search word in contents
+ arr.forEach((record) => {
+ if (record.files === undefined) return;
+
+ let recordFiles = record.files;
+ if (recordFiles.length === undefined) recordFiles = [recordFiles];
+ files.push(...recordFiles);
+
+ // set score for the word in each file
+ recordFiles.forEach((file) => {
+ if (!scoreMap.has(file)) scoreMap.set(file, {});
+ scoreMap.get(file)[word] = record.score;
+ });
+ });
+
+ // create the mapping
+ files.forEach((file) => {
+ if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1)
+ fileMap.get(file).push(word);
+ else fileMap.set(file, [word]);
+ });
+ });
+
+ // now check if the files don't contain excluded terms
+ const results = [];
+ for (const [file, wordList] of fileMap) {
+ // check if all requirements are matched
+
+ // as search terms with length < 3 are discarded
+ const filteredTermCount = [...searchTerms].filter(
+ (term) => term.length > 2
+ ).length;
+ if (
+ wordList.length !== searchTerms.size &&
+ wordList.length !== filteredTermCount
+ )
+ continue;
+
+ // ensure that none of the excluded terms is in the search result
+ if (
+ [...excludedTerms].some(
+ (term) =>
+ terms[term] === file ||
+ titleTerms[term] === file ||
+ (terms[term] || []).includes(file) ||
+ (titleTerms[term] || []).includes(file)
+ )
+ )
+ break;
+
+ // select one (max) score for the file.
+ const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
+ // add result to the result list
+ results.push([
+ docNames[file],
+ titles[file],
+ "",
+ null,
+ score,
+ filenames[file],
+ ]);
+ }
+ return results;
+ },
+
+ /**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words.
+ */
+ makeSearchSummary: (htmlText, keywords) => {
+ const text = Search.htmlToText(htmlText);
+ if (text === "") return null;
+
+ const textLower = text.toLowerCase();
+ const actualStartPosition = [...keywords]
+ .map((k) => textLower.indexOf(k.toLowerCase()))
+ .filter((i) => i > -1)
+ .slice(-1)[0];
+ const startWithContext = Math.max(actualStartPosition - 120, 0);
+
+ const top = startWithContext === 0 ? "" : "...";
+ const tail = startWithContext + 240 < text.length ? "..." : "";
+
+ let summary = document.createElement("p");
+ summary.classList.add("context");
+ summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
+
+ return summary;
+ },
+};
+
+_ready(Search.init);
diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js
new file mode 100644
index 00000000..aae669d7
--- /dev/null
+++ b/_static/sphinx_highlight.js
@@ -0,0 +1,144 @@
+/* Highlighting utilities for Sphinx HTML documentation. */
+"use strict";
+
+const SPHINX_HIGHLIGHT_ENABLED = true
+
+/**
+ * highlight a given string on a node by wrapping it in
+ * span elements with the given class name.
+ */
+const _highlight = (node, addItems, text, className) => {
+ if (node.nodeType === Node.TEXT_NODE) {
+ const val = node.nodeValue;
+ const parent = node.parentNode;
+ const pos = val.toLowerCase().indexOf(text);
+ if (
+ pos >= 0 &&
+ !parent.classList.contains(className) &&
+ !parent.classList.contains("nohighlight")
+ ) {
+ let span;
+
+ const closestNode = parent.closest("body, svg, foreignObject");
+ const isInSVG = closestNode && closestNode.matches("svg");
+ if (isInSVG) {
+ span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
+ } else {
+ span = document.createElement("span");
+ span.classList.add(className);
+ }
+
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ parent.insertBefore(
+ span,
+ parent.insertBefore(
+ document.createTextNode(val.substr(pos + text.length)),
+ node.nextSibling
+ )
+ );
+ node.nodeValue = val.substr(0, pos);
+
+ if (isInSVG) {
+ const rect = document.createElementNS(
+ "http://www.w3.org/2000/svg",
+ "rect"
+ );
+ const bbox = parent.getBBox();
+ rect.x.baseVal.value = bbox.x;
+ rect.y.baseVal.value = bbox.y;
+ rect.width.baseVal.value = bbox.width;
+ rect.height.baseVal.value = bbox.height;
+ rect.setAttribute("class", className);
+ addItems.push({ parent: parent, target: rect });
+ }
+ }
+ } else if (node.matches && !node.matches("button, select, textarea")) {
+ node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
+ }
+};
+const _highlightText = (thisNode, text, className) => {
+ let addItems = [];
+ _highlight(thisNode, addItems, text, className);
+ addItems.forEach((obj) =>
+ obj.parent.insertAdjacentElement("beforebegin", obj.target)
+ );
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+const SphinxHighlight = {
+
+ /**
+ * highlight the search words provided in localstorage in the text
+ */
+ highlightSearchWords: () => {
+ if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
+
+ // get and clear terms from localstorage
+ const url = new URL(window.location);
+ const highlight =
+ localStorage.getItem("sphinx_highlight_terms")
+ || url.searchParams.get("highlight")
+ || "";
+ localStorage.removeItem("sphinx_highlight_terms")
+ url.searchParams.delete("highlight");
+ window.history.replaceState({}, "", url);
+
+ // get individual terms from highlight string
+ const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
+ if (terms.length === 0) return; // nothing to do
+
+ // There should never be more than one element matching "div.body"
+ const divBody = document.querySelectorAll("div.body");
+ const body = divBody.length ? divBody[0] : document.querySelector("body");
+ window.setTimeout(() => {
+ terms.forEach((term) => _highlightText(body, term, "highlighted"));
+ }, 10);
+
+ const searchBox = document.getElementById("searchbox");
+ if (searchBox === null) return;
+ searchBox.appendChild(
+ document
+ .createRange()
+ .createContextualFragment(
+ '
' +
+ '' +
+ _("Hide Search Matches") +
+ "
"
+ )
+ );
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords: () => {
+ document
+ .querySelectorAll("#searchbox .highlight-link")
+ .forEach((el) => el.remove());
+ document
+ .querySelectorAll("span.highlighted")
+ .forEach((el) => el.classList.remove("highlighted"));
+ localStorage.removeItem("sphinx_highlight_terms")
+ },
+
+ initEscapeListener: () => {
+ // only install a listener if it is really needed
+ if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
+
+ document.addEventListener("keydown", (event) => {
+ // bail for input elements
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ // bail with special keys
+ if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
+ if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
+ SphinxHighlight.hideSearchWords();
+ event.preventDefault();
+ }
+ });
+ },
+};
+
+_ready(SphinxHighlight.highlightSearchWords);
+_ready(SphinxHighlight.initEscapeListener);
diff --git a/api_reference/center_selector.html b/api_reference/center_selector.html
new file mode 100644
index 00000000..68e4ee1d
--- /dev/null
+++ b/api_reference/center_selector.html
@@ -0,0 +1,595 @@
+
+
+
+
+
+
+
+
+
falkon.center_selection — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.center_selection
+
+CenterSelector
+
+
+class falkon.center_selection. CenterSelector ( random_gen )
+Create the center selector with a random number generator
+
+Parameters:
+random_gen – A numpy random number generator object or a random seed.
+
+
+
+
+abstract select ( X , Y ) → Tensor | SparseTensor | Tuple [ Tensor | SparseTensor , Tensor ]
+Abstract method for selecting M centers from the data.
+
+Parameters:
+
+
+Returns:
+
+X_centers – If Y is None this is the only output: M centers selected from the data.
+Y_centers – If Y is not None, a set of label centers will be returned as well.
+
+
+
+
+
+
+
+
+abstract select_indices ( X , Y ) → Tuple [ Tensor | SparseTensor , Tensor ] | Tuple [ Tensor | SparseTensor , Tensor , Tensor ]
+Abstract method for selecting M centers from the data.
+
+Parameters:
+
+
+Returns:
+
+X_centers – M centers selected from the data.
+Y_centers – If Y is not None, a set of label centers will be returned as well.
+indices – The indices in X (and optionally Y) associated with the chosen centers.
+
+
+
+
+
+
+
+
+
+
+
+FixedSelector
+
+
+class falkon.center_selection. FixedSelector ( centers : Tensor | SparseTensor , y_centers : Tensor | None = None , idx_centers : Tensor | None = None )
+Bases: CenterSelector
+Center selector which always picks the same centers.
+The fixed centers are specified at class initialization time.
+
+Parameters:
+
+centers – Tensor of data-centers to be used.
+y_centers – Optional tensor of label-centers to be used. If this is None , calling select() with
+a non-empty Y argument will throw an exception
+idx_centers – Optional tensor containing the indices which correspond to the given centers. This tensor
+is used in the select_indices() method.
+
+
+
+
+
+select ( X : Tensor | SparseTensor , Y : Tensor | None ) → Tensor | SparseTensor | Tuple [ Tensor | SparseTensor , Tensor ]
+Returns the fixed centers with which this instance was created
+
+Parameters:
+
+X – This parameter is ignored. The centers returned are the ones passed in the class’s
+constructor.
+Y – Optional N x T tensor containing the input targets. The value of the parameter is
+ignored, but if it is not None , this method will return a tuple of X-centers
+and Y-centers.
+
+
+Returns:
+
+X_M – The fixed centers as given in the class constructor
+(X_M, Y_M) – The X-centers and Y-centers as given in the class constructor. This tuple is only
+returned if Y is not None.
+
+
+
+Raises:
+RuntimeError – If parameter Y is not None but the y_centers tensor passed to the class constructor
+ is None .
+
+
+
+
+
+
+select_indices ( X : Tensor | SparseTensor , Y : Tensor | None ) → Tuple [ Tensor | SparseTensor , Tensor ] | Tuple [ Tensor | SparseTensor , Tensor , Tensor ]
+Returns the fixed centers, and their indices with which this instance was created
+
+Parameters:
+
+X – This parameter is ignored. The centers returned are the ones passed in the class’s
+constructor.
+Y – Optional N x T tensor containing the input targets. The value of the parameter is
+ignored, but if it is not None , this method will return a tuple of X-centers
+Y-centers, and indices.
+
+
+Returns:
+
+(X_M, indices) – The fixed centers and the indices as given in the class constructor
+(X_M, Y_M, indices) – The X-centers, Y-centers and indices as given in the class constructor.
+This tuple is only returned if Y is not None.
+
+
+
+Raises:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/gsc_losses.html b/api_reference/gsc_losses.html
new file mode 100644
index 00000000..68425a3b
--- /dev/null
+++ b/api_reference/gsc_losses.html
@@ -0,0 +1,710 @@
+
+
+
+
+
+
+
+
+
falkon.gsc_losses — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.gsc_losses
+
+Loss
+
+
+class falkon.gsc_losses. Loss ( name : str , kernel : Kernel , opt : FalkonOptions | None = None )
+Abstract generalized self-concordant loss function class.
+Such loss functions must be three times differentiable; but for the logistic Falkon algorithm
+only the first two derivatives are used.
+Subclasses must implement the __call__() method which calculates the loss function
+given two input vectors (the inputs could also be matrices e.g. for the softmax loss),
+the df() method which calculates the first derivative of the function and ddf()
+which calculates the second derivative.
+Additionally, this class provides two methods (knmp_grad() and knmp_hess() ) which
+calculate kernel-vector products using the loss derivatives for vectors. These functions are
+specific to the logistic Falkon algorithm.
+
+Parameters:
+
+name – A descriptive name for the loss function (e.g. “logistic”, “softmax”)
+kernel – The kernel function used for training a LogFalkon model
+opt – Falkon options container. Will be passed to the kernel when computing kernel-vector
+products.
+
+
+
+
+
+
+abstract __call__ ( y1 : Tensor , y2 : Tensor ) → Tensor
+Abstract method. Should return the loss for predicting y2 with true labels y1 .
+
+Parameters:
+
+
+Returns:
+torch.Tensor – The loss calculated for the two inputs.
+
+
+
+
+
+
+abstract ddf ( y1 : Tensor , y2 : Tensor ) → Tensor
+Abstract method. Should return the second derivative of the loss wrt y2 .
+
+Parameters:
+
+y1 (torch.Tensor ) – One of the two inputs to the loss. This should be interpreted as the true labels.
+y2 (torch.Tensor ) – The other loss input. Should be interpreted as the predicted labels. The derivative
+should be computed with respect to this tensor.
+
+
+Returns:
+torch.Tensor – The second derivative of the loss with respect to y2 . It will be a tensor of the
+same shape as the two inputs.
+
+
+
+
+
+
+abstract df ( y1 : Tensor , y2 : Tensor ) → Tensor
+Abstract method. Should return the derivative of the loss wrt y2 .
+
+Parameters:
+
+y1 (torch.Tensor ) – One of the two inputs to the loss. This should be interpreted as the true labels.
+y2 (torch.Tensor ) – The other loss input. Should be interpreted as the predicted labels. The derivative
+should be computed with respect to this tensor.
+
+
+Returns:
+torch.Tensor – The derivative of the loss with respect to y2 . It will be a tensor of the same shape
+as the two inputs.
+
+
+
+
+
+
+knmp_grad ( X : Tensor , Xc : Tensor , Y : Tensor , u : Tensor , opt : FalkonOptions | None = None ) → Tuple [ Tensor , Tensor ]
+Computes a kernel vector product where the vector is the first derivative of this loss
+Given kernel function \(K\) , the loss represented by this class \(\mathcal{l}\) ,
+number of samples \(n\) , this function follows equation
+
+\[\dfrac{1}{n} K(X_c, X) @ (\mathcal{l}'(Y, K(X, X_c) @ u))\]
+
+Parameters:
+
+X (torch.Tensor ) – Data matrix of shape (n x d) with n samples in d dimensions.
+Xc (torch.Tensor ) – Center matrix of shape (m x d) with m centers in d dimensions.
+Y (torch.Tensor ) – Label matrix of shape (n x t) with n samples. Depending on the loss, the labels may or may not
+have more than one dimension.
+u (torch.Tensor ) – A vector (or matrix if the labels are multi-dimensional) of weights of shape (m x t).
+The product K(X, Xc) @ u , where K is the kernel associated to this loss, should
+produce label predictions.
+opt (FalkonOptions or None ) – Options to be passed to the mmv function for the kernel associated to this loss.
+Options passed as an argument take precedence over the options used to build this
+class instance.
+
+
+Returns:
+
+grad_mul (torch.Tensor ) – A tensor of shape (m x 1) coming from the multiplication of the kernel matrix
+K(Xc, X) and the loss calculated on predictions with weights u .
+The formula followed is: (1/n) * K(Xc, X) @ df(Y, K(X, Xc) @ u) .
+func_val (torch.Tensor ) – A tensor of shape (n x t) of predictions obtained with weights u .
+
+
+
+
+
+
+
+
+knmp_hess ( X : Tensor , Xc : Tensor , Y : Tensor , f : Tensor , u : Tensor , opt : FalkonOptions | None = None ) → Tensor
+Compute a kernel-vector product with a rescaling with the second derivative
+Given kernel function \(K\) , the loss represented by this class \(\mathcal{l}\) ,
+number of samples \(n\) , this function follows equation
+
+\[\dfrac{1}{n} K(X_c, X) @ (\mathcal{l}''(Y, f) * K(X, X_c) @ u)\]
+
+Parameters:
+
+X (torch.Tensor ) – Data matrix of shape (n x d) with n samples in d dimensions.
+Xc (torch.Tensor ) – Center matrix of shape (m x d) with m centers in d dimensions.
+Y (torch.Tensor ) – Label matrix of shape (n x t) with n samples. Depending on the loss, the labels may
+or may not have more than one dimension.
+f (torch.Tensor ) – Tensor of shape (n x t) of predictions. Typically this will be the second output of
+the knmp_grad() method.
+u (torch.Tensor ) – A vector (or matrix if the labels are multi-dimensional) of weights of shape (m x t).
+The product K(X, Xc) @ u , where K is the kernel associated to this loss, should
+produce label predictions.
+opt (FalkonOptions or None ) – Options to be passed to the mmv function for the kernel associated to this loss.
+Options passed as an argument take precedence over the options used to build this
+class instance.
+
+
+Returns:
+A tensor of shape (m x t), the output of the computation.
+
+
+
+
+
+
+
+
+Logistic loss
+
+
+class falkon.gsc_losses. LogisticLoss ( kernel : Kernel , opt : FalkonOptions | None = None )
+Wrapper for the logistic loss, to be used in conjunction with the
+LogisticFalkon estimator.
+Usage of this loss assumes a binary classification problem with labels -1 and +1. For different
+choices of labels, see WeightedCrossEntropyLoss .
+
+Parameters:
+
+
+
+Examples
+>>> k = falkon . kernels . GaussianKernel ( 3 )
+>>> log_loss = LogisticLoss ( k )
+>>> estimator = falkon . LogisticFalkon ( k , [ 1e-4 , 1e-4 , 1e-4 ], [ 3 , 3 , 3 ], loss = log_loss , M = 100 )
+
+
+
+
+__call__ ( y1 : Tensor , y2 : Tensor ) → Tensor
+Compute the logistic loss between two 1-dimensional tensors
+The formula used is \(\log(1 + \exp(-y_1 * y_2))\)
+
+Parameters:
+
+
+Returns:
+loss – The logistic loss between the two input vectors.
+
+
+
+
+
+
+ddf ( y1 : Tensor , y2 : Tensor ) → Tensor
+Compute the second derivative of the logistic loss with respect to y2
+The formula used is
+
+\[y_1^2 \dfrac{1}{1 + \exp(-y_1 * y_2)} \dfrac{1}{1 + \exp(y_1 * y_2)}\]
+
+Parameters:
+
+
+Returns:
+dd_loss – The second derivative of the logistic loss, calculated between the two input vectors.
+
+
+
+
+
+
+df ( y1 : Tensor , y2 : Tensor ) → Tensor
+Compute the derivative of the logistic loss with respect to y2
+The formula used is
+
+\[\dfrac{-y_1}{1 + \exp(y_1 * y_2)}\]
+
+Parameters:
+
+
+Returns:
+d_loss – The derivative of the logistic loss, calculated between the two input vectors.
+
+
+
+
+
+
+
+
+Weighted binary cross entropy loss
+
+
+class falkon.gsc_losses. WeightedCrossEntropyLoss ( kernel : Kernel , neg_weight : float , opt : FalkonOptions | None = None )
+Wrapper for the weighted binary cross-entropy loss, to be used with the
+LogisticFalkon estimator.
+Using this loss assumes a binary classification problem with labels 0 and +1. Additionally,
+this loss allows to place a different weight to samples belonging to one of the two classes
+(see the neg_weight parameter).
+
+Parameters:
+
+kernel (falkon.kernels.kernel.Kernel ) – The kernel function used for training a LogisticFalkon model
+neg_weight (float ) – The weight to be assigned to samples belonging to the negative (0-labeled) class.
+By setting neg_weight to 1, the classes are equally weighted and this loss is
+equivalent to the LogisticLoss loss, but with a different
+choice of labels.
+opt (FalkonOptions ) – Falkon options container. Will be passed to the kernel when computing kernel-vector
+products.
+
+
+
+Examples
+>>> k = falkon . kernels . GaussianKernel ( 3 )
+>>> wce_loss = WeightedCrossEntropyLoss ( k )
+>>> estimator = falkon . LogisticFalkon ( k , [ 1e-4 , 1e-4 , 1e-4 ], [ 3 , 3 , 3 ], loss = wce_loss , M = 100 )
+
+
+
+
+__call__ ( true : Tensor , pred : Tensor ) → Tensor
+Compute the weighted BCE loss between two 1-dimensional tensors
+The formula used is
+
+\[\mathrm{true} * \log(1 + e^{-\mathrm{pred}}) + w * (1 - \mathrm{true}) * \log(1 + e^{\mathrm{pred}})\]
+
+Parameters:
+
+true – The label tensor. Must be 1D, with values 0 or 1.
+pred – The prediction tensor. Must be 1D. These are “logits” so need not be scaled before
+hand.
+
+
+Returns:
+loss – The weighted BCE loss between the two input vectors.
+
+
+
+
+
+
+ddf ( true : Tensor , pred : Tensor ) → Tensor
+Compute the second derivative of the weighted BCE loss with respect to pred
+The formula used is
+
+\[\dfrac{-(\mathrm{true} * (w - 1) - w) * e^{\mathrm{pred}}}{(e^{\mathrm{pred}} + 1)^2}\]
+
+Parameters:
+
+
+Returns:
+dd_loss – The second derivative of the weighted BCE loss between the two input vectors.
+
+
+
+
+
+
+df ( true : Tensor , pred : Tensor ) → Tensor
+Compute the derivative of the weighted BCE loss with respect to pred
+The formula used is
+
+\[\dfrac{-(w * \mathrm{true} - w) * e^{\mathrm{pred}} - \mathrm{true}}{e^{\mathrm{pred}} + 1}\]
+
+Parameters:
+
+
+Returns:
+d_loss – The derivative of the weighted BCE loss between the two input vectors.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/hopt.html b/api_reference/hopt.html
new file mode 100644
index 00000000..8d9c0563
--- /dev/null
+++ b/api_reference/hopt.html
@@ -0,0 +1,463 @@
+
+
+
+
+
+
+
+
+
falkon.hopt — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.hopt
+
+Objectives
+
+
+class falkon.hopt.objectives.objectives. HyperoptObjective ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None , pen_transform : Transform | None )
+
+
+
+Nystrom Complexity Regularization
+
+
+class falkon.hopt.objectives. NystromCompReg ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+
+
+
+
+Stochastic Nystrom Computational Regularization
+
+
+class falkon.hopt.objectives. StochasticNystromCompReg ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , flk_opt : FalkonOptions , flk_maxiter : int = 10 , num_trace_est : int = 20 , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+
+
+
+
+Complexity Regularization
+
+
+class falkon.hopt.objectives. CompReg ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+
+
+
+
+Generalized Cross Validation
+
+
+class falkon.hopt.objectives. GCV ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+GCV objective is
+
+\[\dfrac{\dfrac{1}{n} \lVert (I - \widetilde{K}_\lambda \widetilde{K}) Y \rVert^2}
+ {\frac{1}{n} \mathrm{Tr}(I - \widetilde{K}_\lambda \widetilde{K}) }\]
+We must compute the two terms denoted as the numerator and the denominator.
+Using the usual names for matrix variable substitutions (taken from gpflow code), we have that
+the numerator can be computed as
+
+\[\dfrac{1}{n} \lVert (I - A^\top \mathrm{LB}^{-\top} \mathrm{LB}^{-1} A) Y \rVert^2\]
+We compute the terms inside the norm first, from right to left using matrix-vector multiplications
+and triangular solves. Finally we compute the norm.
+The denominator is far less efficient to compute, since it requires working with m*n matrices.
+It can be expressed in terms of the same matrices as above:
+
+\[\Big( \frac{1}{n} (\mathrm{Tr}(I) - \lVert \mathrm{LB}^{-1}A \rVert_F^2 ) \Big)^2\]
+
+
+
+
+Hold Out Cross Validation
+
+
+class falkon.hopt.objectives. HoldOut ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , val_pct : float , per_iter_split : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+
+
+
+
+Leave One Out Cross Validation
+
+
+class falkon.hopt.objectives. LOOCV ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+LOOCV objective is to minimize the PRESS error:
+
+\[\min \sum_{i=1}^n \big(e_{(i)}\big)_i^2\]
+whose components are calculated as
+
+\[\big(e_{(i)}\big)_i = (y_i - f(\alpha_{(i)})_i) = \dfrac{y_i - f(\alpha)_i}{1 - S_{ii}}\]
+where
+
+\[S = K_{nm} (\lambda K_{mm} + K_{nm}^\top K_{nm})^{-1} K_{nm}^\top\]
+So we first need to compute the N-KRR solution \(\alpha\) and then the diagonal of matrix
+\(S\) . For simplicity we use a direct solver the N-KRR problem, although Falkon could easily
+be substituted in.
+
+
+
+
+SGPR
+
+
+class falkon.hopt.objectives. SGPR ( kernel : DiffKernel , centers_init : Tensor , penalty_init : Tensor , opt_centers : bool , opt_penalty : bool , centers_transform : Transform | None = None , pen_transform : Transform | None = None )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/index.html b/api_reference/index.html
new file mode 100644
index 00000000..4ed41948
--- /dev/null
+++ b/api_reference/index.html
@@ -0,0 +1,549 @@
+
+
+
+
+
+
+
+
+
API Reference — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/kernels.html b/api_reference/kernels.html
new file mode 100644
index 00000000..0ae635b1
--- /dev/null
+++ b/api_reference/kernels.html
@@ -0,0 +1,1757 @@
+
+
+
+
+
+
+
+
+
falkon.kernels — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.kernels
+
+Kernel
+
+
+class falkon.kernels.kernel. Kernel ( name : str , opt : FalkonOptions | None )
+Abstract kernel class. Kernels should inherit from this class, overriding appropriate methods.
+To extend Falkon with new kernels, you should read the documentation of this class
+carefully, and take a look at the existing implementation of GaussianKernel
+or LinearKernel . A walk-through for implementing a custom kernel
+is available as a notebook .
+There are several abstract methods which should be implemented, depending on the kind of operations
+which are supported by the implementing kernel.
+The compute() method should compute the kernel matrix, without concerns for differentiability,
+while the compute_sparse() method is used to compute the kernel for sparse input matrices.
+
+Parameters:
+
+
+
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+_decide_dmmv_impl ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , opt : FalkonOptions )
+Choose which dmmv function to use for this data.
+Note that dmmv functions compute double kernel-vector products (see dmmv() for
+an explanation of what they are).
+
+Parameters:
+
+X1 (torch.Tensor ) – First data matrix, of shape (N x D)
+X2 (torch.Tensor ) – Second data matrix, of shape (M x D)
+v (torch.Tensor or None ) – Vector for the matrix-vector multiplication (M x T)
+w (torch.Tensor or None ) – Vector for the matrix-vector multiplicatoin (N x T)
+opt (FalkonOptions ) – Falkon options. Options may be specified to force GPU or CPU usage.
+
+
+Returns:
+dmmv_fn – A function which allows to perform the mmv operation.
+
+
+Notes
+This function decides based on the inputs: if the inputs are sparse, it will choose
+the sparse implementations; if CUDA is detected, it will choose the CUDA implementation;
+otherwise it will simply choose the basic CPU implementation.
+
+
+
+
+_decide_mm_impl ( X1 : Tensor , X2 : Tensor , diag : bool , opt : FalkonOptions )
+Choose which mm function to use for this data.
+Note that mm functions compute the kernel itself so KeOps may not be used .
+
+Parameters:
+
+X1 (torch.Tensor ) – First data matrix, of shape (N x D)
+X2 (torch.Tensor ) – Second data matrix, of shape (M x D)
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+opt (FalkonOptions ) – Falkon options. Options may be specified to force GPU or CPU usage.
+
+
+Returns:
+mm_fn – A function which allows to perform the mm operation.
+
+
+Notes
+This function decides based on the inputs: if the inputs are sparse, it will choose
+the sparse implementations; if CUDA is detected, it will choose the CUDA implementation;
+otherwise it will simply choose the basic CPU implementation.
+
+
+
+
+_decide_mmv_impl ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , opt : FalkonOptions )
+Choose which mmv function to use for this data.
+Note that mmv functions compute the kernel-vector product
+
+Parameters:
+
+X1 (torch.Tensor ) – First data matrix, of shape (N x D)
+X2 (torch.Tensor ) – Second data matrix, of shape (M x D)
+v (torch.Tensor ) – Vector for the matrix-vector multiplication (M x T)
+opt (FalkonOptions ) – Falkon options. Options may be specified to force GPU or CPU usage.
+
+
+Returns:
+mmv_fn – A function which allows to perform the mmv operation.
+
+
+Notes
+This function decides based on the inputs: if the inputs are sparse, it will choose
+the sparse implementations; if CUDA is detected, it will choose the CUDA implementation;
+otherwise it will simply choose the basic CPU implementation.
+
+
+
+
+abstract compute ( X1 : Tensor , X2 : Tensor , out : Tensor , diag : bool , ** kwargs ) → Tensor
+Compute the kernel matrix of X1 and X2 - without regards for differentiability.
+The kernel matrix should be stored in out to ensure the correctness of allocatable
+memory computations.
+
+Parameters:
+
+X1 (torch.Tensor ) – The left matrix for computing the kernel
+X2 (torch.Tensor ) – The right matrix for computing the kernel
+out (torch.Tensor ) – The output matrix into which implementing classes should store the kernel.
+diag (bool ) – If true, X1 and X2 have the same shape, and only the diagonal of k(X1, X2)
+is to be computed and stored in out . Otherwise compute the full kernel matrix.
+kwargs – Additional keyword arguments which may be used in computing the kernel values
+
+
+Returns:
+out (torch.Tensor ) – The kernel matrix. Should use the same underlying storage as the parameter out .
+
+
+Notes
+Supporting the diag argument is optional . It’s only used with in the hyper-parameter
+optimization module, so if you’re not using that you don’t need to implement
+this function for diag=True (it will always be False).
+
+
+
+
+abstract compute_sparse ( X1 : SparseTensor , X2 : SparseTensor , out : Tensor , diag : bool , ** kwargs ) → Tensor
+Compute the kernel matrix of X1 and X2 which are two sparse matrices, storing the output
+in the dense matrix out .
+
+Parameters:
+
+X1 (SparseTensor ) – The left matrix for computing the kernel
+X2 (SparseTensor ) – The right matrix for computing the kernel
+out (torch.Tensor ) – The output matrix into which implementing classes should store the kernel.
+diag (bool ) – If true, X1 and X2 have the same shape, and only the diagonal of k(X1, X2)
+is to be computed and stored in out .
+kwargs –
Additional keyword arguments which some sparse implementations might require. Currently
+the keyword arguments passed by the falkon.mmv_ops.fmmv.sparse_mmv_run_thread()
+and falkon.mmv_ops.fmm.sparse_mm_run_thread() functions are:
+
+
+
+
+Returns:
+out (torch.Tensor ) – The kernel matrix. Should use the same underlying storage as the parameter out .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+Compute the amount of extra memory which will be needed when computing this kernel.
+Often kernel computation needs some extra memory allocations. To avoid using too large
+block-sizes which may lead to OOM errors, you should declare any such extra allocations
+for your kernel here.
+Indicate extra allocations as coefficients on the required dimensions. For example,
+if computing a kernel needs to re-allocate the data-matrix (which is of size n * d),
+the return dictionary will be: {‘nd’: 1} . Other possible coefficients are on d , n , m
+which are respectively the data-dimension, the number of data-points in the first data
+matrix and the number of data-points in the second matrix. Pairwise combinations of the
+three dimensions are possible (i.e. nd , nm , md ), and a special key ‘0’ can be
+used to specify a base memory needed independently of data dimensions.
+Make sure to specify the dictionary keys as is written here since they will not be
+recognized otherwise.
+
+Parameters:
+
+is_differentiable (bool ) –
+is_sparse (bool ) –
+dtype (torch.dtype or np.dtype ) –
+density1 (float or None ) –
+density2 (float or None ) –
+
+
+Returns:
+extra_allocs (dictionary ) – A dictionary from strings indicating on which dimensions the extra-allocation is
+needed (allowed strings: ‘n’, ‘m’, ‘d’, ‘nm’, ‘nd’, ‘md’, ‘0’ ) to floating-point
+numbers indicating how many extra-allocations are needed.
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+DiffKernel
+
+
+class falkon.kernels.diff_kernel. DiffKernel ( name , options , core_fn , ** kernel_params )
+Abstract class for differentiable kernels.
+This class should be extended instead of Kernel whenever designing
+a custom kernel to be used with automatic hyperparameter optimization (see the hopt
+module).
+Subclasses should implement the detach() method to return a new instance of the kernel,
+with its parameters detached from the computation graph.
+The compute_diff() method should be overridden, unless the core_fn parameter is
+passed to the constructor.
+Hyperparameters to the concrete kernel instance (for example the length-scale of the Gaussian
+kernel) should be passed to the constructor of this class, in order to be registered
+as parameters of the computation graph. Even non-differentiable parameters should be provided
+as keywords (also non tensor arguments).
+
+Parameters:
+
+name – A short name for the kernel (e.g. “Gaussian”)
+options – Base set of options to be used for operations involving this kernel.
+core_fn – Optional function which can be used to compute a kernel matrix.
+The signature of the function should be:
+core_fn(X1, X2, out, diag, **kernel_parameters)`
+where X1 and X2 are the input matrices, out is the output matrix (it will
+be None when called from compute_diff() ), diag is a flag indicating
+that only the diagonal of the kernel matrix is to be computed, and **kernel_parameters
+includes all additional parameters belonging to the kernel (which are passed to the
+constructor of DiffKernel ).
+kernel_params – All parameters (differentiable and non-differentiable) to be used for this kernel.
+The values given are used to initialize the actual parameters - which will be copied in
+the constructor.
+
+
+
+
+
+compute_diff ( X1 : Tensor , X2 : Tensor , diag : bool , ** kwargs )
+Compute the kernel matrix of X1 and X2 . The output should be differentiable with
+respect to X1 , X2 , and all kernel parameters returned by the diff_params() method.
+
+Parameters:
+
+X1 (torch.Tensor ) – The left matrix for computing the kernel
+X2 (torch.Tensor ) – The right matrix for computing the kernel
+diag (bool ) – If true, X1 and X2 have the same shape, and only the diagonal of k(X1, X2)
+is to be computed and stored in out . Otherwise compute the full kernel matrix.
+
+
+Returns:
+out (torch.Tensor ) – The constructed kernel matrix.
+
+
+
+
+
+
+abstract detach ( ) → Kernel
+Detaches all differentiable parameters of the kernel from the computation graph.
+
+Returns:
+k – A new instance of the kernel sharing the same parameters, but detached from the
+computation graph.
+
+
+
+
+
+
+property diff_params : Dict [ str , Tensor ]
+A dictionary mapping parameter names to their values for all differentiable parameters
+of the kernel.
+
+Returns:
+params – A dictionary mapping parameter names to their values
+
+
+
+
+
+
+
+
+KeopsKernelMixin
+
+
+class falkon.kernels.keops_helpers. KeopsKernelMixin ( name : str , opt : FalkonOptions | None )
+Abstract class for kernels which enables KeOps acceleration.
+This class should be extended when KeOps-accelerated kernel-vector products are required.
+Subclasses should implement the keops_mmv_impl() method by defining an appropriate
+KeOps formula, and calling into KeOps for kernel vector product calculation (the helper method
+keops_mmv() can be used to call into KeOps).
+For help in implementing a subclass, check the existing implementations
+(e.g. GaussianKernel ), and the
+Custom Kernels notebook.
+
+
+keops_mmv ( X1 : Tensor , X2 : Tensor , v : Tensor , out : Tensor | None , formula : str , aliases : List [ str ] , other_vars : List [ Tensor ] , opt : FalkonOptions )
+Helper method to call into KeOps for kernel-vector products
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+formula (str ) – The KeOps formula
+aliases – Aliases referencing names in the formula with actual KeOps variables
+other_vars – Kernel parameters to be used in the formula, other than X1 , X2 and v .
+opt – Options to be used for computing the operation. Useful are the memory size options,
+CUDA options and KeOps options.
+
+
+Returns:
+out – The computed kernel matrix between X1 and X2 , multiplied by vector v .
+
+
+
+
+
+
+abstract keops_mmv_impl ( X1 , X2 , v , kernel , out , opt : FalkonOptions , kwargs_m1 : Dict [ str , Tensor ] | None , kwargs_m2 : Dict [ str , Tensor ] | None )
+Implementation of the KeOps formula to compute a kernel-vector product.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+kernel (falkon.kernels.kernel.Kernel ) – Instance of this class. This is equal to self and can be ignored.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (FalkonOptions ) – Options to be used for computing the operation. Useful are the memory size options,
+CUDA options and KeOps options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out – The computed kernel matrix between X1 and X2 , multiplied by vector v .
+
+
+
+
+
+
+
+
+Radial kernels
+
+Gaussian kernel
+
+
+class falkon.kernels. GaussianKernel ( sigma : float | Tensor , opt : FalkonOptions | None = None )
+Class for computing the Gaussian kernel and related kernel-vector products
+The Gaussian kernel is one of the most common and effective kernel embeddings
+since it is infinite dimensional, and governed by a single parameter. The kernel length-scale
+determines the width of the Gaussian distribution which is placed on top of each point.
+A larger sigma corresponds to a wide Gaussian, so that the relative influence of far away
+points will be high for computing the kernel at a given datum.
+On the opposite side of the spectrum, a small sigma means that only nearby points will
+influence the kernel.
+
+Parameters:
+
+sigma – The length-scale of the kernel.
+This can be a scalar, and then it corresponds to the standard deviation
+of the Gaussian distribution from which the kernel is derived.
+If sigma is a vector of size d (where d is the dimensionality of the data), it is
+interpreted as the diagonal standard deviation of the Gaussian distribution.
+It can also be a matrix of size d*d where d , in which case sigma will be the precision
+matrix (inverse covariance).
+opt – Additional options to be forwarded to the matrix-vector multiplication
+routines.
+
+
+
+Examples
+Creating a Gaussian kernel with a single length-scale. Operations on this kernel will not
+use KeOps.
+>>> K = GaussianKernel ( sigma = 3.0 , opt = FalkonOptions ( keops_active = "no" ))
+
+
+Creating a Gaussian kernel with a different length-scale per dimension
+>>> K = GaussianKernel ( sigma = torch . tensor ([ 1.0 , 3.5 , 7.0 ]))
+
+
+Creating a Gaussian kernel object with full covariance matrix (randomly chosen)
+>>> mat = torch . randn ( 3 , 3 , dtype = torch . float64 )
+>>> sym_mat = mat @ mat . T
+>>> K = GaussianKernel ( sigma = sym_mat )
+>>> K
+GaussianKernel(sigma=tensor([[ 2.0909, 0.0253, -0.2490],
+ [ 0.0253, 0.3399, -0.5158],
+ [-0.2490, -0.5158, 4.4922]], dtype=torch.float64)) #random
+
+
+Notes
+The Gaussian kernel with a single length-scale follows
+
+\[k(x, x') = \exp{-\dfrac{\lVert x - x' \rVert^2}{2\sigma^2}}\]
+When the length-scales are specified as a matrix, the RBF kernel is determined by
+
+\[k(x, x') = \exp{-\dfrac{1}{2}x\Sigma x'}\]
+In both cases, the actual computation follows a different path, working on the expanded
+norm.
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+Laplacian kernel
+
+
+class falkon.kernels. LaplacianKernel ( sigma : float | Tensor , opt : FalkonOptions | None = None )
+Class for computing the Laplacian kernel, and related kernel-vector products.
+The Laplacian kernel is similar to the Gaussian kernel, but less sensitive to changes
+in the parameter sigma .
+
+Parameters:
+sigma – The length-scale of the Laplacian kernel
+
+
+Notes
+The Laplacian kernel is determined by the following formula
+
+\[k(x, x') = \exp{-\frac{\lVert x - x' \rVert}{\sigma}}\]
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+Matern kernel
+
+
+class falkon.kernels. MaternKernel ( sigma : float | Tensor , nu : float | Tensor , opt : FalkonOptions | None = None )
+Class for computing the Matern kernel, and related kernel-vector products.
+The Matern kernels define a generic class of kernel functions which includes the
+Laplacian and Gaussian kernels. The class is parametrized by ‘nu’. When nu = 0.5
+this kernel is equivalent to the Laplacian kernel, when nu = float('inf') , the
+Matern kernel is equivalent to the Gaussian kernel.
+This class implements the Matern kernel only for the values of nu which have a closed
+form solution, which are 0.5, 1.5, 2.5, and infinity.
+
+Parameters:
+
+sigma – The length-scale of the Matern kernel. The length-scale can be either a scalar
+or a vector. Matrix-valued length-scales are not allowed for the Matern kernel.
+nu – The parameter of the Matern kernel. It should be one of 0.5 , 1.5 , 2.5 or
+inf .
+
+
+
+Notes
+While for nu = float(‘inf’) this kernel is equivalent to the GaussianKernel ,
+this implementation is more general. Using the GaussianKernel directly
+may be computationally more efficient.
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+
+Dot-Product kernels
+
+Polynomial kernel
+
+
+class falkon.kernels. PolynomialKernel ( beta : float | Tensor , gamma : float | Tensor , degree : float | Tensor , opt : FalkonOptions | None = None )
+Polynomial kernel with multiplicative and additive constants.
+Follows the formula
+
+\[(\gamma * X_1^\top X_2 + \beta)^{\mathrm{degree}}\]
+Where all operations apart from the matrix multiplication are taken element-wise.
+
+Parameters:
+
+beta (float-like ) – Additive constant
+gamma (float-like ) – Multiplicative constant
+degree (float-like ) – Power of the polynomial kernel
+opt (Optional [ FalkonOptions ] ) – Options which will be used in downstream kernel operations.
+
+
+
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+Linear kernel
+
+
+class falkon.kernels. LinearKernel ( beta : float | Tensor = 0.0 , gamma : float | Tensor = 1.0 , opt : FalkonOptions | None = None )
+Linear Kernel with optional scaling and translation parameters.
+The kernel implemented here is the covariance function in the original
+input space (i.e. X @ X.T ) with optional parameters to translate
+and scale the kernel: beta + gamma * X @ X.T
+
+Parameters:
+
+beta (float-like ) – Additive constant for the kernel, default: 0.0
+gamma (float-like ) – Multiplicative constant for the kernel. The kernel will
+be multiplied by the inverse of sigma squared. Default: 1.0
+opt (Optional [ FalkonOptions ] ) – Options which will be used in downstream kernel operations.
+
+
+
+Examples
+>>> k = LinearKernel ( beta = 0.0 , gamma = 2.0 )
+>>> X = torch . randn ( 100 , 3 ) # 100 samples in 3 dimensions
+>>> kernel_matrix = k ( X , X )
+>>> torch . testing . assert_close ( kernel_matrix , X @ X . T * 2 )
+
+
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+Sigmoid kernel
+
+
+class falkon.kernels. SigmoidKernel ( beta : float | Tensor , gamma : float | Tensor , opt : FalkonOptions | None = None )
+Sigmoid (or hyperbolic tangent) kernel function, with additive and multiplicative constants.
+Follows the formula
+
+\[k(x, y) = \tanh(\alpha x^\top y + \beta)\]
+
+Parameters:
+
+beta (float-like ) – Multiplicative constant
+gamma (float-like ) – Multiplicative constant
+opt (Optional [ FalkonOptions ] ) – Options which will be used in downstream kernel operations.
+
+
+
+
+
+__call__ ( X1 : Tensor , X2 : Tensor , diag : bool = False , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Compute the kernel matrix between X1 and X2
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+diag (bool ) – Whether to compute just the diagonal of the kernel matrix, or the whole matrix.
+out (torch.Tensor or None ) – Optional tensor of shape (N x M) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel between X1 and X2 .
+
+
+
+
+
+
+dmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor | None , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None )
+Compute double matrix-vector multiplications where the matrix is the current kernel.
+The general form of dmmv operations is: Kernel(X2, X1) @ (Kernel(X1, X2) @ v + w)
+where if v is None, then we simply have Kernel(X2, X1) @ w and if w is None
+we remove the additive factor.
+At least one of `w` and `v` must be provided .
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor or None ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+w (torch.Tensor or None ) – A vector to compute matrix-vector products. This may also be a matrix of shape
+(N x T) but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (M x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with X1 .
+For example this could be a set of indices corresponding to X1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with X2 .
+For example this could be a set of indices corresponding to X2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (M x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 ) # N is 100, D is 3
+>>> X2 = torch . randn ( 150 , 3 ) # M is 150
+>>> v = torch . randn ( 150 , 1 )
+>>> w = torch . randn ( 100 , 1 )
+>>> out = k . dmmv ( X1 , X2 , v , w , out = None )
+>>> out . shape
+torch.Size([150, 1])
+
+
+
+
+
+
+mmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , out : Tensor | None = None , opt : FalkonOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+Compute matrix-vector multiplications where the matrix is the current kernel.
+
+Parameters:
+
+X1 (torch.Tensor ) – The first data-matrix for computing the kernel. Of shape (N x D):
+N samples in D dimensions.
+X2 (torch.Tensor ) – The second data-matrix for computing the kernel. Of shape (M x D):
+M samples in D dimensions. Set X2 == X1 to compute a symmetric kernel.
+v (torch.Tensor ) – A vector to compute the matrix-vector product. This may also be a matrix of shape
+(M x T), but if T is very large the operations will be much slower.
+out (torch.Tensor or None ) – Optional tensor of shape (N x T) to hold the output. If not provided it will
+be created.
+opt (Optional [ FalkonOptions ] ) – Options to be used for computing the operation. Useful are the memory size options
+and CUDA options.
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The (N x T) output.
+
+
+Examples
+>>> import falkon , torch
+>>> k = falkon . kernels . GaussianKernel ( sigma = 2 ) # You can substitute the Gaussian kernel by any other.
+>>> X1 = torch . randn ( 100 , 3 )
+>>> X2 = torch . randn ( 150 , 3 )
+>>> v = torch . randn ( 150 , 1 )
+>>> out = k . mmv ( X1 , X2 , v , out = None )
+>>> out . shape
+torch.Size([100, 1])
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/mmv_ops.html b/api_reference/mmv_ops.html
new file mode 100644
index 00000000..edc4236f
--- /dev/null
+++ b/api_reference/mmv_ops.html
@@ -0,0 +1,538 @@
+
+
+
+
+
+
+
+
+
falkon.mmv_ops — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.mmv_ops
+The algorithms to compute kernels and kernel-vector products blockwise on GPUs and CPU. The algorithms in this module
+are kernel agnostic. Refer to falkon.kernels for the actual kernel implementations.
+The KeOps wrapper only supports the mmv operation (kernel-vector products). The matrix-multiplication implementations
+instead support three different operations:
+
+
+mm which calculates the full kernel
+mmv which calculates kernel-vector products
+dmmv which calculates double kernel-vector products (which are operations like \(K^\top (K v)\) where
+\(K\) is a kernel matrix and \(v\) is a vector).
+
+
+
+run_keops_mmv
+A thin wrapper to KeOps is provided to allow for block-splitting and multiple GPU usage. This only supports
+kernel-vector products.
+
+
+falkon.mmv_ops.keops. run_keops_mmv ( X1 : Tensor , X2 : Tensor , v : Tensor , other_vars : List [ Tensor ] , out : Tensor | None , formula : str , aliases : List [ str ] , axis : int , reduction : str = 'Sum' , opt : FalkonOptions | None = None ) → Tensor
+
+
+
+
+fmm
+Block-wise kernel calculation. If the inputs require gradient, this function uses a differentiable implementation.
+
+
+falkon.mmv_ops.fmm. fmm ( kernel : Kernel , opt : BaseOptions | None , out : Tensor | None , diag : bool , X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+
+
+
+
+fmmv
+Block-wise kernel-vector products.
+
+
+falkon.mmv_ops.fmmv. fmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , kernel : Kernel , out : Tensor | None = None , opt : BaseOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None )
+
+
+
+
+fdmmv
+Block-wise double kernel-vector products.
+
+
+falkon.mmv_ops.fmmv. fdmmv ( X1 : Tensor | SparseTensor , X2 : Tensor | SparseTensor , v : Tensor , w : Tensor | None , kernel : Kernel , out : Tensor | None = None , differentiable : bool = False , opt : BaseOptions | None = None , kwargs_m1 : Dict [ str , Tensor ] | None = None , kwargs_m2 : Dict [ str , Tensor ] | None = None ) → Tensor
+Double kernel-vector product
+Computes kernel \(K = k(X_1, X_2)\) and then the double kernel-vector
+product \(K^{\top} (K v + w)\) .
+
+Parameters:
+
+X1 – \(n \times d\) input matrix.
+X2 – \(m \times d\) input matrix.
+v – \(m \times t\) vector (to be multiplied by the kernel)
+w – \(n \times t\) vector to be added to the first k-v product
+kernel – Kernel object responsible for computing kernel blocks
+out – Optional output matrix of size \(m \times t\) . If specified, the output will be
+stored in it, otherwise a new output matrix will be allocated
+differentiable – Whether the inputs are intended to be differentiated with. Currently setting this
+to True results in a NotImplementedError .
+opt – Options to be used for this operation
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out – Output of the double kernel-vector product. Will use the same storage as the out
+parameter if it was specified
+
+
+
+
+
+
+incore_fmmv
+
+
+falkon.mmv_ops.fmmv_incore. incore_fmmv ( mat : Tensor , vec : Tensor , out : Tensor | None = None , transpose : bool = False , opt : FalkonOptions | None = None ) → Tensor
+
+
+
+
+incore_fdmmv
+
+
+falkon.mmv_ops.fmmv_incore. incore_fdmmv ( mat : Tensor , vec : Tensor , w : Tensor | None , out : Tensor | None = None , opt : FalkonOptions | None = None ) → Tensor
+
+
+
+
+Low-level functions
+The following are some of the low-level functions which help compute kernels and kernel-vector products block-wise.
+They are specialized for different input types.
+
+
+falkon.mmv_ops.fmm. sparse_mm_run_thread ( m1 : SparseTensor , m2 : SparseTensor , out : Tensor , kernel : Kernel , n : int , m : int , comp_dt : dtype , dev : device , tid : int , kwargs_m1 : Dict [ str , Tensor ] , kwargs_m2 : Dict [ str , Tensor ] )
+Inner loop to compute (part of) a kernel matrix for two sparse input tensors
+
+Parameters:
+
+m1 – Left input tensor for computing the kernel
+m2 – Right input tensor for computing the kernel
+out – Output dense matrix in which to store the result
+kernel – Kernel object, used for computing the kernel. This must implement the
+falkon.kernels.kernel.Kernel.compute_sparse() method.
+n – Block size for the first axis of m1
+m – Block size for the first ais of m2
+comp_dt – Data-type in which to run the actual calculations (may be different from the data-type
+of m1 or m2 ).
+dev – Device on which to run the calculations
+tid – Thread ID. If on the main thread this will be -1
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel matrix. Should use the same underlying storage as the parameter out .
+
+
+
+
+
+
+falkon.mmv_ops.fmmv. sparse_mmv_run_thread ( m1 : SparseTensor , m2 : SparseTensor , v : Tensor , out : Tensor , kernel : Kernel , blk_n : int , blk_m : int , mem_needed : int , dev : device , tid : int , kwargs_m1 : Dict [ str , Tensor ] , kwargs_m2 : Dict [ str , Tensor ] )
+Inner loop to compute (part of) a kernel-vector product for sparse input matrices.
+
+Parameters:
+
+m1 – Left input tensor for computing the kernel
+m2 – Right input tensor for computing the kernel
+v – Dense vector to be multiplied by the kernel matrix
+out – Dense output vector which should store the result of the kernel vector product on exit
+from this function.
+kernel – Kernel object, used for computing the kernel. This must implement the
+falkon.kernels.kernel.Kernel.compute_sparse() method.
+blk_n – Block size for the first axis of m1
+blk_m – Block size for the first ais of m2
+mem_needed – Memory needed for pre-allocations
+dev – Device on which to run the calculations
+tid – Thread ID or -1 if on main thread
+kwargs_m1 – Keyword arguments containing tensors which should be split along with m1 .
+For example this could be a set of indices corresponding to m1 , which are then
+correctly split and available in the kernel computation.
+kwargs_m2 – Keyword arguments containing tensors which should be split along with m2 .
+For example this could be a set of indices corresponding to m2 , which are then
+correctly split and available in the kernel computation.
+
+
+Returns:
+out (torch.Tensor ) – The kernel matrix. Should use the same underlying storage as the parameter out .
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/models.html b/api_reference/models.html
new file mode 100644
index 00000000..cf22bfae
--- /dev/null
+++ b/api_reference/models.html
@@ -0,0 +1,1061 @@
+
+
+
+
+
+
+
+
+
falkon.models — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.models
+
+Falkon
+
+
+class falkon.models. Falkon ( kernel : Kernel , penalty : float , M : int , center_selection : str | CenterSelector = 'uniform' , maxiter : int = 20 , seed : int | None = None , error_fn : Callable [ [ Tensor , Tensor ] , Any | Tuple [ Any , str ] ] | None = None , error_every : int | None = 1 , weight_fn : Callable [ [ Tensor , Tensor , Tensor ] , Tensor ] | None = None , options : FalkonOptions | None = None )
+Falkon Kernel Ridge Regression solver.
+This estimator object solves approximate kernel ridge regression problems with Nystroem
+projections and a fast optimization algorithm as described in [1] , [2] .
+Multiclass and multiple regression problems can all be tackled
+with this same object, for example by encoding multiple classes
+in a one-hot target matrix.
+
+Parameters:
+
+kernel – Object representing the kernel function used for KRR.
+penalty (float ) – Amount of regularization to apply to the problem.
+This parameter must be greater than 0.
+M (int ) – The number of Nystrom centers to pick. M must be positive,
+and lower than the total number of training points. A larger
+M will typically lead to better accuracy but will use more
+computational resources. You can either specify the number of centers
+by setting this parameter, or by passing to this constructor a
+falkon.center_selection.CenterSelector class instance.
+center_selection (str or falkon.center_selection.CenterSelector ) – The center selection algorithm. Implemented is only ‘uniform’
+selection which can choose each training sample with the same
+probability.
+maxiter (int ) – The number of iterations to run the optimization for. Usually
+fewer than 20 iterations are necessary, however this is problem
+dependent.
+seed (int or None ) – Random seed. Can be used to make results stable across runs.
+Randomness is present in the center selection algorithm, and in
+certain optimizers.
+error_fn (Callable or None ) – A function with two arguments: targets and predictions, both torch.Tensor
+objects which returns the error incurred for predicting ‘predictions’ instead of
+‘targets’. This is used to display the evolution of the error during the iterations.
+error_every (int or None ) – Evaluate the error (on training or validation data) every
+error_every iterations. If set to 1 then the error will be
+calculated at each iteration. If set to None, it will never be
+calculated.
+weight_fn (Callable or None ) –
A function for giving different weights to different samples. This is used
+for weighted least-squares, it should accept three arguments: Y , X , indices which
+represent the samples for which weights need to be computed, and return a vector of
+weights corresponding to the input targets.
+As an example, in the setting of binary classification Y can be -1 or +1. To give more
+importance to errors on the negative class, pass a weight_fn which returns 2 whenever
+the target is -1.
+
+options (FalkonOptions ) – Additional options used by the components of the Falkon solver. Individual options
+are documented in falkon.options .
+
+
+
+Examples
+Running Falkon on a random dataset
+>>> X = torch . randn ( 1000 , 10 )
+>>> Y = torch . randn ( 1000 , 1 )
+>>> kernel = falkon . kernels . GaussianKernel ( 3.0 )
+>>> options = FalkonOptions ( use_cpu = True )
+>>> model = Falkon ( kernel = kernel , penalty = 1e-6 , M = 500 , options = options )
+>>> model . fit ( X , Y )
+>>> preds = model . predict ( X )
+
+
+Warm restarts: run for 5 iterations, then use warm_start to run for 5 more iterations.
+>>> model = Falkon ( kernel = kernel , penalty = 1e-6 , M = 500 , maxiter = 5 )
+>>> model . fit ( X , Y )
+>>> model . fit ( X , Y , warm_start = model . beta_ )
+
+
+References
+
+Alessandro Rudi, Luigi Carratino, Lorenzo Rosasco, “FALKON: An optimal large
+scale kernel method,” Advances in Neural Information Processing Systems 29, 2017.
+Giacomo Meanti, Luigi Carratino, Lorenzo Rosasco, Alessandro Rudi,
+“Kernel methods through the roof: handling billions of points efficiently,”
+Advancs in Neural Information Processing Systems, 2020.
+
+
+
+fit ( X : Tensor , Y : Tensor , Xts : Tensor | None = None , Yts : Tensor | None = None , warm_start : Tensor | None = None )
+Fits the Falkon KRR model.
+
+Parameters:
+
+X (torch.Tensor ) – The tensor of training data, of shape [num_samples, num_dimensions].
+If X is in Fortran order (i.e. column-contiguous) then we can avoid
+an extra copy of the data.
+Y (torch.Tensor ) – The tensor of training targets, of shape [num_samples, num_outputs].
+If X and Y represent a classification problem, Y can be encoded as a one-hot
+vector.
+If Y is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+Xts (torch.Tensor or None ) – Tensor of validation data, of shape [num_test_samples, num_dimensions].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Xts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+Yts (torch.Tensor or None ) – Tensor of validation targets, of shape [num_test_samples, num_outputs].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Yts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+warm_start (torch.Tensor or None ) – Specify a starting point for the conjugate gradient optimizer. If not specified, the
+initial point will be a tensor filled with zeros.
+Be aware that the starting point should not be in the parameter space, but in the
+preconditioner space (i.e. if initializing from a previous Falkon object, use the
+beta_ field, not alpha_ ).
+
+
+Returns:
+model (Falkon ) – The fitted model
+
+
+
+
+
+
+get_metadata_routing ( )
+Get metadata routing of this object.
+Please check User Guide on how the routing
+mechanism works.
+
+Returns:
+routing (MetadataRequest ) – A MetadataRequest encapsulating
+routing information.
+
+
+
+
+
+
+get_params ( deep = True )
+Get parameters for this estimator.
+
+Parameters:
+deep (bool , default=True ) – If True, will return the parameters for this estimator and
+contained subobjects that are estimators.
+
+Returns:
+params (dict ) – Parameter names mapped to their values.
+
+
+
+
+
+
+init_kernel_matrix ( X : Tensor , ny_pts : Tensor ) → Kernel
+Decide whether to store the full kernel. If dimensions are such that it is convenient
+to precompute it, it is saved in a PrecomputedKernel which is used for
+subsequent computations. Otherwise return the original kernel..
+
+
+
+
+predict ( X : Tensor ) → Tensor
+Makes predictions on data X using the learned model.
+
+Parameters:
+X (torch.Tensor ) – Tensor of test data points, of shape [num_samples, num_dimensions].
+
+Returns:
+predictions (torch.Tensor ) – Prediction tensor of shape [num_samples, num_outputs] for all
+data points.
+
+
+
+
+
+
+set_fit_request ( * , Xts : bool | None | str = '$UNCHANGED$' , Yts : bool | None | str = '$UNCHANGED$' , warm_start : bool | None | str = '$UNCHANGED$' ) → Falkon
+Request metadata passed to the fit method.
+Note that this method is only relevant if
+enable_metadata_routing=True (see sklearn.set_config() ).
+Please see User Guide on how the routing
+mechanism works.
+The options for each parameter are:
+
+True : metadata is requested, and passed to fit if provided. The request is ignored if metadata is not provided.
+False : metadata is not requested and the meta-estimator will not pass it to fit .
+None : metadata is not requested, and the meta-estimator will raise an error if the user provides it.
+str : metadata should be passed to the meta-estimator with this given alias instead of the original name.
+
+The default (sklearn.utils.metadata_routing.UNCHANGED ) retains the
+existing request. This allows you to change the request for some
+parameters and not others.
+
+
+
Note
+
This method is only relevant if this estimator is used as a
+sub-estimator of a meta-estimator, e.g. used inside a
+Pipeline . Otherwise it has no effect.
+
+
+Parameters:
+
+Xts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Xts parameter in fit .
+Yts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Yts parameter in fit .
+warm_start (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for warm_start parameter in fit .
+
+
+Returns:
+self (object ) – The updated object.
+
+
+
+
+
+
+set_params ( ** params )
+Set the parameters of this estimator.
+The method works on simple estimators as well as on nested objects
+(such as Pipeline ). The latter have
+parameters of the form <component>__<parameter> so that it’s
+possible to update each component of a nested object.
+
+Parameters:
+**params (dict ) – Estimator parameters.
+
+Returns:
+self (estimator instance ) – Estimator instance.
+
+
+
+
+
+
+
+
+LogisticFalkon
+
+
+class falkon.models. LogisticFalkon ( kernel : Kernel , penalty_list : List [ float ] , iter_list : List [ int ] , loss : Loss , M : int , center_selection : str | CenterSelector = 'uniform' , seed : int | None = None , error_fn : Callable [ [ Tensor , Tensor ] , float | Tuple [ float , str ] ] | None = None , error_every : int | None = 1 , options : FalkonOptions | None = None )
+Falkon Logistic regression solver.
+This estimator object solves approximate logistic regression problems with Nystroem
+projections and a fast optimization algorithm as described in [1] , [3] .
+This model can handle logistic regression, so it may be used in place of
+falkon.models.Falkon (which uses the squared loss) when tackling binary
+classification problems.
+The algorithm works by repeated applications of the base falkon algorithm with decreasing
+amounts of regularization; therefore the class accepts slightly different parameters from
+falkon.models.Falkon : a penalty_list which should contain a list of decreasing
+regularization amounts, and an iter_list which should specify for each application
+of the base algorithm, how many CG iterations to use. For guidance on how to set these
+parameters, see below.
+
+Parameters:
+
+kernel – Object representing the kernel function used for KRR.
+penalty_list (List [ float ] ) – Amount of regularization to use for each iteration of the base algorithm. The length
+of this list determines the number of base algorithm iterations.
+iter_list (List [ int ] ) – Number of conjugate gradient iterations used in each iteration of the base algorithm.
+The length of this list must be identical to that of penalty_list .
+loss (Loss ) – This parameter must be set to an instance of falkon.gsc_losses.LogisticLoss ,
+initialized with the same kernel as this class.
+M (int ) – The number of Nystrom centers to pick. M must be positive,
+and lower than the total number of training points. A larger
+M will typically lead to better accuracy but will use more
+computational resources.
+center_selection (str or falkon.center_selection.CenterSelector ) – The center selection algorithm. Implemented is only ‘uniform’
+selection which can choose each training sample with the same
+probability.
+seed (int or None ) – Random seed. Can be used to make results stable across runs.
+Randomness is present in the center selection algorithm, and in
+certain optimizers.
+error_fn (Callable or None ) – A function with two arguments: targets and predictions, both torch.Tensor
+objects which returns the error incurred for predicting ‘predictions’ instead of
+‘targets’. This is used to display the evolution of the error during the iterations.
+error_every (int or None ) – Evaluate the error (on training or validation data) every
+error_every iterations. If set to 1 then the error will be
+calculated at each iteration. If set to None, it will never be
+calculated.
+options (FalkonOptions ) – Additional options used by the components of the Falkon solver. Individual options
+are documented in falkon.options .
+
+
+
+Examples
+Running Logistic Falkon on a random dataset
+>>> X = torch . randn ( 1000 , 10 )
+>>> Y = torch . randn ( 1000 , 1 )
+>>> Y [ Y > 0 ] = 1
+>>> Y [ Y <= 0 ] = - 1
+>>> kernel = falkon . kernels . GaussianKernel ( 3.0 )
+>>> options = FalkonOptions ()
+>>> model = LogisticFalkon ( kernel = kernel , penalty_list = [ 1e-2 , 1e-4 , 1e-6 , 1e-6 , 1e-6 ],
+>>> iter_list = [ 3 , 3 , 3 , 8 , 8 ], M = 500 , options = options )
+>>> model . fit ( X , Y )
+>>> preds = model . predict ( X )
+
+
+References
+
+Ulysse Marteau-Ferey, Francis Bach, Alessandro Rudi, “Globally Convergent Newton Methods
+for Ill-conditioned Generalized Self-concordant Losses,” NeurIPS 32, 2019.
+Giacomo Meanti, Luigi Carratino, Lorenzo Rosasco, Alessandro Rudi,
+“Kernel methods through the roof: handling billions of points efficiently,”
+Advancs in Neural Information Processing Systems, 2020.
+
+Notes
+A rule of thumb for setting the `penalty_list` is to keep in mind the desired final
+regularization (1e-6 in the example above), and then create a short path of around three
+iterations where the regularization is decreased down to the desired value. The decrease can
+be of 10^2 or 10^3 at each step. Then a certain number of iterations at the desired
+regularization may be necessary to achieve good performance.
+The iter_list attribute follows a similar reasoning: use 3 inner-steps for the first three
+iterations where the regularization is decreased, and then switch to a higher number of
+inner-steps (e.g. 8) for the remaining iterations.
+
+
+fit ( X : Tensor , Y : Tensor , Xts : Tensor | None = None , Yts : Tensor | None = None )
+Fits the Falkon Kernel Logistic Regression model.
+
+Parameters:
+
+X (torch.Tensor ) – The tensor of training data, of shape [num_samples, num_dimensions].
+If X is in Fortran order (i.e. column-contiguous) then we can avoid
+an extra copy of the data.
+Y (torch.Tensor ) – The tensor of training targets, of shape [num_samples, num_outputs].
+If X and Y represent a classification problem, Y can be encoded as a one-hot
+vector.
+If Y is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+Xts (torch.Tensor or None ) – Tensor of validation data, of shape [num_test_samples, num_dimensions].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Xts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+Yts (torch.Tensor or None ) – Tensor of validation targets, of shape [num_test_samples, num_outputs].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Yts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data.
+
+
+Returns:
+model (LogisticFalkon ) – The fitted model
+
+
+
+
+
+
+get_metadata_routing ( )
+Get metadata routing of this object.
+Please check User Guide on how the routing
+mechanism works.
+
+Returns:
+routing (MetadataRequest ) – A MetadataRequest encapsulating
+routing information.
+
+
+
+
+
+
+get_params ( deep = True )
+Get parameters for this estimator.
+
+Parameters:
+deep (bool , default=True ) – If True, will return the parameters for this estimator and
+contained subobjects that are estimators.
+
+Returns:
+params (dict ) – Parameter names mapped to their values.
+
+
+
+
+
+
+predict ( X : Tensor ) → Tensor
+Makes predictions on data X using the learned model.
+
+Parameters:
+X (torch.Tensor ) – Tensor of test data points, of shape [num_samples, num_dimensions].
+
+Returns:
+predictions (torch.Tensor ) – Prediction tensor of shape [num_samples, num_outputs] for all
+data points.
+
+
+
+
+
+
+set_fit_request ( * , Xts : bool | None | str = '$UNCHANGED$' , Yts : bool | None | str = '$UNCHANGED$' ) → LogisticFalkon
+Request metadata passed to the fit method.
+Note that this method is only relevant if
+enable_metadata_routing=True (see sklearn.set_config() ).
+Please see User Guide on how the routing
+mechanism works.
+The options for each parameter are:
+
+True : metadata is requested, and passed to fit if provided. The request is ignored if metadata is not provided.
+False : metadata is not requested and the meta-estimator will not pass it to fit .
+None : metadata is not requested, and the meta-estimator will raise an error if the user provides it.
+str : metadata should be passed to the meta-estimator with this given alias instead of the original name.
+
+The default (sklearn.utils.metadata_routing.UNCHANGED ) retains the
+existing request. This allows you to change the request for some
+parameters and not others.
+
+
+
Note
+
This method is only relevant if this estimator is used as a
+sub-estimator of a meta-estimator, e.g. used inside a
+Pipeline . Otherwise it has no effect.
+
+
+Parameters:
+
+Xts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Xts parameter in fit .
+Yts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Yts parameter in fit .
+
+
+Returns:
+self (object ) – The updated object.
+
+
+
+
+
+
+set_params ( ** params )
+Set the parameters of this estimator.
+The method works on simple estimators as well as on nested objects
+(such as Pipeline ). The latter have
+parameters of the form <component>__<parameter> so that it’s
+possible to update each component of a nested object.
+
+Parameters:
+**params (dict ) – Estimator parameters.
+
+Returns:
+self (estimator instance ) – Estimator instance.
+
+
+
+
+
+
+
+
+InCoreFalkon
+
+
+class falkon.models. InCoreFalkon ( kernel : Kernel , penalty : float , M : int , center_selection : str | CenterSelector = 'uniform' , maxiter : int = 20 , seed : int | None = None , error_fn : Callable [ [ Tensor , Tensor ] , float | Tuple [ float , str ] ] | None = None , error_every : int | None = 1 , weight_fn : Callable [ [ Tensor ] , Tensor ] | None = None , options : FalkonOptions | None = None )
+In GPU core Falkon Kernel Ridge Regression solver.
+This estimator object solves approximate kernel ridge regression problems with Nystroem
+projections and a fast optimization algorithm as described in [1] , [2] .
+Multiclass and multiple regression problems can all be tackled
+with this same object, for example by encoding multiple classes
+in a one-hot target matrix.
+Compared to the base falkon.models.Falkon estimator, the InCoreFalkon estimator
+is designed to work fully within the GPU, performing no data-copies between CPU and GPU. As
+such, it is more constraining than the base estimator, but has better performance on smaller
+problems .
+In particular, the constraints are that:
+
+
+the input data must be on a single GPU, when calling InCoreFalkon.fit ;
+the data, preconditioner, kernels, etc. must all fit on the same GPU.
+
+
+Using multiple GPUs is not possible with this model.
+
+Parameters:
+
+kernel – Object representing the kernel function used for KRR.
+penalty (float ) – Amount of regularization to apply to the problem.
+This parameter must be greater than 0.
+M (int ) – The number of Nystrom centers to pick. M must be positive,
+and lower than the total number of training points. A larger
+M will typically lead to better accuracy but will use more
+computational resources. You can either specify the number of centers
+by setting this parameter, or by passing to this constructor a CenterSelctor class
+instance.
+center_selection (str or falkon.center_selection.CenterSelector ) – The center selection algorithm. Implemented is only ‘uniform’
+selection which can choose each training sample with the same
+probability.
+maxiter (int ) – The number of iterations to run the optimization for. Usually
+fewer than 20 iterations are necessary, however this is problem
+dependent.
+seed (int or None ) – Random seed. Can be used to make results stable across runs.
+Randomness is present in the center selection algorithm, and in
+certain optimizers.
+error_fn (Callable or None ) – A function with two arguments: targets and predictions, both torch.Tensor
+objects which returns the error incurred for predicting ‘predictions’ instead of
+‘targets’. This is used to display the evolution of the error during the iterations.
+error_every (int or None ) – Evaluate the error (on training or validation data) every
+error_every iterations. If set to 1 then the error will be
+calculated at each iteration. If set to None, it will never be
+calculated.
+weight_fn (Callable or None ) –
A function for giving different weights to different samples. This is used
+for weighted least-squares, it should accept three arguments: Y , X , indices which
+represent the samples for which weights need to be computed, and return a vector of
+weights corresponding to the input targets.
+As an example, in the setting of binary classification Y can be -1 or +1. To give more
+importance to errors on the negative class, pass a weight_fn which returns 2 whenever
+the target is -1.
+
+options (FalkonOptions ) – Additional options used by the components of the Falkon solver. Individual options
+are documented in falkon.options .
+
+
+
+Examples
+Running InCoreFalkon on a randomly generated dataset
+>>> X = torch . randn ( 1000 , 10 ) . cuda ()
+>>> Y = torch . randn ( 1000 , 1 ) . cuda ()
+>>> kernel = falkon . kernels . GaussianKernel ( 3.0 )
+>>> options = FalkonOptions ( use_cpu = True )
+>>> model = InCoreFalkon ( kernel = kernel , penalty = 1e-6 , M = 500 , options = options )
+>>> model . fit ( X , Y )
+>>> preds = model . predict ( X )
+>>> assert preds . is_cuda
+
+
+References
+
+Alessandro Rudi, Luigi Carratino, Lorenzo Rosasco, “FALKON: An optimal large
+scale kernel method,” Advances in Neural Information Processing Systems 29, 2017.
+Giacomo Meanti, Luigi Carratino, Lorenzo Rosasco, Alessandro Rudi,
+“Kernel methods through the roof: handling billions of points efficiently,”
+Advancs in Neural Information Processing Systems, 2020.
+
+
+
+fit ( X : Tensor , Y : Tensor , Xts : Tensor | None = None , Yts : Tensor | None = None , warm_start : Tensor | None = None )
+Fits the Falkon KRR model.
+
+Parameters:
+
+X (torch.Tensor ) – The tensor of training data, of shape [num_samples, num_dimensions].
+If X is in Fortran order (i.e. column-contiguous) then we can avoid
+an extra copy of the data. Must be a CUDA tensor.
+Y (torch.Tensor ) – The tensor of training targets, of shape [num_samples, num_outputs].
+If X and Y represent a classification problem, Y can be encoded as a one-hot
+vector.
+If Y is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data. Must be a CUDA tensor.
+Xts (torch.Tensor or None ) – Tensor of validation data, of shape [num_test_samples, num_dimensions].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Xts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data. Must be a CUDA tensor.
+Yts (torch.Tensor or None ) – Tensor of validation targets, of shape [num_test_samples, num_outputs].
+If validation data is provided and error_fn was specified when
+creating the model, they will be used to print the validation error
+during the optimization iterations.
+If Yts is in Fortran order (i.e. column-contiguous) then we can avoid an
+extra copy of the data. Must be a CUDA tensor.
+warm_start (torch.Tensor or None ) – Specify a starting point for the conjugate gradient optimizer. If not specified, the
+initial point will be a tensor filled with zeros.
+Be aware that the starting point should not be in the parameter space, but in the
+preconditioner space (i.e. if initializing from a previous Falkon object, use the
+beta_ field, not alpha_ ).
+
+
+Returns:
+model (InCoreFalkon ) – The fitted model
+
+
+
+
+
+
+get_metadata_routing ( )
+Get metadata routing of this object.
+Please check User Guide on how the routing
+mechanism works.
+
+Returns:
+routing (MetadataRequest ) – A MetadataRequest encapsulating
+routing information.
+
+
+
+
+
+
+get_params ( deep = True )
+Get parameters for this estimator.
+
+Parameters:
+deep (bool , default=True ) – If True, will return the parameters for this estimator and
+contained subobjects that are estimators.
+
+Returns:
+params (dict ) – Parameter names mapped to their values.
+
+
+
+
+
+
+init_kernel_matrix ( X : Tensor , ny_pts : Tensor ) → Kernel
+Decide whether to store the full kernel. If dimensions are such that it is convenient
+to precompute it, it is saved in a PrecomputedKernel which is used for
+subsequent computations. Otherwise return the original kernel..
+
+
+
+
+predict ( X : Tensor ) → Tensor
+Makes predictions on data X using the learned model.
+
+Parameters:
+X (torch.Tensor ) – Tensor of test data points, of shape [num_samples, num_dimensions].
+
+Returns:
+predictions (torch.Tensor ) – Prediction tensor of shape [num_samples, num_outputs] for all
+data points.
+
+
+
+
+
+
+set_fit_request ( * , Xts : bool | None | str = '$UNCHANGED$' , Yts : bool | None | str = '$UNCHANGED$' , warm_start : bool | None | str = '$UNCHANGED$' ) → InCoreFalkon
+Request metadata passed to the fit method.
+Note that this method is only relevant if
+enable_metadata_routing=True (see sklearn.set_config() ).
+Please see User Guide on how the routing
+mechanism works.
+The options for each parameter are:
+
+True : metadata is requested, and passed to fit if provided. The request is ignored if metadata is not provided.
+False : metadata is not requested and the meta-estimator will not pass it to fit .
+None : metadata is not requested, and the meta-estimator will raise an error if the user provides it.
+str : metadata should be passed to the meta-estimator with this given alias instead of the original name.
+
+The default (sklearn.utils.metadata_routing.UNCHANGED ) retains the
+existing request. This allows you to change the request for some
+parameters and not others.
+
+
+
Note
+
This method is only relevant if this estimator is used as a
+sub-estimator of a meta-estimator, e.g. used inside a
+Pipeline . Otherwise it has no effect.
+
+
+Parameters:
+
+Xts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Xts parameter in fit .
+Yts (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for Yts parameter in fit .
+warm_start (str , True , False , or None , default=sklearn.utils.metadata_routing.UNCHANGED ) – Metadata routing for warm_start parameter in fit .
+
+
+Returns:
+self (object ) – The updated object.
+
+
+
+
+
+
+set_params ( ** params )
+Set the parameters of this estimator.
+The method works on simple estimators as well as on nested objects
+(such as Pipeline ). The latter have
+parameters of the form <component>__<parameter> so that it’s
+possible to update each component of a nested object.
+
+Parameters:
+**params (dict ) – Estimator parameters.
+
+Returns:
+self (estimator instance ) – Estimator instance.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/optimization.html b/api_reference/optimization.html
new file mode 100644
index 00000000..7bde9786
--- /dev/null
+++ b/api_reference/optimization.html
@@ -0,0 +1,467 @@
+
+
+
+
+
+
+
+
+
falkon.optim — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.optim
+
+Optimizer
+
+
+class falkon.optim. Optimizer
+Base class for optimizers. This is an empty shell at the moment.
+
+
+
+
+Conjugate gradient methods
+
+ConjugateGradient
+
+
+class falkon.optim. ConjugateGradient ( opt : ConjugateGradientOptions | None = None )
+
+
+solve ( X0 : Tensor | None , B : Tensor , mmv : Callable [ [ Tensor ] , Tensor ] , max_iter : int , callback : Callable [ [ int , Tensor , float ] , None ] | None = None ) → Tensor
+Conjugate-gradient solver with optional support for preconditioning via generic MMV.
+This solver can be used for iterative solution of linear systems of the form $AX = B$ with
+respect to the X variable. Knowledge of A is only needed through matrix-vector
+multiplications with temporary solutions (must be provided through the mmv function).
+Preconditioning can be achieved by incorporating the preconditioner matrix in the mmv
+function.
+
+Parameters:
+
+X0 (Optional [ torch.Tensor ] ) – Initial solution for the solver. If not provided it will be a zero-tensor.
+B (torch.Tensor ) – Right-hand-side of the linear system to be solved.
+mmv – User-provided function to perform matrix-vector multiplications with the design matrix
+A . The function must accept a single argument (the vector to be multiplied), and
+return the result of the matrix-vector multiplication.
+max_iter (int ) – Maximum number of iterations the solver will perform. Early stopping is implemented
+via the options passed in the constructor of this class (in particular look at
+cg_tolerance options)
+i + 1, X, e_train
+callback – An optional, user-provided function which shall be called at the end of each iteration
+with the current solution. The arguments to the function are the iteration number,
+a tensor containing the current solution, and the total time elapsed from the beginning
+of training (note that this time explicitly excludes any time taken by the callback
+itself).
+
+
+Returns:
+The solution to the linear system X .
+
+
+
+
+
+
+
+
+FalkonConjugateGradient
+
+
+class falkon.optim. FalkonConjugateGradient ( kernel : Kernel , preconditioner : Preconditioner , opt : FalkonOptions , weight_fn = None )
+Preconditioned conjugate gradient solver, optimized for the Falkon algorithm.
+The linear system solved is
+
+\[\widetilde{B}^\top H \widetilde{B} \beta = \widetilde{B}^\top K_{nm}^\top Y\]
+where \(\widetilde{B}\) is the approximate preconditioner
+
+\[\widetilde{B} = 1/\sqrt{n}T^{-1}A^{-1}\]
+\(\beta\) is the preconditioned solution vector (from which we can get \(\alpha = \widetilde{B}\beta\) ),
+and \(H\) is the \(m\times m\) sketched matrix
+
+\[H = K_{nm}^\top K_{nm} + \lambda n K_{mm}\]
+
+Parameters:
+
+kernel – The kernel class used for the CG algorithm
+preconditioner – The approximate Falkon preconditioner. The class should allow triangular solves with
+both \(T\) and \(A\) and multiple right-hand sides.
+The preconditioner should already have been initialized with a set of Nystrom centers.
+If the Nystrom centers used for CG are different from the ones used for the preconditioner,
+the CG method could converge very slowly.
+opt – Options passed to the CG solver and to the kernel for computations.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/options.html b/api_reference/options.html
new file mode 100644
index 00000000..5bd65d95
--- /dev/null
+++ b/api_reference/options.html
@@ -0,0 +1,490 @@
+
+
+
+
+
+
+
+
+
falkon.options — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.options
+
+FalkonOptions
+
+
+class falkon.options. FalkonOptions ( keops_acc_dtype : str = 'auto' , keops_sum_scheme : str = 'auto' , keops_active : str = 'auto' , keops_memory_slack : float = 0.7 , chol_force_in_core : bool = False , chol_force_ooc : bool = False , chol_par_blk_multiplier : int = 2 , pc_epsilon_32 : float = 1e-05 , pc_epsilon_64 : float = 1e-13 , cpu_preconditioner : bool = False , cg_epsilon_32 : float = 1e-07 , cg_epsilon_64 : float = 1e-15 , cg_tolerance : float = 1e-07 , cg_full_gradient_every : int = 10 , cg_differential_convergence : bool = False , debug : bool = False , use_cpu : bool = False , max_gpu_mem : float = inf , max_cpu_mem : float = inf , compute_arch_speed : bool = False , no_single_kernel : bool = True , min_cuda_pc_size_32 : int = 10000 , min_cuda_pc_size_64 : int = 30000 , min_cuda_iter_size_32 : int = 300000000 , min_cuda_iter_size_64 : int = 900000000 , never_store_kernel : bool = False , store_kernel_d_threshold : int = 1200 , num_fmm_streams : int = 2 , memory_slack : float = 0.9 )
+Global options for Falkon.
+
+Parameters:
+
+debug (bool ) – default False - When set to True , the estimators will print extensive debugging information.
+Set it if you want to dig deeper.
+use_cpu (bool ) – default False - When set to True forces Falkon not to use the GPU. If this option is not set,
+and no GPU is available, Falkon will issue a warning.
+max_gpu_mem (float ) – The maximum GPU memory (in bytes) that Falkon may use. If not set, Falkon will
+use all available memory.
+max_cpu_mem (float ) – The maximum CPU RAM (in bytes) that Falkon may use. If not set, Falkon will
+use all available memory. This option is not a strict bound (due to the nature
+of memory management in Python).
+compute_arch_speed (bool ) – default False - When running Falkon on a machine with multiple GPUs which have a range of different
+performance characteristics, setting this option to True may help subdivide the
+workload better: the performance of each accelerator will be evaluated on startup,
+then the faster devices will receive more work than the slower ones.
+If this is not the case, do not set this option since evaluating accelerator performance
+increases startup times.
+no_single_kernel (bool ) – default True - Whether the kernel should always be evaluated in double precision.
+If set to False , kernel evaluations will be faster but less precise (note that this
+referes only to calculations involving the full kernel matrix, not to kernel-vector
+products).
+min_cuda_pc_size_32 (int ) – default 10000 - If M (the number of Nystroem centers) is lower than min_cuda_pc_size_32 , falkon will
+run the preconditioner on the CPU. Otherwise, if CUDA is available, falkon will try
+to run the preconditioner on the GPU. This setting is valid for data in single
+(float32) precision.
+Along with the min_cuda_iter_size_32 setting, this determines a cutoff for running
+Falkon on the CPU or the GPU. Such cutoff is useful since for small-data problems
+running on the CPU may be faster than running on the GPU. If your data is close to the
+cutoff, it may be worth experimenting with running on the CPU and on the GPU to check
+which side is faster. This will depend on the exact hardware.
+min_cuda_pc_size_64 (int ) – default 30000 - If M (the number of Nystroem centers) is lower than min_cuda_pc_size_64 ,
+falkon will run the preconditioner on the CPU. Otherwise, if CUDA is available, falkon will try
+to run the preconditioner on the GPU. This setting is valid for data in double
+(float64) precision.
+Along with the min_cuda_iter_size_64 setting, this determines a cutoff for running
+Falkon on the CPU or the GPU. Such cutoff is useful since for small-data problems
+running on the CPU may be faster than running on the GPU. If your data is close to the
+cutoff, it may be worth experimenting with running on the CPU and on the GPU to check
+which side is faster. This will depend on the exact hardware.
+min_cuda_iter_size_32 (int ) – default 300_000_000 - If the data size (measured as the product of M, and the dimensions of X) is lower than
+min_cuda_iter_size_32 , falkon will run the conjugate gradient iterations on the CPU.
+For example, with the default setting, the CPU-GPU threshold is set at a dataset
+with 10k points, 10 dimensions, and 3k Nystroem centers. A larger dataset, or the use
+of more centers, will cause the conjugate gradient iterations to run on the GPU.
+This setting is valid for data in single (float32) precision.
+min_cuda_iter_size_64 (int ) – default 900_000_000 - If the data size (measured as the product of M, and the dimensions of X) is lower than
+min_cuda_iter_size_64 , falkon will run the conjugate gradient iterations on the CPU.
+For example, with the default setting, the CPU-GPU threshold is set at a dataset
+with 30k points, 10 dimensions, and 3k Nystroem centers. A larger dataset, or the use
+of more centers, will cause the conjugate gradient iterations to run on the GPU.
+This setting is valid for data in double (float64) precision.
+never_store_kernel (bool ) – default False - If set to True, the kernel between the data and the Nystroem centers will not
+be stored - even if there is sufficient RAM to do so. Setting this option to
+True may (in case there would be enough RAM to store the kernel), increase the
+training time for Falkon since the K_NM matrix must be recomputed at every
+conjugate gradient iteration.
+store_kernel_d_threshold (int ) – default 1200 - The minimum data-dimensionality (d ) for which to consider whether to store
+the full Knm kernel matrix (between the data-points and the Nystrom centers). The final decision
+on whether the matrix is stored or not is based on the amount of memory available.
+Storing the Knm matrix may greatly reduce training and inference times, especially if d is
+large, or for kernels which are costly to compute.
+num_fmm_streams (int ) – default 2 - The number of CUDA streams to use for evaluating kernels when CUDA is available.
+This number should be increased from its default value when the number of Nystroem centers is
+higher than around 5000.
+memory_slack (float ) – default 0.9 - Controls the amount of slack in GPU memory when calculating the size of matrix
+splits for kernel-vector multiplications. This can be reduced if out-of-memory errors occur
+on the GPU.
+keops_acc_dtype (str ) – default “auto” - A string describing the accumulator data-type for KeOps.
+For more information refer to the
+KeOps documentation
+keops_sum_scheme (str ) – default “auto” - Accumulation scheme for KeOps.
+For more information refer to the KeOps documentation
+keops_active (str ) – default “auto” - Whether to use or not to use KeOps. Three settings are allowed, specified by strings:
+‘auto’ (the default setting) means that KeOps will be used if it is installed correctly,
+‘no’ means keops will not be used, nor will it be imported, and ‘force’ means that if KeOps is
+not installed an error will be raised.
+keops_memory_slack (float ) – default 0.7 - Controls the amount of slack used when calculating the matrix splits for KeOps.
+Since memory usage estimation for KeOps is hard, you may need to reduce this value if running
+out-of-GPU-memory when using KeOps. Typically this only occurs for large datasets.
+cg_epsilon_32 (float ) – default 1e-7 - Small added epsilon to prevent divide-by-zero errors in the conjugate
+gradient algorithm. Used for single precision data-types
+cg_epsilon_64 (float ) – default 1e-15 - Small added epsilon to prevent divide-by-zero errors in the conjugate
+gradient algorithm. Used for double precision data-types
+cg_tolerance (float ) – default 1e-7 - Maximum change in model parameters between iterations. If less change than
+cg_tolerance is detected, then we regard the optimization as converged.
+cg_full_gradient_every (int ) – default 10 - How often to calculate the full gradient in the conjugate gradient algorithm.
+Full-gradient iterations take roughly twice the time as normal iterations, but they reset
+the error introduced by the other iterations.
+cg_differential_convergence (bool ) – default False - Differential convergence refers to a procedure relevant to the conjugate
+gradient optimizer, and only applies when multiple right-hand side vectors are used (e.g.
+in multi-class classification, or in hyperparameter optimization with the stochastic objective).
+If this flag is set, whenever the convergence criterion is met for single right-hand-sides,
+they are removed from the optimization procedure. If it is not set, all vectors must have
+converged for the optimization to stop. It is especially useful for hyperparameter optimization.
+pc_epsilon_32 (float ) – default 1e-5 - Epsilon used to increase the diagonal dominance of a matrix before its
+Cholesky decomposition (for single-precision data types).
+pc_epsilon_64 (float ) – default 1e-13 - Epsilon used to increase the diagonal dominance of a matrix before its
+Cholesky decomposition (for double-precision data types).
+cpu_preconditioner (bool ) – default False - Whether the preconditioner should be computed on the CPU. This setting
+overrides the FalkonOptions.use_cpu option.
+chol_force_in_core (bool ) – default False - Whether to force in-core execution of the Cholesky decomposition. This will
+not work with matrices bigger than GPU memory.
+chol_force_ooc (bool ) – default False - Whether to force out-of-core (parallel) execution for the POTRF algorithm,
+even on matrices which fit in-GPU-core.
+chol_par_blk_multiplier () – default 2 - Minimum number of tiles per-GPU in the out-of-core, GPU-parallel POTRF algorithm.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/outofcore.html b/api_reference/outofcore.html
new file mode 100644
index 00000000..37497271
--- /dev/null
+++ b/api_reference/outofcore.html
@@ -0,0 +1,440 @@
+
+
+
+
+
+
+
+
+
falkon.ooc_ops — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.ooc_ops
+The out-of-core algorithms for the Cholesky decomposition and the LAUUM operation are crucial for speeding up our
+library. To find out more about how they work, check the source code:
+
+
+The following functions provide a higher-level interface to the two operations.
+
+gpu_cholesky
+
+
+falkon.ooc_ops. gpu_cholesky ( A : Tensor , upper : bool , clean : bool , overwrite : bool , opt : FalkonOptions ) → Tensor
+
+Parameters:
+
+A (torch.Tensor ) – 2D positive-definite matrix of size (n x n) that will be factorized as
+A = U.T @ U (if upper is True) or A = L @ L.T if upper
+is False.
+upper (bool ) – Whether the triangle which should be factorized is the upper or lower of A .
+clean (bool ) – Whether the “other” triangle of the output matrix (the one that
+does not contain the factorization) will be filled with zeros or
+not.
+overwrite (bool ) – Whether to overwrite matrix A or to output the result in a new
+buffer.
+opt (FalkonOptions ) – Options forwarded for block calculation, and other knobs in the out-of-core
+parallel POTRF implementation. Useful options are the ones defined in
+CholeskyOptions .
+
+
+
+Notes
+The factorization will always be the ‘lower’ version of the factorization
+which could however end up on the upper-triangular part of the matrix
+in case A is not Fortran contiguous to begin with.
+
+
+
+
+gpu_lauum
+
+
+falkon.ooc_ops. gpu_lauum ( A : Tensor , upper : bool , overwrite : bool = True , write_opposite : bool = False , opt : FalkonOptions | None = None )
+
+Parameters:
+
+A (torch.Tensor ) – N-by-N triangular matrix.
+upper (bool ) – Whether the input matrix is upper or lower triangular.
+overwrite (bool ) – Whether to overwrite matrix A or to output the result in a new buffer.
+write_opposite (bool ) – Independently of the overwrite parameter, whether to write the result of the
+triangular multiplication on the ‘opposite’ side of A . For example, if upper == True
+and overwrite == False , then the result will be written on the lower triangular part
+of the input matrix A .
+While independent, this is mostly useful when overwrite == False , since it can
+effectively avoid allocating a new tensor, and at the same time preserve the original data.
+opt (FalkonOptions or None ) – Options for the LAUUM operation. The only relevant options are the one connected to
+GPU memory usage.
+
+
+Returns:
+out (torch.Tensor ) – A (N x N) tensor. This will share the same memory as the input tensor A if overwrite
+is set to True , otherwise it will be a newly allocated tensor.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/preconditioner.html b/api_reference/preconditioner.html
new file mode 100644
index 00000000..1e2e7ba3
--- /dev/null
+++ b/api_reference/preconditioner.html
@@ -0,0 +1,821 @@
+
+
+
+
+
+
+
+
+
falkon.preconditioner — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.preconditioner
+
+Preconditioner
+
+
+class falkon.preconditioner.preconditioner. Preconditioner
+Generic preconditioner class, used to accelerate solutions to linear systems.
+Given a system of equations \(H\beta = Y\) , where \(H\) typically contains in some
+form our data matrix X and Y contains the targets. We can use matrix \(B\) to
+create an equivalent linear system which will have lower condition number:
+
+\[BB^\top H \beta = Y\]
+where \(BB^\top \approx H^{-1}\) in order to make the preconditioner effective, but not
+too expensive to compute. Then, in order to use the preconditioner in an algorithm based
+on matrix-vector products (such as conjugate gradient descent), we must be able to “apply” the
+matrix \(B\) and its transpose \(B^ op\) to any vector.
+For this reason, this class exposes abstract methods apply and apply_t which should
+be overridden in concrete preconditioner implementations
+
+
+
+
+
+Cholesky preconditioners
+
+FalkonPreconditioner
+
+
+class falkon.preconditioner. FalkonPreconditioner ( penalty : float , kernel , opt : FalkonOptions )
+Approximated Cholesky Preconditioner for FALKON.
+The preconditioner is based on the \(K_{MM}\) kernel between the
+inducing points. A two step approximation of the inverse matrix
+via two Cholesky decompositions is performed.
+Starting with \(K_{MM}\) we obtain \(T = \mathrm{chol}(K_{MM})\) .
+Then we can obtain \(A = \mathrm{chol}(\frac{1}{M} T T^\top + \lambda)\) via another Cholesky
+decomposition. Both T and A are upper triangular: the first gets stored in the upper
+triangle of the \(K_{MM}\) matrix (called fC in the code), while the second is stored
+in the lower triangle.
+Whenever we want to use one of the two triangles we must reset the matrix diagonal, since
+it is shared between the two matrices.
+
+Parameters:
+
+penalty (float ) – The regularization parameter for KRR. Must be greater than 0.
+kernel (falkon.kernels.kernel.Kernel ) – The kernel object. This is used to compute the M*M kernel
+between inducing points. The kernel matrix is then overwritten by
+the preconditioner itself.
+opt (FalkonOptions ) –
Additional options to be used in computing the preconditioner.
+Relevant options are:
+
+
+pc_epsilonthe jitter to add to the kernel matrix to make it positive-definite and allow Cholesky decomposition.
+This can be either a float, or a dictionary mapping from
+torch datatypes (e.g. float32, float64) to an appropriate
+float. Typically float32 requires more jitter than float64.
+
+
+
+
+cpu_preconditionera boolean value which overrides CPU/GPU settings and forces the function to compute the whole
+preconditioner on the CPU. If set to False, we fall back to
+the usual CPU/GPU settings (i.e. ‘use_cpu’ option and the
+availability of a GPU).
+
+
+
+
+
+
+
+
+
+
+apply ( v : Tensor ) → Tensor
+Solve two systems of equations \(ATx = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+apply_t ( v : Tensor ) → Tensor
+Solve two systems of equations \(A^\top T^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+init ( X : Tensor | SparseTensor , weight_vec : Tensor | None = None )
+Initialize the preconditioner matrix.
+This method must be called before the preconditioner can be used.
+
+Parameters:
+
+X (torch.Tensor ) – The (M x D) matrix of Nystroem centers
+weight_vec – An optional vector of size (M x 1) which is used for reweighted least-squares.
+This vector should contain the weights corresponding to the Nystrom centers.
+
+
+
+
+
+
+
+invA ( v : Tensor ) → Tensor
+Solve the system of equations \(Ax = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invAt ( v : Tensor ) → Tensor
+Solve the system of equations \(A^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invT ( v : Tensor ) → Tensor
+Solve the system of equations \(Tx = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invTt ( v : Tensor ) → Tensor
+Solve the system of equations \(T^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+
+
+LogisticPreconditioner
+
+
+class falkon.preconditioner. LogisticPreconditioner ( kernel , loss , opt : FalkonOptions )
+Approximate Cholesky Preconditioner for Logistic-FALKON.
+The preconditioner is based on the K_MM kernel between the
+inducing points. A two step approximation of the inverse matrix
+via two cholesky decompositions is performed.
+T = chol ( K_MM ) => T . T @ T = K_MM
+A = chol ( 1 / M * ( T @ ( T . T @ W )) + lambda )
+
+
+So T and A are both upper triangular.
+W is a diagonal matrix of weights derived from the 2nd derivative of the loss function.
+Here we store T in the upper triangular part of the fC matrix,
+and A in the upper triangular part of the matrix.
+Whenever we need to use one or the other we need to reset the
+diagonal of fC since it is shared between the two matrices.
+W is of size M and is the only difference with respect to the normal FALKON preconditioner
+(falkon.preconditioner.FalkonPreconditioner ).
+
+Parameters:
+
+kernel (falkon.kernels.kernel.Kernel ) – The kernel object. This is used to compute the M*M kernel
+between inducing points. This kernel is then overwritten by
+the preconditioner itself.
+loss (falkon.gsc_losses.Loss ) – The loss-function used for defining kernel weights.
+opt (FalkonOptions ) –
Additional options to be used in computing the preconditioner.
+Relevant options are:
+
+
+pc_epsilonthe jitter to add to the kernel matrix to make it positive-definite and allow Cholesky decomposition.
+This can be either a float, or a dictionary mapping from
+torch datatypes (e.g. float32, float64) to an appropriate
+float. Typically float32 requires more jitter than float64.
+
+
+
+
+cpu_preconditionera boolean value which overrides CPU/GPU settings and forces the function to compute the whole
+preconditioner on the CPU. If set to False, we fall back to
+the usual CPU/GPU settings (i.e. ‘use_cpu’ option and the
+availability of a GPU).
+
+
+
+
+
+
+
+
+
+
+
+apply ( v )
+Solve two systems of equations \(ATx = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+apply_t ( v )
+Solve two systems of equations \(A^\top T^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+init ( X : Tensor | SparseTensor , Y : Tensor , alpha : Tensor , penalty : float , N : int ) → None
+Initialize the preconditioner matrix.
+This method must be called before the preconditioner becomes usable.
+
+Parameters:
+
+X (torch.Tensor ) – (M x D) matrix of Nystroem centers
+Y (torch.Tensor ) – (M x 1) vector of targets corresponding to the Nystroem centers X
+alpha (torch.Tensor ) – (M x 1) parameter vector (of the same dimension as Y ) which gives the current
+solution to the optimization problem.
+penalty (float ) – Regularization amount
+N (int ) – Number of points in the full data-set.
+
+
+
+Notes
+If debug=True is present in the options, this method will print a lot of extra
+information pertaining timings of the various preconditioner operations. This can be
+useful to help understand how the preconditioner works.
+
+
+
+
+invA ( v )
+Solve the system of equations \(Ax = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invAt ( v )
+Solve the system of equations \(A^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invT ( v )
+Solve the system of equations \(Tx = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+invTt ( v )
+Solve the system of equations \(T^\top x = v\) for unknown vector \(x\) .
+Multiple right-hand sides are supported (by simply passing a 2D tensor for v )
+
+Parameters:
+v – The right-hand side of the triangular system of equations
+
+Returns:
+x – The solution, computed with the trsm function.
+
+
+
+
See also
+
+falkon.preconditioner.pc_utils.trsm() the function used to solve the system of equations
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/api_reference/sparse.html b/api_reference/sparse.html
new file mode 100644
index 00000000..d3d1a295
--- /dev/null
+++ b/api_reference/sparse.html
@@ -0,0 +1,493 @@
+
+
+
+
+
+
+
+
+
falkon.sparse — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+falkon.sparse
+
+SparseTensor
+
+
+class falkon.sparse.sparse_tensor. SparseTensor ( indexptr : Tensor , index : Tensor , data : Tensor , size : Tuple [ int , int ] , sparse_type : str | SparseType = SparseType.CSR )
+Wrapper class to represent sparse 2D matrices in CSR or CSC format.
+The wrapper holds three 1D torch tensors which give the sparse representation
+(an index pointer, an index and the non-zero values of the matrix).
+It supports some of the common torch tensor management functions (e.g. pin_memory , device ,
+size ) and conversion to and from the corresponding scipy sparse matrix representation.
+It does not define any mathematical function on sparse matrices, which are
+instead defined separately (see sparse_matmul() for example).
+
+Parameters:
+
+indexptr (torch.Tensor ) – Array of row (or column for CSC data) pointers into the
+index and data arrays. Should be either of type long or int.
+index (torch.Tensor ) – Array of column (or row for CSC data) indices for non-zero elements.
+Should be either of type long or int.
+data (torch.Tensor ) – Array of the non-zero elements for the sparse matrix.
+size (Tuple [ int , int ] ) – Shape of the 2D tensor (rows, columns).
+sparse_type (str or falkon.sparse.sparse_tensor.SparseType ) – Whether the matrix should be interpreted as CSR or CSC format.
+
+
+
+
+
+narrow_rows ( start : int | None , length : int | None ) → SparseTensor
+Select a subset of contiguous rows from the sparse matrix.
+If this is a CSC sparse matrix, instead of taking contiguous rows we take contiguous
+columns.
+
+Parameters:
+
+
+Returns:
+SparseTensor – A new SparseTensor object with length rows.
+
+
+Notes
+The output matrix will share storage with the original matrix whenever possible.
+
+
+
+
+
+
+class falkon.sparse.sparse_tensor. SparseType ( value )
+Whether a SparseTensor is in CSC or CSR format.
+
+
+
+
+Sparse operations
+
+
+falkon.sparse. sparse_matmul ( A : SparseTensor , B : SparseTensor , out : Tensor ) → Tensor
+Sparse*Sparse matrix multiplication. Output will be copied into dense out matrix.
+This function can be applied to CPU or CUDA tensors (but all tensors must
+be on the same device).
+
+Parameters:
+
+A (SparseTensor ) – N x D, sparse matrix.
+B (SparseTensor ) – D x M, sparse matrix
+out (torch.Tensor ) – Dense N x M tensor, it will hold the output of the multiplication.
+
+
+Returns:
+out (torch.Tensor ) – The same tensor as the input out parameter.
+
+
+
+
+
+
+falkon.sparse. sparse_square_norm ( A : SparseTensor , out : Tensor ) → Tensor
+Row-wise squared l2 norm of a sparse 2D matrix.
+The operation is equivalent to squaring all elements of the matrix, and summing up the rows.
+
+Parameters:
+
+A (SparseTensor ) – The 2D matrix. Since we compute row-wise norms, the matrix must be in CSR format (for
+efficiency).
+out (torch.Tensor ) – A dense tensor with the same number of rows as matrix A . Will contain the output
+of the squared-norm operation.
+
+
+Returns:
+out (torch.Tensor ) – The same tensor as the input out parameter.
+
+
+Notes
+This function is currently limited to CPU input tensors.
+
+
+
+
+falkon.sparse. sparse_norm ( A : SparseTensor , out : Tensor | None ) → Tensor
+Row-wise l2 norm of a sparse 2D matrix
+
+Parameters:
+
+A (SparseTensor ) – The 2D matrix. Since we compute row-wise norms, the matrix must be in CSR format (for
+efficiency).
+out (torch.Tensor ) – A dense tensor with the same number of rows as matrix A . Will contain the output
+of the norm operation.
+
+
+Returns:
+out (torch.Tensor ) – The same tensor as the input out parameter.
+
+
+Notes
+This function is currently limited to CPU input tensors.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/codecov.yml b/codecov.yml
deleted file mode 100644
index 5a87e7ae..00000000
--- a/codecov.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-coverage:
- precision: 2
- round: down
- range: "70...100"
- status:
- project:
- default:
- target: auto
- threshold: 0.01
- patch: false
- changes: false
- ignore:
- - "*/tests/*"
-comment:
- layout: "header, diff, sunburst, uncovered"
-behavior: default
diff --git a/doc/.gitignore b/doc/.gitignore
deleted file mode 100644
index 97b84caa..00000000
--- a/doc/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-
-_build/
diff --git a/doc/Makefile b/doc/Makefile
deleted file mode 100644
index 2d870b9c..00000000
--- a/doc/Makefile
+++ /dev/null
@@ -1,29 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = .
-BUILDDIR = _build
-
-GITHUB_PAGES_BRANCH = gh-pages
-OUTPUTDIR = _build/html
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-install:
- touch $(OUTPUTDIR)/.nojekyll
- ghp-import -m "[skip ci] Update documentation" -b $(GITHUB_PAGES_BRANCH) $(OUTPUTDIR)
- git push --force origin $(GITHUB_PAGES_BRANCH)
-
diff --git a/doc/conf.py b/doc/conf.py
deleted file mode 100644
index 336e0b14..00000000
--- a/doc/conf.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Configuration file for the Sphinx documentation builder.
-#
-# This file only contains a selection of the most common options. For a full
-# list see the documentation:
-# https://www.sphinx-doc.org/en/master/usage/configuration.html
-
-# -- Path setup --------------------------------------------------------------
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-import os
-import sys
-
-sys.path.insert(0, os.path.abspath('..'))
-#sys.path.insert(0, os.path.abspath('../falkon'))
-
-# Need mocking to allow everything to be imported even on no-GPU machines
-autodoc_mock_imports = [
- # "torch",
- # "pykeops",
- # "numpy",
- "falkon.la_helpers.cuda_la_helpers",
- "falkon.ooc_ops.cuda",
- "falkon.cuda",
- "falkon.ooc_ops.multigpu_potrf"
-]
-
-# -- Project information -----------------------------------------------------
-
-project = 'falkon'
-copyright = '2020, Giacomo Meanti, Alessandro Rudi'
-author = 'Giacomo Meanti, Alessandro Rudi'
-
-
-# -- General configuration ---------------------------------------------------
-
-# Error on warnings/missing links, etc
-nitpicky = True
-nitpick_ignore = [
- ('py:class', 'torch.Tensor'),
- ('py:class', "'torch.Tensor'"),
- ('py:class', "'falkon.kernels.Kernel'"),
- ('py:class', 'falkon.options.BaseOptions'),
- ('py:class', 'falkon.options.CholeskyOptions'),
- ('py:class', 'falkon.kernels.distance_kernel.DistKerContainer'),
- ('py:func', 'falkon.preconditioner.pc_utils.trsm'),
- ('py:attr', 'falkon.options.FalkonOptions.no_single_kernel'),
- ('py:attr', 'falkon.options.FalkonOptions.use_cpu'),
- ('py:attr', 'FalkonOptions.use_cpu'),
- ('py:class', "'SparseTensor'"),
- ('py:class', 'NoneType'),
- ('py:class', 'default=True'),
- ('py:class', 'default "auto"'),
- ('py:class', 'float-like'),
- ('py:class', 'falkon.options.ConjugateGradientOptions'),
-]
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
- 'sphinx.ext.napoleon',
- 'sphinx.ext.autodoc',
- # 'sphinx_autodoc_typehints',
- 'sphinx.ext.doctest',
- 'sphinx_rtd_theme', # Read-the-docs theme
- 'sphinx.ext.mathjax', # For displaying math in html output
- 'nbsphinx', # For displaying jupyter notebooks
-]
-
-# Napoleon config
-napoleon_numpy_docstring = True
-napoleon_google_docstring = False
-napoleon_use_rtype = False
-napoleon_type_alias = {
- '_tensor_type': "Union[torch.Tensor, SparseTensor]",
-}
-
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-def get_version(root_dir):
- with open(os.path.join(root_dir, 'VERSION')) as version_file:
- version = version_file.read().strip()
- return version
-
-# The short X.Y version.
-version = get_version("../falkon")
-# The full version, including alpha/beta/rc tags.
-release = version
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-
-# -- Options for HTML output -------------------------------------------------
-
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = "sphinx_rtd_theme"
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-html_theme_options = {
- 'collapse_navigation': False,
- 'display_version': True,
-}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'falkondoc'
-
-numpydoc_show_class_members = False
-
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-
-# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {
- 'python': ('https://docs.python.org/{.major}'.format(sys.version_info), None),
- 'numpy': ('https://docs.scipy.org/doc/numpy/', None),
- 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
- 'matplotlib': ('https://matplotlib.org/', None),
- 'sklearn': ('http://scikit-learn.org/stable', None),
- 'torch': ('https://pytorch.org/docs/stable/', None),
-}
-
-# sphinx_gallery_conf = {
-# 'backreferences_dir': 'gen_modules/backreferences',
-# 'doc_module': ('celer', 'numpy'),
-# 'examples_dirs': '../examples',
-# 'gallery_dirs': 'auto_examples',
-# 'reference_url': {
-# 'celer': None,
-# }
-# }
-
-html_sidebars = {'**': ['globaltoc.html', 'localtoc.html', 'searchbox.html']}
diff --git a/doc/doc-requirements.txt b/doc/doc-requirements.txt
deleted file mode 100644
index cac24968..00000000
--- a/doc/doc-requirements.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-sphinx
-nbsphinx
-numpydoc
-sphinx-rtd-theme
-pandas
-matplotlib
-jupyter
diff --git a/examples/custom_kernels.html b/examples/custom_kernels.html
new file mode 100644
index 00000000..6820f95c
--- /dev/null
+++ b/examples/custom_kernels.html
@@ -0,0 +1,1109 @@
+
+
+
+
+
+
+
+
+
Implementing A Custom Kernel — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Implementing A Custom Kernel
+In this notebook we will show how to implement a custom kernel in Falkon.
+There are several complementary parts to a kernel, which can be added to support different operations. We will go through them one-by-one in this notebook:
+
+Basic support: supports learning with Falkon!
+Autodiff support: supports automatic hyperparameter tuning (in the hopt module)
+KeOps support: faster kernel-vector products in low dimension
+Sparse support: support learning on sparse data.
+
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+Setup a simple problem for testing
+Load and preprocess the California housing dataset. The learn_with_kernel function sets up Falkon for learning on the California housing datase with a given kernel.
+
+
+
+
+Basic Kernel Implementation
+We must inherit from the falkon.kernels.Kernel class, and implement:
+
+compute method: the core of the kernel implementation. Given two input matrices (of size \(n\times d\) and \(m\times d\) ), and an output matrix (of size \(n\times m\) ), compute the kernel function between the two inputs and store it in the output.
+The additional diag parameter is a boolean flag. It indicates that a) \(n\) is equal to \(m\) , b) only the diagonal of the kernel matrix should be computed.
+
+compute_sparse method: this should be used if you want your kernel to support sparse data. We will implement it in a later section.
+
+We will implement a linear kernel:
+
+\[k(x, x') = \sigma (x^\top x')\]
+the parameter \(\sigma\) is the variance of the kernel. It is the only hyperparameter.
+
+
+Test the basic kernel
+
+
+
+
+
+
+tensor([[-1.3538, 4.0383, -0.5058, -3.1306, -0.3159],
+ [-0.9498, -2.0581, 0.4684, 0.8994, 0.7577],
+ [ 0.3122, -0.1038, -0.5039, 2.5076, -0.4032],
+ [ 0.8383, 3.8545, -1.4094, 1.0497, -1.4979],
+ [ 0.8344, -4.5258, 2.9362, -7.7300, 2.0740]])
+
+
+
+
+
+
+
+tensor([[6.1084],
+ [3.6743],
+ [1.2653],
+ [1.2448]])
+
+
+
+
+
+
+
+tensor([[ -3.6467],
+ [ -9.8628],
+ [ 1.4857],
+ [-12.8557]])
+
+
+
+
+
+
+
+
+Iteration 1 - Elapsed 0.07s - training error: 2.36367178
+Iteration 2 - Elapsed 0.11s - training error: 2.19508219
+Iteration 3 - Elapsed 0.14s - training error: 2.19265079
+Iteration 4 - Elapsed 0.17s - training error: 2.19265032
+Iteration 5 - Elapsed 0.20s - training error: 2.19262338
+Iteration 6 - Elapsed 0.24s - training error: 2.19262123
+Iteration 7 - Elapsed 0.27s - training error: 2.19261861
+Iteration 8 - Elapsed 0.30s - training error: 2.19261885
+Iteration 9 - Elapsed 0.33s - training error: 2.19261789
+Iteration 10 - Elapsed 0.39s - training error: 2.19261765
+Iteration 11 - Elapsed 0.42s - training error: 2.19261956
+Iteration 12 - Elapsed 0.45s - training error: 2.19261932
+Iteration 13 - Elapsed 0.48s - training error: 2.19261909
+Iteration 14 - Elapsed 0.51s - training error: 2.19261813
+Iteration 15 - Elapsed 0.55s - training error: 2.19261885
+Iteration 16 - Elapsed 0.57s - training error: 2.19261742
+Iteration 17 - Elapsed 0.61s - training error: 2.19261813
+Iteration 18 - Elapsed 0.63s - training error: 2.19261980
+Iteration 19 - Elapsed 0.66s - training error: 2.19261956
+Iteration 20 - Elapsed 0.73s - training error: 2.19262052
+Test RMSE: 2.19
+
+
+
+
+
+Differentiable Kernel
+A differentiable kernel is needed for automatic hyperparameter optimization (see the notebook ).
+It requires inheriting from falkon.kernels.DiffKernel . In addition to the methods already discussed, we must implement:
+
+compute_diff , which works similarly to the compute method but it does not have an out parameter. The implementation should be fully differentiable with respect to its inputs, and to the kernel hyperparameters.
+detach , which essentially clones the kernel with the parameters detached from the computational graph.
+
+Another important difference from the basic kernel is the call to the constructor , which must include
+
+``core_fn`` parameter (optional)
+The constructor can also optionally contain a core_fn parameter which can simplify implementation by uniting the compute and compute_diff implementations. Have a look at the implementation of kernels in falkon.kernels.dot_prod_kernel.py and falkon.kernels.distance_kernel.py for how to use the core_fn parameter.
+
+
+Test the differentiable kernel
+
+
+
+
+
+
+tensor([[ 2.7480, 1.6149, -1.2979, -2.3070, -1.1852],
+ [ 4.2437, 2.8397, -2.6248, -3.1610, -1.1940],
+ [ 2.6474, 0.9644, -0.4447, -1.1742, -1.0197],
+ [-3.4735, 0.4214, -1.9773, 0.3380, 2.2361],
+ [-1.8094, -0.2183, -0.5620, 1.8260, 1.8644]],
+ grad_fn=<KernelMmFnFullBackward>)
+
+
+
+
+
+
+
+
+
+
+Kernel-vector product
+tensor([[ 0.0198],
+ [-1.6055],
+ [ 2.3654],
+ [-0.6039]], grad_fn=<KernelMmvFnFullBackward>)
+Gradients:
+(tensor([0.1758]), tensor([[ 0.6192, 1.2183, -0.2544],
+ [ 0.6192, 1.2183, -0.2544],
+ [ 0.6192, 1.2183, -0.2544],
+ [ 0.6192, 1.2183, -0.2544]]))
+
+
+
+
+
+
+
+
+Iteration 1 - Elapsed 0.06s - training error: 2.20815659
+Iteration 2 - Elapsed 0.10s - training error: 2.19324374
+Iteration 3 - Elapsed 0.12s - training error: 2.19264197
+Iteration 4 - Elapsed 0.15s - training error: 2.19263649
+Iteration 5 - Elapsed 0.18s - training error: 2.19262934
+Iteration 6 - Elapsed 0.21s - training error: 2.19261909
+Iteration 7 - Elapsed 0.24s - training error: 2.19261813
+Iteration 8 - Elapsed 0.26s - training error: 2.19262004
+Iteration 9 - Elapsed 0.29s - training error: 2.19261765
+Iteration 10 - Elapsed 0.34s - training error: 2.19261789
+Iteration 11 - Elapsed 0.38s - training error: 2.19261909
+Iteration 12 - Elapsed 0.40s - training error: 2.19261885
+Iteration 13 - Elapsed 0.43s - training error: 2.19261956
+Iteration 14 - Elapsed 0.46s - training error: 2.19261932
+Iteration 15 - Elapsed 0.49s - training error: 2.19261932
+Iteration 16 - Elapsed 0.52s - training error: 2.19262099
+Iteration 17 - Elapsed 0.54s - training error: 2.19262123
+Iteration 18 - Elapsed 0.57s - training error: 2.19262147
+Iteration 19 - Elapsed 0.60s - training error: 2.19262195
+Iteration 20 - Elapsed 0.65s - training error: 2.19262338
+Test RMSE: 2.19
+
+
+
+
+
+Adding KeOps Support
+We must inherit from falkon.kernels.KeopsKernelMixin and implement the method keops_mmv_impl .
+KeOps-enabled kernels will still use the implementation in the compute function for computing the kernel matrix itself, but will use KeOps to compute kernel-vector products (if the data dimension is small enough).
+This method is responsible for kernel-vector products, and it should contain:
+
+A formula definition (see https://www.kernel-operations.io/keops/api/math-operations.html for the appropriate syntax)
+A definition of all variables (again have a look at the KeOps documentation, or the implementation of other kernels within Falkon)
+A call to the keops_mmv method of the KeopsKernelMixin class, responsible for calling into the KeOps formula.
+
+For our kernel we will use the (X | Y) syntax for the dot-product between samples, and then multiplication with the vector v . The aliases list maps the symbols used in the formula with the KeOps variable types.
+For more examples check the KeOps documentatiaon or the implementation of existing kernels.
+
+
+Test the KeOps kernel
+Note that KeOps will need to compile the kernels the first time they are run!
+
+
+
+
+
+
+
+Kernel-vector product
+tensor([[-1.2121],
+ [-0.1148],
+ [ 2.2435],
+ [ 0.9918]], grad_fn=<TilingGenredAutogradBackward>)
+Gradients:
+(tensor([1.9084]), tensor([[ 1.0124, -0.8363, 0.7706],
+ [ 1.0124, -0.8363, 0.7706],
+ [ 1.0124, -0.8363, 0.7706],
+ [ 1.0124, -0.8363, 0.7706]], requires_grad=True))
+
+
+
+
+
+
+
+
+Iteration 1 - Elapsed 0.17s - training error: 2.27769995
+Iteration 2 - Elapsed 0.34s - training error: 2.19313025
+Iteration 3 - Elapsed 0.51s - training error: 2.19323778
+Iteration 4 - Elapsed 0.66s - training error: 2.19308257
+Iteration 5 - Elapsed 0.82s - training error: 2.19269753
+Iteration 6 - Elapsed 0.98s - training error: 2.19266987
+Iteration 7 - Elapsed 1.13s - training error: 2.19262886
+Iteration 8 - Elapsed 1.29s - training error: 2.19262505
+Iteration 9 - Elapsed 1.45s - training error: 2.19262052
+Iteration 10 - Elapsed 1.76s - training error: 2.19260979
+Iteration 11 - Elapsed 1.92s - training error: 2.19261813
+Iteration 12 - Elapsed 2.08s - training error: 2.19261646
+Iteration 13 - Elapsed 2.25s - training error: 2.19263911
+Iteration 14 - Elapsed 2.42s - training error: 2.19263911
+Iteration 15 - Elapsed 2.58s - training error: 2.19264960
+Iteration 16 - Elapsed 2.74s - training error: 2.19265103
+Iteration 17 - Elapsed 2.91s - training error: 2.19268680
+Iteration 18 - Elapsed 3.07s - training error: 2.19269395
+Iteration 19 - Elapsed 3.23s - training error: 2.19270301
+Iteration 20 - Elapsed 3.55s - training error: 2.19275403
+Test RMSE: 2.19
+
+
+
+
+
+Supporting Sparse Data
+Sparse support can be necessary for kernel learning in extremely high dimensions, when the inputs are sparse.
+Sparse support requires using special functions for common operations such as matrix multiplication. Falkon implements sparse tensors in a CSR format (PyTorch is slowly picking this format up, in place of COO), through the falkon.sparse.SparseTensor class.
+We will implement the compute_sparse method below, supporting both diagonal and full kernels. However, only CPU support is added here (CUDA support is possible but requires a few more details), and differentiable sparse kernels are not supported.
+
+
+
+Testing sparse support
+We generate two sparse matrices, and check that the sparse kernel is equivalent to the dense version.
+
+
+
+
+
+tensor([[0., 5.],
+ [1., 8.],
+ [2., 0.]])
+
+
+
+
+
+
+
+tensor([[0., 2.],
+ [1., 0.],
+ [3., 4.]])
+
+
+
+
+
+
+
+
+tensor([[True, True, True],
+ [True, True, True],
+ [True, True, True]])
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/custom_kernels.ipynb b/examples/custom_kernels.ipynb
new file mode 100644
index 00000000..27f46126
--- /dev/null
+++ b/examples/custom_kernels.ipynb
@@ -0,0 +1,944 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "be9efd3e",
+ "metadata": {},
+ "source": [
+ "# Implementing A Custom Kernel\n",
+ "\n",
+ "In this notebook we will show how to implement a custom kernel in Falkon.\n",
+ "\n",
+ "There are several complementary parts to a kernel, which can be added to support different operations.\n",
+ "We will go through them one-by-one in this notebook:\n",
+ "\n",
+ " - Basic support: supports learning with Falkon!\n",
+ " - Autodiff support: supports automatic hyperparameter tuning (in the `hopt` module)\n",
+ " - KeOps support: faster kernel-vector products in low dimension\n",
+ " - Sparse support: support learning on sparse data."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "cc7a4428",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "import matplotlib.pyplot as plt\n",
+ "plt.style.use('ggplot')\n",
+ "\n",
+ "from sklearn import datasets\n",
+ "import torch\n",
+ "import numpy as np\n",
+ "\n",
+ "import falkon\n",
+ "from falkon import FalkonOptions\n",
+ "from falkon.kernels import Kernel, DiffKernel, KeopsKernelMixin"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6b37d1d0",
+ "metadata": {
+ "heading_collapsed": true
+ },
+ "source": [
+ "## Setup a simple problem for testing\n",
+ "\n",
+ "Load and preprocess the *California housing* dataset. The `learn_with_kernel` function sets up Falkon for learning on the California housing datase with a given kernel."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "be73c315",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "X, Y = datasets.fetch_california_housing(return_X_y=True)\n",
+ "num_train = int(X.shape[0] * 0.8)\n",
+ "num_test = X.shape[0] - num_train\n",
+ "shuffle_idx = np.arange(X.shape[0])\n",
+ "np.random.shuffle(shuffle_idx)\n",
+ "train_idx = shuffle_idx[:num_train]\n",
+ "test_idx = shuffle_idx[num_train:]\n",
+ "\n",
+ "Xtrain, Ytrain = X[train_idx], Y[train_idx]\n",
+ "Xtest, Ytest = X[test_idx], Y[test_idx]\n",
+ "# convert numpy -> pytorch\n",
+ "Xtrain = torch.from_numpy(Xtrain).to(dtype=torch.float32)\n",
+ "Xtest = torch.from_numpy(Xtest).to(dtype=torch.float32)\n",
+ "Ytrain = torch.from_numpy(Ytrain).to(dtype=torch.float32)\n",
+ "Ytest = torch.from_numpy(Ytest).to(dtype=torch.float32)\n",
+ "# z-score normalization\n",
+ "train_mean = Xtrain.mean(0, keepdim=True)\n",
+ "train_std = Xtrain.std(0, keepdim=True)\n",
+ "Xtrain -= train_mean\n",
+ "Xtrain /= train_std\n",
+ "Xtest -= train_mean\n",
+ "Xtest /= train_std"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "a58a7c95",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "def rmse(true, pred):\n",
+ " return torch.sqrt(torch.mean((true.reshape(-1, 1) - pred.reshape(-1, 1))**2))\n",
+ "\n",
+ "def learn_with_kernel(kernel):\n",
+ " flk_opt = FalkonOptions(use_cpu=True)\n",
+ " model = falkon.Falkon(\n",
+ " kernel=kernel, penalty=1e-5, M=1000, options=flk_opt,\n",
+ " error_every=1, error_fn=rmse)\n",
+ " model.fit(Xtrain, Ytrain)\n",
+ " ts_err = rmse(Ytest, model.predict(Xtest))\n",
+ " print(\"Test RMSE: %.2f\" % (ts_err))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1d046959",
+ "metadata": {},
+ "source": [
+ "## Basic Kernel Implementation\n",
+ "\n",
+ "We must inherit from the `falkon.kernels.Kernel` class, and implement:\n",
+ " - `compute` method: the core of the kernel implementation. \n",
+ " Given two input matrices (of size $n\\times d$ and $m\\times d$), and an output matrix (of size $n\\times m$), compute the kernel function between the two inputs and store it in the output.\n",
+ " \n",
+ " The additional `diag` parameter is a boolean flag. It indicates that a) $n$ is equal to $m$, b) only the diagonal of the kernel matrix should be computed.\n",
+ " - `compute_sparse` method: this should be used if you want your kernel to support sparse data. \n",
+ " We will implement it in a later section.\n",
+ " \n",
+ "We will implement a **linear** kernel:\n",
+ "$$k(x, x') = \\sigma (x^\\top x')$$\n",
+ "the parameter $\\sigma$ is the *variance* of the kernel. It is the only hyperparameter."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "bfd90b30",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "class BasicLinearKernel(Kernel):\n",
+ " def __init__(self, lengthscale, options):\n",
+ " # The base class takes as inputs a name for the kernel, and\n",
+ " # an instance of `FalkonOptions`.\n",
+ " super().__init__(\"basic_linear\", options)\n",
+ " \n",
+ " self.lengthscale = lengthscale\n",
+ " \n",
+ " def compute(self, X1: torch.Tensor, X2: torch.Tensor, out: torch.Tensor, diag: bool) -> torch.Tensor:\n",
+ " # To support different devices/data types, you must make sure\n",
+ " # the lengthscale is compatible with the data.\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ "\n",
+ " scaled_X1 = X1 * lengthscale\n",
+ " \n",
+ " if diag:\n",
+ " out.copy_(torch.sum(scaled_X1 * X2, dim=-1))\n",
+ " else:\n",
+ " # The dot-product row-by-row on `X1` and `X2` can be computed\n",
+ " # on many rows at a time with matrix multiplication.\n",
+ " out = torch.matmul(scaled_X1, X2.T, out=out)\n",
+ "\n",
+ " return out\n",
+ " \n",
+ " def compute_sparse(self, X1, X2, out, diag, **kwargs) -> torch.Tensor:\n",
+ " raise NotImplementedError(\"Sparse not implemented\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f19f3e95",
+ "metadata": {},
+ "source": [
+ "### Test the basic kernel"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "7bfcd1be",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Initialize the kernel\n",
+ "lengthscale_init = torch.tensor([1.0])\n",
+ "k = BasicLinearKernel(lengthscale_init, options=falkon.FalkonOptions())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "32b86480",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[-1.3538, 4.0383, -0.5058, -3.1306, -0.3159],\n",
+ " [-0.9498, -2.0581, 0.4684, 0.8994, 0.7577],\n",
+ " [ 0.3122, -0.1038, -0.5039, 2.5076, -0.4032],\n",
+ " [ 0.8383, 3.8545, -1.4094, 1.0497, -1.4979],\n",
+ " [ 0.8344, -4.5258, 2.9362, -7.7300, 2.0740]])"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# The kernel matrix\n",
+ "k(torch.randn(5, 3), torch.randn(5, 3))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "a3134217",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[6.1084],\n",
+ " [3.6743],\n",
+ " [1.2653],\n",
+ " [1.2448]])"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Kernel-vector product\n",
+ "k.mmv(torch.randn(4, 3), torch.randn(4, 3), v=torch.randn(4, 1))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "53521733",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[ -3.6467],\n",
+ " [ -9.8628],\n",
+ " [ 1.4857],\n",
+ " [-12.8557]])"
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Double kernel-vector product\n",
+ "k.dmmv(torch.randn(3, 3), torch.randn(4, 3), v=torch.randn(4, 1), w=torch.randn(3, 1))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "31837bb2",
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iteration 1 - Elapsed 0.07s - training error: 2.36367178\n",
+ "Iteration 2 - Elapsed 0.11s - training error: 2.19508219\n",
+ "Iteration 3 - Elapsed 0.14s - training error: 2.19265079\n",
+ "Iteration 4 - Elapsed 0.17s - training error: 2.19265032\n",
+ "Iteration 5 - Elapsed 0.20s - training error: 2.19262338\n",
+ "Iteration 6 - Elapsed 0.24s - training error: 2.19262123\n",
+ "Iteration 7 - Elapsed 0.27s - training error: 2.19261861\n",
+ "Iteration 8 - Elapsed 0.30s - training error: 2.19261885\n",
+ "Iteration 9 - Elapsed 0.33s - training error: 2.19261789\n",
+ "Iteration 10 - Elapsed 0.39s - training error: 2.19261765\n",
+ "Iteration 11 - Elapsed 0.42s - training error: 2.19261956\n",
+ "Iteration 12 - Elapsed 0.45s - training error: 2.19261932\n",
+ "Iteration 13 - Elapsed 0.48s - training error: 2.19261909\n",
+ "Iteration 14 - Elapsed 0.51s - training error: 2.19261813\n",
+ "Iteration 15 - Elapsed 0.55s - training error: 2.19261885\n",
+ "Iteration 16 - Elapsed 0.57s - training error: 2.19261742\n",
+ "Iteration 17 - Elapsed 0.61s - training error: 2.19261813\n",
+ "Iteration 18 - Elapsed 0.63s - training error: 2.19261980\n",
+ "Iteration 19 - Elapsed 0.66s - training error: 2.19261956\n",
+ "Iteration 20 - Elapsed 0.73s - training error: 2.19262052\n",
+ "Test RMSE: 2.19\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Learning on the california housing dataset\n",
+ "learn_with_kernel(k)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d1a4b946",
+ "metadata": {
+ "heading_collapsed": true
+ },
+ "source": [
+ "## Differentiable Kernel\n",
+ "\n",
+ "A differentiable kernel is needed for automatic hyperparameter optimization (see the [notebook](hyperopt.ipynb)).\n",
+ "\n",
+ "It requires inheriting from `falkon.kernels.DiffKernel`. In addition to the methods already discussed, we must implement:\n",
+ " - `compute_diff`, which works similarly to the `compute` method but it does not have an `out` parameter. The implementation should be fully differentiable with respect to its inputs, and to the kernel hyperparameters.\n",
+ " - `detach`, which essentially clones the kernel with the parameters *detached* from the computational graph.\n",
+ " \n",
+ "Another important difference from the basic kernel is the call to the *constructor*, which must include\n",
+ " - All kernel hyperparameters as keyword arguments. These will be available as attributes on the class. Hyperparameters do not need to be tensors.\n",
+ "\n",
+ "**`core_fn` parameter (optional)**\n",
+ "\n",
+ "The constructor can also *optionally* contain a `core_fn` parameter which can simplify implementation by uniting the `compute` and `compute_diff` implementations. Have a look at the implementation of kernels in `falkon.kernels.dot_prod_kernel.py` and `falkon.kernels.distance_kernel.py` for how to use the `core_fn` parameter."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "13c002e2",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "class DiffLinearKernel(DiffKernel):\n",
+ " def __init__(self, lengthscale, options):\n",
+ " # Super-class constructor call. We do not specify core_fn\n",
+ " # but we must specify the hyperparameter of this kernel (lengthscale)\n",
+ " super().__init__(\"diff_linear\", \n",
+ " options, \n",
+ " core_fn=None, \n",
+ " lengthscale=lengthscale)\n",
+ " \n",
+ " def compute(self, X1: torch.Tensor, X2: torch.Tensor, out: torch.Tensor, diag: bool):\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ " scaled_X1 = X1 * lengthscale\n",
+ " if diag:\n",
+ " out.copy_(torch.sum(scaled_X1 * X2, dim=-1))\n",
+ " else:\n",
+ " out = torch.matmul(scaled_X1, X2.T, out=out)\n",
+ "\n",
+ " return out\n",
+ " \n",
+ " def compute_diff(self, X1: torch.Tensor, X2: torch.Tensor, diag: bool):\n",
+ " # The implementation here is similar to `compute` without in-place operations.\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ " scaled_X1 = X1 * lengthscale\n",
+ " \n",
+ " if diag:\n",
+ " return torch.sum(scaled_X1 * X2, dim=-1)\n",
+ " \n",
+ " return torch.matmul(scaled_X1, X2.T)\n",
+ "\n",
+ " def detach(self):\n",
+ " # Clones the class with detached hyperparameters\n",
+ " return DiffLinearKernel(\n",
+ " lengthscale=self.lengthscale.detach(), \n",
+ " options=self.params\n",
+ " )\n",
+ " \n",
+ " def compute_sparse(self, X1, X2, out, diag, **kwargs) -> torch.Tensor:\n",
+ " raise NotImplementedError(\"Sparse not implemented\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a7dd73bf",
+ "metadata": {
+ "hidden": true
+ },
+ "source": [
+ "### Test the differentiable kernel"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "0f223c67",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "# Initialize the kernel, with a lengthscale which requires grad.\n",
+ "lengthscale_init = torch.tensor([1.0]).requires_grad_()\n",
+ "k = DiffLinearKernel(lengthscale_init, options=falkon.FalkonOptions())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "cc862c47",
+ "metadata": {
+ "hidden": true,
+ "scrolled": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[ 2.7480, 1.6149, -1.2979, -2.3070, -1.1852],\n",
+ " [ 4.2437, 2.8397, -2.6248, -3.1610, -1.1940],\n",
+ " [ 2.6474, 0.9644, -0.4447, -1.1742, -1.0197],\n",
+ " [-3.4735, 0.4214, -1.9773, 0.3380, 2.2361],\n",
+ " [-1.8094, -0.2183, -0.5620, 1.8260, 1.8644]],\n",
+ " grad_fn=
)"
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Kernel matrix. Notice how the outputs has a `grad_fn`\n",
+ "k_mat = k(torch.randn(5, 3), torch.randn(5, 3))\n",
+ "k_mat"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "a22671f4",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(tensor([-0.7049]),)"
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Gradient of the kernel with respect to the lengthscale.\n",
+ "torch.autograd.grad(k_mat.sum(), k.lengthscale)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "id": "d508d0d7",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Kernel-vector product\n",
+ "tensor([[ 0.0198],\n",
+ " [-1.6055],\n",
+ " [ 2.3654],\n",
+ " [-0.6039]], grad_fn=)\n",
+ "Gradients:\n",
+ "(tensor([0.1758]), tensor([[ 0.6192, 1.2183, -0.2544],\n",
+ " [ 0.6192, 1.2183, -0.2544],\n",
+ " [ 0.6192, 1.2183, -0.2544],\n",
+ " [ 0.6192, 1.2183, -0.2544]]))\n"
+ ]
+ }
+ ],
+ "source": [
+ "# kernel-vector product + gradient\n",
+ "m1 = torch.randn(4, 3).requires_grad_()\n",
+ "m2 = torch.randn(2, 3)\n",
+ "v = torch.randn(2, 1)\n",
+ "k_mmv = k.mmv(m1, m2, v)\n",
+ "print(\"Kernel-vector product\")\n",
+ "print(k_mmv)\n",
+ "print(\"Gradients:\")\n",
+ "print(torch.autograd.grad(k_mmv.sum(), [k.lengthscale, m1]))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "c56f9a2a",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iteration 1 - Elapsed 0.06s - training error: 2.20815659\n",
+ "Iteration 2 - Elapsed 0.10s - training error: 2.19324374\n",
+ "Iteration 3 - Elapsed 0.12s - training error: 2.19264197\n",
+ "Iteration 4 - Elapsed 0.15s - training error: 2.19263649\n",
+ "Iteration 5 - Elapsed 0.18s - training error: 2.19262934\n",
+ "Iteration 6 - Elapsed 0.21s - training error: 2.19261909\n",
+ "Iteration 7 - Elapsed 0.24s - training error: 2.19261813\n",
+ "Iteration 8 - Elapsed 0.26s - training error: 2.19262004\n",
+ "Iteration 9 - Elapsed 0.29s - training error: 2.19261765\n",
+ "Iteration 10 - Elapsed 0.34s - training error: 2.19261789\n",
+ "Iteration 11 - Elapsed 0.38s - training error: 2.19261909\n",
+ "Iteration 12 - Elapsed 0.40s - training error: 2.19261885\n",
+ "Iteration 13 - Elapsed 0.43s - training error: 2.19261956\n",
+ "Iteration 14 - Elapsed 0.46s - training error: 2.19261932\n",
+ "Iteration 15 - Elapsed 0.49s - training error: 2.19261932\n",
+ "Iteration 16 - Elapsed 0.52s - training error: 2.19262099\n",
+ "Iteration 17 - Elapsed 0.54s - training error: 2.19262123\n",
+ "Iteration 18 - Elapsed 0.57s - training error: 2.19262147\n",
+ "Iteration 19 - Elapsed 0.60s - training error: 2.19262195\n",
+ "Iteration 20 - Elapsed 0.65s - training error: 2.19262338\n",
+ "Test RMSE: 2.19\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Learning on the california housing dataset\n",
+ "learn_with_kernel(k)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "fd29c886",
+ "metadata": {
+ "heading_collapsed": true
+ },
+ "source": [
+ "## Adding KeOps Support\n",
+ "\n",
+ "We must inherit from `falkon.kernels.KeopsKernelMixin` and implement the method `keops_mmv_impl`.\n",
+ "\n",
+ "KeOps-enabled kernels will still use the implementation in the `compute` function for computing the kernel matrix itself, but will use KeOps to compute kernel-vector products (if the data dimension is small enough).\n",
+ "\n",
+ "This method is responsible for kernel-vector products, and it should contain:\n",
+ " 1. A formula definition (see https://www.kernel-operations.io/keops/api/math-operations.html for the appropriate syntax)\n",
+ " 2. A definition of all variables (again have a look at the KeOps documentation, or the implementation\n",
+ " of other kernels within Falkon)\n",
+ " 3. A call to the `keops_mmv` method of the `KeopsKernelMixin` class, responsible for calling into\n",
+ " the KeOps formula.\n",
+ " \n",
+ "For our kernel we will use the `(X | Y)` syntax for the dot-product between samples, and then multiplication with the vector `v`. The aliases list maps the symbols used in the formula with the KeOps variable types.\n",
+ "\n",
+ "For more examples check the [KeOps documentatiaon](https://www.kernel-operations.io) or the implementation of existing kernels."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "id": "8164d28e",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "class KeopsLinearKernel(DiffKernel, KeopsKernelMixin):\n",
+ " def __init__(self, lengthscale, options):\n",
+ " super().__init__(\"my-keops-linear\", \n",
+ " options, \n",
+ " core_fn=None, \n",
+ " lengthscale=lengthscale)\n",
+ " \n",
+ " def compute(self, X1: torch.Tensor, X2: torch.Tensor, out: torch.Tensor, diag: bool):\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ " scaled_X1 = X1 * lengthscale\n",
+ " \n",
+ " if diag:\n",
+ " out.copy_(torch.sum(scaled_X1 * X2, dim=-1))\n",
+ " else:\n",
+ " out = torch.matmul(scaled_X1, X2.T, out=out)\n",
+ "\n",
+ " return out\n",
+ " \n",
+ " def compute_diff(self, X1: torch.Tensor, X2: torch.Tensor, diag: bool):\n",
+ " scaled_X1 = X1 * self.lengthscale\n",
+ " \n",
+ " if diag:\n",
+ " return torch.sum(scaled_X1 * X2, dim=-1)\n",
+ " \n",
+ " return torch.matmul(scaled_X1, X2.T)\n",
+ "\n",
+ " def detach(self):\n",
+ " return KeopsLinearKernel(\n",
+ " lengthscale=self.lengthscale.detach(), \n",
+ " options=self.params\n",
+ " )\n",
+ " \n",
+ " def keops_mmv_impl(self, X1, X2, v, kernel, out, opt):\n",
+ " # Keops formula for kernel-vector.\n",
+ " formula = '(scale * (X | Y)) * v'\n",
+ " aliases = [\n",
+ " 'X = Vi(%d)' % (X1.shape[1]),\n",
+ " 'Y = Vj(%d)' % (X2.shape[1]),\n",
+ " 'v = Vj(%d)' % (v.shape[1]),\n",
+ " 'scale = Pm(%d)' % (self.lengthscale.shape[0]),\n",
+ " ]\n",
+ " other_vars = [\n",
+ " self.lengthscale.to(dtype=X1.dtype, device=X1.device),\n",
+ " ]\n",
+ " # Call to the executor of the formula.\n",
+ " return self.keops_mmv(X1, X2, v, out, formula, aliases, other_vars, opt)\n",
+ "\n",
+ " \n",
+ " def compute_sparse(self, X1, X2, out: torch.Tensor, diag: bool, **kwargs) -> torch.Tensor:\n",
+ " raise NotImplementedError(\"Sparse not implemented\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11528fb9",
+ "metadata": {
+ "hidden": true
+ },
+ "source": [
+ "### Test the KeOps kernel\n",
+ "\n",
+ "Note that KeOps will need to compile the kernels the first time they are run!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "id": "12107005",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [],
+ "source": [
+ "lengthscale_init = torch.tensor([1.0]).requires_grad_()\n",
+ "k = KeopsLinearKernel(lengthscale_init, options=falkon.FalkonOptions(use_cpu=True))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "id": "c231eabc",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Kernel-vector product\n",
+ "tensor([[-1.2121],\n",
+ " [-0.1148],\n",
+ " [ 2.2435],\n",
+ " [ 0.9918]], grad_fn=)\n",
+ "Gradients:\n",
+ "(tensor([1.9084]), tensor([[ 1.0124, -0.8363, 0.7706],\n",
+ " [ 1.0124, -0.8363, 0.7706],\n",
+ " [ 1.0124, -0.8363, 0.7706],\n",
+ " [ 1.0124, -0.8363, 0.7706]], requires_grad=True))\n"
+ ]
+ }
+ ],
+ "source": [
+ "# kernel-vector product + gradient\n",
+ "m1 = torch.randn(4, 3).requires_grad_()\n",
+ "m2 = torch.randn(2, 3)\n",
+ "v = torch.randn(2, 1)\n",
+ "k_mmv = k.mmv(m1, m2, v)\n",
+ "print(\"Kernel-vector product\")\n",
+ "print(k_mmv)\n",
+ "print(\"Gradients:\")\n",
+ "print(torch.autograd.grad(k_mmv.sum(), [k.lengthscale, m1]))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "id": "7d2b80e5",
+ "metadata": {
+ "hidden": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iteration 1 - Elapsed 0.17s - training error: 2.27769995\n",
+ "Iteration 2 - Elapsed 0.34s - training error: 2.19313025\n",
+ "Iteration 3 - Elapsed 0.51s - training error: 2.19323778\n",
+ "Iteration 4 - Elapsed 0.66s - training error: 2.19308257\n",
+ "Iteration 5 - Elapsed 0.82s - training error: 2.19269753\n",
+ "Iteration 6 - Elapsed 0.98s - training error: 2.19266987\n",
+ "Iteration 7 - Elapsed 1.13s - training error: 2.19262886\n",
+ "Iteration 8 - Elapsed 1.29s - training error: 2.19262505\n",
+ "Iteration 9 - Elapsed 1.45s - training error: 2.19262052\n",
+ "Iteration 10 - Elapsed 1.76s - training error: 2.19260979\n",
+ "Iteration 11 - Elapsed 1.92s - training error: 2.19261813\n",
+ "Iteration 12 - Elapsed 2.08s - training error: 2.19261646\n",
+ "Iteration 13 - Elapsed 2.25s - training error: 2.19263911\n",
+ "Iteration 14 - Elapsed 2.42s - training error: 2.19263911\n",
+ "Iteration 15 - Elapsed 2.58s - training error: 2.19264960\n",
+ "Iteration 16 - Elapsed 2.74s - training error: 2.19265103\n",
+ "Iteration 17 - Elapsed 2.91s - training error: 2.19268680\n",
+ "Iteration 18 - Elapsed 3.07s - training error: 2.19269395\n",
+ "Iteration 19 - Elapsed 3.23s - training error: 2.19270301\n",
+ "Iteration 20 - Elapsed 3.55s - training error: 2.19275403\n",
+ "Test RMSE: 2.19\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Learning on the california housing dataset.\n",
+ "# Due to differences in floating point code, results may be slightly \n",
+ "# different from the other implementations.\n",
+ "learn_with_kernel(k)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4fecea8f",
+ "metadata": {},
+ "source": [
+ "## Supporting Sparse Data\n",
+ "\n",
+ "Sparse support can be necessary for kernel learning in extremely high dimensions, when the inputs are sparse.\n",
+ "\n",
+ "Sparse support requires using special functions for common operations such as matrix multiplication. Falkon implements sparse tensors in a CSR format (PyTorch is slowly picking this format up, in place of COO), through the `falkon.sparse.SparseTensor` class.\n",
+ "\n",
+ "We will implement the `compute_sparse` method below, supporting both diagonal and full kernels.\n",
+ "However, only CPU support is added here (CUDA support is possible but requires a few more details), and differentiable sparse kernels are not supported."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "id": "98a92bf4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from falkon.sparse import SparseTensor\n",
+ "from falkon.sparse import sparse_matmul, bdot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "id": "d9304478",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "class SparseLinearKernel(Kernel):\n",
+ " def __init__(self, lengthscale, options):\n",
+ " # The base class takes as inputs a name for the kernel, and\n",
+ " # an instance of `FalkonOptions`.\n",
+ " super().__init__(\"sparse_linear\", options)\n",
+ " \n",
+ " self.lengthscale = lengthscale\n",
+ " \n",
+ " def compute(self, X1: torch.Tensor, X2: torch.Tensor, out: torch.Tensor, diag: bool) -> torch.Tensor:\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ "\n",
+ " scaled_X1 = X1 * lengthscale\n",
+ " \n",
+ " if diag:\n",
+ " out.copy_(torch.sum(scaled_X1 * X2, dim=-1))\n",
+ " else:\n",
+ " # The dot-product row-by-row on `X1` and `X2` can be computed\n",
+ " # on many rows at a time with matrix multiplication.\n",
+ " out = torch.matmul(scaled_X1, X2.T, out=out)\n",
+ "\n",
+ " return out\n",
+ " \n",
+ " def compute_sparse(self, \n",
+ " X1: SparseTensor, \n",
+ " X2: SparseTensor, \n",
+ " out: torch.Tensor, \n",
+ " diag: bool,\n",
+ " **kwargs) -> torch.Tensor:\n",
+ " # The inputs will be matrix X1(n*d) in CSR format, and X2(d*n) in CSC format.\n",
+ " \n",
+ " # To support different devices/data types, you must make sure\n",
+ " # the lengthscale is compatible with the data.\n",
+ " lengthscale = self.lengthscale.to(device=X1.device, dtype=X1.dtype)\n",
+ " \n",
+ " if diag:\n",
+ " # The diagonal is a dot-product between rows of X1 and X2.\n",
+ " # The batched-dot is only implemented on CPU.\n",
+ " out = bdot(X1, X2.transpose_csr(), out)\n",
+ " else:\n",
+ " # Otherwise we need to matrix-multiply. Note that X2 is already\n",
+ " # transposed correctly.\n",
+ " out = sparse_matmul(X1, X2, out)\n",
+ "\n",
+ " out.mul_(lengthscale)\n",
+ " return out"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "75a18424",
+ "metadata": {},
+ "source": [
+ "### Testing sparse support\n",
+ "\n",
+ "We generate two sparse matrices, and check that the sparse kernel is equivalent to the dense version."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "id": "e6e4c0bd",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[0., 5.],\n",
+ " [1., 8.],\n",
+ " [2., 0.]])"
+ ]
+ },
+ "execution_count": 22,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "indexptr = torch.tensor([0, 1, 3, 4], dtype=torch.long)\n",
+ "index = torch.tensor([1, 0, 1, 0], dtype=torch.long)\n",
+ "value = torch.tensor([5, 1, 8, 2], dtype=torch.float32)\n",
+ "sp1 = SparseTensor(indexptr=indexptr, index=index, data=value, size=(3, 2), sparse_type=\"csr\")\n",
+ "# Converted to dense:\n",
+ "dense1 = torch.from_numpy(sp1.to_scipy().todense())\n",
+ "dense1"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "id": "7470a0b6",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[0., 2.],\n",
+ " [1., 0.],\n",
+ " [3., 4.]])"
+ ]
+ },
+ "execution_count": 23,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "indexptr = torch.tensor([0, 1, 2, 4], dtype=torch.long)\n",
+ "index = torch.tensor([1, 0, 0, 1], dtype=torch.long)\n",
+ "value = torch.tensor([2, 1, 3, 4], dtype=torch.float32)\n",
+ "sp2 = SparseTensor(indexptr=indexptr, index=index, data=value, size=(3, 2), sparse_type=\"csr\")\n",
+ "dense2 = torch.from_numpy(sp2.to_scipy().todense())\n",
+ "dense2"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "id": "f217cde0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Initialize the kernel\n",
+ "lengthscale_init = torch.tensor([1.0])\n",
+ "k = SparseLinearKernel(lengthscale_init, options=falkon.FalkonOptions())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "id": "80f6a317",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([[True, True, True],\n",
+ " [True, True, True],\n",
+ " [True, True, True]])"
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "k(sp1, sp2) == k(dense1, dense2)\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/examples/examples.html b/examples/examples.html
new file mode 100644
index 00000000..593128ad
--- /dev/null
+++ b/examples/examples.html
@@ -0,0 +1,385 @@
+
+
+
+
+
+
+
+
+ Examples — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Examples
+
+
+Starting with simple kernel ridge regression, via classification, hyperparameter tuning, to large-scale GPU experiments,
+these notebooks cover all there is to know about Falkon.
+
+
+Kernel ridge regression goes through the basic notions of the library with a simple example;
+Logistic Falkon tutorial shows how to use the Logistic Falkon estimator, comparing the results with normal Falkon;
+Hyperparameter tuning is a fully worked out example of optimizing hyperparameters with cross-validation for a real-world multi-class problem;
+custom kernels will walk you through the implementation of a custom kernel.
+Gradient hyperopt : a tutorial on using the hopt module for gradient-based hyperparameter optimization in Falkon.
+MNIST example : A simple tutorial on using Falkon for MNIST digit classification.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/falkon_cv.html b/examples/falkon_cv.html
new file mode 100644
index 00000000..c0e18e79
--- /dev/null
+++ b/examples/falkon_cv.html
@@ -0,0 +1,674 @@
+
+
+
+
+
+
+
+
+ Hyperparameter Tuning with Falkon — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Hyperparameter Tuning with Falkon
+
+Introduction
+We use Falkon for a multi-class problem (on the digits dataset), showing how to integrate it into scikit-learn for hyperparameter optimization.
+Since both Falkon and LogisticFalkon are estimators, and follow scikit-learn’s API, integration is seamless
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+
+Load the data
+We use the digits dataset, which is distributed alongside scikit-learn.
+
+
+
+
+
+
+
+
+
+
+
+
+Split into training and test sets
+We split the data into a training set with 80% of the samples and a test set with the remaining 20%.
+
+
+
+Data Preprocessing
+As always with Falkon we must:
+
+Convert from numpy arrays to torch tensors
+Convert data and labels to the same data-type (in this case float32)
+
+
+Normalizing the data is always a good idea. Here we use the global mean and standard deviation of the training set for z-score normalization.
+
+Since Falkon optimizes with respect to the square loss, using ordinal labels (e.g. 1, 4, 5) is not ideal since closeness in the natural numbers is meaningless for classification. We therefore convert the labels to a 1-hot representation.
+
+
+
+
+
+
+First label vector: tensor([0., 0., 1., 0., 0., 0., 0., 0., 0., 0.])
+
+
+
+
+Search for the optimal parameters
+Since Falkon (with the Gaussian kernel) has only 3 important hyperparameters, it is entirely feasible to run a grid search over them to find the best parameter settings.
+Scikit-learn has great support for this, with the GridSearchCV class. For each paramater setting it will run 5-fold cross-validation on the training set, to determine which has the best results.
+Given the dataset is quite small, and Falkon is fast, we can run 160 model evaluations in around 40 seconds.
+
+The parameter settings which will be explored by the grid-search are:
+
+four different kernel length-scales (varying around small positive numbers, which are usually good for normalized data)
+four different regularization values
+two different values for M: the number of inducing points. As we will see, a larger M is almost always better than a smaller one (but it leads to longer training times). Of course this is not the case if the dataset is easy to overfit, since reducing M may also provide additional regularization.
+
+When we create the estimator we pass it additional parameters via the FalkonOptions class. In our case we want to ensure that the model runs on the CPU by setting use_cpu=True .
+
+
+
+
+
+
+
+The best parameters are: {'M': 500, 'kernel': GaussianKernel(sigma=Parameter containing:
+tensor([10.], dtype=torch.float64)), 'penalty': 1e-07}
+CPU times: user 52.3 s, sys: 1.78 s, total: 54.1 s
+Wall time: 13.6 s
+
+
+
+Evaluating the model
+We evaluate the model on the held-out set and see that we obtain a respectable 1% error on 10 classes.
+
+
+
+
+
+
+CPU times: user 534 ms, sys: 23.8 ms, total: 558 ms
+Wall time: 139 ms
+
+
+
+
+
+
+
+
+Training error: 0.00%
+Test error: 1.11%
+
+
+
+
+
+Plot grid-search results
+Plotting results from a grid-search is always useful, since it shows the range of parameters which were successful. If the initial grid was too coarse, one could then run a second grid search to obtain even better accuracy.
+In the plot red indicates a high error, while darker blue indicates a low error.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/falkon_cv.ipynb b/examples/falkon_cv.ipynb
new file mode 100644
index 00000000..b07609b2
--- /dev/null
+++ b/examples/falkon_cv.ipynb
@@ -0,0 +1,433 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true
+ },
+ "source": [
+ "# Hyperparameter Tuning with Falkon\n",
+ "\n",
+ "## Introduction\n",
+ "\n",
+ "We use Falkon for a multi-class problem (on the [digits](https://archive.ics.uci.edu/ml/datasets/Optical+Recognition+of+Handwritten+Digits) dataset), showing how to integrate it into scikit-learn\n",
+ "for hyperparameter optimization.\n",
+ "\n",
+ "Since both `Falkon` and `LogisticFalkon` are estimators, and follow scikit-learn's API, integration is seamless"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "from sklearn import datasets, model_selection, metrics\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import torch\n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "import falkon"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Load the data\n",
+ "\n",
+ "We use the **digits** dataset, which is distributed alongside scikit-learn."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X, Y = datasets.load_digits(return_X_y=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Label: 1\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPUAAAD8CAYAAACvvuKtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAALn0lEQVR4nO3d34tc9RnH8c+nmwR/xaxUK2LEVKgBEboJIVQCmh8qsUpy04sEFCot6UUrhhZEexP9B8ReFGGJGsEY0WhIkdYa0EWEVpvEtUY3Fg0Rk6hRzCZqoUF9ejEnZSsb98zs+Z6dPL5fMOTMzsx5nk3yme85Z86cryNCAPL43kw3AKBZhBpIhlADyRBqIBlCDSRDqIFk+iLUtlfbftv2O7bvLljnYdtHbe8rVWNCrctsv2h7zPabtu8sWOss26/afr2qdV+pWhNqDth+zfazLdQ6aPsN26O2dxeuNWh7u+391b/dNYXqLKx+n1O3E7Y3NrLyiJjRm6QBSe9KukLSHEmvS7qqUK1rJS2WtK+F3+sSSYur5bmS/lXw97Kk86rl2ZJekfSTwr/fbyU9LunZFv4uD0q6sHSdqtajkn5ZLc+RNNhCzQFJH0q6vIn19cNIvVTSOxFxICJOSnpC0toShSLiJUmfllj3JLU+iIi91fJnksYkXVqoVkTE59Xd2dWt2FlFtudLulnS5lI1ZoLt89V5439IkiLiZESMt1B6laR3I+K9JlbWD6G+VNL7E+4fUqH//DPF9gJJi9QZQUvVGLA9KumopF0RUayWpAck3SXp64I1JgpJz9veY3tDwTpXSPpY0iPVrsVm2+cWrHfKOknbmlpZP4Tak/wszbmrts+T9LSkjRFxolSdiPgqIoYkzZe01PbVJerYvkXS0YjYU2L9p7EsIhZLuknSr21fW6jOLHV2zx6MiEWSvpBU7BiPJNmeI2mNpKeaWmc/hPqQpMsm3J8v6cgM9dIo27PVCfTWiHimjZrV5uKIpNWFSiyTtMb2QXV2lVbafqxQLUlSRByp/jwqaYc6u2wlHJJ0aMJWznZ1Ql7STZL2RsRHTa2wH0L9D0k/sv3D6l1rnaQ/zXBP02bb6uybjUXE/YVrXWR7sFo+W9L1kvaXqBUR90TE/IhYoM6/1QsRcWuJWpJk+1zbc08tS7pRUpFPLyLiQ0nv215Y/WiVpLdK1JpgvRrc9JY6mxszKiK+tP0bSX9V5yjgwxHxZolatrdJWi7pQtuHJG2KiIdK1FJnRLtN0hvVvq4k/T4i/lyg1iWSHrU9oM4b9ZMRUfyjppZcLGlH5z1SsyQ9HhHPFax3h6St1QBzQNLtpQrZPkfSDZJ+1eh6q0PqAJLoh81vAA0i1EAyhBpIhlADyRBqIJm+CXXh0/9mrFbb9ahFrb4JtaQ2g9ZqqFuuR63veK1+CjWABhQ5+cQ2Z7Q04Morr+z6NcePH9e8efO6ft2sWd2fXHjs2DFdcMEFXb/u8OHDXb/m5MmTmjNnTtevO378eNevOVNExGRfhiLU/WxkZKS1WoODg63V2rRpU2u1du7c2Vqttp0u1Gx+A8kQaiAZQg0kQ6iBZAg1kAyhBpIh1EAyhBpIplao25oWB8D0TRnq6mJ2f1TnUqZXSVpv+6rSjQHoTZ2RurVpcQBMX51Q15oWx/YG27tLz0oI4NvV+WpOrWlxImJY0rDEFzqAmVRnpE47LQ6QUZ1Qp5wWB8hqys3vNqfFATB9tS53Uc3/VGIOKAAN44wyIBlCDSRDqIFkCDWQDKEGkiHUQDKEGkim+2kZ0Jrx8fHWal133XWt1VqxYkVrtTJfzP90GKmBZAg1kAyhBpIh1EAyhBpIhlADyRBqIBlCDSRDqIFkCDWQTJ0ZOh62fdT2vjYaAjA9dUbqLZJWF+4DQEOmDHVEvCTp0xZ6AdAA9qmBZBr76qXtDZI2NLU+AL1pLNTMpQX0Bza/gWTqfKS1TdLfJC20fcj2L8q3BaBXdebSWt9GIwCaweY3kAyhBpIh1EAyhBpIhlADyRBqIBlCDSTDtDtdGBoaarXe8uXLW63XltHR0ZluITVGaiAZQg0kQ6iBZAg1kAyhBpIh1EAyhBpIhlADyRBqIBlCDSRT5xpll9l+0faY7Tdt39lGYwB6U+fc7y8l/S4i9tqeK2mP7V0R8Vbh3gD0oM60Ox9ExN5q+TNJY5IuLd0YgN50tU9te4GkRZJeKdINgGmr/dVL2+dJelrSxog4McnjTLsD9IFaobY9W51Ab42IZyZ7DtPuAP2hztFvS3pI0lhE3F++JQDTUWefepmk2ySttD1a3X5auC8APaoz7c7LktxCLwAawBllQDKEGkiGUAPJEGogGUINJEOogWQINZAMoQaSOePn0tq4cWNrte69997WaknSvHnzWq3XlpGRkZluITVGaiAZQg0kQ6iBZAg1kAyhBpIh1EAyhBpIhlADyRBqIJk6Fx48y/artl+vpt25r43GAPSmzmmi/5G0MiI+ry4V/LLtv0TE3wv3BqAHdS48GJI+r+7Orm5c1xvoU7X2qW0P2B6VdFTSrohg2h2gT9UKdUR8FRFDkuZLWmr76m8+x/YG27tt7264RwBd6Orod0SMSxqRtHqSx4YjYklELGmmNQC9qHP0+yLbg9Xy2ZKul7S/cF8AelTn6Pclkh61PaDOm8CTEfFs2bYA9KrO0e9/qjMnNYAzAGeUAckQaiAZQg0kQ6iBZAg1kAyhBpIh1EAyhBpIxp1vVja8UjvlVzMHBwdbrXfs2LFW67Vl0aL2zmUaHR1trVbbIsKT/ZyRGkiGUAPJEGogGUINJEOogWQINZAMoQaSIdRAMoQaSIZQA8nUDnV1Qf/XbHPRQaCPdTNS3ylprFQjAJpRd9qd+ZJulrS5bDsApqvuSP2ApLskfX26JzDtDtAf6szQcYukoxGx59uex7Q7QH+oM1Ivk7TG9kFJT0haafuxol0B6NmUoY6IeyJifkQskLRO0gsRcWvxzgD0hM+pgWTqTJD3PxExos5UtgD6FCM1kAyhBpIh1EAyhBpIhlADyRBqIBlCDSTT1efUQBOGhoZaq5V52p3TYaQGkiHUQDKEGkiGUAPJEGogGUINJEOogWQINZAMoQaSIdRAMrVOE62uJPqZpK8kfcllgIH+1c253ysi4pNinQBoBJvfQDJ1Qx2Snre9x/aGyZ7AtDtAf6i7+b0sIo7Y/oGkXbb3R8RLE58QEcOShiXJdjTcJ4Caao3UEXGk+vOopB2SlpZsCkDv6kyQd67tuaeWJd0oaV/pxgD0ps7m98WSdtg+9fzHI+K5ol0B6NmUoY6IA5J+3EIvABrAR1pAMoQaSIZQA8kQaiAZQg0kQ6iBZAg1kAyhBpIh1EAyhBpIhlADyRBqIBlCDSRDqIFkCDWQDKEGkiHUQDKEGkimVqhtD9rebnu/7THb15RuDEBv6l73+w+SnouIn9meI+mcgj0BmIYpQ237fEnXSvq5JEXESUkny7YFoFd1Nr+vkPSxpEdsv2Z7c3X9bwB9qE6oZ0laLOnBiFgk6QtJd3/zScylBfSHOqE+JOlQRLxS3d+uTsj/T0QMR8QS5q4GZtaUoY6IDyW9b3th9aNVkt4q2hWAntU9+n2HpK3Vke8Dkm4v1xKA6agV6ogYlcRmNXAG4IwyIBlCDSRDqIFkCDWQDKEGkiHUQDKEGkiGUAPJ1D2jDJLGx8dbrbdz587Waq1du7a1WsuXL2+t1pYtW1qr1S8YqYFkCDWQDKEGkiHUQDKEGkiGUAPJEGogGUINJEOogWSmDLXthbZHJ9xO2N7YQm8AejDlaaIR8bakIUmyPSDpsKQdZdsC0KtuN79XSXo3It4r0QyA6es21OskbSvRCIBm1A51dc3vNZKeOs3jTLsD9IFuvnp5k6S9EfHRZA9GxLCkYUmyHQ30BqAH3Wx+rxeb3kDfqxVq2+dIukHSM2XbATBddafd+bek7xfuBUADOKMMSIZQA8kQaiAZQg0kQ6iBZAg1kAyhBpIh1EAyjmj+NG3bH0vq9uuZF0r6pPFmZr5W2/Wo9d2odXlEXDTZA0VC3QvbuyNiSbZabdejFrXY/AaSIdRAMv0U6uGktdquR63veK2+2acG0Ix+GqkBNIBQA8kQaiAZQg0kQ6iBZP4Lhj5lm5RUNJYAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "_ = ax.matshow(X[1].reshape((8, 8)), cmap='gray')\n",
+ "print(\"Label: %d\" % Y[1])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Split into training and test sets\n",
+ "\n",
+ "We split the data into a training set with 80% of the samples and a test set with the remaining 20%."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train, X_test, Y_train, Y_test = model_selection.train_test_split(\n",
+ " X, Y, test_size=0.2, random_state=10, shuffle=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Data Preprocessing\n",
+ "\n",
+ "As always with Falkon we must:\n",
+ " 1. Convert from numpy arrays to torch tensors\n",
+ " 2. Convert data and labels to the same data-type (in this case float32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train = torch.from_numpy(X_train).to(dtype=torch.float32)\n",
+ "X_test = torch.from_numpy(X_test).to(dtype=torch.float32)\n",
+ "Y_train = torch.from_numpy(Y_train)\n",
+ "Y_test = torch.from_numpy(Y_test)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Normalizing the data is always a good idea. Here we use the global mean and standard deviation of the training set for z-score normalization."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# z-score normalization\n",
+ "train_mean = X_train.mean()\n",
+ "train_std = X_train.std()\n",
+ "X_train -= train_mean\n",
+ "X_train /= train_std\n",
+ "X_test -= train_mean\n",
+ "X_test /= train_std"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Since Falkon optimizes with respect to the square loss, using ordinal labels (e.g. 1, 4, 5) is not ideal since closeness in the natural numbers is meaningless for classification. We therefore convert the labels to a 1-hot representation."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "First label vector: tensor([0., 0., 1., 0., 0., 0., 0., 0., 0., 0.])\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Convert labels to 1-hot\n",
+ "eye = torch.eye(10, dtype=torch.float32)\n",
+ "Y_train = eye[Y_train]\n",
+ "Y_test = eye[Y_test]\n",
+ "print(\"First label vector: \", Y_train[0])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Search for the optimal parameters\n",
+ "\n",
+ "Since Falkon (with the Gaussian kernel) has only 3 important hyperparameters, it is entirely feasible to run a grid search over them to find the best parameter settings.\n",
+ "\n",
+ "Scikit-learn has great support for this, with the `GridSearchCV` class. For each paramater setting it will run 5-fold cross-validation on the training set, to determine which has the best results.\n",
+ "\n",
+ "Given the dataset is quite small, and Falkon is fast, we can run 160 model evaluations in around 40 seconds."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def mclass_loss(true, pred):\n",
+ " true = torch.argmax(true, dim=1)\n",
+ " pred = torch.argmax(pred, dim=1)\n",
+ " return torch.mean((true != pred).to(torch.float32))\n",
+ "mclass_scorer = metrics.make_scorer(mclass_loss, greater_is_better=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The parameter settings which will be explored by the grid-search are:\n",
+ " - four different kernel length-scales (varying around small positive numbers, which are usually good for normalized data)\n",
+ " - four different regularization values\n",
+ " - two different values for M: the number of inducing points. As we will see, a larger `M` is almost always better than a smaller one (but it leads to longer training times). \n",
+ " Of course this is not the case if the dataset is easy to overfit, since reducing `M` may also provide additional regularization.\n",
+ "\n",
+ "When we create the estimator we pass it additional parameters via the `FalkonOptions` class.\n",
+ "In our case we want to ensure that the model runs on the CPU by setting `use_cpu=True`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "parameter_grid = {\n",
+ " 'kernel': [falkon.kernels.GaussianKernel(sigma=1), \n",
+ " falkon.kernels.GaussianKernel(sigma=5), \n",
+ " falkon.kernels.GaussianKernel(sigma=10),\n",
+ " falkon.kernels.GaussianKernel(sigma=15),],\n",
+ " 'penalty': [1e-3, 1e-5, 1e-7, 1e-9],\n",
+ " 'M': [500, 1000],\n",
+ "}\n",
+ "estimator = falkon.Falkon(\n",
+ " kernel=falkon.kernels.GaussianKernel(1), penalty=1e-3, M=1000, # Mandatory parameters, will be overridden\n",
+ " maxiter=10, options=falkon.FalkonOptions(use_cpu=True))\n",
+ "\n",
+ "grid_search = model_selection.GridSearchCV(estimator, parameter_grid, scoring=mclass_scorer, cv=5)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "The best parameters are: {'M': 500, 'kernel': GaussianKernel(sigma=Parameter containing:\n",
+ "tensor([10.], dtype=torch.float64)), 'penalty': 1e-07}\n",
+ "CPU times: user 52.3 s, sys: 1.78 s, total: 54.1 s\n",
+ "Wall time: 13.6 s\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%time\n",
+ "grid_search.fit(X_train, Y_train)\n",
+ "print(\"The best parameters are: \", grid_search.best_params_)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Evaluating the model\n",
+ "We evaluate the model on the held-out set and see that we obtain a respectable 1% error on 10 classes."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "CPU times: user 534 ms, sys: 23.8 ms, total: 558 ms\n",
+ "Wall time: 139 ms\n"
+ ]
+ }
+ ],
+ "source": [
+ "%%time\n",
+ "flk = grid_search.best_estimator_\n",
+ "flk.fit(X_train, Y_train)\n",
+ "test_pred = flk.predict(X_test)\n",
+ "train_pred = flk.predict(X_train)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Training error: 0.00%\n",
+ "Test error: 1.11%\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(\"Training error: %.2f%%\" % (mclass_loss(Y_train, train_pred) * 100))\n",
+ "print(\"Test error: %.2f%%\" % (mclass_loss(Y_test, test_pred) * 100))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Plot grid-search results\n",
+ "\n",
+ "Plotting results from a grid-search is always useful, since it shows the range of parameters which were successful.\n",
+ "If the initial grid was too coarse, one could then run a second grid search to obtain even better accuracy.\n",
+ "\n",
+ "In the plot red indicates a high error, while darker blue indicates a low error."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "res_df = pd.DataFrame.from_dict(grid_search.cv_results_)\n",
+ "res_df[\"M\"] = res_df.params.apply(lambda x: x.get(\"M\"))\n",
+ "res_df[\"penalty\"] = res_df.params.apply(lambda x: x.get(\"penalty\"))\n",
+ "res_df[\"sigma\"] = res_df.params.apply(lambda x: x.get(\"kernel\").sigma.item())\n",
+ "res_df = res_df[[\"mean_test_score\", \"M\", \"penalty\", \"sigma\"]]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_heatmap(ax, df, xlabel, ylabel, value, scale):\n",
+ " piv = pd.pivot_table(df, index=ylabel, columns=xlabel, values=value)\n",
+ " cmap = plt.cm.get_cmap('coolwarm_r', 20)\n",
+ " ax.grid(False)\n",
+ " c = ax.pcolormesh(piv, cmap=cmap, vmin=scale[0], vmax=scale[1])\n",
+ " ax.set_yticks(np.arange(piv.shape[0]) + 0.5, minor=False)\n",
+ " ax.set_xticks(np.arange(piv.shape[1]) + 0.5, minor=False)\n",
+ " ax.set_xticklabels(piv.columns, minor=False)\n",
+ " ax.set_yticklabels(piv.index, minor=False)\n",
+ " ax.set_xlabel(xlabel)\n",
+ " ax.set_ylabel(ylabel)\n",
+ " return c"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAt0AAAFNCAYAAADcudMsAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAZaUlEQVR4nO3dfZCsZ1kn4N9tjghFkjUkbAqTiEBFsuEjEaJmtVaRddeQskwwspJCA1asCIruolQtlAq4H7qolFWAhI0xewjWRgkSCSUfKovEwogkkpAckXgA0UNSfEogREHCvX/Me6SdzJwzPWeemZ7T11XV1f1+dd/zVJ87v7z99NvV3QEAAMb5qp0uAAAAjnZCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCN0e9qvqbqvpiVZ20av0tVdVV9Q1b/Hp/XFX/WFX3TLcPrNr+76vqr6rq3qp6R1U9fGZbVdVLq+pT0+2Xq6q2sj6ARbIDPfq5VXVTVX2hqvausX3TPbqqvmE65t7pOb57K2tndxO6WRYfTnLxwYWqelySBw18ved297HT7dEzr3tSkjck+fkkD0lyU5LfmTnusiQXJjkryeOTfG+SHxtYJ8Ai2M4efWeS/5HkqtUbtqBHX5PkvUlOTPKzSV5fVQ/d8r+AXUnoZlm8NsklM8vPTHL1DtTx/Un2dfe13f2PSV6S5KyqOmOmrpd194Hu/miSlyV51g7UCbCdtq1Hd/cbuvv3knxqjc2b7tFV9Y1JnpDkxd39D939u0luS3LRiL+D3UfoZln8WZLjq+rfVNUxSX4wyW8d6oCqelVVfWad2/sO83q/VFWfrKp3VdWTZtY/JsmtBxe6+/NJPjitv9/26fFjAnB02+4evZ4j6dGPSfKh7v7cOttZcnt2ugDYRgfPpLwzyV8l+eihdu7uH0/y45t4nf+a5C+TfDHJ05O8qarO7u4PJjk2ySdW7X93kuOmx8dOy7Pbjq2q6u7eRC0Au8V29ehD2XSPXmPbwe2nbHGN7FJCN8vktUluSPKIDJxa0t3vnll8TVVdnOT8JK9Ick+S41cdcnySg2dGVm8/Psk9AjewBLalRx/Gpnt0VR3uWJac6SUsje7+SFa+rHN+Vr4oc0hV9eqZK5Csvu2b56WTHPx2+76sfAHn4Gs8OMmjpvX32z49nue1AHalHezRs46kR+9L8siqOm6d7Sw5oZtlc2mSJ0/z9A6pu589cwWS1bc15+hV1ddW1fdU1QOrak9VPSPJdyR527TLdUkeW1UXVdUDk7woyfu6+6+m7Vcn+emqOqWqvi7JzyTZe2R/MsCuMbRHJ8nUmx+Y5Jgkxxzs19PmTffo7r4jyS1JXjw951OzcoWT393EOHAUMr2EpTLNqx7pq7NyKaozktyXlXmJF3b3B6bX/0RVXZTklVn5ktC7szLv+6D/neSRWfnGe5JcOa0DOOptQ49Okp9L8uKZ5R9K8gtJXrIFPfrpWQnhf5/kb5P8QHevniPOkipTRQEAYCzTSwAAYDChGwAABhO6AQBgMKEbAAAGE7oBAGCwpbhk4IOOPbGPP+Hrd7qMXeOYPf5fbB4nP+ienS5hV7nn/R/Z6RJ2lf35wie7+6E7Xcd20rPno2fPR8+en749n/X69lKE7uNP+Pr84M+8c6fL2DUecuKDdrqEXeU/P+5dO13CrvKn3/xjO13CrvK9X7pj6f5rp2fPR8+ej549P317Puv1bf97DAAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADDY0NBdVedV1Qeqan9VvWCN7VVVL5+2v6+qnnC4Y6vqaVW1r6q+XFXnjKwfYJno2QDjDAvdVXVMkl9P8pQkZya5uKrOXLXbU5KcPt0uS3L5Bo69Pcn3J7lhVO0Ay0bPBhhr5Jnub0myv7s/1N1fTPLbSS5Ytc8FSa7uFX+W5Gur6mGHOra739/dHxhYN8Ay0rMBBhoZuk9J8nczywemdRvZZyPHArB19GyAgUaG7lpjXW9wn40ce+gXr7qsqm6qqpv+4fOfmudQgGWkZwMMNDJ0H0hy2szyqUnu3OA+Gzn2kLr7iu4+p7vPedCDT5znUIBlpGcDDDQydL8nyelV9YiqekCSpye5ftU+1ye5ZPpG/LlJ7u7uuzZ4LABbR88GGGjPqCfu7i9V1XOTvC3JMUmu6u59VfXsafurk7w5yflJ9ie5N8mPHOrYJKmqpyZ5RZKHJvn9qrqlu79n1N8BsAz0bICxhoXuJOnuN2elSc+ue/XM407yExs9dlp/XZLrtrZSAPRsgHH8IiUAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMtmenC2Dx/NHr/nSnS9hV3vPOh+50CbvKN191+06XsLtc8oCdroAFp2fPR8+en749p3X6tjPdAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMNiw0F1VV1XVx6vq9k0c+8Squq2q9lfVy6uqpvXPqqpPVNUt0+1Ht75ygOWkbwOMM/JM994k523y2MuTXJbk9Ok2+zy/091nT7crj6xEAGbsjb4NMMSw0N3dNyT59Oy6qnpUVb21qm6uqj+pqjNWH1dVD0tyfHff2N2d5OokF46qE4AV+jbAONs9p/uKJD/Z3U9M8vwkr1pjn1OSHJhZPjCtO+iiqnpfVb2+qk4bVyoA0bcBtsSe7Xqhqjo2ybcluXaa6pckX7PWrmus6+n+TUmu6e4vVNWzk7wmyZPXeb3LsvJRZ447QY8HmNd29m09GzjabVvozspZ9c9099mzK6vqmCQ3T4vXZ2Ve4Kkzu5ya5M4k6e5Pzaz/jSQvXe/FuvuKrJyhycmnfVOvtx8A69q2vq1nA0e7bZte0t2fTfLhqnpaktSKs7r7vpkv2Lyou+9K8rmqOnf69vslSd44HfOwmaf8viTv3676AZaNvg2wdUZeMvCaJDcmeXRVHaiqS5M8I8mlVXVrkn1JLljn8OckuTLJ/iQfTPKWaf1PVdW+6fifSvKsUfUDLBt9G2CcYdNLuvvidTYd9nJU3X1Tkseusf6FSV54hKUBsAZ9G2Acv0gJAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAwmNANAACDCd0AADCY0A0AAIMJ3QAAMJjQDQAAgwndAAAw2IZCd1U9ZHQhAGwNPRtg8Wz0TPe7q+raqjq/qmpoRQAcKT0bYMFsNHR/Y5Irkvxwkv1V9YtV9Y3jygLgCOjZAAtmQ6G7V/xhd1+c5EeTPDPJn1fVO6vq3w6tEIC56NkAi2fPRnaqqhOT/FBWzpp8LMlPJrk+ydlJrk3yiEH1ATAnPRtg8WwodCe5Mclrk1zY3Qdm1t9UVa/e+rIAOAJ6NsCC2eic7p/r7v8+27yr6mlJ0t0vHVIZAJulZwMsmI2G7hesse6FW1kIAFtGzwZYMIecXlJVT0lyfpJTqurlM5uOT/KlkYUBMB89G2BxHW5O951Jbk7yfdP9QZ9L8rxRRQGwKXo2wII6ZOju7luT3FpVv9XdzpIALDA9G2BxHW56yW1Jenp8v+3d/fgxZQEwLz0bYHEdbnrJ925LFQBsBT0bYEEdbnrJR7arEACOjJ4NsLg2dMnAqjq3qt5TVfdU1Rer6r6q+uzo4gCYn54NsHg2ep3uVya5OMlfJ3lQkh9N8opRRQFwRPRsgAWz0Z+BT3fvr6pjuvu+JP+nqv50YF0AHAE9G2CxbDR031tVD0hyS1X9cpK7kjx4XFmwe9z9sU/udAm7yh+9znhtAz0b1qFnz0/f3hobnV7yw9O+z03y+SSnJbloVFEAHBE9G2DBbOhM98w34v8xyS+MKweAI6VnAyyeDYXuqvr2JC9J8vDZY7r7kWPKAmCz9GyAxbPROd2/meR5SW5Oct+4cgDYAno2wILZaOi+u7vfMrQSALaKng2wYDYaut9RVb+S5A1JvnBwZXf/xZCqADgSejbAgtlo6P7W6f6cmXWd5MlbWw4AW0DPBlgwG716yXeNLgSAraFnAyyeDV2nu6pOrqrfrKq3TMtnVtWlY0sDYDP0bIDFs9Efx9mb5G1Jvm5aviPJfxlQDwBHbm/0bICFstHQfVJ3vy7Jl5Oku78Ul6ECWFR6NsCC2Wjo/nxVnZiVL+Kkqs5NcvewqgA4Eno2wILZ6NVLfjrJ9UkeWVXvSvLQJD8wrCoAjoSeDbBgNhq6/zLJdUnuTfK5JL+XlTmCACwePRtgwWx0esnVSc5I8otJXpHk9CSvHVUUAEdEzwZYMBs90/3o7j5rZvkdVXXriIIAOGJ6NsCC2eiZ7vdOX8RJklTVtyZ515iSADhCejbAgpnnZ+Avqaq/nZa/Psn7q+q2JN3djx9SHQCboWcDLJiNhu7zhlYBwFbSswEWzIZCd3d/ZHQhAGwNPRtg8Wx0TjcAALBJQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGDDQndVXVVVH6+q2zdx7BOr6raq2l9VL6+qmtb/WlXdMt3uqKrPbHnhAEtK3wYYZ+SZ7r1JztvksZcnuSzJ6dPtvCTp7ud199ndfXaSVyR5w5GXCcBkb/RtgCGGhe7uviHJp2fXVdWjquqtVXVzVf1JVZ2x+riqeliS47v7xu7uJFcnuXCNl7g4yTUDSgdYSvo2wDh7tvn1rkjy7O7+66r61iSvSvLkVfuckuTAzPKBad0/q6qHJ3lEkv83sFYA9G2ALbFtobuqjk3ybUmunab6JcnXrLXrGut61fLTk7y+u+87xOtdlpWPOnPcCafNXS/AstvOvq1nA0e77TzT/VVJPjPN6/tnVXVMkpunxeuzMi/w1JldTk1y56rnenqSnzjUi3X3FVk5Q5OTT/um1c0fgMPbtr6tZwNHu227ZGB3fzbJh6vqaUlSK87q7vsOfsmmu1/U3Xcl+VxVnTt9+/2SJG88+DxV9egkJyS5cbtqB1hG+jbA1hl5ycBrstJgH11VB6rq0iTPSHJpVd2aZF+SC9Y5/DlJrkyyP8kHk7xlZtvFSX57+rIOAFtE3wYYZ9j0ku6+eJ1Nh70cVXfflOSx62x7yRGUBcA69G2AcfwiJQAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAy2Z6cLAJbLvzr5pJ0uAYA56Ntbw5luAAAYTOgGAIDBhG4AABhM6AYAgMGEbgAAGEzoBgCAwYRuAAAYTOgGAIDBhG4AABhM6AYAgMGEbgAAGEzoBgCAwYRuAAAYTOgGAIDBhG4AABhM6AYAgMGEbgAAGEzoBgCAwYRuAAAYTOgGAIDBhG4AABhM6AYAgMGEbgAAGEzoBgCAwYRuAAAYTOgGAIDBhG4AABhM6AYAgMGEbgAAGGxY6K6qq6rq41V1+yaOfWJV3VZV+6vq5VVV0/qHV9Xbq+p9VfXHVXXq1lcOsJz0bYBxRp7p3pvkvE0ee3mSy5KcPt0OPs+vJrm6ux+f5L8l+aUjrBGAr9gbfRtgiGGhu7tvSPLp2XVV9aiqemtV3VxVf1JVZ6w+rqoeluT47r6xuzvJ1UkunDafmeTt0+N3JLlgVP0Ay0bfBhhnu+d0X5HkJ7v7iUmen+RVa+xzSpIDM8sHpnVJcmuSi6bHT01yXFWdOKhWAPRtgC2xZ7teqKqOTfJtSa6dpvolydestesa63q6f36SV1bVs5LckOSjSb60zutdlpWPOnPcCadtum6AZbWdfVvPBo522xa6s3JW/TPdffbsyqo6JsnN0+L1WZkXOPtFm1OT3Jkk3X1nku+fjjs2yUXdffdaL9bdV2TlDE1OPu2beq19ADikbevbejZwtNu26SXd/dkkH66qpyVJrTiru+/r7rOn24u6+64kn6uqc6dvv1+S5I3TMSdV1cGaX5jkqu2qH2DZ6NsAW2fkJQOvSXJjkkdX1YGqujTJM5JcWlW3JtmX9b9Q85wkVybZn+SDSd4yrX9Skg9U1R1JTk7yP0fVD7Bs9G2AcYZNL+nui9fZdNjLUXX3TUkeu8b61yd5/RGWBsAa9G2AcfwiJQAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBgQjcAAAxW3b3TNQxXVZ9I8pGdrmMNJyX55E4XsYsYr/kYr/ks6ng9vLsfutNFbCc9+6hhvOZnzOazqOO1Zt9eitC9qKrqpu4+Z6fr2C2M13yM13yMF4fjPTIf4zU/Yzaf3TZeppcAAMBgQjcAAAwmdO+sK3a6gF3GeM3HeM3HeHE43iPzMV7zM2bz2VXjZU43AAAM5kw3AAAMJnQPVlVXVdXHq+r2dbZXVb28qvZX1fuq6gnbXeOiqaq/qarbquqWqrppje1LPWZrvaeq6iFV9YdV9dfT/QnrHHteVX1gGrsXbF/VO2ed8XpJVX10eo/dUlXnr3Ps0o3XstOz56dnH5qePb+jtW8L3ePtTXLeIbY/Jcnp0+2yJJdvQ027wXd199nrXApo2cdsb+7/nnpBkrd39+lJ3j4t/wtVdUySX8/K+J2Z5OKqOnNsqQthb9b+N/hr03vs7O5+8+qNSzxey25v9OzN0LPXtzd69rz25ijs20L3YN19Q5JPH2KXC5Jc3Sv+LMnXVtXDtqe6XWupx2yd99QFSV4zPX5NkgvXOPRbkuzv7g919xeT/PZ03FFtA/8G17OU47Xs9OwhlnrM9Oz5Ha19W+jeeack+buZ5QPTumXWSf6gqm6uqsvW2G7M7u/k7r4rSab7f73GPsbtX3ru9FH3Vet8tGu8WIv3xf3p2fPTszdnV/dtoXvn1Rrrlv2SMt/e3U/IysdDP1FV37FquzHbHOP2FZcneVSSs5PcleRla+xjvFiL98X96dljGLd/adf3baF75x1IctrM8qlJ7tyhWhZCd9853X88yXVZ+bholjG7v48d/Lh2uv/4GvsYt0l3f6y77+vuLyf5jdz/PZYYL9bmfbGKnr0pevacjoa+LXTvvOuTXDJ9u/vcJHcf/MhpGVXVg6vquIOPk/zHJKuvImDM7u/6JM+cHj8zyRvX2Oc9SU6vqkdU1QOSPH06bumsmk/61Nz/PZYYL9am/8zQszdNz57T0dC39+x0AUe7qromyZOSnFRVB5K8OMlXJ0l3vzrJm5Ocn2R/knuT/MjOVLowTk5yXVUlK+/P/9vdb62qZyfGLFn3PfW/kryuqi5N8rdJnjbt+3VJruzu87v7S1X13CRvS3JMkqu6e99O/A3baZ3xelJVnZ2Vjx3/JsmPTfsu/XgtOz17bnr2YejZ8zta+7ZfpAQAgMFMLwEAgMGEbgAAGEzoBgCAwYRuAAAYTOgGAIDBhG44hKq6sqrO3Ok6ADg8PZtF5pKBAAAwmDPdMJl+We33q+rWqrq9qn6wqv64qs6Ztl9aVXdM636jql45rd9bVZdX1Tuq6kNV9Z1VdVVVvb+q9s48/+VVdVNV7auqX9ihPxPgqKBns9sI3fAV5yW5s7vP6u7HJnnrwQ3TL179fJJzk/yHJGesOvaEJE9O8rwkb0rya0kek+Rx0y9oJcnPdvc5SR6f5Dur6vED/xaAo52eza4idMNX3Jbku6vqpVX177r77plt35Lknd396e7+pyTXrjr2Tb0yV+u2JB/r7tu6+8tJ9iX5hmmf/1RVf5HkvVlp7uYdAmyens2usmenC4BF0d13VNUTk5yf5Jeq6g9mNtdhDv/CdP/lmccHl/dU1SOSPD/JN3f3308fYT5wayoHWD56NruNM90wmT6OvLe7fyvJryZ5wszmP8/Kx4snVNWeJBfN+fTHJ/l8krur6uQkT9mKmgGWlZ7NbuNMN3zF45L8SlV9Ock/JXlOVhp5uvujVfWLSd6d5M4kf5nk7vWeaLXuvrWq3puVjy4/lORdW1w7wLLRs9lVXDIQNqiqju3ue6azJtcluaq7r9vpugC4Pz2bRWN6CWzcS6rqliS3J/lwkt/b0WoAOBQ9m4XiTDcAAAzmTDcAAAwmdAMAwGBCNwAADCZ0AwDAYEI3AAAMJnQDAMBg/x9D103bFBoy5gAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, axs = plt.subplots(ncols=2, figsize=(12, 5))\n",
+ "scale = (res_df[\"mean_test_score\"].min(), res_df[\"mean_test_score\"].max())\n",
+ "\n",
+ "c = plot_heatmap(axs[0], res_df[res_df.M == 500], \"sigma\", \"penalty\", \"mean_test_score\", scale)\n",
+ "axs[0].set_title(\"M = 500\")\n",
+ "c = plot_heatmap(axs[1], res_df[res_df.M == 1000], \"sigma\", \"penalty\", \"mean_test_score\", scale)\n",
+ "_ = axs[1].set_title(\"M = 1000\")"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "pycharm": {
+ "stem_cell": {
+ "cell_type": "raw",
+ "metadata": {
+ "collapsed": false
+ },
+ "source": []
+ }
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/examples/falkon_mnist.html b/examples/falkon_mnist.html
new file mode 100644
index 00000000..0ee823d0
--- /dev/null
+++ b/examples/falkon_mnist.html
@@ -0,0 +1,571 @@
+
+
+
+
+
+
+
+
+ MNIST Classification with Falkon — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+MNIST Classification with Falkon
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+Download the MNIST dataset & load it in memory
+
+
+
+
+
+
+
+
+<matplotlib.image.AxesImage at 0x7f4ee3780130>
+
+
+
+
+
+
+
+
+
+
+
+Data Preprocessing
+We convert the labels to their one-hot representation. This is the best way to run multi-class classification with Falkon which minimizes the squared error
+
+
+
+
+Run Falkon
+There are a few parameters which need to be provided to the algorithm
+
+The FalkonOptions class is used to provide non-standard tuning knobs. It allows to, for example, tune the amount of GPU memory the algorithm can use, adjust the convergence tolerance, and decide whether certain parts of the algorithm are computed on CPU or GPU.
+It can be used with default parameters for most purposes!
+
+The kernel is the most important choice which depends on the data at hand. We use the GaussianKernel which is the most common option and initialize it with a length-scale of 15.
+The penalty determines the amount of regularization. A higher value corresponds to more regularization.
+The number of centers M strongly influences the time needed for fitting. By default the centers are chosen uniformly at random.
+
+
+
+
+
+
+
+
+Iteration 1 - Elapsed 0.55s - training error: 0.11998333
+Iteration 2 - Elapsed 1.02s - training error: 0.07140000
+Iteration 3 - Elapsed 1.50s - training error: 0.05766667
+Iteration 4 - Elapsed 1.98s - training error: 0.05121667
+Iteration 5 - Elapsed 2.46s - training error: 0.04776667
+Iteration 6 - Elapsed 2.95s - training error: 0.04556667
+Iteration 7 - Elapsed 3.45s - training error: 0.04376667
+Iteration 8 - Elapsed 3.93s - training error: 0.04340000
+Iteration 9 - Elapsed 4.42s - training error: 0.04286667
+Iteration 10 - Elapsed 5.39s - training error: 0.04223333
+
+
+
+
+
+
+
+
+Training error: 4.22%
+Test error: 4.13%
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/falkon_mnist.ipynb b/examples/falkon_mnist.ipynb
new file mode 100644
index 00000000..b0483211
--- /dev/null
+++ b/examples/falkon_mnist.ipynb
@@ -0,0 +1,284 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "# MNIST Classification with Falkon"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "import matplotlib.pyplot as plt\n",
+ "plt.style.use('ggplot')\n",
+ "\n",
+ "import torch\n",
+ "import torchvision\n",
+ "\n",
+ "import falkon"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Download the MNIST dataset & load it in memory"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "mnist_train_dataset = torchvision.datasets.MNIST(\n",
+ " root=\".\", train=True, download=True,\n",
+ " transform=torchvision.transforms.ToTensor())\n",
+ "mnist_test_dataset = torchvision.datasets.MNIST(\n",
+ " root=\".\", train=False, download=True,\n",
+ " transform=torchvision.transforms.ToTensor())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Load the whole dataset in memory\n",
+ "mnist_tr_img_list, mnist_tr_label_list = [], []\n",
+ "for i in range(len(mnist_train_dataset)):\n",
+ " data_point = mnist_train_dataset[i]\n",
+ " mnist_tr_img_list.append(data_point[0])\n",
+ " mnist_tr_label_list.append(data_point[1])\n",
+ "mnist_ts_img_list, mnist_ts_label_list = [], []\n",
+ "for i in range(len(mnist_test_dataset)):\n",
+ " data_point = mnist_test_dataset[i]\n",
+ " mnist_ts_img_list.append(data_point[0])\n",
+ " mnist_ts_label_list.append(data_point[1])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "mnist_tr_x = torch.vstack(mnist_tr_img_list)\n",
+ "mnist_tr_x = mnist_tr_x.reshape(mnist_tr_x.shape[0], -1)\n",
+ "mnist_ts_x = torch.vstack(mnist_ts_img_list)\n",
+ "mnist_ts_x = mnist_ts_x.reshape(mnist_ts_x.shape[0], -1)\n",
+ "mnist_tr_y = torch.tensor(mnist_tr_label_list)\n",
+ "mnist_ts_y = torch.tensor(mnist_ts_label_list)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAUXUlEQVR4nO3dfVRUZ34H8O8MoICEcXgJRpZJAuoqCa4loFnXBA2j61GPIa6auolWkx5LwCRKYpbYHuk22p1snEWTxXi6TTSxm1ZMV5Km2+Q42gxbjYKlxrO+g1pfFkFgQIyQwMztH/ZcHPU+g8O8XH2+n7/und+9d37O+OW+PDNzDYqiKCCiu54x3A0QUWgw7ESSYNiJJMGwE0mCYSeSBMNOJInIgax88OBBbN68GR6PB/n5+SgoKPC5zlTjPHW6osaG4vGlA2khaPTam177AtibvwLZ207Pds2a33t2j8eD9957D6tWrUJ5eTn27NmD8+fP+7s5Igoyv8NeX1+PYcOGISUlBZGRkZg4cSJqa2sD2RsRBZDB30/Q7du3DwcPHkRhYSEAoLq6GidPnsTzzz/vtZzD4YDD4QAA2Gw2nDjQoNYsY1Jx9ugFf3sPKr32pte+APbmr0D2NionQ7Pm9zn7rf5GGAyGmx6zWq2wWq3q/PXnJrKcRwWSXvsC2Ju/dH/OnpiYiNbWVnW+tbUVZrPZ380RUZD5HfaMjAw0NjaiubkZvb292Lt3L3JycgLZGxEFkN+H8REREXjuueewdu1aeDweTJkyBWlpaYHsjYgCaEDj7NnZ2cjOzg5UL0QURPwEHZEkGHYiSTDsRJJg2IkkwbATSYJhJ5IEw04kCYadSBIMO5EkGHYiSTDsRJJg2IkkwbATSYJhJ5IEw04kCYadSBIMO5EkGHYiSTDsRJJg2IkkwbATSYJhJ5IEw04kCYadSBIMO5EkGHYiSTDsRJJg2IkkwbATSWJAd3El/TNEit/iiOSkwD1ZVBQi7xvm9dDxVx/QXNwd6xFu7v6MZmE9tsggrF/81SB1umdENC59+n11vi5nm3DdFvc3wvqE7a8I6yNK9gnr4TCgsBcXFyM6OhpGoxERERGw2WyB6ouIAmzAe/aysjLEx8cHohciCiKesxNJwqAoiuLvysXFxYiLiwMATJ06FVar9aZlHA4HHA4HAMBms+HEgQa1ZhmTirNHL/j79EGl195uuy+D+LwWPs7pb4dlZArOnmzyeuzbYYM0lgYUH7uaQYN7hHXj/4r/bT1pffX0uGScunJJnc8c0iZct1cRX0846koR1qPPic/5rxfI/2ujcjI0awMKe1tbGxISEtDR0YE1a9ZgyZIlyMzMFK4z1ThPna6osaF4fKm/Tx9Ueu3tdvsK5QW6Db9fiZdnvOX1mF4u0P120gt45r/eVef1dIEukP/Xdnq2a9YGdBifkJAAADCZTMjNzUV9ff1ANkdEQeR32Lu7u9HV1aVOHzp0CBaLJWCNEVFg+X3C1tHRgXXr1gEA3G43Jk2ahHHjxgWqr7tKxJiRwroyOEpY/1PeUHW6JyUOF1+e6FXvelT7kDPBJD4c/cMPxIeztyMqcSmqDvx7wLb3H1fvEdbf/PV0YX1/1kfqdFTs09j/SN/86Z4u4bq2pqnC+vA/+H32GzZ+hz0lJQVvvfWW7wWJSBc49EYkCYadSBIMO5EkGHYiSTDsRJLgV1wDwD05W1j/1ZYKYX1UlPZHSm8UlWjFgdfe6ffyetajuIX11e8sFtYjvxEPf/1w+zJ1+p/+OhnPru2bv+dCr3DdwS3iobnYA/uFdT3inp1IEgw7kSQYdiJJMOxEkmDYiSTBsBNJgmEnkgTH2QNg8PE/Cev/3Z0mrI+KahLWw+mVxkeF9VNX+n7pZkOcCS+fnOVV35Lxsea6HR7xOHnK23v70WH/RP7Vk0h876t+L3/nfYHVN+7ZiSTBsBNJgmEnkgTDTiQJhp1IEgw7kSQYdiJJcJw9AHobLwrr77w5T1hfO138c88Rh+LU6Y8L78XcTS961b8u8v/77Wtaxgrr9dZYYd3d3qhOKzU96Jnc6FX/6Q+LNNc985K4twfxtXgBui3csxNJgmEnkgTDTiQJhp1IEgw7kSQYdiJJMOxEkuA4ewgkbBZ/jzr53xKFdXdrmzo96MnZSPt77+099PBzmusefvx94bY//Yc8Yf3e9oF9p9zwlfZY+YP9/3o5BYDPsG/cuBF1dXUwmUyw2+0AgCtXrqC8vByXLl1CcnIyVqxYgbi4OB9bIqJw8nkYP3nyZKxatcrrsaqqKmRlZeHtt99GVlYWqqqqgtUfEQWIz7BnZmbetNeura1FXt61w7+8vDzU1tYGpzsiChi/ztk7OjpgNpsBAGazGZcvX9Zc1uFwwOFwAABsNhsqamxqzTIm1WteT0LaW6SPt6G3775kljGpqNj/C6/yt+kxmqtGxT0p3PRHK+8V1qP+Yra4t+vw/fRPqHoL+gU6q9UKq9WqzhePL1WnK2psXvN6EsreIpL6f4GuYv8vUDzhda/66Y+0v8zi6wLdT996UVi/t6L/F+j4fvonkL3t9GzXrPk19GYymeByuQAALpcL8fHx/nVGRCHjV9hzcnLgdDoBAE6nE7m5uQFtiogCz+dh/Pr163HkyBF0dnaisLAQ8+fPR0FBAcrLy7F7924kJSWhpKQkFL3etdwtrbe3guL9q+Y9l/t/f/cbPfTMEWH90rsR4g14xPdYJ/3wGfbly5ff8vHVq1cHuhciCiJ+XJZIEgw7kSQYdiJJMOxEkmDYiSTBr7jeBcb87IRmbUlWvnDdzffvEtbz5hUL6/ds2yesk35wz04kCYadSBIMO5EkGHYiSTDsRJJg2IkkwbATSYLj7HcBd3uHZq31hTHCdc9+2iWsl675UFh/ff5T6vR3GTE4/68PedWV/zFprpu21sdvSd/wVV4aGO7ZiSTBsBNJgmEnkgTDTiQJhp1IEgw7kSQYdiJJcJz9Luf5+qiw/uc/Xyms/7ZsnbB+8NG+cfioIT/xmgcAPKq97kNDlgm3PfI3jcJ676kzwjp5456dSBIMO5EkGHYiSTDsRJJg2IkkwbATSYJhJ5IEx9kll/C++Dvly46Lfzc+3nZenf7lPfF47dSPver/nP6F5rqHF/1auO3RaX8prH//5+J9lfvkKWFdNj7DvnHjRtTV1cFkMsFutwMAKisrsWvXLsTHxwMAFixYgOzs7OB2SkQD4jPskydPxvTp01FRUeH1+MyZMzF79uygNUZEgeXznD0zMxNxcXGh6IWIgsigKL5/6Ku5uRlvvvmm12G80+lETEwM0tPTsWjRIs0/CA6HAw6HAwBgs9lw4kCDWrOMScXZoxcC8e8IOL32Fuq+lLhYYT3ie9+p09+LGYbzXRe96umDLvv93H/sTBLWoxvd4g10f6tO6vX9BALb26icDM2aX2Fvb29Xz9e3bdsGl8uFoqKifjUz1ThPna6osaF4fGm/1gs1vfYW6r6UH40T1r0u0P2gFK99bfOqiy7Q+TL6P31doNP+oU3A+wKdXt9PILC97fRs16z5NfQ2dOhQGI1GGI1G5Ofno6GhwfdKRBRWfoXd5XKp0zU1NUhLSwtYQ0QUHD6vxq9fvx5HjhxBZ2cnCgsLMX/+fBw+fBhnzpyBwWBAcnIyli5dGopeKQwMew4K61fn3qtOez4Hrs713n/kPv2i5rr7f7ZBuO1jU/5RWH/mgWnCesckYVk6PsO+fPnymx574okngtELEQURPy5LJAmGnUgSDDuRJBh2Ikkw7ESS4FdcaUDcTc19Mz293vMAUt5uhpbu13qF2441DBLWf/PAZ8L6rKeWq9OeoUNw9akJfdvesV+47t2Ie3YiSTDsRJJg2IkkwbATSYJhJ5IEw04kCYadSBIcZychz6RxwnrDvGh1ujttCE5u8L5H88Pjzmiu62sc3Zd32v5MWI/95IA6bXz9Ka95GXHPTiQJhp1IEgw7kSQYdiJJMOxEkmDYiSTBsBNJguPsdzlDzsPC+omXfHxn/EcfCOuPR/fd/inKPA3H51YIlr493yo9wvq+tgfFG/A03jDv43ZRdznu2YkkwbATSYJhJ5IEw04kCYadSBIMO5EkGHYiSXCc/Q4Q+eD9fTODB3nPA2hYMlxz3b99+l+E2/5JXMuAehuIVU05wrrzhu/G38j8wVeBbOeu5zPsLS0tqKioQHt7OwwGA6xWK2bMmIErV66gvLwcly5dQnJyMlasWIG4uLhQ9ExEfvAZ9oiICCxcuBDp6eno6upCaWkpxo4diy+//BJZWVkoKChAVVUVqqqq8Oyzz4aiZyLyg89zdrPZjPT0dABATEwMUlNT0dbWhtraWuTl5QEA8vLyUFtbG9xOiWhADIqiKP1duLm5GWVlZbDb7SgqKsKWLVvU2pIlS7B58+ab1nE4HHA4HAAAm82GEwca1JplTCrOHr0wgPaDR1e9De77/LolIwVnG5q8yt8mRmmuOjzBJdy02Si+39rtMERmQOlt8L3g/7vQGyusdzYNEdYjW7/p93Pp6v28QSB7G5WToVnr9wW67u5u2O12LF68GLGx4jfpelarFVarVZ0vHl+qTlfU2Lzm9URPvV1/QW5D1ct4uWCDV10vF+iiEj9BT+uT/V5+fQgv0Onp/bxRIHvb6dmuWevX0Ftvby/sdjsee+wxTJhw7U6YJpMJLte1vYbL5UJ8fHwAWiWiYPG5Z1cUBZs2bUJqaipmzZqlPp6TkwOn04mCggI4nU7k5uYGtdE7WeQDFmG945H7hPWn/+5zddr0wFVM/+ygV71w6O/87m2gXmns2/u+Fj8Ev2z03ht/tVF7752wpUa4bbOHQ2uB5DPsx48fR3V1NSwWC1auXAkAWLBgAQoKClBeXo7du3cjKSkJJSUlQW+WiPznM+yjR49GZWXlLWurV68OeENEFBz8uCyRJBh2Ikkw7ESSYNiJJMGwE0mCX3Htp8j7hmnW2t4Xf6zzhQedwvqCe5qE9etFRXyHwqGn+r28L8suTBLW694dJ6wnffxHdbr7SyNOTon2qid0cqxcL7hnJ5IEw04kCYadSBIMO5EkGHYiSTDsRJJg2IkkIc04+3c/Fv8qyncr2rzm3SMH48rn6er8qhG/11x3Wkz/fx4pGJrcXZq1xz99Rbju6L85JqwntIvHyT3Xz7jd8HR2Cpen8OGenUgSDDuRJBh2Ikkw7ESSYNiJJMGwE0mCYSeShDTj7GcKxH/XTmR530kjKuZZfJmlfXeN21HRrn1LHgDY4JwmrBvcBnX603nJmP27Iq/66DWnNdcd2bRfuG23sEp3E+7ZiSTBsBNJgmEnkgTDTiQJhp1IEgw7kSQYdiJJ+Bxnb2lpQUVFBdrb22EwGGC1WjFjxgxUVlZi165diI+PB3DtNs7Z2dlBb9hfo14Q3wt81guPeM1X1MSiePwjGksH1iiIe7te9ISnMPJF77FzjpVTf/gMe0REBBYuXIj09HR0dXWhtLQUY8eOBQDMnDkTs2fPDnqTRDRwPsNuNpthNpsBADExMUhNTUVbW5uPtYhIbwyKoij9Xbi5uRllZWWw2+347LPP4HQ6ERMTg/T0dCxatAhxcXE3reNwOOBwOAAANpsNJw40qDXLmFScPXohAP+MwNNrb3rtC2Bv/gpkb6NytD+a3e+wd3d3o6ysDHPmzMGECRPQ3t6unq9v27YNLpcLRUVFPrYCTDXOU6cramwoHl/an6cPOb32pte+APbmr0D2ttOj/X2Ofl2N7+3thd1ux2OPPYYJEyYAAIYOHQqj0Qij0Yj8/Hw0NDT42AoRhZPPsCuKgk2bNiE1NRWzZs1SH3e5XOp0TU0N0tLSgtMhEQWEzwt0x48fR3V1NSwWC1auXAng2jDbnj17cObMGRgMBiQnJ2Pp0qVBb5aI/Ocz7KNHj0ZlZeVNj+t5TJ2IbsZP0BFJgmEnkgTDTiQJhp1IEgw7kSQYdiJJMOxEkmDYiSTBsBNJgmEnkgTDTiQJhp1IEgw7kSQYdiJJ3NZv0BHRnSuse/bSUn3+Jhig39702hfA3vwVqt54GE8kCYadSBJhDbvVag3n0wvptTe99gWwN3+FqjdeoCOSBA/jiSTBsBNJwudPSQfDwYMHsXnzZng8HuTn56OgoCAcbdxScXExoqOjYTQaERERAZvNFrZeNm7ciLq6OphMJtjtdgDAlStXUF5ejkuXLiE5ORkrVqy45T32wtGbXm7jrXWb8XC/dmG//bkSYm63W1m2bJly8eJFpaenR3n11VeVc+fOhboNTUVFRUpHR0e421AURVEOHz6sNDQ0KCUlJepjW7duVXbs2KEoiqLs2LFD2bp1q25627Ztm/LJJ5+EpZ/rtbW1KQ0NDYqiKMrVq1eVl156STl37lzYXzutvkL1uoX8ML6+vh7Dhg1DSkoKIiMjMXHiRNTW1oa6jTtCZmbmTXue2tpa5OXlAQDy8vLC9trdqje9MJvNSE9PB+B9m/Fwv3ZafYVKyA/j29rakJiYqM4nJibi5MmToW5DaO3atQCAqVOn6m7IpqOjA2azGcC1/zyXL18Oc0fevvjiC1RXVwtv4x1Kzc3NOH36NEaMGKGr1+76vo4dOxaS1y3kYVduMdJnMBhC3YamN954AwkJCejo6MCaNWswfPhwZGZmhrutO8K0adMwd+5cANdu4/3hhx/26zbewdLd3Q273Y7FixcjNjY2bH3c6Ma+QvW6hfwwPjExEa2trep8a2ur+tdWDxISEgAAJpMJubm5qK+vD3NH3kwmk3oHXZfLpV7U0QM93cb7VrcZ18NrF87bn4c87BkZGWhsbERzczN6e3uxd+9e5OTkhLqNW+ru7kZXV5c6fejQIVgsljB35S0nJwdOpxMA4HQ6kZubG+aO+ujlNt6Kxm3Gw/3aafUVqtctLJ+gq6urwwcffACPx4MpU6Zgzpw5oW7hlpqamrBu3ToAgNvtxqRJk8La2/r163HkyBF0dnbCZDJh/vz5yM3NRXl5OVpaWpCUlISSkpKwnBffqrfDhw/fdBvvcBy1HTt2DKtXr4bFYlFPERcsWICRI0eG9bXT6utWtz8PxuvGj8sSSYKfoCOSBMNOJAmGnUgSDDuRJBh2Ikkw7ESSYNiJJPF/h2UBXzw4klYAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "ax.imshow(mnist_tr_x[0].reshape(28,28))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Data Preprocessing\n",
+ "\n",
+ "We convert the labels to their one-hot representation. \n",
+ "This is the best way to run multi-class classification with Falkon which minimizes the squared error"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# one-hot labels\n",
+ "A = torch.eye(10, dtype=torch.float32)\n",
+ "mnist_tr_y = A[mnist_tr_y.to(torch.long), :]\n",
+ "mnist_ts_y = A[mnist_ts_y.to(torch.long), :]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def classif_error(y_true, y_pred):\n",
+ " y_true = torch.argmax(y_true, dim=1)\n",
+ " y_pred = torch.argmax(y_pred, dim=1)\n",
+ " err = y_true.flatten() != y_pred.flatten()\n",
+ " return torch.mean(err.to(torch.float32))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Run Falkon\n",
+ "\n",
+ "There are a few parameters which need to be provided to the algorithm\n",
+ "\n",
+ " - The `FalkonOptions` class is used to provide non-standard tuning knobs. It allows to, for example, tune the amount of GPU memory the algorithm can use, adjust the convergence tolerance, and decide whether certain parts of the algorithm are computed on CPU or GPU. \n",
+ " \n",
+ " It can be used with default parameters for most purposes!\n",
+ " \n",
+ " - The **kernel** is the most important choice which depends on the data at hand. We use the `GaussianKernel` which is the most common option and initialize it with a length-scale of 15.\n",
+ " \n",
+ " - The **penalty** determines the amount of regularization. A higher value corresponds to more regularization.\n",
+ " \n",
+ " - The **number of centers** `M` strongly influences the time needed for fitting. By default the centers\n",
+ " are chosen uniformly at random."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "options = falkon.FalkonOptions(use_cpu=True)\n",
+ "kernel = falkon.kernels.GaussianKernel(sigma=15)\n",
+ "flk = falkon.Falkon(kernel=kernel, \n",
+ " penalty=1e-8,\n",
+ " M=1000, \n",
+ " maxiter=10,\n",
+ " options=options,\n",
+ " error_every=1,\n",
+ " error_fn=classif_error)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iteration 1 - Elapsed 0.55s - training error: 0.11998333\n",
+ "Iteration 2 - Elapsed 1.02s - training error: 0.07140000\n",
+ "Iteration 3 - Elapsed 1.50s - training error: 0.05766667\n",
+ "Iteration 4 - Elapsed 1.98s - training error: 0.05121667\n",
+ "Iteration 5 - Elapsed 2.46s - training error: 0.04776667\n",
+ "Iteration 6 - Elapsed 2.95s - training error: 0.04556667\n",
+ "Iteration 7 - Elapsed 3.45s - training error: 0.04376667\n",
+ "Iteration 8 - Elapsed 3.93s - training error: 0.04340000\n",
+ "Iteration 9 - Elapsed 4.42s - training error: 0.04286667\n",
+ "Iteration 10 - Elapsed 5.39s - training error: 0.04223333\n"
+ ]
+ }
+ ],
+ "source": [
+ "_ = flk.fit(mnist_tr_x, mnist_tr_y)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Training error: 4.22%\n",
+ "Test error: 4.13%\n"
+ ]
+ }
+ ],
+ "source": [
+ "train_pred = flk.predict(mnist_tr_x)\n",
+ "test_pred = flk.predict(mnist_ts_x)\n",
+ "\n",
+ "print(\"Training error: %.2f%%\" % (classif_error(train_pred, mnist_tr_y) * 100))\n",
+ "print(\"Test error: %.2f%%\" % (classif_error(test_pred, mnist_ts_y) * 100))\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/examples/falkon_regression_tutorial.html b/examples/falkon_regression_tutorial.html
new file mode 100644
index 00000000..5dfc0ead
--- /dev/null
+++ b/examples/falkon_regression_tutorial.html
@@ -0,0 +1,567 @@
+
+
+
+
+
+
+
+
+ Falkon Regression Tutorial — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Falkon Regression Tutorial
+
+Introduction
+This notebook introduces the main interface of the Falkon library, using a toy regression problem.
+We will be using the Boston housing dataset which is included in scikit-learn to train a Falkon model. Since the dataset is very small, it is not necessary to use the Nystroem approximation here. It is however useful to demonstrate the simple API offered by Falkon.
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+
+Load the data
+The Boston housing dataset poses a regression problem with 506 data points in 13 dimensions. The goal is to predict house prices given some attributes including criminality rates, air pollution, property value, etc.
+After loading the data, we split it into two parts: a training set (containing 80% of the points) and a test set with the remaining 20%. Data splitting could alternatively be done using some scikit-learn utilities (found in the model_selection module )
+
+
+
+
+Pre-process the data
+We must convert the numpy arrays to PyTorch tensors before using them in Falkon. This is very easy and fast with the torch.from_numpy function.
+Another preprocessing step which is often necessary with kernel methods is to normalize the z-score of the data: convert it to have zero-mean and unit standard deviation. We use the statistics of the training data to avoid leakage between the two sets.
+
+
+
+
+Create the Falkon model
+The Falkon object is the main API of this library. It is similar in spirit to the fit-transform API of scikit-learn, while supporting some additional features such as monitoring of validation error.
+While Falkon models have many options, most are related to performance fine-tuning which becomes useful with much larger datasets. Here we only showcase some of the more basic options.
+Mandatory parameters are:
+
+the kernel function (here we use a linear kernel)
+the amount of regularization, which we set to some small positive value
+the number of inducing points M. We set M to 5000, which is a sizable portion of the dataset.
+
+
+
+
+
+
+
+/home/giacomo/Dropbox/unige/falkon/falkon/falkon/utils/switches.py:25: UserWarning: Failed to initialize CUDA library; falling back to CPU. Set 'use_cpu' to True to avoid this warning.
+ warnings.warn(get_error_str("CUDA", None))
+
+
+
+
+Training the model
+The Falkon model is trained using the preconditioned conjugate gradient algorithm (TODO: Add a reference). Thus there are two steps to the algorithm: first the preconditioner is computed, and then the conjugate gradient iterations are performed. To gain more insight in the various steps of the algorithm you can pass debug=True when creating the Falkon object.
+Model training will occur on the GPU, if it is available, and CUDA is properly installed, or on the CPU as a fallback.
+
+
+
+
+
+Falkon(M=5000, center_selection=<falkon.center_selection.UniformSelector object at 0x7f65871c45e0>, kernel=GaussianKernel(sigma=Parameter containing:
+tensor([1.], dtype=torch.float64)), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=False, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty=1e-05)
+
+
+Optimization converges very quickly to a minimum, where convergence is detected by checking the change model parameters between iterations.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/falkon_regression_tutorial.ipynb b/examples/falkon_regression_tutorial.ipynb
new file mode 100644
index 00000000..9a519e23
--- /dev/null
+++ b/examples/falkon_regression_tutorial.ipynb
@@ -0,0 +1,378 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Falkon Regression Tutorial\n",
+ "\n",
+ "## Introduction\n",
+ "\n",
+ "This notebook introduces the main interface of the Falkon library, \n",
+ "using a toy regression problem.\n",
+ "\n",
+ "We will be using the Boston housing dataset which is included in `scikit-learn` to train a Falkon model.\n",
+ "Since the dataset is very small, it is not necessary to use the Nystroem approximation here. It is however useful to demonstrate the simple API offered by Falkon."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "from sklearn import datasets\n",
+ "import numpy as np\n",
+ "import torch\n",
+ "import matplotlib.pyplot as plt\n",
+ "plt.style.use('ggplot')\n",
+ "\n",
+ "import falkon"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Load the data\n",
+ "\n",
+ "The Boston housing dataset poses a regression problem with 506 data points in 13 dimensions.\n",
+ "The goal is to predict house prices given some attributes including criminality rates, air pollution, property value, etc.\n",
+ "\n",
+ "After loading the data, we split it into two parts: a training set (containing 80% of the points) and a test \n",
+ "set with the remaining 20%. Data splitting could alternatively be done using some scikit-learn utilities (found in the [model_selection module](https://scikit-learn.org/stable/modules/cross_validation.html#cross-validation))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "X, Y = datasets.fetch_california_housing(return_X_y=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "num_train = int(X.shape[0] * 0.8)\n",
+ "num_test = X.shape[0] - num_train\n",
+ "shuffle_idx = np.arange(X.shape[0])\n",
+ "np.random.shuffle(shuffle_idx)\n",
+ "train_idx = shuffle_idx[:num_train]\n",
+ "test_idx = shuffle_idx[num_train:]\n",
+ "\n",
+ "Xtrain, Ytrain = X[train_idx], Y[train_idx]\n",
+ "Xtest, Ytest = X[test_idx], Y[test_idx]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Pre-process the data\n",
+ "\n",
+ "We must convert the numpy arrays to PyTorch tensors before using them in Falkon.\n",
+ "This is very easy and fast with the `torch.from_numpy` function.\n",
+ "\n",
+ "Another preprocessing step which is often necessary with kernel methods is to normalize the z-score of the data:\n",
+ "convert it to have zero-mean and unit standard deviation.\n",
+ "We use the statistics of the training data to avoid leakage between the two sets."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# convert numpy -> pytorch\n",
+ "Xtrain = torch.from_numpy(Xtrain).to(dtype=torch.float32)\n",
+ "Xtest = torch.from_numpy(Xtest).to(dtype=torch.float32)\n",
+ "Ytrain = torch.from_numpy(Ytrain).to(dtype=torch.float32)\n",
+ "Ytest = torch.from_numpy(Ytest).to(dtype=torch.float32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# z-score normalization\n",
+ "train_mean = Xtrain.mean(0, keepdim=True)\n",
+ "train_std = Xtrain.std(0, keepdim=True)\n",
+ "Xtrain -= train_mean\n",
+ "Xtrain /= train_std\n",
+ "Xtest -= train_mean\n",
+ "Xtest /= train_std"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Create the Falkon model\n",
+ "\n",
+ "The Falkon object is the main API of this library. \n",
+ "It is similar in spirit to the fit-transform API of scikit-learn, while supporting some\n",
+ "additional features such as monitoring of validation error.\n",
+ "\n",
+ "While Falkon models have many options, most are related to performance fine-tuning which becomes useful with much \n",
+ "larger datasets.\n",
+ "Here we only showcase some of the more basic options.\n",
+ "\n",
+ "Mandatory parameters are:\n",
+ " - the kernel function (here we use a linear kernel)\n",
+ " - the amount of regularization, which we set to some small positive value\n",
+ " - the number of inducing points M. We set M to 5000, which is a sizable portion of the dataset."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/home/giacomo/Dropbox/unige/falkon/falkon/falkon/utils/switches.py:25: UserWarning: Failed to initialize CUDA library; falling back to CPU. Set 'use_cpu' to True to avoid this warning.\n",
+ " warnings.warn(get_error_str(\"CUDA\", None))\n"
+ ]
+ }
+ ],
+ "source": [
+ "options = falkon.FalkonOptions(keops_active=\"no\")\n",
+ "\n",
+ "kernel = falkon.kernels.GaussianKernel(sigma=1, opt=options)\n",
+ "flk = falkon.Falkon(kernel=kernel, penalty=1e-5, M=5000, options=options)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Training the model\n",
+ "\n",
+ "The Falkon model is trained using the preconditioned conjugate gradient algorithm (TODO: Add a reference). Thus there are\n",
+ "two steps to the algorithm: first the preconditioner is computed, and then the conjugate gradient iterations are performed.\n",
+ "To gain more insight in the various steps of the algorithm you can pass `debug=True` when creating the Falkon object. \n",
+ "\n",
+ "Model training will occur on the GPU, if it is available, and CUDA is properly installed, \n",
+ "or on the CPU as a fallback. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Falkon(M=5000, center_selection=, kernel=GaussianKernel(sigma=Parameter containing:\n",
+ "tensor([1.], dtype=torch.float64)), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=False, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty=1e-05)"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "flk.fit(Xtrain, Ytrain)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Optimization converges very quickly to a minimum, where convergence is detected by checking the change model parameters between iterations."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Evaluating model performance"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Since the problem is regression a natural error metric is the RMSE. Given a fitted model, we can run the `predict` method to obtain predictions on new data.\n",
+ "\n",
+ "Here we print the error on both train and test sets."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "pycharm": {
+ "is_executing": false,
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Training RMSE: 0.527\n",
+ "Test RMSE: 0.578\n"
+ ]
+ }
+ ],
+ "source": [
+ "train_pred = flk.predict(Xtrain).reshape(-1, )\n",
+ "test_pred = flk.predict(Xtest).reshape(-1, )\n",
+ "\n",
+ "def rmse(true, pred):\n",
+ " return torch.sqrt(torch.mean((true.reshape(-1, 1) - pred.reshape(-1, 1))**2))\n",
+ "\n",
+ "print(\"Training RMSE: %.3f\" % (rmse(train_pred, Ytrain)))\n",
+ "print(\"Test RMSE: %.3f\" % (rmse(test_pred, Ytest)))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally we plot the model predictions to check that the distribution of our predictions is close to that of the labels."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAD4CAYAAAAAczaOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAY9ElEQVR4nO3df2xb1d3H8bdvHJOWLMGOG6KEVHtCUqGKQNjStQmwlNbVWNWJqNKYWhWNtJMGTA9qgxhhdC1SC4s0kkCmVJWmUoYmTZu2NZug+4PgkWp4kwylWvih0rBoUDU0P2xSWpI4if380QdrrKFJ7Bvb8fm8/qqv7Xu+p44/Offk3nMdsVgshoiIGMFKdwEiIpI6Cn0REYMo9EVEDKLQFxExiEJfRMQgCn0REYM4013AfJw7dy7dJczJ6/UyMjKS7jJskU19AfUnk2VTXyCz+lNaWjrrdo30RUQMotAXETGIQl9ExCBLYk5fRGShYrEYExMTRKNRHA5HSto8f/48k5OTKWkLLvfRsizy8vLm3UeFvohkpYmJCXJzc3E6UxdzTqeTnJyclLUHMD09zcTEBMuWLZvX6zW9IyJZKRqNpjTw08XpdBKNRuf9eoW+iGSlVE3pZIKF9FWhLyJikOw/9hERAWZ+ccDW/eX870+v+nwoFOJ73/seAMPDw+Tk5ODxeAB4+eWXcblcttYzXwp9sYXdX6iFmOvLJ5IOHo+HV155BYC2tjauvfZaHnjggfjz09PTafmbg0JfRCRFdu/ezXXXXcfbb79NdXU1+fn5X/hlsGHDBn71q19RXl7OH/7wB55//nkikQi33XYbP/vZz2w5M0hz+iIiKfSvf/2L3/72t+zfv/9LX3PmzBn+/Oc/093dzSuvvEJOTg5//OMfbWlfI30RkRTasmXLnCP2v/3tb/T19bF582bg8jUHXq/XlvYV+iIiKbR8+fL4v3Nycr5wjv3nV/PGYjG++93v8vjjj9vevqZ3RETSpLy8nL6+PgD6+vr48MMPAbjjjjt46aWX4ss0h8Nhzp49a0ubGumLiBEy8SyvzZs38/vf/55NmzZRU1NDRUUFAKtWreLHP/4x27ZtIxaL4XQ6eeqpp7jhhhuSblOhLyKyyB555JFZty9btozf/OY3sz53zz33cM8999hei6Z3REQMMudI/9ChQ5w8eZLCwkLa2toAuHjxIh0dHQwPD7NixQr27NlDfn4+AMeOHcPv92NZFk1NTdTU1ACXT1Pq6uqKn3Pa1NRk1NoY2e7p/NvT1nbmHbSLZK45R/rr16/nJz/5yRe2dXd3U11dTWdnJ9XV1XR3dwNw9uxZAoEA7e3tPPHEExw5ciT+l+lf/vKX/PCHP6Szs5OPP/6YU6dO2d4ZERG5ujlDf/Xq1fFR/OeCwSANDQ0ANDQ0EAwG49vr6+vJzc2luLiYkpIS+vv7CYfDjI+Ps2rVKhwOB9/85jfj7xERkdRJaE5/bGwMt9sNgNvt5sKFC8DlBYaKiorir/N4PIRCoSu2FxUVEQqFkqlbREQSYOvZO7FYbEHbv0xPTw89PT0AtLa22nYl2mJyOp1Los75SKQvDit95wTMVWs2fTaQXf1ZzL6cP38+PQuapaHNa665Zt7/jwlVV1hYSDgcxu12Ew6HKSgoAC6P4EdHR+OvC4VCeDyeK7aPjo7Glxidjc/nw+fzxR9/foFCJvN6vUuizvlIpC+xBdy5x25z1ZpNnw1kV38Wsy+Tk5NfWO7g4Gsf2br/vevLr9jmdDqZnp6OPy4vL+emm25iZmaGyspKnnvuuXnf1vC/7d69G5/Px5YtW654bnJy8or/x9LS0ln3k9DwrLa2lt7eXgB6e3tZs2ZNfHsgEGBqaoqhoSEGBweprKzE7XazbNky3n//fWKxGCdOnKC2tjaRpkVEloy8vDxeeeUV/H4/LpeLF1988QvPz8zMpLymOUf6zz77LO+++y6ffvopDzzwAPfeey+NjY10dHTg9/vxer00NzcDl3+r1dXV0dzcjGVZ7Nq1C+v/D/t/8IMfcOjQISKRCDU1Ndx2222L2zMRkQzyjW98g/feey9+huP111/PO++8w6uvvsrTTz/N3//+dyKRCN///ve57777iMVi7N27l9dff53y8iuPKhI1Z+jv3r171u379u2bdfvWrVvZunXrFdtvvPHG+Hn+IiImmZ6e5q9//Svr168H4NSpU/j9flauXMmvf/1rvvKVr3D8+HEmJydpbGykoaGBt99+mw8++IBXX32V4eFh7rrrrviduJKhZRhERBbJxMQEmzZtAmDt2rVs27aNN954g5qaGlauXAlcniJ/7733ePnllwH49NNPGRgY4B//+AeNjY3k5ORQUlLC7bfbcwGkQl9EZJF8Pqf/3/5zeWWAgwcPxo8CPvfqq68uyqoFWntHRCSNGhoaePHFF5mamgLggw8+4LPPPmPdunX86U9/YmZmhvPnzxMIBGxpTyN9ETHCbKdYZoLt27fz0UcfcffddxOLxfB4PDz//PN8+9vf5vXXX2fjxo1UVFSwbt06W9pzxBZ65VQanDt3Lt0lzMn0c6cPHPUvUjVz+2nThqs+n02fDWRXfxazL5999tkV0yiL7b/P00+V2fpq63n6IiKyNCn0RUQMotAXkay0BGaubbOQvir0RSQrWZaVlvn1VJueno6vfDAfOntHRLJSXl4eExMTTE5Opuwufddccw2Tk5MpaQsuj/AtyyIvL2/e71Hoi0hWcjgcCa9omailcGaVpndERAyi0BcRMYhCX0TEIAp9ERGD6A+5WWbmFweS3kfY5WImElnYm/LtWfZVRBaXRvoiIgbRSF+WvLlueO1ynSey0COXecjUVRtFrkYjfRERgyj0RUQMotAXETGIQl9ExCAKfRERgyj0RUQMotAXETGIQl9ExCAKfRERgyj0RUQMotAXETGIQl9ExCBJLbj20ksv4ff7cTgclJeX89BDDxGJROjo6GB4eJgVK1awZ88e8vPzATh27Bh+vx/LsmhqaqKmpsaOPoiIyDwlPNIPhUL85S9/obW1lba2NqLRKIFAgO7ubqqrq+ns7KS6upru7m4Azp49SyAQoL29nSeeeIIjR44QjUbt6oeIiMxDUtM70WiUSCTCzMwMkUgEt9tNMBikoaEBgIaGBoLBIADBYJD6+npyc3MpLi6mpKSE/v7+5HsgIiLzlvD0jsfj4Tvf+Q4PPvggLpeLW2+9lVtvvZWxsTHcbjcAbrebCxcuAJePDKqqqr7w/lAoNOu+e3p66OnpAaC1tRWv15tomSnjdDozos6wy5X0PiyHA9cC9+Ow0vfnoblqdTisBfdnPtL1eWfKz5odsqkvsDT6k3DoX7x4kWAwSFdXF8uXL6e9vZ0TJ0586etjsdi89+3z+fD5fPHHIyMjiZaZMl6vNyPqXPBtDmfhcrkWfNORmCt9U3Vz1ZpIf+YjXZ93pvys2SGb+gKZ1Z/S0tJZtyc8POvr66O4uJiCggKcTidr167l/fffp7CwkHA4DEA4HKagoACAoqIiRkdH4+8PhUJ4PJ5EmxcRkQQkHPper5czZ84wOTlJLBajr6+PsrIyamtr6e3tBaC3t5c1a9YAUFtbSyAQYGpqiqGhIQYHB6msrLSnFyIiMi8JT+9UVVWxbt06HnvsMXJycvjqV7+Kz+djYmKCjo4O/H4/Xq+X5uZmAMrLy6mrq6O5uRnLsti1axdWGueBs9XT+bcnvQ+HZaV1ukZEFo8jtpDJ9jQ5d+5cukuYU6bM5R046k96Hw7LIraETqd1/E/VVZ9frDn9dN0YPVN+1uyQTX2BzOqP7XP6IiKy9Cj0RUQMotAXETGIQl9ExCAKfRERgyj0RUQMotAXETGIQl9ExCBJ3URFJBPEBs5c9fnIYl1slqaLs0SSoZG+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBnMm8+dKlSxw+fJiPPvoIh8PBgw8+SGlpKR0dHQwPD7NixQr27NlDfn4+AMeOHcPv92NZFk1NTdTU1NjRBxERmaekQv/o0aPU1NTwyCOPMD09zeTkJMeOHaO6uprGxka6u7vp7u5mx44dnD17lkAgQHt7O+FwmAMHDvDcc89hWTrYEBFJlYQT97PPPuO9995jw4YNADidTq699lqCwSANDQ0ANDQ0EAwGAQgGg9TX15Obm0txcTElJSX09/fb0AUREZmvhEf6Q0NDFBQUcOjQIf79739TUVHB/fffz9jYGG63GwC3282FCxcACIVCVFVVxd/v8XgIhUKz7runp4eenh4AWltb8Xq9iZaZMk6nMyPqdNhw5OQAyKIjsMXqT7o+70z5WbNDNvUFlkZ/Eg79mZkZBgYG2LlzJ1VVVRw9epTu7u4vfX0sFpv3vn0+Hz6fL/54ZGQk0TJTxuv1ZkSdsWg0+Z1Ylj37yRSL1J/dv3/L9n3Oh8vl4sf116elbbtlyvfGLpnUn9LS0lm3Jzz8KSoqoqioKD56X7duHQMDAxQWFhIOhwEIh8MUFBTEXz86Ohp/fygUwuPxJNq8iIgkIOHQv+666ygqKuLcuXMA9PX1ccMNN1BbW0tvby8Avb29rFmzBoDa2loCgQBTU1MMDQ0xODhIZWWlDV0QEZH5SursnZ07d9LZ2cn09DTFxcU89NBDxGIxOjo68Pv9eL1empubASgvL6euro7m5mYsy2LXrl06c0dEJMUcsYVMtqfJ50cTmSxT5vIOHPUnvQ9Hls3pL1Z/HP9TNfeLFoHm9DNXJvXH9jl9ERFZehT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhBnugsQWapiA2fS0m7EsiBLbowuqaeRvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQZK+IjcajdLS0oLH46GlpYWLFy/S0dHB8PAwK1asYM+ePeTn5wNw7Ngx/H4/lmXR1NRETU1Nss2LiMgCJD3SP378OGVlZfHH3d3dVFdX09nZSXV1Nd3d3QCcPXuWQCBAe3s7TzzxBEeOHCEajSbbvIiILEBSoT86OsrJkyfZuHFjfFswGKShoQGAhoYGgsFgfHt9fT25ubkUFxdTUlJCf39/Ms2LiMgCJTW988ILL7Bjxw7Gx8fj28bGxnC73QC43W4uXLgAQCgUoqqqKv46j8dDKBSadb89PT309PQA0NraitfrTabMlHA6nRlRp8NK/s80DgAb9pMpsrE/mfCzZodM+d7YZSn0J+HQf/PNNyksLKSiooJ33nlnztfHYrF579vn8+Hz+eKPR0ZGEqoxlbxeb0bUGbNjysyy7NlPpsjC/mTCz5odMuV7Y5dM6k9paems2xMO/dOnT/PGG2/w1ltvEYlEGB8fp7Ozk8LCQsLhMG63m3A4TEFBAQBFRUWMjo7G3x8KhfB4PIk2LyIiCUj4mHf79u0cPnyYrq4udu/ezc0338zDDz9MbW0tvb29APT29rJmzRoAamtrCQQCTE1NMTQ0xODgIJWVlfb0QkRE5sX2m6g0NjbS0dGB3+/H6/XS3NwMQHl5OXV1dTQ3N2NZFrt27cLKonlWEZGlwBFbyGR7mpw7dy7dJcwpU+byDhz1J70PR5bNgWdjf/Z+f326y7BFpnxv7JJJ/bF9Tl9E0ufgax+lpd2968vT0q7YR/MrIiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQXRxlsgSFBs4k56GdXHWkqeRvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYhCFvoiIQRT6IiIGUeiLiBhEoS8iYpCEb4w+MjJCV1cXn3zyCQ6HA5/Px+bNm7l48SIdHR0MDw+zYsUK9uzZQ35+PgDHjh3D7/djWRZNTU3U1NTY1Q8RkUUx84sD835t2OViJhKxpd2c//2pLfv5bwmHfk5ODvfddx8VFRWMj4/T0tLCLbfcwmuvvUZ1dTWNjY10d3fT3d3Njh07OHv2LIFAgPb2dsLhMAcOHOC5557DsnSwISKSKgmHvtvtxu12A7Bs2TLKysoIhUIEg0GefPJJABoaGnjyySfZsWMHwWCQ+vp6cnNzKS4upqSkhP7+flatWmVLRzLJQkYGtsu/PX1ti0jGSzj0/9PQ0BADAwNUVlYyNjYW/2Xgdru5cOECAKFQiKqqqvh7PB4PoVBo1v319PTQ09MDQGtrK16v144yF5XT6YzXGXa50laHw4YjJwdAFh2BqT/2sfu7+J/fm0y1kO+z5XDgsun7716k/5ekQ39iYoK2tjbuv/9+li9f/qWvi8Vi896nz+fD5/PFH4+MjCRVYyp4vd54nXbN6SUi5oomvxPLIha1YT+ZQv2xjd3fxf/83mSqhXyfXS4XEZu+/8n+v5SWls66PanhwvT0NG1tbdx5552sXbsWgMLCQsLhMADhcJiCggIAioqKGB0djb83FArh8XiSaV5ERBYo4dCPxWIcPnyYsrIytmzZEt9eW1tLb28vAL29vaxZsya+PRAIMDU1xdDQEIODg1RWViZZvoiILETC0zunT5/mxIkTrFy5kkcffRSAbdu20djYSEdHB36/H6/XS3NzMwDl5eXU1dXR3NyMZVns2rVLZ+6IiKRYwqF/00038bvf/W7W5/bt2zfr9q1bt7J169ZEmxQRkSRpqC0iYhBbTtmUL3pa58qLSIbSSF9ExCAa6YvIvB187SNb9+dynZ/Xee1715fb2q7JFPoiMm+xgTO27i8y3wvNFPq20fSOiIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgZR6IuIGEShLyJiEIW+iIhBFPoiIgbRMgwikvHsXvNnIR5PW8uLQyN9ERGDKPRFRAyi6R0RyXh2r+5pMo30RUQMotAXETGIpndERK5iIfe8dlgWMdc8bgozDz+1ZS9X0khfRMQgCn0REYMo9EVEDKLQFxExiEJfRMQgCn0REYMo9EVEDKLQFxExSMovzjp16hRHjx4lGo2yceNGGhsbU12CiIixUhr60WiUI0eOsHfvXoqKinj88cepra3lhhtuWJT2UrkGt8t1nkgkkrL2REQSkdLpnf7+fkpKSrj++utxOp3U19cTDAZTWYKIiNFSOtIPhUIUFRXFHxcVFXHmzJVLpvb09NDT0wNAa2srpaWlCbV3aHti70ve2jS1KyJydSkd6cdisSu2ORyOK7b5fD5aW1tpbW1NRVm2aGlpSXcJtsmmvoD6k8myqS+wNPqT0tAvKipidHQ0/nh0dBS3253KEkREjJbS0L/xxhsZHBxkaGiI6elpAoEAtbW1qSxBRMRoKZ3Tz8nJYefOnTz11FNEo1HuuusuysvLU1nCovH5fOkuwTbZ1BdQfzJZNvUFlkZ/HLHZJtpFRCQr6YpcERGDKPRFRAyie+QmKZuWlTh06BAnT56ksLCQtra2dJeTtJGREbq6uvjkk09wOBz4fD42b96c7rISEolE2L9/P9PT08zMzLBu3TruvffedJeVtGg0SktLCx6PZ0mc7ng1P/rRj8jLy8OyLHJycjL2lHOFfhJSvazEYlu/fj133303XV1d6S7FFjk5Odx3331UVFQwPj5OS0sLt9xyy5L8fHJzc9m/fz95eXlMT0+zb98+ampqWLVqVbpLS8rx48cpKytjfHw83aXYYv/+/RQUFKS7jKvS9E4Ssm1ZidWrV5Ofn5/uMmzjdrupqKgAYNmyZZSVlREKhdJcVWIcDgd5eXkAzMzMMDMzM+uFjUvJ6OgoJ0+eZOPGjekuxSga6SdhvstKSPoNDQ0xMDBAZWVluktJWDQa5bHHHuPjjz/mW9/6FlVVVekuKSkvvPACO3bsyJpRPsBTTz0FwKZNmzL29E2FfhLmu6yEpNfExARtbW3cf//9LF++PN3lJMyyLH7+859z6dIlnnnmGT788ENWrlyZ7rIS8uabb1JYWEhFRQXvvPNOusuxxYEDB/B4PIyNjXHw4EFKS0tZvXp1usu6gkI/CVpWIvNNT0/T1tbGnXfeydq12bEQ3rXXXsvq1as5derUkg3906dP88Ybb/DWW28RiUQYHx+ns7OThx9+ON2lJczj8QBQWFjImjVr6O/vz8jQ15x+ErSsRGaLxWIcPnyYsrIytmzZku5yknLhwgUuXboEXD6Tp6+vj7KysjRXlbjt27dz+PBhurq62L17NzfffPOSDvyJiYn4NNXExAT//Oc/M/YXskb6Sci2ZSWeffZZ3n33XT799FMeeOAB7r33XjZs2JDushJ2+vRpTpw4wcqVK3n00UcB2LZtG1/72tfSXNnChcNhurq6iEajxGIx6urq+PrXv57usuT/jY2N8cwzzwCX/9B+xx13UFNTk96ivoSWYRARMYimd0REDKLQFxExiEJfRMQgCn0REYMo9EVEDKLQFxExiEJfRMQg/wcfh6NZb+HHugAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "hist_range = (min(Ytest.min(), test_pred.min()).item(), max(Ytest.max(), test_pred.max()).item())\n",
+ "ax.hist(Ytest.numpy(), bins=10, range=hist_range, alpha=0.7, label=\"True\")\n",
+ "ax.hist(test_pred.numpy(), bins=10, range=hist_range, alpha=0.7, label=\"Pred\")\n",
+ "ax.legend(loc=\"best\");"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "pycharm": {
+ "stem_cell": {
+ "cell_type": "raw",
+ "metadata": {
+ "collapsed": false
+ },
+ "source": []
+ }
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/examples/hyperopt.html b/examples/hyperopt.html
new file mode 100644
index 00000000..c2560648
--- /dev/null
+++ b/examples/hyperopt.html
@@ -0,0 +1,696 @@
+
+
+
+
+
+
+
+
+ Automatic Hyperparameter Optimization — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Automatic Hyperparameter Optimization
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+Load the data
+We use the digits dataset, which is distributed alongside scikit-learn.
+
+
+
+
+
+
+
+
+
+
+
+Split into training and test sets
+We split the data into a training set with 80% of the samples and a test set with the remaining 20%.
+
+
+
+Data Preprocessing
+As always with Falkon we must:
+
+Convert from numpy arrays to torch tensors
+Convert data and labels to the same data-type (in this case float32)
+
+
+Normalizing the data is always a good idea, and it becomes even more important with automatic hyperparameter optimization.
+Here we use the global mean and standard deviation of the training set for z-score normalization .
+
+Since Falkon optimizes with respect to the square loss, using ordinal labels (e.g. 1, 4, 5) is not ideal since closeness in the natural numbers is meaningless for classification.
+We therefore convert the labels to a 1-hot representation .
+
+
+
+
+
+
+First label vector: tensor([0., 0., 1., 0., 0., 0., 0., 0., 0., 0.])
+
+
+
+
+Hyperparameter Optimization
+The Falkon algorithm has three main kinds of hyper-parameters:
+
+The kernel parameters. Most commonly when using the Gaussian kernel these are the length-scales for each dimension in the data
+The amount of regularization \(\lambda\) (the penalty term, which helps prevent overfitting).
+The Nystrom centers. These are sometimes not considered hyper parameters, and they are chosen uniformly at random from the training set. However, it is possible to find better centers!
+
+In the grid-search notebook all three types of hyperparameters are considered, but only with a small number of options in each category.
+With automatic hyperparameter optimization, which is based on a gradient descent-type procedure, we can instead define a much larger search space for the hyperparameters.
+In particular, we will optimize the kernel length-scale (one for each dimension in the data), the regularization and the Nystrom centers. Optimizing the Nystrom centers is especially useful since it allows to reduce their number, thus speeding up the whole training and inference process!
+
+Several functions and classes used for hyperparameter optimization reside in the falkon.hopt module.
+Here we import the NystromCompReg class which defines the optimization objective.
+
+
+We have to initialize the hyperparameters to some default values. In particular we choose some random initial points from the dataset as the initial Nystrom centers.
+
+Now we initialize the loss function (NystromCompReg ) and the optimization algorithm (Adam).
+
+And start training. Each iteration corresponds to a single gradient step over the whole dataset.
+
+
+
+
+
+
+Epoch 0 Loss 40310.387 Error 1.67%
+Epoch 1 Loss 35137.203 Error 1.67%
+Epoch 2 Loss 24207.420 Error 1.74%
+Epoch 3 Loss 17153.834 Error 2.02%
+Epoch 4 Loss 12490.971 Error 1.95%
+Epoch 5 Loss 9343.500 Error 1.67%
+Epoch 6 Loss 7062.557 Error 1.32%
+Epoch 7 Loss 5359.402 Error 1.18%
+Epoch 8 Loss 4108.987 Error 1.04%
+Epoch 9 Loss 3210.683 Error 0.90%
+Epoch 10 Loss 2569.709 Error 0.63%
+Epoch 11 Loss 2121.547 Error 0.49%
+Epoch 12 Loss 1814.989 Error 0.49%
+Epoch 13 Loss 1606.087 Error 0.42%
+Epoch 14 Loss 1460.298 Error 0.42%
+Epoch 15 Loss 1353.231 Error 0.21%
+Epoch 16 Loss 1269.496 Error 0.21%
+Epoch 17 Loss 1199.922 Error 0.14%
+Epoch 18 Loss 1139.733 Error 0.14%
+Epoch 19 Loss 1088.957 Error 0.07%
+Epoch 20 Loss 1043.391 Error 0.07%
+Epoch 21 Loss 1002.747 Error 0.07%
+Epoch 22 Loss 966.971 Error 0.07%
+Epoch 23 Loss 935.513 Error 0.07%
+Epoch 24 Loss 907.745 Error 0.07%
+Epoch 25 Loss 883.216 Error 0.07%
+Epoch 26 Loss 861.607 Error 0.07%
+Epoch 27 Loss 842.661 Error 0.07%
+Epoch 28 Loss 826.113 Error 0.07%
+Epoch 29 Loss 811.653 Error 0.07%
+Epoch 30 Loss 798.939 Error 0.07%
+Epoch 31 Loss 787.643 Error 0.07%
+Epoch 32 Loss 777.481 Error 0.07%
+Epoch 33 Loss 768.217 Error 0.07%
+Epoch 34 Loss 759.674 Error 0.07%
+Epoch 35 Loss 751.720 Error 0.07%
+Epoch 36 Loss 744.265 Error 0.07%
+Epoch 37 Loss 737.261 Error 0.07%
+Epoch 38 Loss 730.671 Error 0.07%
+Epoch 39 Loss 724.491 Error 0.07%
+Epoch 40 Loss 718.708 Error 0.07%
+Epoch 41 Loss 713.325 Error 0.07%
+Epoch 42 Loss 708.335 Error 0.07%
+Epoch 43 Loss 703.722 Error 0.07%
+Epoch 44 Loss 699.472 Error 0.07%
+Epoch 45 Loss 695.557 Error 0.07%
+Epoch 46 Loss 691.935 Error 0.00%
+Epoch 47 Loss 688.580 Error 0.00%
+Epoch 48 Loss 685.445 Error 0.00%
+Epoch 49 Loss 682.500 Error 0.00%
+
+
+
+
+The optimized parameters are available as attributes of the model object:
+
+
+
+
+
+
+Final value of lambda: 4.428e-05
+Final value of sigma: tensor([2.3884, 2.6252, 2.7859, 2.7036, 2.6799, 2.7698, 2.7344, 3.0071, 2.5923,
+ 2.7421, 2.7629, 2.7525, 2.6989, 2.7232, 2.8216, 2.8830, 2.6465, 2.7994,
+ 2.7406, 2.7082, 2.8053, 2.6552, 2.7757, 2.6979, 2.3884, 2.7573, 2.7242,
+ 2.6681, 2.7780, 2.7649, 2.7010, 2.7274, 2.3884, 2.6608, 2.7477, 2.7604,
+ 2.8015, 2.7581, 2.6359, 2.3884, 2.8348, 2.7656, 2.6553, 2.6672, 2.7252,
+ 2.7499, 2.6721, 2.6589, 2.9603, 2.7725, 2.7143, 2.7481, 2.7307, 2.7093,
+ 2.7251, 2.7213, 2.6326, 2.6092, 2.7600, 2.7744, 2.7885, 2.6976, 2.6838,
+ 2.7062])
+
+
+We can compare the obtained results with the grid-search notebook.
+A grid-search with 1000 centers and 32 grid-points resulted in choosing a model with sigma=5, and lambda=1e-7.
+The NystromCompReg objective with half the centers led to obtain a lower test error (0.83% vs. 1.11%) after 50 training epochs. However the obtained hyperparameters are quite different: lambda in particular is much higher at 1.1e-4.
+This objective in particular has quite a high bias and tends to choose simple models instead of more complex ones (remember that since lambda is a regularizer, it can be seen as one possible measure of model complexity). In practice this is often not a problem, as we observed in this case.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/hyperopt.ipynb b/examples/hyperopt.ipynb
new file mode 100644
index 00000000..639eaae3
--- /dev/null
+++ b/examples/hyperopt.ipynb
@@ -0,0 +1,498 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "d237711a",
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "# Automatic Hyperparameter Optimization"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "73b68133",
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "from sklearn import datasets, model_selection\n",
+ "import numpy as np\n",
+ "np.random.seed(30)\n",
+ "import torch\n",
+ "import matplotlib.pyplot as plt\n",
+ "plt.style.use('ggplot')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7ee63dc4",
+ "metadata": {},
+ "source": [
+ "## Load the data\n",
+ "\n",
+ "We use the **digits** dataset, which is distributed alongside scikit-learn."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "3da6ecb4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X, Y = datasets.load_digits(return_X_y=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "6b948acf",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAACzCAYAAAD2UgRyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAALMUlEQVR4nO3dXWjWZQPH8Z+mz9JenFFRK9oyyVYgo4yoFXOUFXmwdRBBslwRJEFUdKBRklKRHqRBnSyolr0deBBB9IKrzJdeTOw+CISIZdCBUrFBYljh/zl4oKc91pPpVffu7fOBDpq3v12uK/jy596cUlVVFQBgUpta7wMAAPUnCAAAQQAACAIAIIIAAIggAAAiCH7Xnj17MmXKlGzbtm1c7MDRcI9pdO7wP2tCBkF/f3+uueaaeh/jqL344ou55JJLMnv27MyYMSPt7e154okn4kdGTC6Nfo+T5M0330xHR0eamprS1taWdevW1ftI/IMa/Q5v2bIlPT09aW1tzZQpU/Loo4/W+0h/q2n1PgCHO/3007Ny5crMmzcvTU1N2bp1a+66665MmzYt99xzT72PB0dk586d6enpyf33359XX301n3zySZYtW5aZM2dm2bJl9T4e/Kn9+/fnwgsvzC233JJ777233sf5203IJwR/5pVXXslll12WWbNm5dRTT83ixYvzxRdfHPa6r776KldffXVmzJiRc889Ny+//PKYX9+3b1/6+/tz2mmn5aSTTkpnZ2e2bNlyzOe77rrr0tvbm/b29syZMydLly7Ntddem82bNx/zNhPHeL/H69aty6WXXpo1a9akvb09/f39ufvuu7N27dpj3mZiGO93+IYbbsjjjz+em2++OU1NTce8N95NyiA4ePBgVq5cmV27dmXTpk057rjjsnjx4vz0009jXrd8+fLcfvvtqdVqWbJkSfr6+rJz584kyY8//pju7u788MMPeeutt/LZZ5/lhhtuyKJFi7J79+4//NwLFy7MwoULj/isVVVlx44d2b59e7q7u4/qz8vENN7v8fbt23P99deP+dj111+fPXv25Jtvvjm6PzQTyni/w5NONQEtXbq0uvrqq4/49d9//32VpNq2bVtVVVX11VdfVUmqhx56aMzrLr/88mrJkiVVVVXV888/X5111lnVzz//POY13d3d1T333DNmZ+vWrb/+el9fX9XX1/enZxodHa1OOOGEavr06dXUqVOr1atXH/Gfh4mh0e/x9OnTq4GBgTEf+/zzz6sk1Y4dO474z0XjavQ7/Futra3VI488csSvb0ST8j0EtVotq1evTq1Wy3fffffrm/W+/vrrdHZ2/vq6yy+/fMzv6+zszLvvvpsk+fTTT7N37940NzePec3BgwczY8aMP/zcGzZsOKIznnTSSanVajlw4EA+/PDDPPDAA2lpackdd9xxRL+fia8R7vEfmTJlyjH9fiaGRr7DE9GkC4IDBw7k2muvzZVXXpnnnnsuZ5xxRpLkoosuOuwx1f+qfvMu/0OHDqW9vT2vvfbaYa+bOXPmMZ9z6tSpmTt3bpJk/vz5GRkZyUMPPSQISNIY9/jMM8/M3r17x3xs3759SfLreZm8GuEOTzaT7j0Eu3fvzrfffpvHHnss3d3daW9vz8jIyO9+S9/HH3885t8/+uijtLe3J0kWLFiQ4eHhnHzyyZk7d+6Yf1paWoqf+9ChQzl48GDxXRpTI9zjzs7OvPPOO2M+9vbbb6e1tTVnn332MW3T+BrhDk82E/YJwf79+1Or1cZ87Pjjj09ra2uampry1FNP5f7778+ePXuyYsWK332E+eyzz+aCCy7IggUL8tJLL+Wjjz7Kk08+mSRZsmRJ1q9fn8WLF+exxx7L+eefn3379uW9995Le3t7ent7f/dct956a5L//7jq4YcfzlVXXZU5c+bk559/zpYtW7J27drcdtttR/W1oHE18j2+7777csUVV+TBBx9MX19fduzYkaeeeirr168/qq8FjamR7/D+/fvz5ZdfJkl++umn7N27N7VaLSeeeOKvT3AnlLq9e+FvtHTp0irJYf/Mmzevqqqq2rhxYzV37tyqqamp6ujoqDZv3lwdd9xx1fPPP19V1X/fgLJhw4aqq6urampqqlpbW6sNGzaM+TzfffddtWzZsqqlpaWaPn161dLSUvX29la7du0as/PbN7J0dXVVXV1d//f89957b3XeeedVxx9/fNXc3FxdfPHF1dNPP1398ssv5b5IjHuNfo+rqqreeOONav78+dW//vWv6pxzzqmeeOKJMl8cGkKj3+H333//d89/JHe/EU2pKj/+DgAmu0n3HgIA4HCCAAAQBACAIAAAIggAgPzJzyEYjz9edPny5UV21qxZU2QnSYaHh4vsLFiwoMhOkoyMjBTbKqUe39AyHu/w//6I1aO1atWqIjvJf/7e+hJK/o2cf/T94/VUr2/KGo/3eDwqdf9K/qyM119/vdhWKX90jz0hAAAEAQAgCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAEgy7Z/4JGvWrCm2ddNNNxXZufPOO4vsJMnAwECRnUsuuaTITpIMDQ0V26KswcHBIjs9PT1FdpJk9erVRXb6+/uL7JTcKvX15u/R1tZWbKurq6vYVimvv/56vY9wxDwhAAAEAQAgCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAACSTPsnPskzzzxTbGvt2rVFdnbu3FlkJ0mGh4eL7AwNDRXZoby2trZiWz09PUV2XnjhhSI7SbJq1aoiO83NzUV2kqSjo6PYFuWV+m89ODhYZKek0dHReh+hLjwhAAAEAQAgCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAIggAAAiCACACAIAIIIAAEgy7Z/4JMPDw8W25syZM652kmRoaKjIzuzZs4vsJMnIyEixLZLR0dF6H+Ewg4OD9T7CYcbj14n/amtrK7ZV6v51dXUV2Slpst5jTwgAAEEAAAgCACCCAACIIAAAIggAgAgCACCCAACIIAAAIggAgAgCACCCAACIIAAAIggAgAgCACCCAACIIAAAIggAgCTT6n2Av2p4eLjIzimnnFJkJ0k2bdo0rnaSZNGiRUV2RkZGiuw0uo6OjnofAY7ZwoULi22Njo4W2ent7S2ykyQvvPBCkZ3NmzcX2Wk0nhAAAIIAABAEAEAEAQAQQQAARBAAABEEAEAEAQAQQQAARBAAABEEAEAEAQAQQQAARBAAABEEAEAEAQAQQQAAJJlW7wPUy8jISLGtRYsWFdkZGBgospMky5cvL7KzYsWKIjuNrlar1fsIh5k1a1axrebm5iI7HR0dRXaSZNWqVcW2+I/BwcFxt1Xq7iXl/p8oeY8biScEAIAgAAAEAQAQQQAARBAAABEEAEAEAQAQQQAARBAAABEEAEAEAQAQQQAARBAAABEEAEAEAQAQQQAARBAAABEEAECSafU+wF+1Zs2aIjtDQ0NFdpJk9uzZRXauueaaIjtJsnHjxmJbJKOjo8W2PvjggyI79913X5GdJLnxxhuL7JT8OtVqtWJbjF/Nzc31PsJh2tra6n2EuvCEAAAQBACAIAAAIggAgAgCACCCAACIIAAAIggAgAgCACCCAACIIAAAIggAgAgCACCCAACIIAAAIggAgAgCACDJtHof4K8aGRkpsjMwMFBkp6SNGzcW27rzzjuLbVFWb29vkZ0nn3yyyE6SdHR0FNnp7+8vssPk0dzcXO8jHKZWq9X7CHXhCQEAIAgAAEEAAEQQAAARBABABAEAEEEAAEQQAAARBABABAEAEEEAAEQQAAARBABABAEAEEEAAEQQAAARBABABAEAkGRKVVVVvQ8BANSXJwQAgCAAAAQBABBBAABEEAAAEQQAQJJ/A0gVBAzdQ+KoAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "def plot_digit(ax, x, y):\n",
+ " ax.imshow(x.reshape((8, 8)), cmap='gray')\n",
+ " ax.set_title(\"Label: %d\" % y)\n",
+ " ax.set_axis_off()\n",
+ "\n",
+ "# Plot three sample images from the dataset\n",
+ "fig, ax = plt.subplots(ncols=3, figsize=(9, 6))\n",
+ "plot_digit(ax[0], X[3], Y[3])\n",
+ "plot_digit(ax[1], X[10], Y[10])\n",
+ "plot_digit(ax[2], X[42], Y[42])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "0aa131a8",
+ "metadata": {},
+ "source": [
+ "## Split into training and test sets\n",
+ "\n",
+ "We split the data into a training set with 80% of the samples and a test set with the remaining 20%."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "6b5535cc",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train, X_test, Y_train, Y_test = model_selection.train_test_split(\n",
+ " X, Y, test_size=0.2, random_state=10, shuffle=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "eef7ac94",
+ "metadata": {},
+ "source": [
+ "## Data Preprocessing\n",
+ "\n",
+ "As always with Falkon we must:\n",
+ " 1. Convert from numpy arrays to torch tensors\n",
+ " 2. Convert data and labels to the same data-type (in this case float32)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "a3a1469f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train = torch.from_numpy(X_train).to(dtype=torch.float32)\n",
+ "X_test = torch.from_numpy(X_test).to(dtype=torch.float32)\n",
+ "Y_train = torch.from_numpy(Y_train)\n",
+ "Y_test = torch.from_numpy(Y_test)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12fcc9d3",
+ "metadata": {},
+ "source": [
+ "Normalizing the data is always a good idea, and it becomes even more important with automatic hyperparameter optimization. \n",
+ "\n",
+ "Here we use the global mean and standard deviation of the training set for **z-score normalization**."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "1450e0f7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# z-score normalization\n",
+ "train_mean = X_train.mean()\n",
+ "train_std = X_train.std()\n",
+ "X_train -= train_mean\n",
+ "X_train /= train_std\n",
+ "X_test -= train_mean\n",
+ "X_test /= train_std"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "938d4389",
+ "metadata": {},
+ "source": [
+ "Since Falkon optimizes with respect to the square loss, using ordinal labels (e.g. 1, 4, 5) is not ideal since closeness in the natural numbers is meaningless for classification.\n",
+ "\n",
+ "We therefore convert the labels to a **1-hot representation**."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "87729460",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "First label vector: tensor([0., 0., 1., 0., 0., 0., 0., 0., 0., 0.])\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Convert labels to 1-hot\n",
+ "eye = torch.eye(10, dtype=torch.float32)\n",
+ "Y_train = eye[Y_train]\n",
+ "Y_test = eye[Y_test]\n",
+ "print(\"First label vector: \", Y_train[0])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "66bf3881",
+ "metadata": {},
+ "source": [
+ "## Hyperparameter Optimization\n",
+ "\n",
+ "The Falkon algorithm has three main kinds of hyper-parameters:\n",
+ " 1. The kernel parameters. Most commonly when using the Gaussian kernel these are the length-scales for each dimension in the data\n",
+ " 2. The amount of regularization $\\lambda$ (the penalty term, which helps prevent overfitting).\n",
+ " 3. The Nystrom centers. These are sometimes not considered hyper parameters, and they are chosen uniformly at random from the training set. However, it is possible to find better centers!\n",
+ " \n",
+ "In the [grid-search](falkon_cv.ipynb) notebook all three types of hyperparameters are considered, but only with a small number of options in each category.\n",
+ "\n",
+ "With automatic hyperparameter optimization, which is based on a gradient descent-type procedure, we can instead define a much larger search space for the hyperparameters.\n",
+ "\n",
+ "In particular, we will optimize the kernel length-scale (one for each dimension in the data), the regularization and the Nystrom centers. Optimizing the Nystrom centers is especially useful since it allows to reduce their number, thus speeding up the whole training and inference process!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "id": "1ad2b54d",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def mclass_loss(true, pred):\n",
+ " true = torch.argmax(true, dim=1)\n",
+ " pred = torch.argmax(pred, dim=1)\n",
+ " return torch.mean((true != pred).to(torch.float32))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "475c9d14",
+ "metadata": {},
+ "source": [
+ "Several functions and classes used for hyperparameter optimization reside in the `falkon.hopt` module.\n",
+ "\n",
+ "Here we import the `NystromCompReg` class which defines the optimization objective."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "86095c91",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import falkon.hopt\n",
+ "from falkon import FalkonOptions\n",
+ "from falkon.hopt.objectives import NystromCompReg"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "4e2132fe",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "flk_opt = FalkonOptions(use_cpu=not torch.cuda.is_available())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6b412208",
+ "metadata": {},
+ "source": [
+ "We have to initialize the hyperparameters to some default values. In particular we choose some random initial points from the dataset as the initial Nystrom centers."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "414616aa",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "sigma_init = torch.tensor([1.0] * X_train.shape[1], dtype=torch.float32).requires_grad_()\n",
+ "kernel = falkon.kernels.GaussianKernel(sigma=sigma_init, opt=flk_opt)\n",
+ "\n",
+ "penalty_init = torch.tensor(1e-5, dtype=torch.float32)\n",
+ "centers_init = X_train[np.random.choice(X_train.shape[0], size=(500, ), replace=False)].clone()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a311c944",
+ "metadata": {},
+ "source": [
+ "Now we initialize the loss function (`NystromCompReg`) and the optimization algorithm (Adam)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "38de0fde",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "model = NystromCompReg(\n",
+ " kernel=kernel, penalty_init=penalty_init, centers_init=centers_init, # The initial hp values\n",
+ " opt_penalty=True, opt_centers=True, # Whether the various hps are to be optimized\n",
+ " )\n",
+ "opt_hp = torch.optim.Adam(model.parameters(), lr=0.1)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e08fae3d",
+ "metadata": {},
+ "source": [
+ "And start training. Each iteration corresponds to a single gradient step over the whole dataset."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "id": "79a9552a",
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch 0 Loss 40310.387 Error 1.67%\n",
+ "Epoch 1 Loss 35137.203 Error 1.67%\n",
+ "Epoch 2 Loss 24207.420 Error 1.74%\n",
+ "Epoch 3 Loss 17153.834 Error 2.02%\n",
+ "Epoch 4 Loss 12490.971 Error 1.95%\n",
+ "Epoch 5 Loss 9343.500 Error 1.67%\n",
+ "Epoch 6 Loss 7062.557 Error 1.32%\n",
+ "Epoch 7 Loss 5359.402 Error 1.18%\n",
+ "Epoch 8 Loss 4108.987 Error 1.04%\n",
+ "Epoch 9 Loss 3210.683 Error 0.90%\n",
+ "Epoch 10 Loss 2569.709 Error 0.63%\n",
+ "Epoch 11 Loss 2121.547 Error 0.49%\n",
+ "Epoch 12 Loss 1814.989 Error 0.49%\n",
+ "Epoch 13 Loss 1606.087 Error 0.42%\n",
+ "Epoch 14 Loss 1460.298 Error 0.42%\n",
+ "Epoch 15 Loss 1353.231 Error 0.21%\n",
+ "Epoch 16 Loss 1269.496 Error 0.21%\n",
+ "Epoch 17 Loss 1199.922 Error 0.14%\n",
+ "Epoch 18 Loss 1139.733 Error 0.14%\n",
+ "Epoch 19 Loss 1088.957 Error 0.07%\n",
+ "Epoch 20 Loss 1043.391 Error 0.07%\n",
+ "Epoch 21 Loss 1002.747 Error 0.07%\n",
+ "Epoch 22 Loss 966.971 Error 0.07%\n",
+ "Epoch 23 Loss 935.513 Error 0.07%\n",
+ "Epoch 24 Loss 907.745 Error 0.07%\n",
+ "Epoch 25 Loss 883.216 Error 0.07%\n",
+ "Epoch 26 Loss 861.607 Error 0.07%\n",
+ "Epoch 27 Loss 842.661 Error 0.07%\n",
+ "Epoch 28 Loss 826.113 Error 0.07%\n",
+ "Epoch 29 Loss 811.653 Error 0.07%\n",
+ "Epoch 30 Loss 798.939 Error 0.07%\n",
+ "Epoch 31 Loss 787.643 Error 0.07%\n",
+ "Epoch 32 Loss 777.481 Error 0.07%\n",
+ "Epoch 33 Loss 768.217 Error 0.07%\n",
+ "Epoch 34 Loss 759.674 Error 0.07%\n",
+ "Epoch 35 Loss 751.720 Error 0.07%\n",
+ "Epoch 36 Loss 744.265 Error 0.07%\n",
+ "Epoch 37 Loss 737.261 Error 0.07%\n",
+ "Epoch 38 Loss 730.671 Error 0.07%\n",
+ "Epoch 39 Loss 724.491 Error 0.07%\n",
+ "Epoch 40 Loss 718.708 Error 0.07%\n",
+ "Epoch 41 Loss 713.325 Error 0.07%\n",
+ "Epoch 42 Loss 708.335 Error 0.07%\n",
+ "Epoch 43 Loss 703.722 Error 0.07%\n",
+ "Epoch 44 Loss 699.472 Error 0.07%\n",
+ "Epoch 45 Loss 695.557 Error 0.07%\n",
+ "Epoch 46 Loss 691.935 Error 0.00%\n",
+ "Epoch 47 Loss 688.580 Error 0.00%\n",
+ "Epoch 48 Loss 685.445 Error 0.00%\n",
+ "Epoch 49 Loss 682.500 Error 0.00%\n"
+ ]
+ }
+ ],
+ "source": [
+ "tr_loss, tr_err = [], []\n",
+ "\n",
+ "for epoch in range(50):\n",
+ " opt_hp.zero_grad()\n",
+ " loss = model(X_train, Y_train)\n",
+ " loss.backward()\n",
+ " opt_hp.step()\n",
+ "\n",
+ " tr_loss.append(loss.item())\n",
+ " tr_err.append(mclass_loss(Y_train, model.predict(X_train)))\n",
+ " print(f\"Epoch {epoch} Loss {tr_loss[-1]:.3f} Error {tr_err[-1] * 100:.2f}%\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "id": "86d2e2ea",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Test error: 0.83%\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Evaluate the test error:\n",
+ "ts_preds = model.predict(X_test)\n",
+ "print(f\"Test error: {mclass_loss(Y_test, ts_preds) * 100:.2f}%\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "89df66d9",
+ "metadata": {},
+ "source": [
+ "The optimized parameters are available as attributes of the `model` object:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "id": "3fe66880",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Final value of lambda: 4.428e-05\n",
+ "Final value of sigma: tensor([2.3884, 2.6252, 2.7859, 2.7036, 2.6799, 2.7698, 2.7344, 3.0071, 2.5923,\n",
+ " 2.7421, 2.7629, 2.7525, 2.6989, 2.7232, 2.8216, 2.8830, 2.6465, 2.7994,\n",
+ " 2.7406, 2.7082, 2.8053, 2.6552, 2.7757, 2.6979, 2.3884, 2.7573, 2.7242,\n",
+ " 2.6681, 2.7780, 2.7649, 2.7010, 2.7274, 2.3884, 2.6608, 2.7477, 2.7604,\n",
+ " 2.8015, 2.7581, 2.6359, 2.3884, 2.8348, 2.7656, 2.6553, 2.6672, 2.7252,\n",
+ " 2.7499, 2.6721, 2.6589, 2.9603, 2.7725, 2.7143, 2.7481, 2.7307, 2.7093,\n",
+ " 2.7251, 2.7213, 2.6326, 2.6092, 2.7600, 2.7744, 2.7885, 2.6976, 2.6838,\n",
+ " 2.7062])\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(\"Final value of lambda: %.3e\" % (model.penalty))\n",
+ "print(\"Final value of sigma: %s\" % (model.kernel.sigma.detach()))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1db747ea",
+ "metadata": {},
+ "source": [
+ "We can compare the obtained results with the grid-search notebook.\n",
+ "\n",
+ "A grid-search with 1000 centers and 32 grid-points resulted in choosing a model with sigma=5, and lambda=1e-7.\n",
+ "\n",
+ "The `NystromCompReg` objective with half the centers led to obtain a lower test error (0.83% vs. 1.11%) after 50 training epochs. However the obtained hyperparameters are quite different: lambda in particular is much higher at 1.1e-4.\n",
+ "\n",
+ "This objective in particular has quite a high bias and tends to choose simple models instead of more complex ones (remember that since lambda is a regularizer, it can be seen as one possible measure of model complexity). In practice this is often not a problem, as we observed in this case.\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/examples/logistic_falkon.html b/examples/logistic_falkon.html
new file mode 100644
index 00000000..8a57191d
--- /dev/null
+++ b/examples/logistic_falkon.html
@@ -0,0 +1,681 @@
+
+
+
+
+
+
+
+
+ Introducing Logistic Falkon — falkon 0.9.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ falkon
+
+
+
+
+
+
+
+
+
+Introducing Logistic Falkon
+
+Introduction
+In this notebook we use a synthetic dataset to show how the LogisticFalkon estimator works.
+We compare Falkon – which is trained on the squared loss – to its logistic loss version, and verify that on a binary classification problem logistic falkon achieves better accuracy.
+
+
+
+
+
+
+[pyKeOps]: Warning, no cuda detected. Switching to cpu only.
+
+
+
+
+Load the data
+We use a synthetic dataset in two dimensions, which consists of two concentric circles labeled with different classes.
+We introduce quite a bit of noise so that the estimators will not trivially achieve zero error, and we draw 10000 samples to demonstrate the speed of the Falkon library.
+
+
+
+
+
+
+
+
+
+
+
+Split into training and test sets
+We split the data into a training set with 80% of the samples and a test set with the remaining 20%.
+
+
+
+Data Preprocessing
+The minimal preprocessing steps are:
+
+Convert from numpy arrays to torch tensors
+Convert data and labels to the same data-type (in this case float32)
+Reshape the labels so that they have two dimensions. This is not strictly necessary, but Falkon internally works with 2D tensors, so the output of the Falkon.predict method will always be 2D.
+Change the labels from 0, 1 to -1, 1. Note that Logistic Falkon uses the following formula for the logistic loss:
+
+\[\log(1 + e^{-y_1 y_2})\]
+where \(y_1\) and \(y_2\) are labels and predictions respectively, which only makes sense if the labels are -1, 1.
+
+
+
+
+
+
+Define the Falkon model
+We use the same base parameters for both models: a small amount of regularization (\(10^{-7}\) ) and a Gaussian kernel with \(\sigma = 5\) . The number of inducing points is set to 1000 which is adequate for the problem which is very easy.
+
+
+
+
+Define Logistic Falkon model
+The logistic Falkon estimator uses the same base parameters as Falkon.
+However, instead of specifying a single value for regularization, we need to specify a regularization path : a series of decreasing amounts of regularization. For each regularization value we also need to specify the number of iterations of conjugate gradient descent, which will be performed for that specific regularization value.
+We validated empirically on a wide number of binary classification problems that a good scheme to set the regularization path is to use three short (i.e. 3 iterations) runs with increasing regularization, and then a few longer (here we used 8 iterations) runs with the final regularization value (here 1e-7 , the same as for Falkon).
+The LogisticFalkon estimator also accepts a mandatory loss parameter, which should be set to an instance of the LogisticLoss class. While the LogisticLoss is the only implemented loss at the moment, the learning algorithm is defined for any generalized self-concordant loss, and we plan to extend the library to support more functions.
+An additional feature we show here is error monitoring : By passing an error function to the estimator (see parameter error_fn ), the estimator will print the training error at every iteration (how often such prints occur is governed by the error_every parameter). This can be very useful to determine if it is possible to stop training early, and in general to monitor the training process.
+
+
+
+Train both models
+Training Falkon for 20 iterations (default value) takes about half a secon on a laptop.
+Clearly, since the logistic falkon runs about 28 iterations (the sum of values in iter_list ), it is necessarily going to be slower. Further, logistic falkon needs to recompute part of the preconditioner at every step of the Newton method leading to further slowdowns. On the same laptop, the logistic falkon algorithm takes around 1.5s.
+
+
+
+
+
+
+CPU times: user 2.35 s, sys: 59.3 ms, total: 2.41 s
+Wall time: 604 ms
+
+
+
+
+
+
+Falkon(M=1000, center_selection=<falkon.center_selection.UniformSelector object at 0x7f9f7d142b20>, kernel=GaussianKernel(sigma=Parameter containing:
+tensor([1.], dtype=torch.float64)), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=True, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty=1e-07)
+
+
+
+
+
+
+
+
+Iteration 0 - penalty 1.000000e-03 - sub-iterations 4
+Iteration 0 - Elapsed 0.27s - training loss 0.4697 - training error 0.1544
+Iteration 1 - penalty 1.000000e-05 - sub-iterations 4
+Iteration 1 - Elapsed 0.51s - training loss 0.3774 - training error 0.1551
+Iteration 2 - penalty 1.000000e-07 - sub-iterations 4
+Iteration 2 - Elapsed 0.74s - training loss 0.3575 - training error 0.1534
+Iteration 3 - penalty 1.000000e-07 - sub-iterations 8
+Iteration 3 - Elapsed 1.12s - training loss 0.3554 - training error 0.1530
+Iteration 4 - penalty 1.000000e-07 - sub-iterations 8
+Iteration 4 - Elapsed 1.50s - training loss 0.3554 - training error 0.1530
+CPU times: user 6.39 s, sys: 30.9 ms, total: 6.42 s
+Wall time: 1.6 s
+
+
+
+
+
+
+LogisticFalkon(M=1000, center_selection=<falkon.center_selection.UniformSelector object at 0x7f9f7d150250>, error_fn=<function binary_loss at 0x7f9f7d13eaf0>, iter_list=[4, 4, 4, 8, 8], kernel=GaussianKernel(sigma=Parameter containing:
+tensor([1.], dtype=torch.float64)), loss=LogisticLoss(kernel=GaussianKernel(sigma=Parameter containing:
+tensor([1.], dtype=torch.float64))), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=True, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty_list=[0.001, 1e-05, 1e-07, 1e-07, 1e-07])
+
+
+
+
+Testing
+However, the price paid for with a higher training time, leads to lower training error.
+We found that, on a variety of binary classification datasets, logistic falkon obtains a slightly lower error than the vanilla Falkon algorithm.
+
+
+
+
+
+
+Falkon model -- Error: 17.05%
+Logistic Falkon model -- Error: 16.90%
+
+
+
+
+
+Plot predictions
+In the plot we have the outer and inner circles which are correct predictions, and the circles in the middle which are the mispredicted points. Since we added lots of noise to the dataset, perfect predictions are not possible (there is no clear boundary between the two classes).
+Here the error difference between Falkon and Logistic Falkon is very hard to distinguish by eye. However, there may be other applications where the best possible classification error is desired.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/logistic_falkon.ipynb b/examples/logistic_falkon.ipynb
new file mode 100644
index 00000000..6dc97caf
--- /dev/null
+++ b/examples/logistic_falkon.ipynb
@@ -0,0 +1,427 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Introducing Logistic Falkon\n",
+ "\n",
+ "## Introduction\n",
+ "\n",
+ "In this notebook we use a synthetic dataset to show how the `LogisticFalkon` estimator works.\n",
+ "\n",
+ "We compare Falkon -- which is trained on the squared loss -- to its logistic loss version, and verify that on a binary classification problem logistic falkon achieves better accuracy."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[pyKeOps]: Warning, no cuda detected. Switching to cpu only.\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "from sklearn import datasets, model_selection\n",
+ "import torch\n",
+ "import matplotlib.pyplot as plt\n",
+ "plt.style.use('ggplot')\n",
+ "\n",
+ "import falkon"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Load the data\n",
+ "\n",
+ "We use a synthetic dataset in two dimensions, which consists of two concentric circles labeled with different classes.\n",
+ "\n",
+ "We introduce quite a bit of noise so that the estimators will not trivially achieve zero error, and we draw 10000 samples to demonstrate the speed of the `Falkon` library."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X, Y = datasets.make_circles(\n",
+ " n_samples=10_000, shuffle=False, noise=0.1, random_state=122)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbQAAAGbCAYAAACszmWlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9149lWZbeCf722UdebdrczFxrFeHKPCIyIjMrWckSKHZNNZtTTbIHGAzAB6IeCP4VBF8IEATIhwExHNHDbnLIbpLdZCdZxVKZId3DI8K1lqbFtSuPPmfPw75mbu7hHiIzUnjk+QBDhJtdce655+xvr7W+9S2hlFIUKFCgQIECrziMX/YBFChQoECBAt8ECkIrUKBAgQLfChSEVqBAgQIFvhUoCK1AgQIFCnwrUBBagQIFChT4VqAgtAIFChQo8K2A+cs+gC/DwsLCL/sQfm4YHR1lbW3tl30YvzIozsezKM7HsyjOx1P8Op+Lqampl/6tiNAKFChQoMC3AgWhFShQoECBbwUKQitQoECBAt8KFIRWoECBAgW+FSgIrUCBAgUKfCtQEFqBAgUKFPhWoCC0AgUKFCjwrUBBaAUKFChQ4FuBgtAKFChQoMC3AgWhFShQoECBbwUKQitQoECBAt8KFIRWoECBAgW+FSgIrUCBAgUKfCtQEFqBAgUKFPhWoCC0AgW+hVBZhur3UFn2yz6UAgV+YfiVn4dWoECBrweVZajPPoI4AtuB188jpPxlH1aBAj93FBFagQLfNoQBxBHCK2lSC4Nf9hEVKPALQUFoBQp82+B6YDuowNcRmuv9so+oQIFfCIqUY4ECv6JQWYrq98D1vlbKUEgJr5/XkdnXfG6BAq8yCkIrUOBXECrLiC68i1pb/anqYEJKKFd+jkdYoMCvHoqUY4ECv4oIA1QUFnWwAgW+BgpCK1DgVxGuh3Dcog5WoMDXQJFyLFDgVxBCSpzZtxFzc0UdrECBr4iC0AoU+BWFkCaiqIMVKPCVUaQcCxQoUKDAtwIFoRUo8FPgeWupwmrq5SjOTYFfFIqUY4FXDirLfmk9VluL8+2riDTRDcwnzsLVj19qNfVlx7v978Cz//9z/iwvOq5v8vwWNlwFfpEoCK3AK4Vf5gK5+d6q24bFJ3D4JEQhtJtbVlMq8DUZDGpfX3a82/+uDAlJDEIgXA/1m7/7Mx/vy4jpZcf1jZ/fbTZcz5+bAgW+aRQpxwKvFn6ZPoWb711rAKA6Lb3o14dfbjX1BcersgzVXEWFATgu3LoMt6/C0hwqDPTrbcPXSd1tke+Vi/q/zz/nZcf1TZ/fwoarwC8QRYRW4NXCN7BA/tQptcF7E4Vw4Bji0AlEuaIjm5dZTbkemeUQ9AI818EcHO8W4YQBLDxGjUyANKFUAb+vozSvBGH8zOO/KHJ6JnW5jZjyfg+aqzA89vQ5LzuP3zABFTZcBX6RKAitwCuFn3WB/FlSatvfWzz33i+zmsqFwcXRo0RBhOM5nBcGErYIxyhXyKd2wb4jUK3p4xIgzr6NkNtuzy9J3X3uc504C7ajyWzhMQo0OQ0+78vO48+DgAobrgK/KBSEVuCVw8+0QP6MNZ2v+95BkhPnglKlhB+l+O0ulVr5mUhIuB5ibALGJp4hku3mxF8aOT33uUQSw+vnUatLsGmhFYXPfN6XfZav+hl/meKcAgVehILQCvx64TliUJYNP4Wj/XZ80cLuWQaONPCjFHvuLs5Sl9wAcfYdxIsioW1iku3mxOL18y9+/OYxWDZK5ah+D7FJgABzD2F5HrU8r9OkXyGF+FWIqlAvFvhVREFoBV45/CyRwfaUmrJsuPox6mdYlL9sYZeGYHamgt/u4ix1kctz4PdRuYKT57ZqcJ/DNnPizRqYGB57oXOIyjLdNqAAIeDEWYSU5J029Ltw8Dj0OrrmN1Ayvuz85XGM+vhdUEorLU+c1dHe848t1IsFfgVREFqBVwpfJDf/Ion69r9tpdT6PdTPuih/hYVdGoJKrUxuoAUfrgeP7qHSBKr1F36GTXPivL2sa2B5jpIScfZtDNt+oQDEKFe20o1KStTtq7q9AHR0Vq58IQGrLEN9/BO4dwtKZdTYJLz3X1G2oyO77WRdqBcL/AqiILQCrxRUv4fqtrV0flATUq73xYv0tr9tRhzKsiHLUKYFzy3KXysCfMnC/iISFWffQSH07xfn9PPCAPGCzyBeP6/NiW/fROU5rCzoyA5FfvYdHVmGAaQZ7DkIhnz2c4SBbvw+fBLVaT2Nzvq9lxNwGOgor1TWadjebR3xlauoyRnE8/W3Qr1Y4FcMBaEV+JXDFzlYbEYdavHJ05rQF0VJg7/huKhWEy78GCUELDyGqV2aAI6d3iKBfJCGfJ4An3fxeF4VqPq9Z4/zBQRr2Dbq/Pd0KnDt38G9m+A45EdPIbLsc59BSBMxPIaSUkd2pbImnHZTk9n8I7j+CZSrsHMv6rf/Oka1rolrW4uBqNbB9fRmwLJfWkPE9XSacXJGH7shYW1J/z2OUJaN2PY9fZF45JsQjDz/Gj/tBO8Cvz4oCK3ArxS+MCXW7+ma0P6jsLEG+48+s3i/MP3lejoKu3VFu3BIC/YeHBCGhDTRr335AqrfBSEhS6DWgHYLLvwYpNSvAVt2V5+rt929rhd924EDx15KsJvElu/YpY/n8V349ENUuaIjxudEHWKQZlQoUOi/1Yd15LSxDnmum7L9PiKJn01dbo9Gt9cKX/L7TeGJCANyQ8Kn78PoJEQPwbRQly+Qb++9+6KNx88oGHlRZP2zTPAu8OuBgtAK/NKxuTCqocZLo62t6Gz+Eawt64X23g3U6TdfGCVtQkiJOHRCk1WpAjc+hdaGXiSzTEckeaYJCQHL81Abhu4GNIbBtODo69Bp6ddrDL80Ctw8ZpVnoNQz5JTHsbbIqg+DZcPiY3h0TxO0U4Y8gx07wXV1DxmQ9zqoLBtEdt/f+nxCStSpN+H+LTAM3TQ9vgPqwy8mkzB4plYokljX0zptVLul33OQ+hTlio5SP34Xslwf1859+vG3rujzWK1/sX/lNyEYef6cNtfIOk1N3s+1HxQosImC0Ar8UrF9AY4WH6F2HXxhSkxlGSJNUHsP6QVt70EdLW1f2LZHSdsWWFGuoMpVbSu1sQ4IOHQCceyUVg32e7rxOI111LRzDzzJdbPzg1s6VVmt6+N9SRS4eczKtODuDVAKBHDirP6M/+5/1Km7cgV+6w9gdAeEoY40uy3wPERjaGvxVnMPiDwXFel+suc/nzhwTJ+LPQeh14Fz7+ioK/x86vJFdb48jvV5v/yRjvJmdqPOvaOjxG3CEManQBra5gue1i6/wL/yGxGMPH9OH98jW3yECoKv3H5Q4NcPBaEV+OVi+048Crcagp+X1SvT0tFSHGlyUUrv1l3vGU9E4ZVQrQ3U6jLG2MRTQcahE6jmqq4LGQKi4BnFo9pzCK5d0u+xNK+jucf3dIrSdhCvzerHbnPC36znqCxDTe3SqTdDwvVPNIkOFnq1tqCjMdeF1romtmoNLEtHmjv3gOPpxy881oSxPI+Y/Q50u0/9FLdbWfk9fVxpghoZh3s3yYM+eGX9GbaRyfMCDkBL8+9c0xHYyLg+jjBARYGOzEplXbcbpDwJA70piMIv9a/8JgQjz7xGlsH1T7BPnCaYe7IlcClQ4HkUhFbgG8NPJQQY7MTzfg9qNZRlY7xAVk/gw7HTOt02iEa2FudNT8T5h6g4hvUVWHxMfugExiAlKcoV1PCYTs/lSiv3NqM/y4Ys1em76d0wOgGHTsLda1sRiUhihF15mv7c7pD/8M5AeOKgfvDfbBGKMi24eRmufwrrS5oka8NQrmCcfhN16ATAVm+Zaq7qVjKvhFqeJ2s3n1VOmhaquaaFGvBU0JJn8KN/q48/z+F3/juEZX9evLL5Pv2eJomgD60NMgTB/tfwbl1FJhEszcHkDEgDTr2pz3W5gjj95rMijS8grRcJRr5Oa8Uzr5FlKNtBhVrgUkzxLvAyFIRW4BvBTysEEFLqeszH76KU0hHZ5nOfS11tihEEgG3r9x0o8IxyhXx0UtekpNSLe7+7lQoTUiJem0XN7NGLZqnyNPpTuY746kODPjFXW1Etz+n3FgK2K/y2R5XLCzriagzBw7tw6V0YHtPkC6hP3teR2Y5dYFtw6CSiNDieWv3ZkzE8pj/zwPzYOfc2/Vb72cckMSTplpXVlmBm+zk15NOesxepAl1Pn6PaENnQGBdqB4lKu3DXFzk/WkFM7UIcPKajsOdFI9vI5OvYgH1p/9sXXDub0ZpT9uj1g2eeV7QNFNiOgtAKfDP4GYQAYjADTFaqsLz0DAl9aepqO+lV61rC3uvoSMUr60Vvc3TK1Y8Rm4vmdiViv6cX+PEprXo89ZZ22th7WMviB899IdE2RqDV1D9Swsj4ltoQ19NpOoCRCT2syfGefa3t52H757Vs0tvXYG11Szkp0gTGJknXVwlaHbxKGRnHula374iOuMrVL22g3q6cDFJBnFQpD9XxN9bw+30q3kBJOWgP2GzY3jpPXzCodEu48rwDype0VqiB6nSzL+/5a0dIiVGpIb7G9IECv34oCK3AN4OfRQiwmXb0+zpNNyChZ1w9XoLnHfAB1JHXNJHduwHXP9kihGcWTXjGHFgdPaVl6oYD1y6Rg47wFp8gNgd5bl9oDxwD0JHhyXM6Tbe8oMl5W+1qM7Wo/D7cv6kfv22cC/DCadVim/XV1lw02yELQy5Mvk40No2zPMfsj/4tckBo4vRbTyX1X9RADVvKybLv4zYzwlzg7j9AaVjq1OnlC9Btw9oS+fQe/ZluX9WE/5JBpfknHwzUoqAOHNtK96osI0/iF7YlgPahZOGxThu7nk4pb3vdF07wLqy3CrwABaEV+EbwswgBNp9ruza8/5dw/RNy00IcOqFf60Vegs8/f/tiVq5Ac1XX3zajizx7ZtHk3DtaHj+Q0htJjBIGolQibzX169YaqMUn2mljszl5c+Hud3UEeOg43L2hxRmmpdWT26KTzdSiKldQC480mc0/REXh0+it39V1N8fVhCHQUaLjorqrKNPS6cujpwg2Noi6FuU0pB+GBIZFReh62OYGQGWZ/tley9u2SdgOaQhmpyqEucCzDAyVk68uwe1rA/FMBLsPIqo1LXZ52Xy1MNCfAwFpqslw4ICSf/KBfr0khn2HUEdPbbmjCCm1OnNyRtcCDTnYFNjPnutyFfVXf+/pgf8crLeKFOarj4LQCnxj+Do1lRc9V0ip02qOC7euoLptWF9BTe3a8hIEvnDReX5wZj54rjCkfp3tDcEDGfxm4+7WAlmuQp5rqfq+I4gjr22RVN5pb0UhLC/A8pxWIh59TQtXom19c9vSbyrLUBMzenG+fxtWF2HuvlYUdts6Ahyb1O8d9CGKsH7vr8PiEty+irr6MSw8xp2YwVno06+P4nSbeFkMUttTbcnxP35XE5hSqD0HtZ/jc5sEwgB1+yoiTRCmRenQCRCefp/2hj4+IfTrPLwNs999dr5anmvHlWOnMWqa7PHK2qIryzQhWbZOH3Y7sL6sf38fMKR2PxlEesqydYS7GaEN6pWq39PneiB2yd/63rPX2jdovVWkML8dKAjt1wi/6jtQ4ZU0qQx6nrY8CaXUisLNBe4LFh1tK7WspegDcYPYTO25nk5Tbe7mn09ZDdKIwvV0yg30YrppG8XgHOZKE1drHTaa0FpHNadgY00/xnZ0De/+TQDdEvDglm4KFwIME3bMQM5W5ITt6sim09LEeP820ft/DnuOaJIfpEqlyphNFglrQ7jVOnL/eS0y2VQwfvwTuHtTv04cwt1rYLmofYc0MQ42CYyM67EyB4/DnWu6Ydp2tMt+YxjVGNHneWhUT8/ebKdormpvyaV5eHIf7t8kP3pKpxePvKZfx/X062wqUW1bfzbTBKEgjqExpL/nfk9vMrZtNjYjtC+9Xr7mBuoLr/+vkML8Vb9/ChSE9muDn8cO9Ge9wT9v4GvqNGBzTe/202QrzfdSEtq26ORxDH/y72DukRYvvPkDPXJl89ie78Xa3rh78/JTUcWhEzpyaQyT93uoCz/WizfAnkOa5JIYhKF7yUbGYWoGHFfXyFpNHXEZhn7O6gI8vq8JJs9hYloTxdCI/vedayBN2HcYeh2yyxcJlIF19xZq10Fyy8H3QzzHQxoS6bqUSRGehxibfNYaTKHJoN/RJGm7OmoK+vpn94Fna1LrK5qgbUcTqzT0sR99HUJfP8e0tqItVanr76XT0gQlTU1y/Z4m1frQlnfm1vc6+12twowj/b0mMerGZ2AYOu362qz+fjc3G5u2X+UKat8RvXFojGBUqrDR+trX2ea19oXX/5ekMIsI7tVAQWi/LviGi+g/6w3+ouerLN1SIirTQpw4izr9FqLXhvqw3sF/Ud2k3dQ1nz0HtOpwes9WHeeZJuptXoeEgU75/cWPwJQ61bj/KGLzfQQQxYO0l9I9YGOTsGuvJo5SRUdUqysw/5B85z6d/vPK0FwlUxBURjUZbRKE7erG5YPH9WfYcwBcD6NaJ2m1uLAYEEmbWi45kQs+GT1K2A+wxw5zfrKEcfbtZ8/JIL2p8kyfl+k9OiICXZcan4Jd++HB7adkppRWRu7YCe/9iTZKdj34vf8ekWe6v+7f/3/1uWkOIs800QNDh0Z1C4I1oqNhhE5fnn7zmSGkwJYIRLz5G/oYr38KzRV9Hk6c22qt2HxebkhNkIPPhmFoctzcHHzB9fSFm6svuf6/NIVZiFBeCRSE9uuCb7qI/rPe4M+44G+glhZIs2hLJs5A2SdufPosaX7RolMf1p+r39OkMv+Q/NEdnTLbNkdsq8amlPZV7Hc1UY1PDdKJ4ZaBL5at04+LT/Rj0nH9+kmizYvzXL/G/EMdtaUJ4rXvIaQkPXSSC4s+UZTi7BfMJsvI7gYcO6UVmGmiI6RtNcLQKRGMTiHjkGhkjA0swn4Xb+UJQZzjt3MqUkGakFkOvcOncG9cQt67ro9951597Oe/p8k8y3QEeP+mFp64riatJEbdvKy9LdMU9h8BpTDyTEdaS/P6M1uW3igMjeroa/6RThkaJpz/Pjy5j2gMPVWBbhLZ5tDRbT1sKok1eXbbut+v1YQdO1HlKuL0m7qW9h/+5VO14w9/fytSVoH/VO35HL7S5uorXP9fmML8OYhQCnzz+EYI7Z/9s3/GpUuXqNfr/KN/9I8+93elFP/iX/wLPvnkExzH4Y/+6I/Yt2/fN/HWBb4ivuki+qaLvWo1dZru697gmy74Ny5rAcLlj/B37YE0I5/e89IUoyg/devIO7rpeFOwYdg2+e//bd0/ZdnaoWNpbmuOmDr//a2eJxaeaB9F24ETZ7T109AIdDb04r+gCcaQEnX6TfKZvXDjU724tjZ0Ss+UmhTnH0KmoKJdLQgDRK1O6JSIntykdOczfGUQVFIqBwaTAvIcXI8sDAkx8aIIo9PGunqJJ+sJQSaoeYrX73yGvZ7gr63hTO7A625AnpKNTnKhqYjurWKvpczmCrmyoL9fUyLOf3+rcTs7eFyn7YZGwe9pooatGp8m1d2IeuPpQr3ZfJ2mmrDLVbIgIDBdPGkihYKRMWit6VqYV9YuLTc+04pPlevpAF4J1Wnp2t1nH0FzXU8z2BSR7D301JPT7+lzV6mieoOm+O2N9V4JBn1oz+ArbK5+1uv/G79/Cvxc8I0Q2m/8xm/wO7/zO/zTf/pPX/j3Tz75hKWlJf7JP/kn3Llzh3/+z/85/+Af/INv4q0LfA38LCrEnwt27NRRQBLC2qpW/I3ueEbI8Uyda1uT9Mt6ngzbhrFJTXhCPDNHbMuNP8t0TUxKvYh2WnDouHbzuPmZfs2NddTqkq7BJTFieBChRKH2YfRK2uhYKR3ZKXT6bf4hyvXIznwHZ2MdZ/4efhDhJAFebwNEBlO7AUUWJ1wwxolWE5xyidlrnxDfucXONhiuR7waEscw21sneHQTb9lEmjaMTuCvrhOOHqFULeNLh2BhgUpnTZNPlj8zpYB7N2BtRdcJRyd03Wr/UX0uDENbfR19HTHwvgQwqnXyN3+gydctkR15nQtrKVE+ipMEzI6amOWKro3lubb/6nf1tILDJ3WErRTq1hX9PqapI8TJ6cFEgyG9gQgDVLWuNzADuy7V08ISMTyGGJvcRiJ6udqazmDZT8fgvCR6+lwq8gXX/1etBf/K3T8FPodvhNCOHTvGysrKS/9+8eJFvve97yGE4NChQ/T7fTY2NhgaGvom3r7ALwNh8Ew66OukHLf6i25f04KF9gaonGThCczs06nDzQVmcyzM7avPNEnTbesUU5bq57+gJrJ9jhimhbpyEYTQda3GJJ7MkFkKO/dpD8crF3WarLWuBR8Lj1GOSzq9h8Cw8WZ2IStaUaguX4CZPdrseGmOrN8jEBaetJD3bsDje8jxKWbbdwmCHl5nHTk1o1OVD2+BaRGECREGpY0l/LZNMFPHcy2c9R435BRGaFCOu7zRfEClWtbTAAZiEy8NcbIE/8F9nDzBG6qDKXRKUYgtn8o8jjXR7Nyrj3HnIUpJgpQSdeDY0x6vkXH6KXhCIQ2hz9+pN7Qg5tFdgrk5otEjlA4eIYxTot1DWIMUK6WytsfaTDcO+vbU3sM6BVtr6GvEknDqDf29HT2lNySDeXTAMxE29WG9OYFnvtfn2zK2Wjq2z3gb1E2BL01FFmKPbxd+ITW0ZrPJ6Ojo1r9HRkZoNpsvJLQ/+ZM/4U/+5E8A+If/8B8+87xvG0zTfGU/nxpq6HEvUYgYHcOZmdnaQb/w8VmqXTkGaaFQKLJqhdzeTd4YAreEJEe6NsbDm5AkCMfFmX0bVfaInrgIt0He2sAsefjrS0Q3PoM8R0YRQ7//f0Z65c+9bz76+2RrK8TXPyN9chdVqvBhaQ9+aSfW+iLnWg/xrnyI5bdRQpHsmCFNIjBtTMskyxI+Uw16V6/hPljkrWpK6Qe/S6oyjJkZsuUlkskpPmKM3s2rOK2IN0wfy3GxRkeIx8YonziNWl/G2LWf4H//19p70nMZmpyhnGfEGJR6LarLLdJdBznJQ0K3zFDqk0yfIJuL8dKAfGMdw/UQAkTZ4zcPDNNdXqFUlohsB3Gvjem62EMNxMObqCgifXQXlcRESwtckhMkcy3Ke/bxg527MHfvRgU+mePx7oMmoR/illzeOTCOaQjyXofQdUg8D+afUMo9lOsyevIUO6fHkSojWnxE1u+RlitI10UcOYF1+CTmxCRCmkTri/oaqU5gn3lD199sm+i9PyNeeoKwHeTwCF7Zw6jU9Jc2NfXCa0gKGCIltkyE2yBefITdaIBSOLUKwivpIaBRiHBczEPHiUiRY2OoMMTZ/h6b10evQ+TYGEND5H7/hY/5VcSrvHb8PPELITSl1Od+J4R4wSPhhz/8IT/84Q+3/r22tvZzO65fNkZHR1/pz6f2HtmKpHpfIKf+3C74xFmUEtDvP5WMJzGlsQl6GxvQbmM0hlHdVcTcnG4YDkK4ckk//tZV6LR16tArkXXbrN27h5yceuH7qm4b5h+D7dBbb9FWPqVqCT9s0I+HEL0+4eKCVh8+eQS9Lplh0h6eJJMmrdU1SlGPXrVB8+6nxCv/QjcLD6K/nrRpNw5SKnn4qkJH5VSShOjP/8sgRWrA/+n/QvbwFkF5CM+NoL1B/ugup8OQIAPbdXg/3Ue0lmK1c9xolX4OZq+L3HeIeN8hnTLMMx3luSW4+AFGEhPajm4JSBIyyyNaXgFD6H66Vgtm9tDb6OI7DUpRQKvT4/HjeapxD+rD9LMeq59epqQSVjB5nB+gUivrSCfJ4P4dWJ7nTLVJIHxKr++n1dRRTL7roI7iSlX47IKW8V+/DLsPIGa/ixhcI8qyYW7+6XfTasPqslY7+n36R08j+sEzqcTtKUCVZVQf3KS9sqxH7EzOQKYIl5egWsfoB7DeRK2tajeT9rL2Bd1stzhwDKMfIML4c3ZaKop1c7ztaPPjF9XpfsXwqq8dPwumXrLhgV8QoY2MjDxz8tfX14t04yuGLxzv8WV4rmgvkhi2Od/jePDpB8hKRUvkQTtSiMGczOYq7D2k02O2o1NV26MxKXV95mXvO7CwYmQcz3FxJo7jP3yAYzbxRKZtnAxDv1mjTjY5zYXyPiIFlmNjNZfxDQdnbQ4vT/R7WzZ0mlAbwrNcHK+O3+/iJBGeZcBbP4D/+h+0X2Pgk200uXD5EZEYwVEhs6UYefg1ZBxTyTN6hk20GlGyTfxccaqU4qkcubuOue+wdik5fprs6iUC6WJ3OsR+gOe5yCSE8Wmt7AwD3U7guE/TgKaF7dioPKfn1fBUjvcf/6VukzBNnN/8A5w8wZc21sJDUrVOurGEnJjSNcYDx0AYSKAS90DlT+tYcaxnva3Mw/qqFp6YJkQhyjC02tL1UNtqnuw7ohvMq3UtLNlsr7h7/fOpxM0U4MDb0ihXyKd26dcwLS3iyXP9uttHEcURmBbi8EmdAh3MUHt+cyW+TDlb4JXCL4TQzp07x49+9CPefvtt7ty5Q6lUKgjtFcLP3HNm2SiVb/UkbQ7uFNsWFc5/D7fs0e9rqbn6+F0dify//gmq1tC1od0Hdc3FG0xSFkJHJl4ZHt5GDQ0/a5g7eF8CX7vVHzqBVa5wPknx7Sm813Yj4zfg9hW9GN65Bo5HkOZEG+uUJPiByynVRh7dg7e+iBw6r5WTa0v6/deWkRPTzA4Lgl4T28oJlIH36AEy8HVD9cxuggyiXFHadwC/3SWQVSqffqDJ+Nx38dIUR0T4G00caVCJurhRSLK2jOp1UEDW63Jhvk8gLJ60I3b2wIsEszWJrNWh3iADguPnKakUaQg49w55FHFpxynUnVuINOWM0UIGfYhDsn6X4P0/5/TYBEHY5RrwaV7HCtocv3yJSthDWhaMjmk1pOPCnWvkea7JxPZ0bS/LtPdjvweeC9XdeoMQBvrL6Hef9vK11uHkeT0VwdFqSpVl+noQQke/QujrbVPcYtmgFPnmNWRacOeq3lg0V1FHXsOo1QejiH4yGNQ6p4lx4MOZd9qogZJyc4LAduVsgVcf3wih/eN//I+5fv063W6Xv/t3/y5/+Id/SDpo7Pyt3/otTp8+zaVLl/h7f+/vYds2f/RHf/RNvG2BnyOeScv8DD1nKsu0gCIMt9KNIom3BnduX1S2xoOEgRY2oLRbxdikbpjetR+jWtsa8Jm/dg4++2hrCOf28SaAFngo9OJ4/Awiz8jjGON//5+o9LpQqcLv/qF+fJrohbRSwVtbxWmU8RNwXEFFpMhP39Vy82iXVuiFPvh9YttjY2iGoZk9eA9vccHcQZQrnMBh9js/RDZXYPZ7eLaLI8CPEhzbxCs3QCptf9VaR5YrnD86jd/u4J34LrLfwl6eJ7U93RqRxASNCSLVR0ZdArOKaQiiOCEwHCpnvoNKUy6upkQf38TJE2aHwDx1nsB0iIIOFZHjK0Wwukbc6WJvrHHJmyZ63MLpCY6+eZYkX8NVCVep0xXj1CvjzLo+8tx39TSBxpAeMnr5Iy2cEQKOnNLRVxBA2YDxfVrNaDv6OnI9HT2mmd4IxDFc/79r1WinBaff0vZbSaKnhHdbcO8mamoXypAYg2soV1rlqnYd0C0Zays6GhwZ37rexMDBRVQr5MbTuW7q8gV9jLkC0ySf2fM51/8Crz6+EUL7+3//73/h34UQ/J2/83e+ibcq8AvAi2peP21T6fMGs+rYab0jHzjBb76eyjLyXmdrAVSmpWtsjquVfV4JMTyqZeLtpnaSsOxnnUMG402UacHM3q2deN7vwaX3yNOELIoIHj7Cswzkxhp89Oe6HpVlcPIc5BlyfZXZ7gMCDLz9p5EHzsG//n+AZcKtKzC9C6KQeH2V/3nHdwnmMrzlG/y1jQ2ikTFK4xP4wiQIIqqjE1CuIC9fYHbMInj8AG9jBXm/pQm0Wtf/PfUGRuhTrVdRlSrcvkK2Mo8Kwq1z562v4kyeIEgyvOUuKQLPsvDQ/oeBUyGMVjGylNByCMIulX4P59ZVnJtz9Lp9VH2YK3mF7MgPUI/ukYURluMStDpw+yqWM8nK0G5UKaNx+QlhCoEfUNm1H/ye7jlTm8pRUxN7vwNuGUxbtzRMTOs05dwDPU/Odp4OV732CSw81Gle0BuJPIO7N6Ba1d/Dmbfhg/+qv/fmKvkPfg/uXifzPHh0X0d7zTWtMu22YXJGR+Ob08e3jQUSw2M6XbkZIRrA2ORWa0iRYvx2oXAKKfB5vKDmpU6c1Ttzx91qHv7aNYd8MJE6z3Sa8NjpLVPd/JMPCIycPDcQr83qx5s6Hceeg7oPDFD/4V/q93Yc2Llf95kJyA6cILxzA89zMW5f0Qvd+oqutygFD26TKbiwoYjiMk6UMps2kVmuSQ/04wIfpIkc30ElS+Hsdwaf14euls1npSqBdOm0QgJhUu2u03XKhH6II1bw0wzHsfAqw0/tte5eRyYJlYe3tJ2WIXSta3q3Tr2FAZgmavcBuPhjuHuTbHSc3lAdz7GQ9QZyfYXZ1/YTGhZv95rEf/EIzzZ0kzNgS8FjXxFEDm6U8p1JLXuXfpczRpufYNIJYtacOieby/QqNR4Ygsz28ITPmyOjiHaGbRpYMsfffQQvi/BqubbDynNNMpU6TO3ShGVK6LU1Ma8t6+9taBRR0l6c268hMTZJXirr8+y4OlVpOVqQsfQE1LTue7t2CXpdLTQJfJ3aBFSW6O+iUoPVJRif1IQoJfyv/2/yiWmoDyFem31GVKI2I8TNWlu1/kIyK8yHX30UhFbgGTw/Swvb0bvfbSkbZZp6t207utj+/HTibXjGYNbxtOrMNPXicuS1Z0ayZOUy9Puomb3P9LiJag1h22RLC5qoGsPa5f7aJfB7ZGnGhbhOZJVwOj6zCszGMMpxtXggjuDiTwgygyitUjIFvvDoeSNIu0TJ9zE2HfJtWyvoRieeClbaTdhzUKsIW2t8WDtANxN49g6c3KEblfHShNHOEqPNBTaGJxmqlpCjx5/WkEC/vkCn6fp97cLx+L4m58awduTvdchuXqFn2NyMfcLdMzjKYvbmp8gkRi7/G8qTM+B62IePDsbV6CnVcarY1fCQQ3vIophkzxC2BAxJsLrKA2MvSMmSM8yYaVOtVdibxNjDI6TNVTpBTCIrjFQ9bPoce/SIoaRPEJbwogh5/yYgdO/gd38bbo1qEci9G9rOSwCvv4HRGAYgtRyCXoDnOpibfpqbvYF7Dg+Ghbpw6zP9vZqW7kEcm9RkF/R18/XYFOzchzQlJLlOS276PE7vgYVHcPOKvrbGplD7j2IMDT97HR46oT06pXzh9Vr0o307UBBagS08c1MbErXnEGJ4VEdomymbNNGFfyHg7nX9+2r9pU2reWcw60ua2utwMKcL8Xmz2a1GDsf9nK2WyrJBfaWtPQDHd0AWQ5oSCJMoCCjtmMGXNsGwRTUa1OzmHujnKPBGR3DaAh8PyzC4Vp8hGTmKIwWzTg+58Ji8Pkw3EZBDJVfI//ivNMGZJsQRvdEZbvQURqVOLgW/Px6RWyFD/gayMcIFa5JImDhRwuzyIvL2VR1x7jui6z6mpUnTK+vUZTpwuG83Ic/JTJsLcpJOfYxFWePInn2EqSKY+5RKc0Vbdt27oetPZ96Gwye3og1PKEqWJMpySuUSnmnAlQv6+0wSqDoIY6AYFB3soIkxMUPS7eCpjCEjw57ZQ//eXTy/xVDa59LkSaIcnMWA2Uwhm0sDIr6nB3ymiW7Q3nNo0GRdQfV7pEnKhfJuYk/glj1mc4XRH1iVndU10C3xj+1AY0RHbWOTuiZWH9Kvd+67iNtXUEIgyxX4jd+Dzz7Qj4lC/dm6HV13a69rpeXlI6h3fvg5VaPYVDX+FObFBV4NFIT2LcbXTqFsMwzm1hW9Q64OlGObKRvD0Df6IPLYFGS8aJSL+vAvtIKw1dR1qkpVR2rVuv7Z7hu4c692hhB6B/25zsUw0Avc2bd1iu7UW3DjM1j9MV53Aye9rmXz0zspvXUWsZmeuv4JNIZR4ztgZJyjexr6cwnBp3KMUsnF/+g9gmQRr7XCh7V93BDD5IZgf93knXYLe3hMi1riCNza0/NQKmMfPURjxwSq06b3o39P1IZSFuPXJugd2YUZxZSiCOPIa6jW+mBMSwp5okm95Giy23cE/ut/IHj0kMgqUx+us2KP0xEONSvR7QXtDb0p6HWh1tB2Uyj9Hb1+HiklszMVgiTX06eD/pZ0v0LC0Wydtcwjdzx2zEwRBgEn908g3/tTPJUgW4LZPQcJVq7ikRI0m0SNlHKlhO9VCHYewIsjgkoDzzAxN6Xwm878nQ0IfdTQGEGrTTR8hNL0NMH0fvqXPqLy4Jr+Lg8cQ5x+U38/a0ta3IEiC3yC9XW8fhf5V/9ADw4Ngy0BUR74+npqruqfA8e0oCQI4OFdHdV5ZZ0e3lRH9nuobvul1+kWCvPhbwUKQvuW4otSKC8lus2benPAZqWmF4MwwDj9Jvn+o3pRqA/pHfbtq/rfL/DPUxd+DB/+uS7gg97V14d0imrPQd0jlMR6cvHVj0EYGJ4H+0/oaCWOdHP1YLesLFs31G7Wzx7dAZVBYwQ5NMpsu00gyniijIzDp2pH29F1r/1HueDOEK2u4hgWr1t9VG2E3kYLLw3wyh7BSkY3BkoWq2FOtNDBaGZ8d/0nyKBPVh+FlUUOjtVpdVKGiaisZIiZKYTn4e3dhxO4+EGMZVtc7YAvKlTWUt6cqWqBQnMVMPU5OHwS8hwxPErm+/ipwjYUTqlEODrNqX172VMxcA1FsHgIb3keaQwMg0sVkObWQp35PqFdwrMMyiaoXoc8y8iEJHh4H9t2OV7OyHZOclPViJIMt+xRqZSQ63NbJC3JqYhU22sND+Ps3E1QruGaAnnkJD/u2ag8w4s93vDKmJapm9ZvXdaRd68LjoeXxTik+H6IG/Xx+i0djZqmbiqff0jmVQmGp/EmdsLKIhe6ktgbwo56nI8jbc/lepowr14isW29sTjymr5GDhxFeh7Zb/wurC/B4pz+vodGt4RG6vZVWHyi+xAPHBtMV+h97tovzIe/HSgI7VuEryK1/9JawYFjAweOsu7LgqdDGB/cerYh9fSbL14AwmBgceTqxly3pJVvu/fD8oIms03J9OA4jXIFpYBP30dlOTy5T75znxYXbI6EGRnXx9xp6Z6o+pCuceUZUgoqIte+ggO1I7ZDduwMQc8nvX2N6OF9Sr0NeuO7+PDmPMrdQJgmr4sWQTtBmhKrXCaMU0LTY9pMyOpDrJkONbXIhyPHyYAnMyeY6K4gqx4i7W2dA9PzOG9EBHWX+OBJ/n/XVkmEhbkSsG+kxMjJ84S7D+Pd+hQJ8Jc/Ituxi/7jEldL0yRdEydzONP8kHi8xMRSl86+Y3w81yPY8TrO6x5n8lXipQW8ySlkp4kKfHLH42IzI8572Ibi7Mo15J2rZFnGhdETBPX9PBk+xc68R2nmEOf3jOB3umSZopflVIbHdV+aV9KCi4HHoyxXOX9okqDnY93+jPdWEm71BeVyielMEKQ5VQudfgbtayklSAOJYjZ4QjDdwNs1hLy+pKcqCAGri2TzD7ngl4hKdZzOGke9lCgpUxIb+G6VoNxgq1U+iSFJUGEfOh09gmZiGu7eQJ1+E+l55H/4d1DNVbBshGVvXYciTWDQXM3+o1qUNLiG1eaIINcjFwZBCp5Xxhj0TG5t0gqSe2VQENq3BF9Zav8ViE7YDurgcQj6T1M1g6jpRaNc8jgmX13SN31pMEKlXNHCivow7DsE597ByLPP2Rqp7amekge93qBXKYReB3Xkdd17dPuqjtCWFwDdZBxUh/F2TCH/6n+nP1sSk1ku4c2rlMr6OC8+aRMmGeaDZSzAb7cR9joqU1QSn14/4/2dB4kndrG80mRK+cjUZcIwifohc2YDMsGDoXPEqoxsjJAqSdmWhFHMmusxJEySTh/n8OuIXhvlVkn8iNsbGanKibKMybJkrROxu2Ig+2VOlhIIM670S0Q5zAcRJ6oVwlQRRw0qzQXSzjLBlcsEo0co2Sa9sRneexQgjDGcjsHsjl2YB48RVoaJlwIc06DV7tNrd6mvrxDkBpFaxpyYIgwVZq1EZFh0Wx0+uXyf+6FAAEd7Nm90HiGjio5sXn+DoOfjeg7y+iXKG+v0Pvgxqn6QsnLpV45g5Clu5KPyWPflHTimLcNqQwRv/w7en/975Mo8lc/e01L5qV16VtvKIizPEdhlok5KKQvx/QCSHo5t4VcncRpDlBxLX5PNVR35lcvkSy1ojJChCHYdppQkGIPr17Bt1Njk1jWsNi3WTEtvgLyy7lXc3srx8U9AGGSWw8XRo0Spwk4jznXuIZNoa16e2Byc+lMMsS3I8BeLgtC+LXiRvdSLUigvqxVse37e72kSK1efphTrw597nsoy8m4b/tO/0WQjTdTUTr1w2Q787t/4nKpMgBZFDLCZ6lH9HmruDly9qGtutq1rK6uLWiWplBZPRCGZZXFh5ChRpYFTGWM2ipELD8mThA+DEt2sRM1POV53iQyLkqnoY3Bq4zYyjbErI1yKKvjtDspyuZeXyWPJXGwxbkuENDk+06CjRrGzFGttjjivsRFmhJHA6S4RscpibZq8vof5v7jOTiPE2ViFxghJa4N+bZRq4kF1iKaf8/j+AquxYMmAKPP4L20HUxxFdhz2egplmLTGdlFPfTy5BkGAGB7GC3s4Bvjz84iHD1D9HmVT4AtBIKrUhsfwcoWMfK4sJGCalLMqb6QZnilwpEGwYzdeoMiGPJz5+3xyK+B6O6VTHmEy69FzawRDR6jEffJOi4+jClGmsNfanIsiDCnxkgDPUEwHGwi1wVuNEsbda+RxRDb/mGB6P3aecmn3LPGGj50PM+u2kGkCQQ9q9afOIUGA13uI487g5waOFFSykNngMcHEIUoy0PW/TSuspTmYnCFrrtE2Ha6FLknHwBE5sxjYsEV+m4S1tVkD3S7y4LauQS7Pk+/YpZ1nEIhSiaAXEPYDSqvz9Ps+wcZdKo6hxSaWCcfPfHH97QUoVJO/HBSE9m3BC4jqRV6LL60VbPfBW3isf2c7W71iQkrUtufBYCbZ4mMtu5YDKX65jNhzQPcg2fZWn9kXQUidqhJBqFNJSaLTTErp1920REpSqNQIwpQIqeX3vS7BjStUVp/Q3XuCG50cY2wH84bB0aNjOM2QfpBi1WoQVfCqZWS9xuzZEwRXPyEWJvf7FmtBTj8T3KdMefk+ai1mtOTCuXeIe2UcP2XIFrh5xHS8zg4RYxghdhIQxgqzbNBJFUkqGE9DEkMwRkSS9DAsGxXkWKZFN8pZM1wywHBKzFQk3bjLcX+RM3VF5eBJ5B0Bd6/pHb7jcJQu5Gt4luKSlPhOGWd0FOfMm/TiDOfqBQ7fvs0aYwyPjdBtjNM79TZ1UmYrNaKD43zPMom6PdIVn0ulCrX2Gs0oInZdKo4mLFwXX1j4UYKUAj9K8Q2bipkiRyeYdQOC4Sred15H5incv0mG4ELfJqrr7znzIyxTEtgeQaqoGIbuSzv1ho62ohD2HUZurDF7/BzBjct4dgupdoDjUjFyhPN0uOumd2M2vYsrYowVP+FJL+PE2mNCYRH8p3+L/Gt/A3Hj0y0fyHzTBxK0CtN19RDWhUc6WosiOH5a9/0FPp7r4Aw+r1Py8JZ7EOVaeBPHW+NwvpZQpFBN/lJQENoriufTGV+nqL1JdCrLtmoFW89vrqJga5e79dqD56lB3StPYt2XFse6UO+6oISuYWXZC22Fnh/MuD06VJYN5bJuzjWEJjTb0f8+cEy7eFg23L+Fl8Q4k/vxJ3fhrC9SGpKw/Fgr4Iy9+nGjkxihz9mla/Tu3uFKWuZDe4pKdYw3HYk1MoL1/R+S+T4HlmOi5YCdWcZ4uMH55Q+xh4bxOiGUZ/HfPMnJns/lVkb++AHzvoepcuZVmWklcSR0/YRlUSZNLZbECHaoOPjwMzLb5rxa5X/Z/VeQeUJomKQ5JEqLHVMFR+2Yt0YEabcLUUiuFMGRs9jEXCjvJ8wAGnxn5TKz1YzezBj567NcXIno9Xy81RREnSXlcPlxjwmrTdUOeOO3v4dZqWIlMZkywHXxXBvbDxkjYcoLODNmUX/tt5HdFjy+h3P3Ko9aNYIMXDJeG87J9h1BnnoTM+hRqdSfksf8Q4IoIwotSqvzdMZmePBgniwXeOUdvP37x8FxtwaHqrmHZEvzBCzj7TuAXHxEpeTqlPTx0+B4iF4b6tqTc9MFRrgeQWOS+GGb+vocT9IyrX5EfbSMF63B6sIz5Ld9QGxuWrreqtAiImGAY0MUII69hZAS0/U4m+asrz9g6OGnSMvWisn6sFbj7jkIw6NfL8IqVJO/FBSE9griZemMrzNR92WvoerDIMSWCezn1Iubz8myp6NfdsyAW9Gktu8o7DsMm84e24rrzwxmnJzRUde2GoV97k14Mqd7nJSCiRmY3o048hrCtsl37YebnyG9MrPrNwheO4QXRhghsHs/lSjimByiG0RUOg8pX+sgH92DTpdbzmEM1yP3dnDyyAyNweJkVqt818sxhESNl3D8IeqPK8g0BGmQuR4312OizMB1DCZeP4pYblO1DYwncxxu3sZpr9OsTJBWhzl5aIrVxMBqLtN4lODbJZJWxP7RCrIxBOsxG6sh3Tij5BqMlRxet3I+vb1IhIGVzZGLEn6iKFWGWbMlnUQRlGeIjs/wzg6Xz7oG/lLIxws9RkomeVpjr2pxKFknyCvssA1WM0l7o8vwrctEmeL9pAo79+OMHkFZHWQKZdugngdY5KhSGRX0ictD7FQ9DANuqyoXH8xRa6ecHXdIjp/FSyPEZuRRG8JT4MzM4Pd9ZBSyt72ODHoEboXg/WvYe/fA8hzqwDHyJOHi1GnCbg8nqzMbr2BWKluq2i2iFMDZd7Zc8HND4n3yPnbi0VaCo9MNjt/4jEq/i6xWdeP12sozdlebPWiAvkandg4c+JtgmOCVtWMNkOeKT5ZDgsYe7EbEbO8BcmVBT/6uVLQ114KH+hppw0I1+ctBQWivIr6JdEYYoMJA3/h+X0dmmyauYaAnPO8+oNWFmzWw7e/b78HufVpCPr1bLxZeCe7fgl4bVRvS40mEnsvFgWP6uVKi/C48vK2jO8tGHXkNVpdJNzZgdR42VnV6UemmY2VI1Ad/riPCuYdgSGSpTOUv/zfYtV8r606/hXnjU96IevhS4QmFWWqgBIM6oNILEwq1sUZujWkhQZZhRQHf3VUhzAUWVQL/r+C1VpCVKqFdIspiLAmfLfq06zZricVOM8dTMeWlR2StjAkhWa16tBOoeiZm3KeXCcTqErW0hdffoDs8huvknJoqc3mxz5grKTmS1vhegpWAUr3GcpAzXxnH8gR3OmD2AoI0Y7xsEmaS+34EQKYUy91ECxlKwxzaM0oaBXRvL/MnmYMQ0LyyzN/s3eZDa5pbbo3Sep9G1cOySox0V/GTjLblEx7Mce9dozL/hJRFnPFjdFMwlpeo91boCMl/SoeouqtUqmXOSgt56wrkCmmazFbrBHWBzDLe70juqirCKHMtz3jTkMhYH3NPurT6MbX5+7Sb6/S696g3aoPxP6ZWua4skPk+QWZQfuMdpOvBB3+OvH2N2dExNkSAZ5aRb7wNu/Yhhgff4wvIQ/V70O9qxxnH1Rst19PR2p1r8KN/iwL6e48Tjh6lVCnRNx2Cvk/FGjTA+zpLsd35/6viizaYhWDk54OC0F5FfAPpjM2+LhX0odPWjcxCwKO7uml4eQEe3EbZNurAMYzTb2oLrCxDNdehWtvyzMsNCf/xX8GjFsw/0jvsa5d0yqY2hBoZ12RiWrowv76qyaxchboNP/7P0G3TvfWZ/t2OnbqOJoSWgv9v/1I3z/YGZr5hqB0lsgxhWaAUIs/ITs7S2+jonfeT25rIDp2gwmWOLj6ka7pU1+9RfiRRpTLZ7/33+jj7XUS5ivv6G1xcDAlGj2L3DWaNEO/Wp5gjR/h0MWCll1KyJNM1i2NjLsMbMSyFOJZLmOUcdhIS10FFsR4Xc+AU3vJ9Ptv1FmdLEeGoSc218OMEVpeYzBOWVy0edFIW+hlxv0VaG6GZChqWRUbC4RGXe82QbpQTZ5ClIUKaPGmHxDmYUnBk1ONs8JgkimkPuXym6timZKPf4rY1SpbEeGXFg25GIwmpS4XTGMeWgv+1L3l8cQnT95iun2Ov6uCMjDHbuk9VbdAxPd71PdJMUbu9zLlDEwS7D1Pxu3rOXOAj9x3BsxwuXL5HNFQiJeD1YZu4HeMnOdWyS+6VuTJ6iHvtFZqlA0yaGVUn4I2qiblzj76Wo4Cs3eJCaQ+Rb+PeX+NcNUU+vAPdFqqzQeXUm4jTb37Ovup58sjjGHXlIsw/Qs0/ht37n0ZuzVXtcIOANMXrNrG8Dr5p4+w/iFfy4fFd/XfHfWkK/adFIRj5+aEgtFcQ30g6Iwx0X5cQ8OQBwrJQvq8LO8rQNSxhaHLrd7UK8fZVTXhZCoe1D6OwKxj9njYBHp3QCsU40e/hDrwbN3ucklgTGQIsS/9+Zh9srOu6SbcNvk82toNgej+eoSDLCJIM2/GI2x28+jByzILJXdBaI00zQruEYzpceNzh+vV7qBz2lW2+e+YYtmkg203eWP2EAAuvuYCcfF1//qW5ZyYB+HuOEWUGJZHj5waRV6Ec+xyvGrQjl7Kd0o9SksxkuOxgnnsbJRSzaU4gJPlrp/hkOeRxN+ZO34Ncsas0STW2OGCYjNTKHPEUQSfjfKPDu3GNuJXwJFOMj4+gwoCxgzu41oGRkkWkTNI8ZU/DIc4ynOYq1/sGhhAIw2HMM3Ckwa6SoN4PEbUSE2GGJSzWoxyR2Xzg7MLyFPXpGZKViLIjSVTOkZokiyJ+lJboGwa5MilFEQeTNsmCwG495lzJ4P9QFbLMIJM23Y028YMeXiCfVcDOPSCMUqK8xMj+fSy2cnoeVPeAN+6AZdINYm4u9cjcErFhsX/jLokQBP2EyubGzJAEpkuUZpRMiT/3hIB1KgPTYyUUGMYzZPaiSCcLAvjL/wzLc7oHstvSGYnLF/TGLQx0M/jaEqgc2VxldnmewLDxDhzCPP9deO3c1jX8/PTsTfzUUVYhGPm5oSC0VxRfpV72shtuy0FheV4rE129C6Vag8OvaXcP0x7UyXK9eMHTIY3qKckhpRZ5uJ5eLHbugeFxLfAYGdfkd+iEHhtCrvuB8lwLP5TSvoTdDW0yXKmRletcUMNE812ssA9KEVklnkwcYuf4KF61ymz7LjKJyWb2c2HoIHEuUI/a+PfuItptlowq3dwhehLwV4+MYQ6NIi2TShLrXrds4LRRqmqStWzIc9zYxzbKBLmJ40jcqA+OQ6VWZrgf4JiC++s5lhRcWugzO1PBOP99lO9jmA6uIeDJdTpdgWNI8lKFD/0yXgb3fId3nnS5sxaS5Yr9YQUjiekbLisZpH7C/rJLJB1OTJgcnygxOT7Gw/llypbBe3dXWW/GHCo7iCzh48QmyWHEMfje/mGMGxa0m8w2qozMjHJxKSBMPYwsI1KSx72UfpKx3I0Zq1j4+45hRiGdTgs/zjGkgzfkEIcRUhrYpMQju6i4HtUudMmYyrp8v1FB+j68dh5h21v2YqVyCTdIiZTg2ITH0aUbVPw28vICanSCdHmNJesEmbTwK8P4pT2MDFWxzS69nYfwpEA+vIV39ATOYh+/MYbbXKY03NA+j3GEPb2TQAjypQWdpqwPPVN3E2ff0dfpv/v/6ExBGIC09ESAxggYBpkSBBtNvDhCVut6EvqD28gkouJK8Lt6MkCt/vRG2tZm8sw99BWjrC3xFdqsuxCM/PxQENq3FF94wz3voPDaeV2HsOynvTuutzXdl/qwviHDCJbmNRGZNurmZZ1KHDSxGkn8dPimIck2mgRuDe/2NWRrXRNHMrDAynPtGbm6CEdeh1YT07boNVtEq6vYKmWuNE5puEF1uEHgDWE6KdHqEsHdW1TWFgnGdhKNmZRNg3YKKlMEwmEtk8jIoLMcUar0+d7ptxAHjw9Gn9QQQY+0VKN35VMwPCphH2lK5P0bnPPK9I+cgcnXEHlM7nqEueDMVJmNIEUgqDiSIMm3PBMvbkCUBdhpxBtOj1zUuNdN6bgmbpyxo+bQiVIeb4Ss9BMyBXl5kp2egdvLqEqByjK6dRc3h7d2lPFs3bw95Jn8+OEG9zZC0tzCFQlDjuC1hsP+8QqeberNAZApuNizuP64y1InJBWS8aqFAdQdSeAIHClY6SX86F6b+xsxSkHJNtg/5PDfHmrw2Z/egTTikjnG6yfOYrYNTj+8TxaFfF+s4j1Z1MRQquKfmMWzDQzbwQh9ZoccOtN1/GYT78ZFPTH74R2Y3oucn2ds7xSJNBmf3sVpDEbNLpeSCtGtBZzYZ7bzAJmnzAqDIPLw6gIZJ6jDJ8HvIeoNuHEZLv5kIBragZraoyOxOEYhYO9hXdctV6C1ASUJXk1vmCamueB7RIGPU97LbLqIVEpnCuJY23Jt1tmev5cGhLQZsakse2mU9YxjD4P2lrvX9d8G6XtRCEZ+LigI7duK59Iaz0xz3uZxKKr1rd2o+uQDHYWVq7o+dv+W3v0+ua+JLI50hHb++3rRCPpPR7wksU4FAUpK8k8/4uIGhJnEGTrCrO0gb34KTgk8T49LMQx9LElMVmuQ7juGffkD5HLEnw6fIEVi5S6zpofnOqRLq3gLj/C666ASvMoQTpbQd2rM+ynTdsqemkmy0aUjhzD9DmFSJcwFlW3jRLJSmQt3V7i+apM3TrFv9Q7vxMvYKwuoiRluLLSJTUd3D2Q9hADPlJyZKlOyNJk50sCWgrV+gp9kVByJn1tktsv3jQ6nSi7N3aP8s4vL3F33MQxBK7KIkwxDGhiGwPIcWms94lzRj1JWI4UiwjUFv7G3QZor3n3U5vL1x7QSmLQhrI0igjUqnTW8bJ3S/gNaeZgmRLVhuisZqrtAlphULcGeqWnKjuTmrYeMZTmjKSyWR5CYhHHKRNUhzhX7hkt8ttTlgTtO1cjYYWVcaGbklonavY/vjUqy7CjZ9YuIWoOLTUX8aAO3VOLcyVmMoE8cZ/yba6sk925S7g7zNzcuIhUErRa238byu/ilBqXWOqOjOTEG0cgOStc+wsckSBIqExPI0Qm8dotg3wndN3jvOlmasXHjKma3i2xvaBVttwN3r+mabGUwHNRxn6a3h0Zgxy5tVjy9h/DU28SLPiUD/FwQ1Eep7Nypr2WldPp91/6t62STyNTNy3D/plb1WiZqeo++f54bUrv1nO0byYGNGMZgukS/+4zLToFvFgWhfVuxLa2hTOuZac7i0AnY5mMnpCRrNeHmZzrViNIzyTZViZ2WTtO5no5ygr5O4STR5+T9m44Nfs8nUmVKQuHbLoEcplKuaQukhUfa+cFyCH7wB9hpn0thFXXjM4Q5we7gEjUThqMNOu4u9jfv8/18nqA/D9bAnDjLkK7N7IhkPeqhyiXKew5jXLvKibzJ3VginQp1I8Oznh1VEyQ5XWWClKz4GaE1jiHLfNefI1CCyLBwTZ1WTHPFkGsyVbPYCFLOTJUJ0pwsU1yY7xFnOU/aMTvrNiXLxHntLP2ez81OTqcV40lFaAosaZBmipGyrdNjQtCPc8bKJovdhDjNWfMzxssGcaroRRlpNyIJQ/wU1nKTOEjZUU6ZlClDrsNRo8fYsCSzXVbxqAU+ZcNmIZVsKBM3T7CzmLybsMuIsF2bI0uX+Y/BEeYDRS4rZMpn99Q4rplzZyWg2/chjxm1YoQhWexE9PyYubbJ/mEXRw1ztNUiFEOUS1oME6Q57p3r/MWq4EnXopJZUB5iLRjm8cgeImmTVfaRCPBUQtbvEE7XqKQBTp7gp+DIDFsa9OwK9tXPuNQ3iRYjnJFRzqxf51LbIAkTLHeI2fwJcm1ZC4QG9lREg+Z714O3flNnDp48gDvXdeRVqVFyLDzHJtx7DC9PKO2sw6cfwEd/oUVIjg1eCbXwSE+YuPoxqtvWbSTWYEpCvwuDeX0cO62jq81rv9/bitxw3MF9kz07XLRcLVKMP0cUhPYtxTPCkc0xKoOxMKrf1c4Hm71nWaZrXKvL+uYfmdCyfZVDOnBM8AcDKXfu0yNc7t0cGBA7qKOnEGFAbtl6EfD7eFcvYNf241sezuxbeEOHoNvUu+UoJFua44I5SZQ+BhS5WmN0ZJim71MeHaEuFb2hCcpll5nbt5ALGTfUMJEzjjM1wpkRSTyzD2/nXkZLFbymwm+38dornOncZja5CdPfobZ3RKe+gCzXRJEphWdL/PoYiYyouIJWZtJulGic+w7uSkw7TDENQcWSdKOUO2sJ7U7AcM3DNiXdOONJO+bERIldDZtjYyVqjuS9Jz36ScqTVsT+/gKiLzFzk8ByWeunTNVtjo+VubHW59JCj06UYQjBVN0hThXDnknFkVxd8ZEduN3O8ZWgSkYiDHLL4ZrvcDju4oxYBIbN//zZOomaoWTm/O65IVoXbjLn++SGRbK0QJqlNNrLXHMmuclOfGXRUC1+c0jQzgxemzK5fvMh5ZU18GP2luH7O8p8lCb0F5axVUaExBjeRZQLLf4zFEGS41omXhrhhzG2U6Xch54wGUpCXNsgmtlPSeSs92PyXo4zNETebpK2NvBrNc6E88RGCzuLubTjFEGQEsV1LBFSMxR+FLMRQyQkFQN6XpXg3A+oGDk01+DeNa2mHR6DA8e12nZzIsP4NGQJlMoQBRitNc6M1AjbXbzhEcwsIW83oVTSN02SAmpgt/VER1K1BkqamjBba/pxD26jDp3A2HTQ2RaVKdPSytxbV/RjB9kODp3Q9+UXDMMt8LOjILRXEF9bXfXcWJjPzYYKA727nd6jhyRO79Z9Ygqdpvy9v6nl9qBTPe0m3L2h0yhxCGlKng8Gd9oOwrKQtRpndtbZEA6NYYvAK5P/zt+kvLGAfHCXYGmRKMwprTyiP3UAkUQ6lacS6jtn+JsNyYYlGYqWsDvr9AyHqGpTmtxBz63ynmUjFptY0SOO1wSnDUXc7WGv3yQWJhUVIqVCAb0ow5aCi/M9rq/6W7aQ+0ZcTCF4sCARqkSrJ/jDDHbVbSzpULElQZLR7OV8em+JuRwwBK8fnGC47JDlOcu9mNGSpWtdj9pcXfLpRXpsSpLAvpKgHcaseyX2j3lkuaIdp6z0UkxpMOQJSpZgV8NhtGRzZqpCkiuur/i0woA4z2mZFYIkJcygFGbstXLCxOC9tsmDiwssRYKaayIcSZgpVsMMP4WSmeCkKbdVmdiZQVRrPIpLpEmGIWocyjJGSwaPuymPejlCmBzOl3kn7iGt47w26pDPaSXjk35K0u0is5TyWIPzoU84LHGqZYIkxXZsSkHE2ZpJ4sL3g8fY42M45PSUgW2bHJPrBBsdyirhZjZNVNqFWHjMd/bsIw5jgvoE87fu0KWKlQn2JxlepcLQ2BGc+8tEI+M4jSG8t0/D5Q+huaLbN7wyHDgCKtckVKmiNtah0tMbqOYqLC+g8hx59RLlWh28Mvlf/W91DbfTBpTeyCG09Vuew9KcNgDYfQB27od71/XmrtfRk9q31aQ30/sEPmrPIZ3FqNR0q0kY6NluBX7uKAjtFcNXVVe9yH1fhIEeBfP8DLPNmlqaaJmzYTy1Eur34MpHCKEbnHl4W9/Aq4ta1h8F2kFdSl1jmJxGTc6Q2R6XkgqBsHl8Y4NEtTAsm2PjI5yxnpClGZZj4ycpdtDl6KjL2FtvE/1lD5lUkNU6E8fPwPt/SlZtkPkxVhbjhxFieAI1PI5nwrW0TG+9S83IOTte5+OhQ0RxgtOwOJMpPn3YJDYd0jynE2lniFQpukHGnoZLFHXxU6g7NhthzD+/sIBjSkqexd84PsaHcz2etHwehxJXCvJYkTzpYJgWpoCRkkQpk16ccb8Z0QxSnqx1mDRSSLr8tSEQwx4X6nVcS+KaBmNlk5VuzIqfstpLaYXQjXOmSyGOFOQIHjRDhG3rYzGhn+ivZaUbEdoZS6JCvtZk2ZI4lqRLg2HXRlz9mPtdk0RYWLlFgmQjg1hYjJgOqQixAc+1OXJsipGRGv/LjQ1sxyIODE5Vc+TuI1ycPEXcA9cxOW52eWvC5v1ShUBYXFhLODfkkNkOF+Z7RGmOGDrCub2CXpzRuPMJdhrC0jpnsoz3xl5HlCqYawu8uXITPI9Lps18dSfd9Qh/tc15s0s2fIQWJjU3Z9oyOfad84zvnkZlGUenVxmenCQ1wLRM8rNv637DMNTX75nv6GvRdlC9rq6pVaqDYbI13UoSBZrgxic14Xzwp5q4hsfALcORk3rETBLr63l8amsMESvzUK1rQchA9auy7GnKcZtqUQyP6sfcuQa5QpkW+ex39QDbAj9XFIT2quGr9rC8wH1f1Orkr83qCGvTL28z2tt/VNcHKjVt4GoY+vXFQF7vunD1kr7ZvZJ2T08zHbHduap9HE1TO4ukKcHMASJnArmySGf5Hg4gh4Zo13bx/thJ6NlYa4ucFE1uykmuDB9kbCXg0Obx5zm0N4hNh/dm3kT1OziG4tThGUpZwKWyR7vpkCcpTtkhUIJmr69FBg9u4IcGG9cXCXacpmQa3FoKSLKcdT8hzyFXis8WO6zH0FEmS77CxKa61GXGyVAWzE1XWA8SLNvCMQRGniOFwLItmn5GP8kQAtbrKaOeCShGHMXjDJRp0nLriL3j3FVlZKxQSjt83N+IkFKwf8jFNQTdOKXf6vGolRE3N3jt6G7GKxarkU6VlhyLUqqI0xxDCpQ0UGGESY5tmZx0QsSYzW/MePzZRw6PRQmhFGZqEHouiQGWabKvbuC0YjLTpKYSdg+5hJZJBsRDE1i1YcShUzTdEv5qiCUNoqm9yEpMXGnw4FabtLqDNEnplku077VZ6SaUZUo3Mbi1lHNwtIIbl5itNmB4jI3xPahEUIm6+DlI08RLQ0Se041S2k6dC6rBIzPD6ecYCOIMHBPckkOapFy6cIUgyigttDlz5hAmA19Rx9NpcMuGa5e0EfGu/TC5AyZ26scgBu41HVha1GbXQV/XxEplnYZPE2g0tFr0xqe62T7PYXwH7D7w1L3/2Gl9v1z/BHX1Y01kmxvK54y71cxeaLd0FHn7qraSfPM3inTjzxkFob1q+Io9LMqyUYNBhZuiDZVlejr0IGrLjp7SRfFBqhDHg+uf6pv+wFE48pp+7tWPdbrSMPRisLGmXfEPHNCvtbqsbaqaazpludHAW3yCo54Q9H1qzghJqUoexzh5ApZHac8+/NUlEumRNNco7Ynp3rhC8Og+Fc+GxSdknQ7vLcfc8KaxSuNM2SCSALvkcH7fKO2pBovXVrmFjp7ejG7gpAm+MHGGhxhKM9wkoBWYGAJOT1VY7EY8acV4luBBMyJDkTs2VpZTlmAmIR0laWQRP7m/waO+oh0mlCsOnhBIS5BmOWGmSBU0wwyIudcKMYQgUZKSBKlSGrbALzW48aiHEIIw0RGiY0rm2zGHRl0qrsGGnxGlOaOuZCNK+bP7GxjSZGa4wtHREgdHFK0w4e6qT4RgxZnEqeVM9U325j5tYVEt2XzSUUSGjS0McgHKtjBMicigbAnq1RK/PZ3QCXxqnkVsu0hg3U/pxRll2+Bq36Df8vl00WfIMfBay7xZ67AhPBb8CVpRTpwpbq5vIKVBrxfgiZyRrIeyXA6FK0Q7D9JzIm6ENiEOTxoTTC/dRYYBstukVx3m+O5xgpEad3td8lRhmJIgE5wYdgiaPcJ2lw/+9H2skQmUgCVRJmolBDcW+N7xaWQcItIE0Rgmb65DEmlHmjvXtPFwc13PBDQMLeYYndDEdeyUvt7f/CuIe9e1YjHPtRpy7qG+voXQj5+cASl1lmLgO8q1D/QomlIZNTmDGGwonzH8/uwjTW6Lj2HxiSbNR3dRJ889299W4BtHQWivGL6KS8gWcWW5VjaefgtDSi3d3z7z7MM/g7lHeqc6PqXJKU10TSKO9MJQruj36/d0wft+rm/26T1w4ow+Dq+sd7ZpqlOWTx4gVcas0yJA8nb7MYG7EyZPUN4/yifLIf1QgTCoKU1yvh/QEOB5DnQ2tGvE+jpZbNMxFYtymHnTpjHm8Na+UUzLxMkFe8aqmIYgDUPyWDI77RI8XsEL+8hqjdkDE/hILi8qOp0+VVsrGOc6Cb04pWqbOFLoNFaas9PO2WtHvFaHPxaSkp3TjwXjFZshzyLOFEmqEEZCmOQ4UjBWtihbJmNl6EYZo6MV1noJ46Med9sJuQJTPP1+4iynGWRsBBlrfsrJyQpX+z2sNKSPxVTVoRlmLHYjkhgOjXqMlEwup4J2lOJKm0M7KuzZP8yf3V1n3ofyUsDpHSWajUlKUY8Ug9GyxJGSTpjRcE3emKlj7jxHst7l3Z7CXPCJU0XdMZiq2rTDhM+WfGzTIEwyphoSrxPzUVLDDxIyI6NsS5IwpR3n2DLHIccVirH+GqvlYXpLTUZndmKc+Q7JkxYV12b61g2ijSZIm/9p72/jOxUkM5zwLI4dmuHBcgccF9c0yEMDOwm5Z49gijJpYDDjKXpRQj3qkWdt+tE8pdfPEkgXt9fTRsJRAEvzZNIkkC5eEGC2m4MZbD6sLukUuTuQ9F//GHXqLT2Q9ezb+r+GodtT2i1A6X7J42fg0/fJckXw8Qd4KkOWylokJfj8hnJ7ZiRLn6obC/xCUBDaK4gvdQkZGA+zsqBvPOd91PnvPxvdCaEl+ps352atoD6k04ZP7usb3PV0/U1K1Otv6MXAtPTv797QaR6V6xEb1Tq01nVKstqAhScwMo7cvY/6a2cxanWElJyZkrz7KCUbn+aTpYQjRhcZLjA1VqMjZuiNzeClEfbH75KYO3GTVYaFZHpyEt9wdF8Z4FkGJUsSZTmOZ5NZDiIJqbzxDuzajxgexbJtykmKeHSPPMzAlGSVKZIsJ8kFipwRSzGSdDGEwjYFlZ0zPHBdVDNkrhNhG1pi75k5Dc+iUTfYlds40sA0BA9bITdW+xgIHFOAkCAypJAoBfuHXZJMUbZ1+8B6P2GtnyAMQa50o3Y3M8gwCDBIckWuBAfHKrQ6PXpRxkI3ZrJqISV4puBRK6YVZCyGUHEk/TjHT3IMQz8vUzn7hjw+XuiR5IpWlPHukw7znZj7A2/IUzvKTNd0nS5KczxLkilFluf0ooyH/ZxS6rA7jyg5JpMlhyDWzGyR0wpzDCGoEhMLk0lCPHLOjDlYno1bKtHv+8S5YM5qkEeSGwwDHkY7IxV9/pY1x6yMwHAonTxLlDSILvo8mM8BheHYzL5xCO/RKuadJcxejN1e5SNzmKiyF6ffYja5g7Qd4iDmPWMU9XAdzzKZPVVDSqnT6Emia8eP7+opDlEMn74PwkBs1phLZT2gdscMHDuthRxhQIrBx1mNIAxxDJidmEEaAnH27c9vKLcLsCxHN2r3OlpY4nrPjmuiMCn+plEQ2rcRrqcJy+9rwlI8bebcjO42JfaTM3qneeotXTsYGtU9ZrazbVT9uyiltPprckZLBPcfRdy6rHei7ZaO6OIIXI/M9uhN7uFa4wjJ9B68apnz9TpiIJ+PkhSRxJRmZrjay+nVLKr9mPq5E3zEGpEwyO/cxLansNKEUuzjBqskS4rS+P6tvjJpCGZnKvSijKsrPp+NHMbJE2b3DGNa5pZMP+31iaKMNVxa/QyshMmKjVARO9MWO/I+Ya9LdWKCh33Fk35OEkQkmaJimTimYKwsmShbLPViHrVyxism+4c9ULo25pgGuxo2carl7P1YstCNQQj+r6fGQAg8yyDLFWv9hLoraYU6wovCGIGi4trYacpv7XRZSi0sxyXwfcbKFpeX+gRJjikFO2sO872IZqhoBRkVSzBRsTg9Ueb/ubiKEAJpCE5OlHjUjpFGijQES52YlV6MYxp045yVXsyuhs3fOjnKSj+h4Uj+1bU1ulFO2TY4MVkiGZrm7koHbJeSZfHXjzW4ttKjd/8BdxPBvmrGPXuY3JFYeUSn1KBveYwY2l3l3UcJSkhWM5Nhz0PZNrgOYZqz2o+5IhTvjDpEnQ5G0KfWaJC+/X2O3VulFysqVY+hWokzM3Wch13MoEvw8D5RXKMUB/g79tLb2ICRCT4WQ9yxRimXPaatlChOKZ16U6sYVa7l9HdvQPZI+zieeRtVremZe/0exmYmYlPsMbiXAtMl7MaUHRt/ai/hkEHFNl5IQFsZlH5PC0M2h9OeelPfb9vEXEBhUvwNoyC0VwzP2+q8aHcnpEScfRuFAsUzjc+bQzpFGKA27aoGlldqMz1i2dsiObSbvZSkYUho2HjkeudrO+TdDly5qIms06L3xm9xrQf96jRPPIeT9TLRYJGvOFqE4l7/GHsD2soCy6aWR1yPXYL5gPvtBCeJaMcO5thRTq9cY1+2QWSVMOwUEac6/TOwnQII05wozSnZJq0A+hlUpOLDJ109d8xUKEvS72ZUbcHjFGoSPCnYq2LmjQrzQLahz08jhbKrydcxhW567kOSRSz3Usq2gWlI/Fj7OlYdk36cYQrJO/sqSGFgGn3Wg4wsz7gw3+N7e3Xt5MJ8j40g4Uk7Ymfd4eCIx+Ehm9bSMr04xXUFe8frHDYNLNNGpg6tMCXNFZYUZArmujHr/ZRekqNUznqQU3EU//l+izzPyQ2DUc/AlgandpS4uuyz5qc82AhphRn5wBTDNARxmvHxQo9enHGvGbLSj+lEOZlSXF7oYbXWULnCM7uM7JkhB94aMVmd81n1alzsG/QSxWi5wdV+QtWw6dxu8z+8bhNnCoFgxEgYVz5jpsByY/qNBusRHBx2yVsm796cQwhwuM0b75zBtEzeOjRJkOTYA9/MMBHUJ49xfPEzPM/BcR18v49lGlwbPkDXbfC47+OFK/TbNmLPBK7nIK5fQnklbYPleoAAeyAEeXhHC6TyHNpN8r/2t7QBwd3rWsQxsKkqnTqHc38NP1M40sB7dFPX7GwH9ZxBwdb9V6ujTr/59F4NA9TzYi4oTIq/YRSE9grhcw2csLUDFGfffkYWbNi2TjNuToMOAz1tmmd3hfnRU/Dxu4Mi9qCAHfiw+wDq4DGthrzxKWkQcMHaQdQzcSyTs2lOfOgU7vo88v5NMsvlQlimvRIxbzc4NtRArEe0w4yaLXCCLvlAtSeTiPOjJTqdLqWxPawqmzyB8bLF7SsLuocq6JCXK7THZrCTGE/FVDyJL03efdTBMASGgDjNUcDjdkSYaIIpLxnsqTtcWe6RIzCU4g+O7MXYSPANC78dc2DEoReY7G4L8n5EXi+T1IaJhCDKwcpy2kFGluU4lkGcKZphTpIrwlQRZznDJX37CGCqZvH27irSEJydLnOvGZBlKf1EkSlNwJlSXFvxWejGNPu6D61kSUquzd/+wQn+7M46yrL4eKHPifVbjHsWB/oJP/b2kuYKP1M0HEmW5wyXLMJuRMm1MaUgU4IoybEtST/OaPbh1nqAYxr85t4619d8mkHGlAJbKtb7GX6S8ZNHPQQKDEGzn2AYBkGSUbIky+2I8UQRSYuVIOPJkw7zGz6GKZlKSoRxyg5b0vQsjM4aTibZn/mEicNGkDLkmag4wleC427C8WQZb8zBf22ST1cCBAK8GfIooFIt0QtSlte7lKslKnlMuVSinyqiTG9eQmXQq4yCs8yhZBUpulBK+dTZTaNRZa73kPGohxts8J0kxQgOacJwHNTwKEzt1hMW1le1SbXrQeRpNWO/pydfv8CmSroes70HBGGMKzKMPAfL0nMEP/6Jrs29IMLaXhpQLxNzFSbF3ygKQnuVMCg4a1PfJS2TbzXB76NQqPPf/9wNpVwPPvvoaapjc9DmpjDk3T/Rqq1aQzt/9LpQrmm7nzzTjiInzhL1Q+LxhLJK6T5+zHsXb4GUOOMTzLbb9JIN2uV91A4eZL5v0k0UR8c8jo86lK5fJPrJdRYpsWPfLjwpERvrVFYXMUIbKVzyygQrV69yqP2Am8YIcalKqVHj3JBPteLx8UpEML4TISQKhWMafDLfIYlTamWbXpwTxDmjFYsbqwFL3YjrywEVR2L2WoRBh3Mlix9XD2Cg+NGtFlUbWo2d7B81EL2cPFU8aUbUHEEvSjk749H0czaCmNV+jisFVVtybrrM27vrVGzJxfkehiHwTH3eL8z1aEcpphTsHnZJ0hwpDDzL0OnPPEflCmFAP0oR6FpgL1YsxgYiTvlkrkMrE3TsMs1On0W3T8mRGKlixDPpxjlVG8qWZLxiseanWFJgS4llCIY9SZzkqDDkclebI29GZuQ5w5aiK6GXCKQBa36CUII4UzRsgcoNGp4JKqcqBDJJsCxBJWmztCKIlYGYnsIqpWTCYGxtkdRvEosqD3yXPY0MA/hwrksgLGzT5vRUhUxUMc9+h4Ztc3qHJExyGnaJT9fm6AUpDzObuw8DjOYDDlsRJ+uC0utncaRBt9uHOOVTc5y7O4agNsTRPSOcH3PwWjlhnHFUXuN45y5e2CVeipCfXUBaphaGeGVNVPuOaOech7d1PS0IdCqyMQTDE/DkoRY3GcZTm6owwIgCKqZERSnMP9SZEilh5z5EtfJMhPWiuthLxVxfIPAq6mtfHwWhvUpwPd3cfO0SYOi+mSzTu8BBnUwNbsCtm+D5vjV4uitMB2rI9gbMP9TCjs6GJkuvBEde25rU6+Qpdr9DP1daCGI7lJcf4vc69IYmuDZ8gLmkhOjA0R0lTk6WqdgSOhsEi3P8Yw7TU5LKE5u/P9nBSyKCRBGaLkt9xXzSJ8kEZuRC3sMlZ+eRPTi9FnYScn7cIdw7gmOZXFro0/JjxNoKTp6xviFZdxu4luRBM2KibLKW5/SSjDTLMFP4LK2wuJyRxQFKSuyoz2QeE6502Lf/EIemTVp+wmp/XU/IAUwhGa8YPG5FWIYgzhWvT5b4wd4GFdekF2UkuWLIMwmSnI0gJcpy6q5EGoLpqo1rGry5s7KVPtvbcJhvx9RsgWdJZqcr9OKMTpCS5VoSnwoD0zRphTGrSuJnQKI4s6PMuekKd5shtjQI04y9Qy4Nx8S2DGxD8N7jDrfXfDZWm/zFUk5HmayPVhmv2Pz23hr3rtxAbWSUchsxNK6vozxlPVK4QtfnDo255Dms9FN2VrWgwhQZ1+8uooRNlZQ4Sdg9WuFEFUTU57afcCRdpOVM0xgtcXGxx8W5HjtqNnl1CsozmK6DtRyR5gHXlwNMCcfHy5x98xQb7T7RespiKyBPcz7LPBaCPlOVNc7sG+N/XOzg9y02Wn1mlI8Uim7bI6wJztVNQqeKM/Fb8Mc+F560ieIyzvXHzB7agfQ8+I3f02NkqnXdZrL0BCwXul0tdJreq71MNx1zZnbD1G6ElNrSbeGxFlpJqf9uWQOlo3gmwnqR8QHw9J58LqX4MoHX1zFQKEjvKQpCe9UQhTolWB2CkTGd75KmJinLhudvpuddDLYJQ1RrQ6dMhka0VNmrQvBAR2ppDM01VKkMVy8hL/wls60mQWMce2yCS5Xd+LmBU69BX5JgcLJu0G64nKhCzRKoNEH9l3/H/GKL5dIkjsxYFor52ODg2CT22ir32in3c4/EkTiGQd8pUzVyknIdIwzIDr9GLvTEYJGLLSFIa73NXJ7QziTrqULZGSCZqJjsaTjcb0bUHYkjFEYqMNOEFBvHsegHEWGqeCJsbJVyd7mD9EqArlPp+pzAlNAOtH3W/lGXS/N9Hm/E/NvrTf7Wa6N4loFlCDaClOpg1IsUsNxLODTi8vqOCp5pcGmhT5BmPNyI6MUpAFN1hx1Vi/fnujxqxWR5zlo/pmxLbCkIx3Zi+imqHTEsJWmucCzBvWaEYxqkuWK+k5DlUHNN3typ052npyrcXuoyIRMeK5tKlpGnObkS2FmCTDNKZZs9fszB3SVy1+M/3lbE7RjPNNg77PDGTA17ME0gSHIQYGQZ46tPeNTxUY7HfVNPzV7qCQ6qMnPlEmFuIEd34GXgDfr1/DgnyxVRzaJimyx2Yx41Q9pxhhQwU09JMBgbrVP3W8w3cwIluN+NWANu31pmpFFmqZviVeq0OwlD/Q6VjYdU736IZ3UwRico7d4P0qT96CGdsEw9jgi9CoHlUnlyH9IEUa2jjrwOzT/Rkdnaslb61hp6CCjoOXmffqCbsas18t//29odZGqXznhEkb435h7ox+/chxooIsVzrTGbUy7U7atbUyyM029+NeL5CgYKxeTrz6MgtFcIeacN929At60Vh3sO6r6a8Sn9gIFcX0ip/7tN2bg1z4mnu0Jh2ajJabh5We9MH9/VZOa6+nWzVP/7wW1IYqQhqCQ+hF1mh3sEZgfP6sL+A1juDE1Zwll4hLvRQ7kOamo3WRxjjo4iQ4PYLSOlgTAC2v2IdPdRVDKMl1iEYQY7JmnkMTNmjGotky2lXGquYu/bR5yniFzhkTK7ZxjDsUm6PXKl6IoaQ6aucx0dK/H2rhrvWS3s5gpZnmE7BtnkNF5qMFN3uJPm7LJzOjlUTcGVjYzZsqAT5ZzeUcY0DO6s+9xdi3TK0tAzxAwhGKtaBHHGRpAyWrZQQJrnbASKIM64sx7SDlOWLIPZ6SpxpmtAhhD0ogxTClzLYK4VE0Q5tmngWjokrNgmh8Y8Hm6EPO6k1Coe00owVjZZ9RPW/IxuGHN03OXIaIlbqz53m9qn8sREibprIoXAtC2UJYnCHM8UKENyfNxjdKjMI0fiBzGOI3kcCXo93RA+U7NZ6iU8bMYMeT7f2VUD4OqKz0aQ8Olij0bPxs0UY0qRZopunDHkWTyqTzM8pHA8l4ftiMftGEMopBQsdGIcExZ7BnOdGD/JWQ8SHGmQpDl5rlsARK/H7Op1jqmUubJiKahiui5pmkMUIQ1BZkhGzIw/aH1KI2pTibpIt6JrX36XOMm4YE1wzx7GTAKO5l2yh/eITUjKQ7hBH3nxx3padWNYjzEyDJ3laK5qC7dwYI81s0dvHNtNGB7TA2w3W2GGRwfG3LYWlpgmnP+evrme20CqPHtmKro6dOKrNVd/FQOFYvL151AQ2iuCrWbph3f1aAwFjO2A9RVEqaQbqPPsaWrE0YIPw9XuFNy9rsUk23Zyhm2TvfNb0Fwja4wSxAne8BhSCE1qlRqiXNHPMcRg0rQE28Oc2EGlVoed+8huXyVvCh4mLhKFKyucj9dgxuSCOUEQJhzP1yhVTHqGw5+VDmBkKbv8Zei3GDdgZHiC/+HcXqxwGH91lYtXBHdVCS9I6DzpkArJcG+VnTKmu3Gfzo79rLgNEiXwE8ley8BxLc5MVfBsyVsjJjsrPaplhzvtBN80Odjw2D/kIgTcEeOsbwQEtkU3yPhssc+xcY80131trqlVgkGS88bOMjurDu/NdfHjDFMY1BxJL8rYCBKuLPn4Sc6d9RBD5ISplub/5zsb/PbBIRxpEKQZJdsgTHLqjtQtAa5kqZswjIkQup8MQCEo2wauKZlsOLy+o8yV5T4/ftjFMQUPN2L2NVxur0ekuRqoFbVCteJIjk+UWS7NsH+jz5HJKqkSnJqs4Dgm579zmqDnE5s2FxYDKrbBmp/hmtCNcnY2BLfWQo6PlwiTnGsrPmGSsdiOmTByfNMjSyNqZs6DVsrDjYiSVFiWZKauaIcZv7m/TtNP8eOcNM+QhsCRsOCnhIn21NxdNylXTBxD8fEHn3IuW0UuPMQ9fIq5DhhSEKQ5ez3F2HCF7ymLxHSxxi1m4hHkkg/dGFQP4oDMsPjLZIi/LM1gpxFDQhI6JS41dvOEEjtXIjzTYdbLkeWK3ryVK9oh5Lu/radHCDGI3BY1kdWGtizieP08amlBz2Bzy/oxWa5TmJtpv03HkO02WP2enuT+Rff1S5TKX2agUEy+/jwKQntVEAZaajw0qkdZ+F1trROGqCTZKl5v2vXw4A5c/ghVrT8jBPlc8Xr+EVlzjQu+S1QbxfnuD5kt+1AbJbj2CZ7vI4+chL/y32hD4loNVpf162QZ/MX/QbCySq+yHzF+AEMYdObnCYwmOLeJhndQiXrsdSrsObKf275gqZ+hVE6UZIggJM1yPEBlBxBSYo6MYjgLeEHKemqxESpsI6HtC6aGba61IzbsiFRIRkVCVwgSISgLgWcaxGnOv77vE/Qr0E2Z8SxGqy5BqhWHT9oxcaaQlkXdM6m6JnXbIM4VUujm6KNjJd3gnINtClaDjIMjHp0oxUBwcUFHvPfXI+a7EcOeSZanZErQDrX4Y74b88GTHm/urLARpBiAn+gUnCVDMgVjZZOpqoVCMOyZHB0rYRqCe+sBYZJRKUlGS5YeSr3lNqJ0I/RgzppSkGRqq53h9I4y76c5y77NnWbMyYnSFlkKKcm9EjeW+sx1IuJMMV42ma7atMIuWa4QIueThQ5RELHYiQGIcsW9zGK/HTNaNrDrHpORohP1sLOcIDXIazY5iivLfQ5WDMqWQT/OWfUTumFCP1ZYpmC0ZDJdt6naEivP8MOEXnudbK3Fe/kSF7w9iGqNEUeya7rCjWaMJQ3+ypEJ4p6D2Pt/I51/QHD7Dp7nIO9co5cZ3CntpmfWEVlKVXXpJ00acUhgVzGFnnMX2ILK2CSEh7VDP2JQg8516q61BpM7tSLyje/rdOOm5+l7f6xFI93WU8FIqaI3k5a99fU8UxcrV1CbQz7L1WdqaCrL9DTrl6Qjv8xA4SuR3q8ZCkL7FcPL+sxwPb0bHJ3UNj/mtPZbzDJNWHMPdN5/aU6PwTDE0zExoCX6/Z5eFAc3n+r3tGhj33GipEKpVsO/d4ueE3CjeYWoPopDxuybx+GP/xeC+Xk8UuTZd/TYmH4PVhaw3QqLucNcbOLUaxw2fUo7JlCdFk7q4jtlvDRkpmKyrCQLvYDMkAgBe7INTNPm3XiIf3nhCa4l+MMTIzh79xMvdpG5gQxThm0BtuKw6HFXlljPJFm5it/f4ICT4HSbRNYYf/6gzYnxEmEK5R2T3F3pEXkOi8sB1iBSmmvHjJUtpqsG03WHxW7Ccj9lxU85PFpCKcW+YZeyJUHAx/M9TEMMZqAluKbBo3bM7obNwVGXy8tdHoUpjil5fUeJYfRjS7aBn6a897hDqtian9aLUqQwCDOFbQgWuwmWKVnpJyS5QqAHgO4Z8ojjkF6UIYXBwWGXVpAyWbFZ7CVIQ5Bk+liHPJMPnnTphCm5AtsEz5K0w4w4y7eazK+u+HSjjLlOxJExl9V+SklKHnUiHGmQK8XeYQcePWBIJTT6Ln5tmN11C9Nw2TVmcm7/KO8v9GlIwUqmMKQkTBXNfkzFtdjVWeRM0qeUlRgd3cXFxYxulBGkGUoISpZNw5U6BRllOKFFL67yeOwtFjOLtVjidgJUo0qqDBxTbxI20c0MrjBOYnWxooRjR8+RBQHmvR4jaY9QGTi2ZCVyWOkr3NZj0n278PIIb+cekPv0ZIhc6bThH/97TSqtdR2VuZ7Ognz0F+Sup9ONU7v1RnJ6FzyIdNpRDHwiDambprfVsDbvY2XZeqAun5+Fpvo9nTlBQBqT7z2EMbjXvyo5falr0K8ZCkL7BeGrqJHyOEZ9/BOdTrQdAD0Z13YQr5/HOP0m6tCJQV7+ht5RSgMcVxu1livkkzP6pitX9CBNyyG0PNxjZzA+eU9LmK9+TH7irC5Wry7hLc3j2NP4vQ5O0AUrILImKOUZfi7ovf+X3Og7RIzgJCGz7/2xTks6HvS7BPUJErfO6OQYGIIjpgNRSOBUOLN4i3h5Ec9QyFHJG6ff4siwzSePW+S1/cynkrqKyVKD4ZrHeqdPJ8o4Nl5hPcxpOII//+wxodKOGEPHjqPaggePugRxhkoNyiVJlGT0goQHGyGm1G7trYUlzERxxm2z1tiNYQhsaRClOUmmcC3BqYkytgzwLPiv9zr4cUaQKsI0p+FanNlR5kk7Jhg462d5Tq4MDBRz7Zh+nOGYkmHPQClFkCjOTle4serjxzmP4xhpwIlJLTppBSmONNg37GJLg26c8qgVI9CzVMM0xzK06GOhG9PtBSAUnimZrtlbfXfvPe4yU7cJYsXvHxkmHsxQW+7FBEnOREXX9+quSZjm/ORRhyTX0emhYZdelPHpgo9rSSoNg511mxMTJTphxpmG4NajhMB0KImUQOWs+hljZckjX3AsykkyeG26xpS/Sj+KWLFsurZFTUKZlGrF40jPZ6hmcL+lySvLFUrpn7vrEet+wpAn+UzVsC0ToaBMH9syqMuUmZKBIw0uL/bJcsVHjzeI+j26ccajVsT+nfu4vtqnN1KinEYcnvsJYzLCVBne1E4q821armB24SZ2bOL1msjbXfjsQ6jXwTDhO78Jf/kjndXYaOrN4JMHZI/uak/IqRnk+ATs0tZV+D1NYqDVkuPjsLaEMk1y3ye0S7iGwrhyQaf+t9x1JLzIKivN/v/s/VmsZVd63wn+1l57PPM5d4w7xBzBmBgkgwySSTJTqVJalq0uy1PJcMGqQvmtX9xGAwZswK8GDPvNDwb6QbAbhe5y27C7q6vdRquktKRUMkkGxxgZ83Dn8Yx73nutflj3XkaQEZwzRWXGB5AR98Y++9xh7fWt7/v+g5nLlTn8f/8D6sgJI4TwFODxteJpQvsFxBehkXRZGjTUpXf3lLxpj4NlIVqdvTahqNb2Bsrq7Hn0ez81CermFXRZoIcD8wDlGVTrlCee590+ZKsxbpHyotLY9g5gpL9tvh7HReYZ5+PbxF5AMOhCs4knM6KsxKsEEASkMqWSx0TSJe4n1ERuvKgOHKZ87UesP/DZ6hcIofnw+BGqWpPlOV415vxYhnQciEOsOMT6+BrplsB3BbMeHBcxvazCMMmRQlB1LD5ai9gIC5a2UzoUWJ5NkZe8t1VwZrbFRysxaIVdCg7KlMKRLPgOgSPJS/jtOZ9sNOQuNZK0YKMfU0rJ8jBnkORo4OiYz3jVoT7I+Gg1BC3wpEU3znjQy3igU/Y3Xfa3XHMOWA8RQFKUHGgZSL5AcLebsDjIDEBEw5nJCgfbHmhoBw5XN2KGqeLURIXTU5U95GNaKmqOZL5pKua6J7mznRLnJVthgefnVF2JQHB6ysy0LGEI5aXSlAoc2yA/0ZApxWZUoLVGjDRjVYcHvZQwK3F2gCOlUvzRnT6FUkgBv3msTVZqbGmqvZZv02wFnG/D2mjEDauO7zqosDREciG4vhmzOEgBwcFDh6jmKUUicDPFoZbLK0PNhc2Sy2mDpXhEnJUIremQo0qN04uxJifJSs1yP6eXaAK3CkoxG8AsCXMVi/ZsnWcmqlzfiLEs+HCxx0zFJOm1Uc4wLYkyxWlREpaK1PbxENSFwpqYIFldoJkOaU5PIY8fN2CQB7dgc41yNGDkN+CP/5DaaIiMQ/Mw5hlls8MFb4Z0FOGtx5zP7yPrTfjtv2MAKHduwuId0+0YDEA6lMB72yVxMURkKd9LUlwpzbO7dA/y/LNcUT8AtHGpcGxz76Mn96gyTyuvrx5PE9ovIj4HjbSb7HS/Cw/uGAJoFBp7Fs9/4sBX5EYnED9AX79krlfKoK82VmBtifjwGTJlSL1hIYmXl6jFffO+p180D2Q4gqCK7G9TW103s4RGnfMnDxA7HoFtIfwA55WzbF+/Rn1jgWBtR2ncsuDQCax9BykfrKHR2EIQ5qBtSWvzHtFwRNzrEXTGiP0GXqG41Ne8n1YoRjn77YxfO9jkb4cD3qxIcqV5e2mIJcwGvDmS3N22WImh4thkls2l9QjPEehUMDE9Qbtlo12b1fWU+92YXlxg4fHDqkM7HfAHWZUUQTcq2QpzbClwpNE/LDWcnqrQTwoqjqSb5Agh2AxzSqW5vmnUNi6uhqyPCva3HKJMgxYsDwomqjaH2t4eOMOXZk7WDCSL/ZzAkZycCDgzaeZYckfP8vxcje0o5z9d2SRX4EnBK6fGUBosIUiLkqBaI48jfNvambsJfNsiKRTzLaOH6EjBxxsxL+yrsr/hsdRLsYSFJYxK/52tlJMTFW5vR/T6EROBRZRZtCsu97ZTtuKCqarLuZkqWakJHAtpCfTzLxNsD9m41KdISiwEU1WHdsXGEoJTkxU2RjnP7atxe9th3i9BC16dr9GPn6d3v8vqem5QjlJQt2FOZ8Q4CKWQZcl8y6UXF2wmEt+CRtXnb52bY8ZVZK5PxXPYDnPWoxwBeLlg0jOvmarZHGm73Pr4Ab07KYuFS16fo65T/IrkXLSEnGvgphGx0yJYXkQu3YOyoHQ83pYzXKseBMvj5P4Or6x8iJydh337iYVDup5Q8Wyiyjjx8TnqRQp5hq63KcuSWFkE1TqyVof5wyRnv0e8lrI0yBglOWrk8X1viNTatP1rjT2u6O6zL/IMvW/e0Ae0+mSvaLYNBedTQsZP44vjaUL7RcTnoZF2lfG3N8x/HQFHTyDOf3+PGP2wdNUjp7tdVW8wlVyva3zJMLJYgW0qjigt8NKQYHwCLt8zM7j/9X+G3/k9M0e49bGBKtsSpA/9LrLMqbVaiONnUEEVaznCOngcdE45OkhcKoJmE3n2PLZts6/uoDUINHXXJlA5cabw5+Zxpz0uTJwirzYptzUD5TJpZajAYsq1icKI97Iq765FoEomKjZHxwKDKLQkR4/O4W8OySyPQguWugm+I5mqu/z2Mx2qruTC8ohjYx7XNiK2YsW9bkx8+BDSLlkSmn6qiLIS1zayRqXSO0hGQZxrGr6NZ1vMNR0KpbixkdLYIUgfbHl8tBriSri4GtP0JO3AYV/d4VDbQwjFvW5mPE6lxHOMvuP+luCZscDA8h8T7y6FLA5yqq5FsZOoAtugLBuew49OTbO0ss7FtZD/dHULEDwz7vPiTI3np6tcWB7R9CVxrnhrwSAgPWlR9yVxoXGloOHbqLLkQH8Ztguqlk3sTOBlFgfaPt8/0DBwf0vgPrQbCCmxazUmazGrw5xmIAkcyWvzDd5fCbm8FlEoxfVNm5fnDD1hV3dxlBXcC00lm5UKEExWXWaLEf08IxWS6VbA9w80+Q9XNtlX12gFv3Gkwf6xKgD5Dnjmw7WQojTKKlah9lRiTk1UKJKE017C/oak2MpZbU/TLWB2zKE+vAH1Ku8sbJEWMU7gc3ryILU8JJ46zPDmOhYaBl1GMifWUNMFfHyR4MxLeEITjc3iRT2CNERbAq5+SHn3BhdUi1RO4h0+zflGgf3S61SEhcgGjJKcam8DjSJefEBtag62VmBy5hFN1b1nuNk2tBtVwNHTiFPPm89/Ssj4aVL7cvE0of0C4nFoJF2WqNHAJCuB4bzMH4axScSzL+3pMn5Guuph5YEzL5qevu2YZFlvwDNnjUlhtY5dr/NSVRO+/w5B2EMu3DHJLMsN5+aDn8Irvw4Hj8MHP4O3/8S0K7Nsx3K+gZCSRAkKpWk3AoZehT8dO02Ua1qTEzwvfXwp8G3JRNWIt75xoIEjNMP+HchCQr/D0KvTcCSXV0OyxhTbYcZkzSMYD1BNi2yrwNsqCfOctNCcngi4tpFgWyULdxY5aGVIV3Jy6jT3ewkaQ12QAn62OGRhR8sRBAJNWCjefBDjSgOBb/mStm8jLSiU5mDL45XZ+l7rDzTzDZe5pmc2d2u4N7uqOhLfthirOqSF4kDLI8oL7ndNK6/i2JycDAgzoxLiSsvM1qTF7W7C9c0E0JwYD/YUVOJc4dpQcS2WBxntwOZeL+WlmRqDtKQd2Pi2qZaiXCGEQGvNdlyAgGZg0/Rs0kKBFjsbvWSy7nCkE2AJeHaqyhv7JW/d3iDJc1YKh9N+ysuTLgcmmoxVnb1ktgsaQUDNNYm85kmOdnyyQtPwJRXXotRwvOPz7tKIiiv4eDPm2akqNU+yGRqPuZVhDsBE1UUgcGyBRrDiT7LST1CWpLsUYtuSI2MBx8YD+knJc9OmcnlrYcgwLZEW3NqKsS3BZlgw3rDpJiV1z+bZ6Sq69Cn6gsv9kg/TgK3tjFbg4FQ8SD2i/oBUOPiB4HLmMlyNaY6Nc27tInW/iso12qtgOymuMwXKgjxFrtzjvFLEjSZBs4oscyiAxevEdkBquQZANT5FcmKeupRYH73Dq1GM6ku0ZRNICFbugUpBOnD0FGJi6jMoxt25OHwCGtHh6LNCxk/bj18qnia0X1A8IlS602ZMPdf4Mj3/PUAYVftPy+N8ql25pzww6JrqqtYwrUcB4uz5T+SudhKnFfap3b1i2oOWBe0J4zhdqZqh+K5jdb0Jh47B+gplURBHOUFeIhwXP41wtCK8c4syy3lLj6OkYNDzGP7sCvWjR5hvujjSp1CGZ9TwJB+OPUMRxdwbZiRbMYVOCByLczMNtqKcvDSy7x+HmqouaPiSYWwU7i+tR+Slpm2V7LcyTncsWnnIW6sDVkY5vR1Awf/l3TVKpQlcyeGOSze2WQ8zlBZUXYEUFlVHcrjt8/JcHYCLqxGWBW8vjkw15Ev+650hTV/S9Gx+98wYz+0zM6vxqoO0BKcmKvTTgoptZl55CY40Ooxro5z9LY+qY3NjKyYvTev1xETAO4sjQ+HT8NFqyFZcMF5xOD9bo+rYnJ2uULUl52ZNy++thRGWBZ60mJw0LcC6KylLxdooR2vNlbWIV+brnJup0o0LGp7ko9XIcN0cG9sSVBzJeNUxSh+2Q6e/xnJRoR/m1A97rEcFyyNDcD43U+XdpRFXNyLUjn/b9w80cG2L7x9sYglrL7m7UvD2esRWVNBNNPtqLqXWXFgcERcld7dTcqVo+jZTNZu00ER5yWaYYeuS+wikFni2BVpjCYs73QQQXNuMOT7mc3FlhGMbzt56mKMAYWkOdarESQLa0DPeX4/ZbBzl43hEs2pRxJqpukNSWiSnXqSSRnjcoJuUZLGFGxSEwqLbHfJS/wYn+iEfjj+D9gLe9Jq81r+HWw2gKJH9LjWxQ5qeP2z4mCsLBEWMZ+VEwyGevY5/cQl1/Az0u7jbG3w/iogth6BIkeEANoV55jzfPPs7bcS9Z3v3ed9V34en/LJvEE8T2p9H7CQpq92G4RBLlUZp4HEoyMcpD1y/ZMAjSWLQWj/8bURpPMaEW3s0cUbhDiHaMvpzf/lvwsULphITO1qQu0agh56hnJjhwv0eaS5wFvucfvNPqTkWL2lNvHqfLoI30zks36dAYOcpZZqSSxeyhMXY5Mfb2wlJVuJ211mKNZYtKapNpusuo6xgdZiRlort2GK2t0RZFMxW6uRBg1MNi8ubIWEuqDrwrCcZK0JGts/VvkJrQAiqnk03LvAdi9Wh2Zyn6jbn56rc6aZIIXAswaG2h+9Irm7EKKWxLJOILq2adlZaapLcAD3CrODHd3qsjHJAcHIi4NX5Oq/M1/f0GONCUZaaKxsRl9dMErAtaPuA1nQqZiOXQlD3JGoAaanYCnM822IjLDgzVdnzcmv5EcUOXB+hCRxTwUWZISW/Ml83bc+1kHZg7j1KS65txMbcdCcpZaXm9f3ikVlY4Fj4lMSdSU4JwSkvwWpJPhqYa+JcsRnmbEQ5SmvWRjmjtAA0PzzUwrUtXj9Q31PPz0oNaALHKKuYhWa+v6orOTxmZnvsWNQ0PIkUEC6tM2tlFHhk7XEqrk0rcDg+5jPMSiYqDkmheGdxxNrIzDlbvs1EdacdasGBToU0gvOzJpEP0pyLazErgwJQjFUcHMsISFc8Bxm0eOHV5/mjGxtsrmf86aiPW+TgTlBtNznJHfTkDPetFukOqvj7tRQ5fwgOO0alP88NkjhNDGBj3wFOFhoe3KK2eQe5PoJ71yEvYHsDOX+YWnsM8hQG26YTooHrl1C2bQAfWhvqjCqf7JrxlF/2teJpQvsFxGcg+36Ath3K7U207Ziq7Al8kscqD6SJceBVudFgvHgBfeKs8TF7iP+iL14wwsNKw9g0NFtQb1JurhMPRwS1KnLugElmfgVWFohrbVIE/vgEl3slo9tbNOoB5zsWNZ3gWg771YCBAmmVWE6FhRDmBndJspwOAlU7zINeilQF+SChBLxcEztVSuUySEuanuRWt2C5n3B7aDET+LTjAp2sca+ruB77NFoNhqngb50/g22XCBzklW2kZYATroRcgc7MbOXcbMDNzZQo1zw/XeXMVBUpBKXWvLM4ohXYRJlpVa7tJKzTkxXeXRliCbizne6QnA0PTIBB0qW5sTPxA5QQXFs3iaRUmtm6S92TfLQyoulJHGkRZYqKI6l5klfn6wYOHxf8l5vdvdYh2hiUNgOjw7ibLN9dGu1pQ1ZcSYa5bqru0t6hD6DN97RrqxLnRtR4lzz98CxMWoLzBztEPYugSJCej2pU8cLYGIZagpvbMYv9lNVhyiDVUHW4vZ3ywr4CKQSX103C3U2cFhZNzxC+p2s2YV7iWDsamLaRCruyHrFrr3eyYTEvM061LLxuQrdqUa/7nNtX5YOVkI0wZyMsONR2cW2YbbpEmeLYuIcUxsH7zGQFz7NRGfzHq9vMNhxubaWEacGMCvF0wV/xCvadep564O61UX+2HHJzqBHSYmxynCJNGVXPkGwukTHgrj/BgqhQVQXl2DTxpEPt3MuIu9dNZ6RSNa39JKa8fpl3ry+RCBuv9Dkf7fA6i9xwQe9YMDZpnuPRwAh8tzoGsDXoGgfr7obhuyHQz71srlPafP7TrhkPHUz3OG073mtP4/HxNKF9w/giftkT1bcBLQxa7oviM8oDp16A7XUYaJiYMm2RJEZf+IlZ9Ko0X9e9mwZokmcwM484e55ibZUL6ympU8fbyjh/oomcmgNLkK2tMOjMIpeG9MIMLIdm1SPJcmKnSW3/Mdyy4O+4FVaSkkkVEVY9nEAT9FLudDMyVbAWLtK3q4z5FgLFBDldXGqOoOFLrqyGJAWEeUHTt8mRbCSKTSlRhUWITV5obF2QYZMqgajWqBQKV1p0Apt9dZffOtLi462I29spa8OMK6sxvbQkzErWw5xz+4wM1tsLQxYHKYuDjGfGTevHsYx1yqX1kEFccrDj0/Ft3jjQ4OZWwuooRyOoOeBffY8iS4ltH3Xqhb1EEmYlri24tGooBjXPZr7pcnqysteqBAMzr7mSM1NVhllJ3ZV7yQfYm1eVygghK6WJi5JC6UeuMQ7QAzSaaxvxXhLxpLXnhr1rfLq7oZuPJfVz5/fWqS0l5+eMA0BWKP7TtS082wBdOhUDaFFK8cFySKYU97ZTnp2u0EsK4kLx+oE6CsWtrYQPVyNWRgUnJwKen66CMDMw2xI8GKRshTnLQ4ejpcvZZMjNooFVWmz1Uk5NFCZZTVXoJyWnJyr8bx93yZTGtQVoy7QbteBEx+PBMMEC+knBfNM1rd80pQwLAtdiPu8SZCEWOdoPGOWa29vmgLMdm0PUUiK4V45htzs4z57noC2wQkG8ukLueXhNsBpNymfPMxqE4PvUbQdLZiRZQebUqRYp4fR+4s3r1IpoR7E/N/zP0y8Y6yXHMWRthEExzh0yv8jetkEvJjFcfh9OPmeezyg0CTBJ0OFoj56zh4JOYmNbMz4N9Sb6L/32l9+kfoXiaUL7BvGl1K4fB9nHEKbtqWnE2upXGvoKKRHnvmeUC25cMn2dtWXYWjf6c54Pz75kuC3h0Hg72Q5lmhL3BpQ3rxOHMbYaEheK+H//z9Q8i+zsq/y7xvPEGwpv5gy/PS+pb6YkgIcm8BWUOaUt+ah9jGzhARtegxfSTVwBa7kFWcozTspKWqGtNUkhsf0adZHg2S6HDjQptGChJzg24XJzQ9PwJOu1OqlW2J7HTLjJaqbRlmQl0kzV4F4/ZabpkZWag20PS/gorWlXHLyeRADzbZ9ACuKyZHWUE2WK/+sH6/zdsxOPbJpH2j43txPaviSNSuoVG1cac8xWy6FTcXil4nB6skKpNVYcUa6kfFA0SIYZzmIfx/f3qpHjYz5Zqam4plKSQjBeNW2kUVruJZfd1uHDCefTEefGeXszKgizkj++ucELY59cm5XaeK/tVGXP7zMV6K5z94WdmeBuJbULePGkxfm5mtEwhEcSX2n6t1gCqq5kpuEa+S8pKLVisW+kvRYGKfsaDjXX4nv7G7w0U2eUKSxLIIUgzNRei7PqWCz0M+52TXvNkYLxw3MUYw4sxAhh7fDoDNUhLTR1V3JlIyZXRrA5yUour4WkpSawIXtwiynf5sZAs+11+OO7OTMNhyBwCKWA/ibv9UOe3fjfCGbnSdwKwyPPAprpuks7kHxvvs6f3OkxiAu0ZRFLl1aZcLBV5Q4zuF7G+80mLxaKd+9vc7WnEFbCyYmAV2YqBL6L202JLAfPlQTT+yDwjYLIg1tGOWRt2YC7bAfqLdh/BJbumypuc80okGxvQHPMPLvHTyMqNaP3eP8WrC2hb1xG78pg7XogCgGL93fEFGzU937w1TarX5F4mtC+SXwZtevPcapVUfjEoe8TJbAALr+HyFJ0qwOHdhCKl941II/hAL25bjTpKnUYDiiFxYVtRfruNWSY86BzkKTUBMmI19VtUJruYESs2tRVynCzjxps8IqjiF/9TYJ6FXHjEpFfo4hiMi0IXItoaYGYHhRNHFViW5q4LLGlYrZiEeUlst1Guh2kkLw630Ragu2oIEwVh8d8JqvODlJQIhyH23mLkcpxA4kjBPsaHnn5yQa8C2sPbFPlnJ+tcXU9Is5K1naqs82wMHOsUhHmBm2YFoqmZzNedbizHXHx8l0jlItNe3Icz5a8Ol/bq2wArm3ERhE+qaHzjJpvE1o2J9oeVeeTKqvtO/i2BVrw+gEDPHk4uZyfq+0ltd3XfLqaAjPXE0CYlVRdCdokud3XBI5RztitynbRiAD9uKCfFjR9SVp84s22m/x275MVaqfKg4ojOTdT5eREwCApWBkZxRLPtnhltsb/7eIG97oJUV6SFBppwdWNaA/VOFFxWBtlJKWm6hoKxCgrOdD0eGbMpxvlxHlpEpe0mRhvciqVDJKC1VHOje0YtOCFmSrSEry/PKLu2WxFGethgbRgaZBRl4JubvP9SsCM6KN8wd2hUXspNfidDnYRc1m2GI4WWevWKRCQb+H4LtNVSdOCZ5o27w832IrMbHVs/S4vq3W6dhXdOY4jFNHWBlurBYOBhVAONMcY9kviSZ/a8y/zchQxKKC8eIHRyhI1nSOn58xz6fmG/hKFsHBnxxXjnklucmd+dviEUSRptsB2EJYRCVfPvmSQzJ4Pw/4nVdrO/kG/Z9RG5OO37Ke+aCaeJrRvEl8CjfR5TrVeNWAUxo91qt2t/HaHxrsSWLtCw3g+9Lo71i8GMcaurndZGBVx14egQuxWSYN5Kjc/Yqt0aOUFLatAxSOy0SauLmjXOwSFZujXCOI+7WqJXFmkduN9yvYE7yQVsqHCcSu4FZ94ch4vjqEzTX5jiVaZETsdVHWMSeUTFyXP1jTlVIPr2ylFWfLe3S1eP9xhf8tjlJU0XEmYl8ZZA/ClcWD2bIu8VASuacMJDDx/M8x5brpCqdlrsf3Z/aEx45QWVVdzbrbCn9weYlkwyhQfb0a8sb+59xppCU7WBB8VGtuxyRPNgYpF4biMspJMGRSh0W002ovR/GGsIiO0XRaGBULGBLbcS1S77bvd+4/S8rHJZDdKpZ+Y8F4/0MBAVg1yM3AebTt++r1273d5PWKhn7HQN/QAR4rPtCSzQvFHt3vc7SbUPdtIaZWaV+frrI0yFq6bRKJ0wfGOD2jT4tvRkhykageZar6Wl2ZrxEVJVmiU1vzJvT73uimgyUtFoTWOtJioOvzgYANpCc5MVgjzEncjYnlgYP55qfm1Qw0CW7Kv7tBPCmxhlrMnBVXfxtUWZAnCtVhNTCJbD3MmKsa1PUsSBBGWsBmUAt+RuJ7LdNXhhd4txnWM3lQckx5jrQB3sMWBdBPRrNLu91gapSR+HT+LeZUuNdlCbfZha4OqNcJLp+Hcq+AHXPnjt7m2mkE6yUl7xCuWjRz0jTGu6xqS9NryjvbqPlhZMO3/cAj7D5vnOA6NdNzunuEHsLlqqjDLQtsO6pVf+wQgEo7Q1doeJceq1aHb+8x+8avOW3ua0L5BfFm168cBPoSUiKACW9uPEqbh0cqvt22u35XAApPkrn1kUI6ua/rwY1M7igR1c4orS3jrx1AWBLaLt89jpCxW2rPEgyHrtuS0s04wcQiUwnUs/ruFP2OlPc++/jKuv8+cCMcmSZKcbOowFdcmFjYvTNexdBU/dtHDPtKCH1sHTduwqDA/P8UoLbjtOUyXcGsrQSQh95Y0N+6tYI9PEbg2HyyNyBU0fMnMmKRW9bGKHFcWphsz7mMLyfnZKv/u0ib9pCBwLP6H5ycB+Mm9Adc3I0ZZSVKaxPfuYshs0zXOHr7FlbWYrIS/dKS1lwBkJcB1BBQF0rJ5ZyPDIuPyesjpiQpbcc7pqU+0F1u+w7lDLbpxgZAR1R0e2W6ierjygs9WUrstwd2Ic/XEhPcwqvDIfIfRzu9/Nz79Xrv3K3baqttRTloqLq1F2Jbg+enq3vU/vT/kbjehn5YorelUTMVm2oSSHQUtDEQRpCXZ13TpZyWdwEFpoz5Sc839slIjMG3Gy2shaaGICs1EIPdAIb5t8fJcnRJ4e2HIMCupOhYgGGUFvbjkehHhSMH3DzboxkYnseHZbMc5k3WHooSgOcHk/hb7qoKFa10sy3itHahYVIcpzxyu8f9Z0VxrHiLKFUGrikbQECXjOkYEFTb6ETkWkzrljxjnlmtTj2L+Gw9max6uiCkcSbm1wUvRHQ4OQvzxcZoUyFEFkpg4U4xKC8uSUKQMgyqxtqidfM7MzYZDuPq+SWobqxCPzIHT8+HMS8aUVxUQ55CvGBi/2zHPfKMFndC0JG9eQdtyDyAiGk30i689tNc8tHV/RV+0X+Zq7mlC+4bxddWudVmSXvgpenPjM6cq7bhGm3F709hUWNYjjtP64HEDvfcD89BsrMKx03B3R21/YxUOPWMeos4EcuEu59lmLd/kdr4PXwoyv8KJ/gqy0wApKYGPZp4j9io8OPYSrx2bQm6tkSQ5jmPjWNAtjQpFYFvEBYQnzlHNYw7Y16hsuMzbJbfcFluJplNxyUuN3hH0lUqDsEjSlI4u6caaXEPVtRilJSvDnJZOSUrFycmA1+Ybe1XV2ijjXjchzBVZofnx3T7fm6tzpxszzEriHb7YqcmASysRB1ou19ZjVtcTcgTbcYlvC354yCS1euBy8uQhRmHKtGVxp5tT8SwurcQsein9pGSuWT6ivZiVmnZgE9jyiYlqN55USe3G5yU80xI0pO7NcpsTDf3Yedun72dbwggf26ZtuZssdys/Q5rWBvCxMzdb7Gf8p+EWpyYqvDRb49REZQ+0Ml4xKihxbr7nqZqDZwtenPlkrbtSsNDP6CY521HBgZbLoJ+znZQIDa3AJkxL+mlBN8y5vBZi7TgE/I0THeKsZLE/pFCKO92Yl2ZrjFeNs/VcUzDbNDD8YbYjq3VillFvm7P7amzuyGFNtgOibYcPeiULOsDph0zLnL+iujSOP0tNZeiBw9ubJVuqzoPqFNOBphQlzflJbqwP8fY9R6rgYNWiYoH0R7wZBWh7maC3xflsBVwH7bgErmn1KjRYkjoZQa1joP1ZCp5n0Ma2Y/5rj5s5djg0jhhZDIMedCbM59PEVFg3LsPmuqnklDKIyCjaaz1+bhL6Cry1X/Zq7mlC+znElzoBJTE6TR7rUaYvXoD7d8xJ7vizhjCdZybRhSO49qGB44+GptUohDkR7pszEODtDfj4I9OeGPZA2sjl+1TTHKE0olLBCUdGMd9xYfYQ8eID4rkjLCmfUNnou108JyCfnMVZX0L37qEth/LwYd5eHO6pXzwzHqBmjhPGfbaV0RqsWiVKWQSOpOJIpusOUaHxkh4dYmZHgsbUfkZpYVQwEBzp+MxNtri7ssULO+hE2BHhLTUaTVZqHNugADdC42k2VXPJSsWxMZ/b2zEP+gnXNkJKDUprWr7EkTZZoR+pqL53oLUD4oB/d2mT7cio4geuUQV5bqq6R0y+sDR6BLb+MM/rSfG4Smr3+4lz9dj7lErz0/sDrm/GVF1JvVYySvUe2OLz3s9YzpgWnRDiM8lyd/4413QZrzgoFGujAg0Ms5Ks1I9w7d5fNkLMVUfy359t7bViL61Fe23SrNTMNR2ysmQbiAvN6wfqnBoP+LcfbhhRZNtiOy74zze7bIaFEVbWcGkjwpFG3BkhKDWEeWnmog8hMN9eGrIZFsS54ie3N3lhzOLV+TqjrOTKWkRalOjJGe5upkRFAXHEpGdTL4Y0rryNQrBcerzrTPFgpMm7GUp4zDYlN7sZ66mF38uZqDrMjzeYrdm8tVznRmJRac0z2xojbh+nZilEnmFXa7x67jin33kTJivURIF98qyh4rz3ZyaZ7JuD1rhpOS7eNVWaJWHusOGNtsYMTabRAttBbaztzNeeMfuAY5vktnwffaONOnvemPs+BikNX9EX7Zfc5fppQvuW40ufgPwA4fno4cajp6okNqc5WwIS4vAT7slH76CHfYOamjtoIMCuh5jcZxanF4DQhtTpejA2Dp0pk7Qu/ISa63Fy9IDRvkPUdE7NbZqvc8P4nIksI6xPUh11SaVLkee0haKbK3A9OiqlGxktPp2nFJbNZpjjOxZznYCVfkZtuM0JPyFNXV595TSe6/D9Aw3W16voax/SbDb492sQ943BpdGANLYmW3mfNMn5eCPmpVlTCV1ej0gLxXzToxMYU8uNKGdpmLIVFYxVbM5OVznW8bm4GhsPsFRhCY0EciVo+zYN336kEnoYKn90zGfDs/Adi6mqTdWRtAOb95dD+mmxN0tLi0f5Xl81njQ72404N9YwgWPRiwvyUnF5Pd5Lpp++/uHX5cpUkHGueH6q8pkk+HDV6ErBhaURK0NDoK67cu9aVwoW+ymjrKDhm/uVGqQQ5OoTMvZoh88HpuKab3rMNBxenqvT9G3+z6/NcHs75m43YSMqyZWi4lhYQjNeM2LOaDg2HtCNTbX18UbMvW7K6akKriX4YDXkzlbCRphzqOOjHwLJNH2bl2cqRB++S5ak3B416ARt0hyOWDEI0xJ9t2jybk9xk5xUW7R987v7tYNNPnRDrssYpWF1mHNrO2FxYFHMHsL2Y0ZaYCWrVOQI8dC8y/I8mjc/NAAQv4J+9YfGvUKDVW+g5g8jjp1CdCZMMvpP/xZ8D659YBLP9/4bmD1ogCN/+P/aqcgcM/dutk1VJyScet48+zvOGJ9GSj8cX7pT9EuuQvI0oX3b8SVOQLsVnHvuFcTK6qOnKj8wbcZdVuquM+7ufRst9MoCjE/BzH6TrNLECJ+eeRF9+gX44G34yf8PulsQxfA3/ke4eRkJvFKsELcOEly7jixyoyBi28jRkNc2fgL+frQl8RrGnj5yd9yO85TQsqlaCm/5AR8NHEph4c5NMNf0WehmCFVyN7bIrYAOhozseT6l0iyXDolsE28XFNh0aj6bmzF5Ae2KjW8LOhWHZlMQZabtlpQl97ZTnttX5Ugn4MR4AMJISC0Pc2YaFmOBzcGWt2NcWZCUikIZNZGqa3F8zONHR5oc7gRPTAaF0oS5Rmm4s51xesrnrYWh0Uz0JQv93Vma/cQ245eJz5udgUlkvm0QmbZlyN1ZaXhkvbhglJU0/c8+sp9BPz6k6v9wPFw1np+tGQUVadEMjBpHVij+l4ubRHlBLyk5P1uj5n7yPe++h7VjeqrRFDtUgkIpNsOSYEf8OXAlJyer9FPFyihifZQzuWM/M9dwCGyJxiAqp6oOtjS6lpdWQ9bCnPWR8U+ruJIJNBNVh8oOSGZXd1JHIdUioVKvcDJOGB9zENPTCEo+cF304l0+Hmp62jG8NmXoCHWpmK85bLV8unFOGOdUfId2xWGUFtzvF+RCYkvB6edPUKYD4nqbqrCwytLYvNQbMDkNcUT59p8QYxNsLMDUDNSbiM6EmX3FI7QlEbUW2pKw/xDW7EFIYtSt+BNJuvFpxDNn9hSD9I3LJpm5HjQ731oS+mV3uX6a0L7teMIJ6GG2/277IFuZgEMnvpxg6S58N02M0OnxM3uajzocffLaVofy5HNmnlatGaFh24Lv/QZc+xDpeNRcC46foqw0iOOUQGXI8j5unvH9bIF4fIZg3EacO04a1PHmz6K3N4nv3yVYv8hofYVo8iyBSqAmOTZZ4c52QqlttnshbpmR2zZrhaQW5SS5Ii6gevQY5TDGCRVRrjjY8RgLbC6uRpRaUA8FL0w6SCFQKNZHOWujjI9WNGf31ZiqG8HmpmdsOpSGXlJyeS1icZBysOOxHVrM1T129/OKK432Y6I+U+GUSlNqjVIwygr6cUlSKh70LA60PCQWaaH3ZmkPw+S/TnwRWERaxvdskJa0dpKM0nB5LQI0V9Ykr8zXP/M1fNHM7tNRKv0ZjhpANy6IcyP+C3C47XOg7T9ieTNKS95dHnJjM6aXlFRsC9uCU1M1lDZ0gV1SubQE52drTFZtbmzGtAODYDw5Xtn7XT7c4uzGOavDnELByjBjvumSFooTk1VemqlxYHaMrc1N3l4YcnUjQmvFybTKSzoEp8pqolnbMg7tR8YkY/sOItwYJ4OGgnP7fF7o32VKxbjXFnnu+PNs31ylzHM2Y5fttofnSA61PWxLcGMj4t33r7OeKPYHy/gHD3J+7SKyKGA4AGGRWQ5vLoRoxyboa843YqTrmecd0LYLnoceDSGoIqbnP3meP31w9YI9AIg6e95UZs0OlusaVZFvKQn9MrtcP01o33I8SVl/D4avtRFlrdaMhNVjKrjdRf2ZOHoKAOshK3ddlsbGPUvRuy3OzjhFvUWc5gR+BbszAZUaOo2NmPH1S5RFwYWwS9zoINoTvHaoirt018zfLAmtMaQlqIgSrryP7nepLd6FE89REwWNfETo1Kj5LlM1l7P7amyMMrbG6tieYDXR/MdrPYZZyWTNxpUW+2oujVqF3ztmtPhudxO2wpys1Bxou+QYiPeRts9P7g8Js5JDHZ+JqsOZycrexvrKfJ3TO/DvjzdiKq5kYZDR8mymaw73uuke8OFwx/sMKhEebf+5tuBA06dUCcOsJC0UUhi04a4titEw/GbxZRJPzZW0fKOiP16XHJmuUii9wy/Tn6nqHr73l22FPqlSbAfGQmc7Kqh7cs954OH3kJZAIHClJMpyxiv2TiuwZCMsEER4tsWZyQqBY4xM46JkLcy510txLGGEi+vuoxXjXI3lQcqd7QTbEjjSYl/dJbAtXthnkJr2Dg1hmJUIIbCEZLTvML22JOqWyH6GJRQI6Ccls3WXo5M1RpkRoP7BhMQZGsRjEUW8fWudhRACL6DIcvIko+ZVyBXEhUKogkBlJFaAVUbEH19ls3uf8UAin32Jcv4Ab15d5/pSl2qaMVto4m6X2toiWil0UEEUOfrAMUOwbrbNLFzKRw6uuizh9jW4+gHa9dBnXjRz9HAI1fonJOtf0iT0bcbThPZziL0T2I6H2SNtyNDov+k4QoxP7FVwKsseOZE9HI/I3wjgxTc+OaU9psWpgyrvvvg7pMMhXr3OK7aDVZVGMqffBVUy2n+CrbtdVlSVfCuBiTqv/aW/wfsrEall460vcv5n/xUZVMESJtFtrhve2/FnoX4YXQjEDifp/GyNtWHGRpSzFeVsxzm5glFm2lCurWj7Nu4O8q6xs2nbUjBKCz5aLlCWhSoS3lqQHGh6OJZgtuFSc+3PSEU1A3uHiJwwSgscS+DagoVeRqnAdyRzDYeiFIRZSWDLRyqiT2/qr+6vEziSUhu1j9cP1HFtC2l9/tzrq8YXJZ6Hk97cPlOR7NrEfB6y8qvEkypFaQmOjflsxQVjwSe2Mg8n4MCxqDiSAy0XpRSH2j4VR3Kw7e0dLi6vGSqFbZk5VtOXRHlJmkOtKkmKz7ZbARb6GY40s7ofHKzz7GSVa5sxl9YiLAEvOTXQGltAWSosy6LuO3Q6NerxCGuQIy2YrDocbvs4UnC3m5IpRWN3BrbTPYltH12pUnW36aUFUkrGWwFXN2LGKy41T3Bssk4YunjKrOWl0gNngiCOOG/bhNUJYrYIqh7hsDDo0luXoD1mOiTzhxGdcWMNVanCtQ8/43EmGk3joFHknzhqbG+aQ6plgVLo42ceOeDqsqTsd1GD/l4H52mYeJrQvkR8Vd7GZ4AhZ17ce5B2Z10iz/Dm5hh1e6gsQ/+//+9776H+2n//iR9aWaK3N4xq/o4qvkagX/7BntDxp1uco6xkUECz1SSNE6I0px646DMvwoU/pRQ2lxa3uaA6bOc+UzoyChvXPibLPCo6J7pzi1G2ghQQnHwW+ZBf22j+GMOLd2lJRbrpMZx4lo+3DHx7ZZjT9CRaa5QySu5GJkpQdSUP+ilJoXhjf50LSyOGaYkCDrRdEm2zPMzQOse2LA51fE5NVKg+AS34/nK4M8eB/S0X27IodWpIxKVioZ9zoOWR5ppX5yqPJKJPb+pN3+YHhxqfqZ6+aO7184jdpGfvtu2+Qjvxy97/cffcBYDsq7sG+PEpRf9PE8l/cLC5h9QEeNDL6MU7QBNPcnE1NK4DlnEeaPhG/3KXVvBwjNKSYVZyZqrCMFU8P10DYcSha67Fj+/0+XA9YxAlnJsJGK84vLa/bqqlNOKl6YBDbQ9LgxKG/7Zr3dONCj4uja/aa6dfIotibN/HWo6YObKfmTzH9Rz6w5jlQUauQIZwuOMj5w9yjJxDYzXkxY+p+i2irM7owDNc6Zcs9xN0rjimerx2eh75wX2T0KSz91xq2zG+hVFouJ39rtFz3LGN0Y776DO8YzXzuNBlifrgLcKlu+g4Rh89hbVbwT2Npwnti+Jr8TY+VTWJPPvsINZ198iRenvTyOS0OjAaopfuo8YmDRT44gXTr195YBRAqnUD1d9pVT7c4tSOSxlFXN4qWOglLGyucNJN8UPQz79svg5pkzzzHMPFENuuYcU5awTc7sZUC1D1gNEgxMsTrnjT5Aq8sZM8F9QYlIKG73Dl0m0WN0cs2i7PtGHQD+km4FiQlQppSaRlYUuDmpuuOWxFBTc2DbJxKypJS+OZNll1uN9LcSzJ2ignzQp8R5IUBQKXu730sSi/3URT8yQjSgQCpTUVx2a2Yci4lmXsUMKsxFuw+MGhxmNRfw9v6p9OVl809/pFxFdpJ36Te376ey21fkRO63FE8l1l/1JpTk4ElFpTW7dYG2asjzL2NTzSXCEE5KXCkQZ2/+lZ5sW1kNvbCbYFZyarBI7Fu0sjFgcpo6RkK8yxHZeVfsKbhQHybIQZ/6O8T57EvBkGqIPHWA4V+1sunrSouhZZYYArDV+SK82P7w1xpGBp0GOu6SClzWsHGliX32VhkPOzfpN13UQhGA8spj0YaodK4BEcPEJ08ypef5Pyj28zqB/lcNMltSq8uL2MawEHjsCOgLA4uyMIfeMy3LoGH7wFaQS2i7akaUnuWsgcOWkqtmoNyhI9f9jsOfWm4Z7uHKpVnpluCcJUcOHwlw56/03iaUL7ovg6vI3HVE1P6oHrsjTq3MO+QSU6Nvz0j9C23IH23jUQ/iI3/JXW2Ges3PdanB+9QxRnJEmVI9MzRFspp4McnRSMBiFBrYLlegRpjFetosKUVpGybVeQacRPM5/n739Itd3khBhwyT9IxXcZVpv8L0WVIs+RheagXuKMm7KVRXTLaS7ejdhOSkDjWGbe1PJtmr5kZViggE7Vpu5YRAXYO7JMrmUqqdmGy68fbHB7JFjY6pOXmsPtgBdmany4EmJbgigvGWXlnhjvw5tvxZE8N12hGxecmazg2haBbfHT+4NPdBHFZ+dPXyZR/DwqpO9qPA7avyundWqi8sRk/vA80hKQFMrMihFYGNmy+ZZHzTWk7k+PI0dpyfXNGFea9XBiIiAr9Z6o9PIgZTvJ0WiwBFthRsW1WeolrLopt9cjruclTnaXfHwGSxhPthPjBhl7cTVEA7e3EuKixJEWWmsK5eBKTRbF+FnKLRqUpUIUJZa0WLq3xJVcMOVrau4hzo9JkvshrujyTh/eywsKx+FgwyI4dgJx4sweIrlUmqSEQFiILDXPbxwZvmjcN56GXoA+egLu3DCJqd40XZTL7xlSoR+YpAifjBwW70GeUQx70OiYGZvjmorvlxC1+FXjaUL7ovgavA0hpRnsbm+C5/O5W2ASm1Pa6z8ymm/Sgf6WsaPYWjfWEkoaQ8+ZAxAO0X/997AeWrgPtyVd6fAgUsTbGUEvxNm6zQVnmmwyJxjGvPTseWQc8sZgCNk9tkm4nDtYWlNWq9S6MdBEdsbxTjxH3JigUBa5yqlXfbbCjEQ6ZPUJHmQ2S3Kc4XZK3ZO0fMm52RqboZGv8h2L+ZYRqp2qOWSl4tZWimNbtAKHl2frvLs8wpGCP1sYoqWL1nBiPOD7B83MYGHHA8yzjfhtqdmr1j69+V7biAGz+b4yX39UF/FTM7SvEj+PCum7Grvf6ygt9+S0enHB6anKE5P5brXs2QatWChN0zPqIp2KQ8U2jtdRVj7CB9ydzz2s+G9bgiRX1Fy5Jyo9XfP4jSMOueWTTjq8tRCitUmU5Dk6S6k6AUMl8LQiLUrudzM0mqpj8/2DTTbDnFvbMQ/6xljW0pphWjBRc3ltdpyR9Lk7VPi2QywsZj2LkzrjivA5ZicUaUZstxCVGnGhCOOSfXafXFaZ6kyQHRzHCarEhcK5cZX3e5BaDu6Bg5xfeoAc9kxCqzdM67FaM+r7Vz8wn3/mjKnIdjhnVrX2SXcnxxyqpTQzuCMncC1BPjUDU/Nw+b3PzOZ+VeNbSWgffvgh/+bf/BuUUvzGb/wGf/2v//VH/v3KlSv8i3/xL5icNBp8r7zyCn/7b//tb+Otf+7xdXgbe2oft64CoD6nz60dF62VgeNPzxk5nBuXjAlgo2O4LqMBlAqKDIoCMepD8AkdQH/0joEFf/Az0vYU8+4s9uFZiqjCsHKKNJZUhSIpFXGhqN66ipvEvBE+4M/ax5hJbYqoznzeR9k2gWNTsxQvHpqiJ32qRcrKUHB7K8ESmvXWLG2pSUaKUT9lKyoZpAVohyurIZ4jqbgG1Xis43O443O/ZywwTkxUODtd4cZWwgerESujnENtnwe9jIPjHpmCF2ZquLaRxNrfcrGEAXaEudojD+9WW7ub7zAtEWjy0rTJdv/9cXOxp/HFsVsBp4WitePn9kXX9pMC2xLUHAMCOTJmZkG3txPWRzljFZvnpquASWZvLQwZpiVV1+KZcZ9Raqx/rm1GWJsW52ere79zaQmCRpv+9harQyOlVRlu0K4JAgtmmz54mjMnx3hvzdjQLPVT5oKUNPfwLE2a5NQcgWNJVgcpSZmxOsxZOdyifvJ5rHKDfbYkU3Cs7cGCTZ0cbTvYnsuVjYisfgj7zATVy++jrSp2OKJx6yPc+wPe3vcc2dQ8ehtwPSppSLT4wNBgZg8w8mrQHKfWX0OOT5k95fAJWLxt5LCa7Uc4Z9p2jCbr7qE6Ck3lNhpSJpHZE1aW0Luo6V9C5Y+vGt84oSml+P3f/33+6T/9p4yNjfFP/sk/4aWXXmJubu6R606ePMk//sf/+Ju+3Z9LfGXI7K7ah7VTETyhz62yBP3eT83ClHKv567720at2xJG860zbiq2Qd8s+GbHJLJwZBZ5FJoZW54RWJqg3SbVEEhNS+T4nk8sbHxpERTpXgs1G5tGdmZ5vtOiF+W8VElwF+sEeQTVaXPKvHcFt0z5q27GW5OnaFRcrm/GbGmLtTBlIyxAG+forChZHRUcH3fY33I5OV7hXi/l8lrEvV7Kc/sqJLlmmBpofCuwWRxkDBJjFWJLC8c2/lrwiVxTWiqavo2Ax86yAsfMS5YHhoy7W83Br1Z19W3GV2m17l47Sktqrpm3CeC56SrvLI0AgQYsSxDuHEZKbUxKLWHU2v7mqQ55qbm8HrI8yBmmBbe2Io6MBXs0gDFXspor2oE0xp6F5n3R4dUDFsnBg1zJK3y4YTiKvi3oLa0y4yXYw9t8OHJwBhb93MUKqgxz8FTJIIGf3h9wsBNwcl+DMFfUXclLszXS+TpvZAmZa8QBPnj7EhWdE2rJ8zMtnr15Gbob1FybeGKGdDCkMgMjy0b0t4lufYznWLjDu7x97q9xrXkO0R7nxDGfV979fyL72/Bnf2AsoHbAY0JK9NFT6DyDjz9CX34P4Qfok8/D+2+aH/jiXXSzbcAjgz647i+t8sdXjW+c0G7dusX09DRTU1MAvPbaa1y4cOEzCe1XKp6k9rETu8koufg23P7YQHonZ/Zg+7THTY+9LMF24eAxytYWcaVFcOgENqDee9NcY1k7NvAFVGpIVXLe7hEP7xJ4CbLIeen8q2S2R+BYWFqhbAeuXyLQ4Fkd0kaTdtWnOTNGND5JlCTg+2QLPQKVMtrukucbjCUu2dwhBJr7vQzLEjjSbGhxpolLqHiCflJg4VP1JKOs5G4vYWWYo3aUHzSaB72MiarNgZbD89M1JqoO2gnIE/0IjPzhTRV4rHVKnCtOT1QYpiUVx0JpAxd/GLDwuI35SZ9/Gia+7GFg9+dY8ySvPmRiClD3jIq/JQRSsCezNUrNzFVpjK+Z0sZTDlONezuKKZYQXNuIzTqKJO/d3eJuN2WYFpx1hZkZdyrYYxOUqzGtwOJBXxDHKVKVeK5LPNokTXwONWok/ZzEF9zvQ1EaIfy6JymU5tmpCnlpTGfjXIGwsOt1PEtQDId4KmdkubB6nyC6j3vnkhkJ5DnB2hLeIZsoqBJYmnPL75Jt3CNoNoj9CsNqG1kZQwiLMIyIFQQTM8TFMsHsQewdM09966qZld2/tfNMV9GdCYOMLHIjkSVtRJFRXv2Q2A4IDh/FPnn2KYSfbyGhbW9vMzY2tvfx2NgYN2/e/Mx1N27c4B/9o39Eu93m937v95ifn3/s/f7wD/+QP/zDPwTgn//zf874+Pg3/RL/XEL/pd/ec5W1avVPEI1lQXrhp6h+l2LhLkFnzChqby5jN5uI7jry2efJyhzhOmBJxP4j/CTyyCt1vDLjjTwhuXkJtb2B8Dyc585jSYkuFeXKA2StRnX5NiDQcYR7+xL+G78BWYYIKqjv/YBUaKxWm9+IE8qjY3jNFm/e3ebiUoIQcGrapT0+xmh9mRUV4VVncPpbvNQEK2+xIBtUpKBTBd+xKcqSUgssKWn7Dn/l7BxKSN5+e53NMCNwJIemW9Rdm07VZTnsshQZfb+VzOEvnZ3ip3e7lI7k5sji9cNj2F+QZAql+emdLdJCYEuHuYm2QUTaFnP7zOsfvsazxd59n/T571LYtv2dXv+F0gyTgouLPQr1+J/jb4+P84OkoFRGZLm+I93lJgXP7xdcWhngeII/uB+zvxXQrNd5oWaQvPd7McL1sGxFo15nqZ+QC4czcxWurAyJ63W2bcGD+Tav75tkPN0mLRTP7a+QZhl1b0SS5nhj0yysCNJUkziSRrPO+KiPwACUCulScywWIkGuLG4thqR5SanghfkWv35sDNlu8WtLd/mv94eUWHxYmeWocwdpF9TsBHtiku+PS9LKiKoANTlFtnAH4oh6q8O+o4fZWgvBtpkeq1N9p8e7qkPqz1GzxnljrII/1iFbcMH3yJZsRCWgWLyPVCVObw2mpih6mwDIg8e5tKXMnlBk/Nr4OO7jxBh+xeIbJzSDZno0hHh0Yzh06BD/+l//a3zf5/333+df/st/yb/6V//qsff70Y9+xI9+9KO9jzc3N7/pl/jnHnpz6xPH6SQ2ljGejy8EcVCDSg1sF1GU6Ctvw8IDQ2CuN2DQY7Tepd8tqYyXdIXL8jtvU7txFaIIqlXy488ZdORoAMORccu8dR36PUo/YKu0CAYjbFvutTa0FrC4YMwCi5zu2gbLGwOy1EhKPVjNeGPSIT15hDhJcMqUKI7YwubjbkwsPHoKjox7zNc9XNtisZ9yIl6BzYKV/3qHtxpHSdIUG41UGpVEWHnJwshmFGU4tiDLCla2etxzctA2OovYDBWLK+UXVgejtGSzOyJwLIa54vl91R0UpKa3vfXYa3bv+6TPf5difHz8iev/51FdfpV77iIbe0nB4iA1/LFCf+7PsQS2RnrPF80SMO1rPFvwp3d7DEcRthT8zjMdFHC6FfDW4pCFzT4fLXaZbgSURcpKVxigVSk4MlVlezDi9oLiWM3eU3Z5f7mgPzaDnSf8rNBEtRJPwLm2T6Zg3RdsJiVJprlw6S5rTsbhikDvP8TSZkQvVdhCkKQJhyslVRtWtkJ6qaKF4iPV5P2Z7+OogpPpGq84A3SRY7kesVKw+MDQcDoT8Df/J87c/YiDcYa2HWr9iLDSJLI6VMbGGbSmWJ+bppZk6DTbEVCwoNaGxpD88AnSXh9OvQAzhwHwJqYY/Jc/oxIO6AmHpX5IPcu/lXXwXY+ZmZkn/ts3TmhjY2NsbW3tfby1tUW73X7kmkqlsvf3c+fO8fu///sMBgMajcY3ffvvfDyJZE2a4Jw8SzJ9wCS6y++hBz3zokoVGk2Y2AdrywTrC3jWOFFnH54tCVaWTKvR8yGoQjSE7XUD/U8SM38rckppc6H1DGnRxNtWvDxXRWYJOhpBmpAVim5mMVYoAsc2rZe+ZnWYUq5uc2054aU2VI6eIE4SPH8BP49RVp26b1OmRpDWkwXPjAc8U7fY7Cqmqi5kQ6wiJ3BsoGSm7vDro5u4RUrf8rHax7jbz1BaU3Us2oHNZmmSy5fle31GlPcxWotP4pF9U37Zn2e78otU+38R99xFNu7OQftJSdP7YvHmUVpydSMywstFyXTVJVdGPcaRRrvy4roxJ1XKtI4nay4FGe2aQ8exSHKYqDjc6SV044L1UUGSKxq+zflZY2tzbqZKlOb0P7rBpb4m0B7d2hh+XPDC8AGDHALtsSQDfKVILIebcQ7rIx70cjKFEcx2NWmW8+G9Pu9vuPRUlWbgYSuFF9QolGIwe474SJt6xUdUa6hBH27sOMlLG7IEmac0qz56fQWUJqgEeElJJF2CVoOK5yAsYVCKSYx+6Q3jrCHEntLIwy3Fiu/hzc4RlRq/GlDxnG/0+/9liW+c0I4cOcLKygrr6+t0Oh3efPNN/sE/+AePXNPr9Wg2mwghuHXrFkop6vX6N33rvxjxOSRrf26OcNdGfc9mvW6Snx8Ys85ohOyuc35MEI/uEgiNvPqh6a/LHaXupXvmffLMJMH1JfAC4hLSap2KZxMNR0TXF6kdOQpXPiC7e4t/Zx8lDhwqH67yd16Y4eREwJgvubFaUitSMscjToacalqImQn8I+PEo4gz3ZIP1xMKleE7Fq5tEeYFF5YS0tCjFpbsm/ZZiDUNz6LmWPzOAR/3foryK1zcKKGWATDXdEhLM2t8/fAYiyvll04SXwa48KRrvgm/7OeRUL5K/DzUS77qPR9GQZ6cCDgzWXmiyv8jIcz/ilLx8UZMnCuqjuSNA3WSQmNWgsazJdtRjiN3wCSZYmOYsVEa8eL3VkI6vmS27jLMCuKewiKhFxdUXPO1lXHM5XXJg9KFvGQ9iriyDn+cVajYxng2dTS5tnBUxpGKgMkqq8kIrTT9/pAiy3n3nXXeVGNsxi6Z1tiupG3BalxSRgl+dAtXNhHnXzPkaCmNKLHemaF7/o7L/EVYWwKtkBP7OH9ghvj4YQKnRIwG6N2EVa2hswz+6382ycy2H6Hq6LKkfO9NXtpcI9aCyuHX9n7uv8xu1F8mvnFCk1Ly9//+3+ef/bN/hlKKX//1X2d+fp4/+IM/AOA3f/M3eeutt/iDP/gDpJS4rss//If/8DNtyV/a+ByS9cM26nu6bi+8ahQBTj4PH75lACWjIXLfHDVbGA8q36esN4kLTdDvI20XdGkUCmYOUPa7xOOzuHmCd/IFot42vufiqoD+1EG4f4uB22SQaVpSMVJGWmhpmJEVil6S0049/LTArlVQPYUzihAYNKNlS37nmTbXt2Pu9zK01tzcTFgcZbgyYDlT/Pvcp0hTpn3BUgT/8a7mubRKMSi5nvmIoZFZWh+V5GUOCH6z0f7KyeXLkqO/qaDvw/HnIYf1cPw81Eu+6j0fdyDYtXX5vN9hzZU8M+7z9oMRSmuiXONKzdFOwK3tlFIrHvQycpViCTg25vNrBxrsqzu0G03ev7eO3vE6G6va3Okm3O6mCEyC6sY5M82AyapNmAtuFYZknWobx5bkClIKbFXSdgSd8YBD7Q51q2Q5hWahcaVF21P0exrbsbkyzFksM0Iccg2BW6FRdjndu02nv47yfLIbEu/UWfMMO65R8wkjaLaM+sfxM+itddhcNb3XLEFqRe3H/ytsrlKMTREfPElw6Aiy3TEWNXGEqDfQo+EjVB2SGBVHyLVFalEIEvTLZl7/bblR/0VNjN8KD+3cuXOcO3fukc/95m/+5t7ff+u3fovf+q3f+jbe6jsdj1sET1LfJ4nR7dZnXr9nBVOtQbON9nxzqn3+e3DpXXjzjyiXH3ChcYTUr+HJcc4zQo5Nw//hdymFxdtFi36u8XyP7x0co7y7jVeOeDcb49qqRA2aCOnTCzy2/TbTliDOS9ZGOVmpKJXgxWPz6Dwncl1armRtmOPagpYnubzcJ0yrNHyHv326ziAu+I9XNymVYKiM2vlkzeHBwia9QUGkHLatJh8FMxwa0wx6inCQIy3BZE1Q82xubyf86e1NRBZ/KauWr9vy+zZahX/ecli/SH3HL3rN49wLPq9qlZbg7FSVXpKzGqasDjOUsvEdC8uCqmMzXWoyZTzQrq4bhOPasGC8BXVf8qCbMMwK1oYwXnGx0WxEBVkBcRYzSBXTh5t4jiRzAzynhB1n7GFaoC2b1LLxmwGBZ1P1bJqVgPurIQLBXNOlbgvWN/rYZc5WaaOUIXILyyTl0hvD6VVQiU+gc1xtMcoUQZJi/ezHphKzJFSrlFFE4lXwqo1PPAilY5ys05gyy7hQtEg/uol3c5Hz4X3k6Rdg0DMeqkHFoJ/Z2SPK0lR/UWjGE7tSePCtuFF/Lbm/70g8VQr5luLzFsFeG6EsUYO+Me8rctKV++gdP7Rd0dFdMjZHTyHOnsfacasWUlLuPwIfvUM8d4w0CahYgqg2xsgNkIdOUqk2GL37Nlf6mg1ZJS9sxAe3+EFHkxx5jtGWwhrmZBpiLF5ulHTHKgSexVI/ZzPK6QQSxwKERatRpdSay2sRSoNjafT9ZUg09QR60wcZxA4frY/YjApavsS1bKqeIM9K5uyM6YokHpTYKDzHwvY9Wn7CZFUy23DQWqDRLA8z6p7NW4shG2HORNV5rPcXPErKre9Axb/MBvzwputY4mt7nH0X5LB+UfqOXza+StVa8yQ1x+Fgy9jTHOn4uPKTQ0Jjh3M4SEq0VmyGBbky3LW/eqzNf7y6xUTVRhUl+1uSlZGNmxbkJaQKukkBSvDD+SrLt+8TZprAtnjjuQO89dEd+qlgC5uGX+PsVBVpGYdwS8C+hkvDlySFZm5unCTJ2dfdxsti1pSNrBgV/2EpWOocYp+UHNBd3pk5jeoq3A/f5fyDW8hwBEGVslC8sxoTSk1l+iwvTd0iSwsCx0GurcK9W8R5Ser2qFiayB0j3s6p7czT2H8IMT1vPNEe3mNaTcNfE+JRKbxvwwj068j98d2o6p4mtG8rvmAR7Cl6DPuwsoA+dhrV7xrIfqP5WDK2yLM9YVIdjoySQKtNMHqAVxsnqneQzTbvOT7W9HH82xsc/5P/Qtl8gdJpYLc7DKRHmA6pezZ1X1NuhWxmAkTAO4OSshwhpGS+5XKsLtnX9qn7DicnKoxXHLpJwSgtjZvvIGS/HVGtVbk8UqwuDPjZ0ohumBtVdSk42qnwe89N8LPFPioS1FXKGy2bYrZOM3A5t6/KWwsWGqg4knMzRi7oylpEP85ZGxVYwmIjyjk9WaEZfHaJjrLyE1LuAM5MVR7r5PzpeFii6fJaZIw0fftrzcCeErYfja9StUrL2PNg1BmpOEbx5WEps7hQlMpog97ciql7Nr4tUbrAFqC3NrHLgmetEvfgIf70rmJ5mGFLi6maS1QqyiTh77UG/HHSoEgy3ryzxd3Eol86TNg5R2qC5/bVqLmGL3llzXjOWVgEtua5mQab20MW+yUTVYfxPOU3n5/mWihYD3NWxBjXaPCOIwgsh9/QBakSxLU2tfIWlAUjN+Bqr0TqEXmp6LXPEliKYPUe59NlZKtN0J7Aa+wnimO8MiNwpRFSaNawZg+aw+7GqpnLJbFJFnmBePalPceNvQTybRiBfg25v+9KVfc0oX1b8UWLYDfhNVropQdw9QOKRtNUKC+8+kQy9iNeaFrBr/1VZJ5zXgtGVz7ivcziZuFTGeaMJyMoSs7k61zSsKVabAwzLlsB3/MDXp2XHGzYvB9uIIqUjzOfoOmzHZeMVtc4LFPczOZubYabWwmeLZiqOSz1c6NuHmvKMmCU5nRcm7LhsTTMUQIKwLWEEQDPFSujEtGZZSXL+a1nxmhWvb1q6AeHmo9UN65t8dJsjT4BV5a2dtQjBE8UwdTmfxqB0opwR6Hii5LSwxJNIGgF9iMq8k/j68dXrVpd2/rMOgDzO3q4dfnSTJVhWlBxJRVX0g7ApaSflYTC5fYooj4O/6fvzfLO8pD73ZRuUrIxyrhSsXjG9lkIFSPtcrcHZeEwVAAOK6kgsM1711zJ6ckKCLOO31oYEmUl1VrAkapAljGx5zDWrNCOB9yKC6JMYUmJ60myUrOZW4w7NkG9ZlT39x81OoxrKyhLsJzbxEg6ox6zYUKsNTWljRjCbI34xPcJwj7ywRSkpoWosgz+8//D7B87szld5BTtMXjmOWNHs+O7uAtI+abSV19H7u/rVnXfdjxNaN9SfOEieDjhTc0C4OyfJ15cRG2sYU1M7TnYAnsQXR2OTDJbfgALdynv3iQ+dJrg3jXk6iqidoBg+iR3t2LCaoXm1Ale7t/isJvxUXM/HVISXTFK9ZaioTJWGvvI8pwkB6dUNMhwVI5Vcbk10mzrhAyLblLSdCWjtDAqHK7gfWeSYZEjLYtpQApB25NIoRmrSEplLGRAoIClCC5uJIyHivNzZoHvGkU+rCjx/nKIckqKUjNbl3Qq7hP1A2ue5NREhX5asDrM+XjTiM5+UaX1sERTxQnpJ8WecsXT+ObxVavWx13/cOtykBT8+ytbe7Yzv/Nim42NTQ6MVclGmtvDHMe1KWyHmm/z28+MsTbM+HA1pFOxSQvN8PAZrq+tkGjYjEp82wEL2i2XiapLXBjKwMOtaA2UWpOWihf31bhpHebj1RAsycX3rnJ9CJa2cKsdkqKkG8Fk3eZ7czXqYU6cuQS9LnL7bQIhOdA5zVZnlum4S+A4hEhEZ4IgT0BXYGY/ssjMeo8NTF+02pAm6LUFYx/VbBm1kJn90GghbbM3cPf6L6Qq+sJ24teo6n4e8TShfYvxeaejXQV+3vsz8DxYeUDa24ClB7CyiDp+GuuFV7EazU9AI7u9cQEMB5S2wwVrinQ9wstqnMsigu4mE+4C0fF9vDDXIhr7b+mKHuOtBp1Ld8icOk4ac+nWCunGBndiSLSNnJikaSmKrQ10UeJEI2qeR0SFRAlsKYiyEqUUhQLPgTiHQaootKAoNaNU8+JMFd+xKErN24sjtuOCP7k/4Ejb49JqjEKzNDAQ/VFW0vTtzwAITk4EjLKCi8tdtkcZQgh+dKT9xOQkLcEr83U2w5wrMqK2I1X0ZSqt3U3UEgKtd4q9p/GdiYdbl0VpPNQavs0oLenFuXHMdh04cJjqVojqVKk4tmlT5orxqkPTt+nFZr4qLImyLCytjSOLLSi0QAiLrTjnylrE6ckKcVFiCUE/Ndqka2HOra2Eu9spB1seM+0KdZXxswXFRqrp6JA2miPzM0zWfTxbILOUN7sCvVkQiEnOFYu8W9vPvUFBFq/iFgljrQrBaIvXjk4g5QHTkelvG4rOneumMhsN0ZMzcPQkrK6YRLa1DrP7DTjk3g3KIIAsRwvxrQsTf7p9uGdp8zmJ82tVdT+HeJrQvsX4olOMyDMQFqJeQ+XT2CjSvDAixOGQMoqIbQ//6ntYafwJuvHk85DlxPfuEWuw/SqxHZK5AeejZUZWhWa0SJxUWewniHYbZztjvw/Vso/sLfJhXiB7W6S1/fRKTdQ1LY2DSiEDh4g6P9YtVMXGEYJTLQuhbHJtkGESwYuzFS6vxtzYiohyzSiJaVdsjnZ8DrdcbndTWr5NLy6YabiEuWK4UnBlLWZ5mNHybb63v/EZAAHC6PmFaUHds7GEZpCWBK58IipRWoLxqkPFkV8ZbRjnxiCyU7G/dCJ8Gr+YeLh1KYVJLIYKIBmruvRTODkZgA5442CLrNRIYQSGNeDZxutMCFA7KkadwCbJFUlecKrtUEqbmYbHwU5AXmpja7QZkytN4EgmqzZXViOioqQXZWSl8dRLV1dZTCAtBIlTZaywcOKUfgZnpwM+6CluJA7VXDKrLbpxyUgkyOYYvueTxoIyHOGrEpmnUBuHO9dgY83wR29cBhQ4Hhw/A/uPGJDY+KRR4585AM88C2mMOzdPvLYGQnz7VdGn24c7ljZf1E78Ntqd3zSeJrRvIfaU73fQi08s/x8uy+tNrFoFVpahKCj9Ku9uFKTDLu5myfnRIjIawr1bMH8Y1pdwp/exsOEQRw5Baz+vd0BuLdOs1XjFj9hcvYxOHLxbq/zYnqPpuTTqbf67VognfeLhADffoOn40OiQKVhJbWaygkM10JWATuAwWFrmKCGDvELYmmR/y+dvnR6jU3E4M1nl376/zoN+QlJq7ncTjnQ85poedVdyZztBWrA2yvdU8JuBZF/d2VNaf5zCxw8PNdnMLUZhRMWxaQc2WaH4yb0+6Y5h6KdRj18XbfjnDbt/Gp8fD7ekf/fMGIO0pB3Y2DutwSgvEcDrBxoEjsWf3h1wfTOm6krGKzaWJWj6NpfXIkZZyXzTobG5TDfdZno5YbU5x4acpJuUnJmqcHE1IlMKS1hM1WxubMasj1KGuYHq2zLlh3MesZXTq3gU3U0CIThGwo1ijGGcoVYUJ8c8qrZFKByEbdOu+dQciQJy6WBXBFMqJLF84rU1alvrBqmotJG5sywQErLM/L3WMCLFt66CbcPiXTh7HlFvoqJo58D7qvFTdFyzB8E3Fyn+dPvwIUub77qi/9OE9g3j0+hFc4JKHnuK2S3LdThCAN6+aYZb5vQTF5rkxmUqZU603SXOh9QCh7IoiTOFOwrpNmeZHSzhEFEmFtlvvIF79ypojdSa8TwioM76+hZFvU57fZ1RY5zB8GPON+8T1z3Oz7n87z2b0tZ0GhXG52u80Ja0m1X+w7Ueoyihn2tWKhWUUjzXcdCuR+AY0EWn4vD8vir9tGBcCjqBzfFOgGtb/M6JDj95MGCiapPkmiMtn5tbMSIqWRnknJqo4ErBKC05Pm68svasYlzJ//GNw9xeWN3zv/rjO31+cm+ALQWTVeexqMevgzb8LsDun8aTo1Sf6D3WXbl3kImykigvWRpkhJnhlp2brVJqZaD0acFMwyGwJb24QGuFb1vsrwhOOV3acoE15XIrmWJ7lKIsyYGmh+cIBqkiTHPCVCGEphk4ZCrHdy1828LxfTpVi0GRkPsGzZuqClFaUmKxERa4quS4ncNsh7MbbyOzhJf8Hgdn9+M4BTd1jWR9iEdG4PtG0ac0Lhmcfckkr3D0icLIR2/D1AxsrJqPtTJ7yJkXETc+Ag3i2ofGTPjiBbhxBVSBPv4s1ouvfe2k9lju7Hegnfhl4mlC+6bxMHpxZcHoMVbrUJboLDNtxk8vgltXIUvJHlSNVlujRfDRO3gjj8gNDHQ3yCmVzQV7inhlyEIxy+zygCW7ybyGgIJAFYgXXzfzto8vIhfucD4K6Y9W6FY7jLRNoAvanRZybIwgqPH2Qo8NGvTKAdXApdnxqTWreJ7kd8+McWszZDlZp6FzVqRHatm0HAOc2G3/vXGgYSxjipL1UcH1rZhb2ymvzteYqrpEecnCjjOwQDDbdBlmJXGueGdxyMeb8Q6vTXCw7RHYBrLt2xZTdRcwmn+ZUtg7LtWl0k9GPX6NeAq7/+7Gw3qPWuu9g0zFlQggzEwLEKEpS81CP2O4o1Dy2nwD1zZI1pVhxvWNGN+G1+sV5Lriuq5xWwUkkaLhCdPqzkoqjiArLUpVEuYlVccici2m6y4zdZcT41Xq+87wTBhy8eodrNLwKqcci7UUqp7k6LjPifWUewPFpeo8TrDjFahreJbD+Y4k3eoTJEOkcGBsCg4fh3s34eZVGJswe8f923DtI9OGPHDEWEsVOew/tidwrrIEEVTM4bm/beZs68s7P8FL6BNGteTrxqfbh9+FduKXiacJ7ZvGTnlOmsDRU2aQe+sa+vJ7sPwAPbPfEB93W5AP96c1huV/7UPk9jrn48R4npEhz/6QURgRj50kX1slbNRwdZf57han0nXGw03krSas1s37FjkcOIocDem4Ln83W6IrM9pOHdeyYGySeP8zjPpXsXWFqTJjjJSsCPhwNdxDd6WFYqk1S+nBsYrP8fEK41WHUml+en9IqRVSCL43X2eQllxaC1kaGnFaheK1+QYrwwyNprrTglwZ5viOxa2thOm6Q660ETfY8btKS7U319qVTgoci6ZnM1VzKEo4M135XNfkp/FLFDt6jwLQfELfsC3B6wcaGAStAi0otZmB2ZZAaU03KZiqubjS4kDL27MKKmbOkx8+RbIworFeQGo81y6uGUTkZlTukavRmnOzVZ6zA56fqvGTB0P+5w83kJbg1fk6zB/CCoeUq8v8TXmXd6hTmTxAff0eVaHIhUWlUWcbF6FK2kePEa+skPY2qW2twMS0cZ8/cMTor+48n6wvm4rNccyfqjBi5mdehKsfmHnZxQugFMXSXYNyPP4s1JoGVNLbMjD+9piZ538L8V0gS3+VeJrQvmE8XJ6LXXuYIgcp0bskyCz9pAX5UH/aGp+AuSPm34SFXFui1qybU9rCbVwsFopZQl2ln2uyuE9Vl4wP15FlBisP0POHoSwplx4QpzmB5yD/+v+AF4+YqjVNhYjpq1fygkrYZTmHrFAINM3RNv6xY/TSEq2hFdiUWpBJm7VehkLg91LiouT6Rkw/LWl6EjDkWEsI7m6n2FJwaysxbZG8YCFSHGgHe61F17ZQSrER7pKnYbbhorQmsCWuFPz0zhab3dGedNIr8/U9btDXUfR4Gn8xo+ZKTk4Ee0owDx9kXNvi9QP1HRCI5sOVkLWRsTzqJwVvPRgyVXc5P1uj4hi384ojqXgOmWxwuT9imJZo4Oykz3ZSUvNsukmBZ0HNl6wPM66tJ0gBq0OF1gpbmgPY2jBhtLyC6m7iR0OchkO942GlEeecELtSxUtzovEZ6mUBfkDiVvCPHCVwpmHtgUk64iGH6modkWfo488aMeJwZBKaF5h5WG5cKSgLiGOwBMLzodczB+ntTVPFKQVL980h+fY19AuvfqMk9F0hS3+VeJrQvmI8Ua9xpxzXuwlr5xpdlo9I0zycAL25OcTaOrq7aRbr2AT8ld81GnDv/RlZ0GQ+TrHnD5OnCad7PcbX7iB3FjtZCgKUsLjQOkaGxKXkFa2RE9PmCw7M11BGEXFa8Mx0g0vbHoQDpGUhyoJoFFGzLZR0dzYHY5IZ5xmWEGyEOZYwPndhWjJZdUBoslLz/L4qH2/GuNIiK0qiu3epkzNrOZw8eppO1aPqjBhmJc7OKfrEhEWcl7wyX8fdAWXEuSIt1J6b8WaYGwj2Y5RCnsYvd+xWQk+acWalxtoBjmyFOWMVh0JpunHJWpizvQP2+LTb+cowo+lZ6LpLmCkqniQsNO8vhWitkRZUbUGiIBoZU9qJeolEsBEWhLlClAX7sowTVkRIzHYqmLIgrdaIMx9GMUfrHvnR00w4Gu249EYJ7WYVx66jjp82ikDVOlajaRLaC6/uIZy9eIhMQvCrkEToaIT2AvjoHUgiYxn17Hl0HBqLqbvXTfUWhaBKkyx3HTu+KYz/O0KW/irxdLf4CrGnt7i7ID91AtpLdmdexMoz9EtvPDJDezgZ4gfoODJtg/a4aRnmOdy8bIRLr18iKBXBvufIDj9DreozoQIs56AZGs8eBMdBvPg6SQmJ9JBlTixcorSgXin33rP48B0udCERNrmuoSyQlg1Cc6amUL17NHXKB3kVOXmQrcgog/SSko83I0CzGRZUHIljCfa3XALbzNVcKXhmzCctNVWlubWheWAF2CrndZXh2gGv7GxOxngxJC0V7cDwhR5WiPBsQS818zchIOjKPTL2UwDHr1Z83ozzYZRqw7c5O11lM8zpJyWOtNAPqcfUPElWKH56f0CuFNtRSVYqGr6k6ticOFBhkJSMV10GScHZqQqOPWJhkJPkJY6Q/I1THf7wTp9BVuBagjx3iWPBhj9G1hhjszHHScfm8tgzXF0dsDYombo95MSER373BhQZgdS8/MZL2DtuGtpx97imSli824UkD3Ee3Ob85gby9p8aUeLeFrzxl6HRMq3KNIFjp3DHxkhWlg0/TGnTtpyaMYjEJIIsQTuuadt+3bbhd4Qs/VXiaUL7CqHDkQF0WBYohT5+Zm/w+qhElUafegHLDxCu2ZBVlqHf+6lpB7geAInnwseXTJthawP2HzYGnckWTEwjS8X5SY903KHSrCP2vQj9bfSLr2Opcm+BeoVioTlLFKUE22u8/tFbqGYDcfY89LeJooTMaRLkKTf9GdbzjMJpMd+weL/ikSwvUQsayCyjZilavs3RsYB8h1RdcQQLvQF1TzLdcDk9Ud0Db7y/HJrTsrA4M9kiW1rkXlyQYfP2tuIHTf3I5vQkdKG0BK8fHuO6NMms6hp+2SgrubYe/7l5jz2N7158GqUKBkgyXnUYZeUj6jHnZqr89P5wD9b/8nyNKCtxbYuKI9nf9BmruERZSd2zOTdT424vw7YshIC/d3aCKxsxcaHYGJXMNByePXmAQwt9rg8FFT9n0HA40PJ4d3FAstVFZZoi6/Ne1ET1BY0k4aDqEr37Mxrf+75JDB+9g95p5UUnXzS8zDIjjBPiiTlqH38E9RYsL5jKK6jseaPRHsc7cRLe/jMD7U9iSFNj9jt7wPgj2i5cfg+1Q4o2LtgC8eLrWK77pX7O3xWy9FeJpwnt24okNotmZQEe3IE711Enn8N64VUA9Ht/BrevG7uH9jhYFqIyaRbi4RPGsLPRMYoAeWb+HJ9GNprUGlXQiuLSe8RJhu/aiGfO7IH+4kIxGUj85buoxTtkegO33UKnCQhBsHIPp3aYnlMjFxZOHIHSLMaCa12FnVZoxwWvNCUpEs/WFErT8Izq+YerIYO0RFqC2YZLdcfIcZQaGLUlDOJRSIl18BDx8ohaxSUp9J46yG486eRdKk2UlTQ8CVoQZiWBLUHz5+o99jS+m/HpddQMbF7b33hEPSbMShb7KQpF1TUfzzQcJmqm5agxM7m/e3acblzQDmxc2+J3z4yxMszYV3cpd9afb1t0Asl8w+OVCRu1qvk4DtkaFLhKcMs+wcXlEduxAEtSSzP6VsGokNxXNZQUfB8FSWzWepzhVSqkUYyXJbiWJlx4gNfbIMiWd3QbjegAQRV+++/Az35spG0uvUu60IS1HTj/5D7TCvxrfw8Rj9C2/Yl6SH/b7EurixCFaDT65V/70snpLwq6cTeeJrSvEKJaQx89tddyFA//onclqgY9c4pyXHPdrk+RxiSzKDRajp5vWgGeBw9ug18xld/MfnMa21wzQ+OJaXQ4ohiNuLCtyOwa7r17nN/eQI5NoJ97hcurQxZv34fthJPJiECPIPEo05SkMYafF5yXPUZVG8vLWdcltm2TpspIW3k+I6nZaNdpaQOVTktFHckz4wHvrwwBWA8zpusOgW1Oxa4U3O+l3O+lZv7gWtiWRLoOi4OcQsOVteiJNjC7sSuFZfuKK4tbzDYcJIJzM1WkJZ6SoH+F4pv41T2sHhNmJQ96GaXSLA1y5poOFh7P7atwaS3aU4kZ7RzUxqsOYIAll1eH5EnG2sjj+X11klwT5SWdioPvWIwsl/fDgI/zglUxxlRsY62OmGn5jGV9sjxnyoO+ZVFvNWDYZ9wPSFxwXJ8LyyOitMqDrmZ/UCXYKnnBDcnYIji+D9n3YGoOlu+bmdm9GwY9vb5s9oiV+xRHTsDV903Ss2z4a38PWauhg0/GGXukaCEM0MR1oVR/IWZhXzeeJrSvEEJKU3E9pgQXUiJefAOttOGRoPcU8wGEH6Cn5/bkrESe4Y11GM4dgYvvGB5bOIIsMRWeJWDpPnrpPtz5mLgQpKJNpVplNBiwKSTj2xskB0+RJxln3JSenXA6XET6kjKocUGPka6neGKMc51J5K3rvD4xxMpzRv4EH6iArVKTpEZ5IVoKmaxlFAoKDRthQccVrPUTRlmJEIK0MICNqbpLVmomqzajzECgu0mJJy1OT1b5aHXEfNPbg+Q/bAQ5SktKrZFC7OkwpqXC3TEadaWHxgz/a7b1lAT9KxJPMgndTXIt9VnlzcclwJOTAWFaUirjPr2vbrO/6TEWONRcuXdAcizB5fWIQmlsS+x4sOXcv/mAo07KunD4cbQfISzSQnOoZePbFu+vxXzkzbBgb6O0wEYwbgmKrMDqTFBVBQeydQajHgmSQbPNdkVyZazKmUFIWmjsg0dIVoZYEwHJzatkm9eobS5Bq20AIeNT4Llw8qxBQO4ejAEs2ygSxRFUG+C6iHgEtdoj4g26LBFJDM+eN1y3LIXVxb3Z2i9jPE1oXzGElAbJ+JBlw25Yrot+9YfoZ18y1z4sQfPcy4jdYfBOTztrNODIaUS9aU5Uq4ufAESOnjCnqjwFBMHWCp5OGUVNHlgNUu3Q0Dkv6RKpSrqbPdw8J6hV4MXXiAuIJ2expc1wKeDHqwmealFxJvje7IA/reyjHduUcUGUK6K0wLYE/aTEt838YHWQcmV7iUHPBmUhPI/tqOSjtZB2P+O56QrrYcFWVCAtwbExj6KEYVowyhR3txOqrs3r+83jsytldWs7YX2UM113OD1Z5aXZGp60UDtaeoXSVJxPVPA/rc7/NKn9csbjTEIftpO5H0tONPTe79+APYYgDPXj3Ex1D3RkCVjoZ8R5STcpudtNkZbg1ESFF/ZVGaQljhRcWovwbIu1UYZjCaoq534Cd9IKo0LTKLvkOHiW4cH9lWMt/vD2kP52j34pqUuFrtQ5PljkXCVBOAEfdw6QP1CcbdrM6SFXxqZpVRyS65dRlRxXNEhmjhD4DuXdmwQr9wkGq1CWsL0NHQWtk2Y/WF1AT8wgOhOmOzTsg+vh/fA3CRfumZm766FtF7Uj5KAdF/3xRbh+ES1tw3ebP4xwHJPk8sxUa7+E8TShfcV4GNzxCGF6J4SUjwJFwtEn1Vy1BuEItdPTzlZK6PfRp14w6CUwrcqL71BmKbFTITh8DJktIbfXOG9tsSz383HjNMuOz1JQ5cStG6jtnHt4yPoY2DZvLNzFtSQL6QRRe4ota5JOU1O3q8wlGYN6gOe7tLHoxgV5ZhyBi9Q49u5vuYxXbPIkxc9ymp5DoywogwpTNYemL+mnBd244GDb42DbI8qN9cvdXkqSaVq+5HDHx7aMWWOcK95dHnF1PWY7LtBoSgXDrCQrNefnagSNNmc7gqzUjySuJ53cn8YvVzxOY/PhJGe86zQ1T+4Q/T/RcJxrumyGOb2koBUYgezpuoMlXK6tRyhAl5pukvPmgwGFhqpjYQm4vBahtUIIuN9N6JY2qYJACPq5QOuSZsNlI8y5sDxidZBi65Kq7TBr5TwX3uH18DZlpUYwvY+XJn26PUFbDZCex2LFIXnrJ3jdNWpWl5eePU86bvPamCDtDQgqKXJ9BOurUKtTDvuMJo5At08tHCEHffTx03D6HHz4M9BQXPnAzN772wZQ9v6b6K119NxB04a8dR26GwbGPzVrxh27e9ZfALTi142nCe0rhC5L9IU/heuXIaig5w+bkv4x/egnkhJ3Z23hCNGom1ZAkZv2pO3AtQ8ps5wLPUHqWXgPtjn/g7+MVCUM+ty0mmyIAFsETCpFkmaEXg032ma1cEjtJlajw7npgPnthMIr0EgCxyaUE2CHtMs1gpX7zGHTVpBKhRdIblYmOTfToFCKQVqwmcBaFGDpgtM1Tf1AAyktLq6EhnBtm/lWrjQV2zbAECGQtmZtkKGU8S5z5YhuUrIyzKj5ks0o3/OcquyogkhL0PBtMtvC/dSqfNzJ/Skw5JcvHqex+XCSG69ZBI5pO8a5AXXsgj20htvdhMVByuIg45lxH7mjQtPwJYv9DKU0uSopdtalQPCXj7SIckXTl6wMM/pRiTPe4F43wfIkTdcCyyKQgkjDXMNjI8xIIsEMOX4ywip7fJC45Pg4kY81gHT8GCLPeO3wGC/3NonUBoHsI6MQypxq1UdfvICzsWAAYe1xGPQpbZe3W89wTc2AnXJS9XnlxkVkZ8LwzkoFUlKsr0BeGjj/1Q/NqKK7ZVCOoyFgrqMw5G7x4uuPl+H7gniqFPJLHDocGfX75ftmoSiFfvkHj+9HP4GUKKSEZ1+Gm1cph0PD8j9x1lRoUzOgNbHtkyY5lZpFpASx9KmdeZG420eshBwkZJBrDldtxh1FPQrJKjVyt07DLdCuQscRXndAqgQVPOYOziFvfcBr8X3cYZcXvvcjNvs9bhUut+wmKi1oNyHOSxYHGXGusKQgbzTI84LFqkdlkDPTcFgZ5sw1PW5uJfzOiQ5q57T7s4UhK4OUrABHCk5NBSS54vJ6hCstNsKcF/ZVmWs00FpT6k/EiT8vnqrj/+rEp9GLDye5uX1j9La3ALMmKo5ktmHmQc/tq3JpLeLMVIV+UnJ2qro3n31uqspbi0NcCX9yd8B2VOC7FsfHAqqepOnZRFlJzZG0KjZeLlDC5+REQMtz8GzoJSVbUUFWas5O19iquSxvDqiPcvIwZiAk1bEOg9ljWJnhbQ5TRXJvyK/Pt6kFLlA3M7JXfkgeJ2xHJe1jz+JsrVJiEeeacjRi1BzHShPIc4ZZSiwcarWGSWYPbsPaMpnvQWcSxqdN4nI8U5ndvGraiX4F5g8ZtZHz3zdQ/a/YZnyqFPKrEGVphrZSwvTcI/3oTxOnd0mJ2nbM63b11S5dMFJX9TogDDJya90MeftdgqlpvNWQyKvgWYKg08Ga2Udlc53/P3t/FqTXlabnYs/aa8//lH/OA+YZIACSIMGxqlrVVW7phCypLUvto3PkIWQ5HO6LjjjhCEcoQrdS6M4RCtsXthR9Tlg+rRPHkmUNVg/Vc5FVJDiAIAASMxIJ5Jz5j3veay9frMwkAAIkq8mqJln5RrCKQP6JBPPfub71vd/7vW+gbrJ3cwOhIl5rjeGd/w4v9vvs/f3f5VrWR5YuwelXCcMc0jtIx+NIkXHS6SLLVWRrBNXd4L3liI43wnwlOeMmDBwX2Q7o5xWV1owENqtRSS9R2NLiXq8kcBRpqSkq849tCS6tRAgEC72cdmDR9CQn99S4200Z5hWhI7AtC0sIpusu52br1BzJxeXoC3dcu+74v9zYLnL2Z0QHAXjS0JItz6a+tVpS9ySlpWmJggeRcYjcM+LRzxRzTRcpBCfHff74To/QtTg+HrB/xONOJyUpNbYwfpKzTY+Wb3N8LOBON6MZOKzXQmZVF9dtcSt3ydoz+EnFnpbNICvppiXX1xNsC178td+gnvSgNcqgsvh3Nzpk/QZ+v+TvTkzwQeyQHRrDKXNCS1KtD8D3afghwdgBs7+6TR/qCrveoGyOwslnDbtz84rxgxSYxI8khn0HEdN7v/De2aew6xTyLYcfGBktW5v5tYYpYluF6uHbjHj2JcSW2ojrl+Hq+1S2A3sOGkcQ1wxxOXgMDp80C9tSwugkcnKK8wdbJElK4DnY1z6AZ19Ctsc41/89OqqibZW45/462na42KlIxg9RswSn3JTmpEdiNyntRdpVxlA6XCpH0PYUIs55Zvog3T0nWFU2q92cK77g8ESNO72MSsNaVPDCrMfYvjp3N1PW45L1uCDO4eZ6jC1NQvXepouqoFQVq1FGXEq6ieLmRkzgSlxL4NuS4+M+3UQxGtpM1c0P18/ace264+/icTz+TDzp0qOVQl+6gO5AWLm4zjg1x6LhSgJH8ON7Pd69PyRXmponOT9Xx5UWGkE7kAwzBVrsPKsIY+Dd9G2OjIWcOv4MXpaQrybktktcag40XTpJzkIvo8wK3oxSCjVCK6gh0oy1uOBet+DAzDT9KGfxwCh5PyeM+8RxxnPJPc4ML0MSURcKee41I70/ehpufwxrS4YVqjWMBd7eQ5BEhn689qGJmxl0zRm1vkp1+oUvRDd+il7cdQr59kIrZQawc/uNo0ccGf/Fq++jXc843j92mxG1+haPXZhCeO1DE/Pw8SVoNLGaTThncov0g7umU6u34Pz3sKWkcfV9RBBSxjFpP8LRivfqB8hGPDxR8XJZklAQZyXSdshKhR0EyDAkFBbegcPEgwEiqFEKwd2DLzAcRlwLAnRmsTrMmW26zDVdjk2EvPUgosLcYs9O15iqu7zzYMhqlPHWgvmhF6LiQNulqjQIzVv3B6wNC4a5InAEz8/UGQsdfNtirOYw3DKC9WzTpcFux7WLXyDShCTNKZ0GE2XGS5MuMS6WpXljfkjNsViJCsZDm35ammLlSaqKrVgZo55MiopLK9GWGXLB/hGPwIYxnVHe/Yg7Sy7vJz6p7fAnd/q0XEG/G0E5RFKB6NDbexApLSZCByE011YiAt9hcVhi373O8MZHiEoTqEXckRY0Q1AFoE1WWp6ZOfvr/xOCPXtJLrwJv/dvjPWVs7X76jggLCjKT86id39sfu8zaMOn0ou7TiHfLHyRoecjtlbL983ycxiagL3thwaefJvZvuX0u+bXYc2k007twRoZ2doxUbB03zxM0RDYWuJ2PYrhkAuJR7aaoe/fQXc61K2KeGIvUZLh3r3EvWWbqNA4E1O8enrO7MspxfnOxyRpjsxd/lW1n8urKRpBPY35lSNtVocFca5YGhQ8Py0Y8SWZ0ni2oOZKXNvixbk6f3DTyPLLwuyQLXaNuCN0QkJpUXMsbAvi0hjEHh0PsC3TgQnM5XJ7kXWbXty+XW/vpe3OxXbxZfBUJawfEPgubidjaDko6VCzBYWCtWFG5Nr0kpKy0njShHm+szgkLkoQglf2NHBti6SsuLaeYgnIVcXehs3eex+i7/RYebDKneBZIgVJBUVVokugUmRYKCQf9ytGuhFHJuskgyGT0QZRrhgtoRwKnrl/kYtxSWr7vNM8yMtTNWRYgw/fgVsfQ2d9aySxAfM3KM6+YIwbLAssjKik0QS9B1Fvojtr6G7HUJC2g2jUP5s2/KyZ/9ecZnwYv9QF7QsPPbfebKtWp5rdhzh6ymzgX353p4CJWn3nNvOw8aiQEn36BSP+CGrma/W7sL5K5vnoQf8T+a1lGepgY80omk6/QPL2m0bYsXCDoQJm9tGJIsIyR11+j+TBPHNDh3nRIEsjfjJV41dmAyxdIYuMRj1kMEyYqcFVoRkMUmKtufrxfSYnxxGWJC0r/v21TW5uxPRzTcu1uLQ85PnZxg676jnm+1JWktmWxyCvSMuKkdCml5UMC0HoCITQFErz6t46udI7hsTbi6xKa1SldxZmHz6E/tr4+C/0/d/FtwfDXO1I9rPSeIBKIXClIDv1As+lCW+ul5Sl5r37Eb4NWQmhZ/bLaq406yipYnGQsBYXlFvP768eGjFnRZaiEKwkmssPeix1wHLHWaFgkFfk2OQaqsI0TQ0tCSmpENjSI9eCU2vXyfsD1P0FlqwmkeUgiMCW3AlGsVTFojfGMy+/ykgZocF0Z8sPYPGeYXqm9qKzrTOrqszI46NLRqw2GJh06UMnjIWe1rB8n6qaNWnWGkS/Bzy2J/sNpBefhF/qgvZFh57acdFam3mY6xnl0FPacf2Y8Wh18rmd3RG2qckshQfzVA/uwntvwv27hv9WynRoNy5Tba7CvsMEoiLwfeI4wStzqiInlS43c5vC8fG6txBRRNFs0ChT9M2Pidc1vmuTCJcwjgl8j7DmU7cTtFUx5gj22jmrhWI5q/CkwHcEoWODpak5gosLfdKiouE71ByLUV+S2xaOJWj4NjUPnp+uM37YoZeU/Ol8j0vLEYmCW5sJpyZCphruDr04zBSXV2MuLkU7N+jH5fhx/tWEEu7ilwuq0lxZiR+R7F9ZiclUxb1uzt6Wi0Zzu1tQaU1eak5NhPTSioYnWR/m5EqzmSgsNCvDnKVhgRDw8VrM+ZmQ8Oo7nLi7zBoei8Ee1hsTLGYeB1TO1MwUzbROklRYpabuCmqOZP9IQBm5rPUiHBTW8gOs0Yjx0KI27DAnBghp85q/l6R1GDZM9h8ze5FBgHBCtJSmkCEMnWhJ6G0iaiGcfgYOn4DhEH7yI2PQICwjBhkZh62RRVXNmvUgNFx+D90eN3mNR07tJIZ8E+nFJ+GXu6B9gVuJVspENGhtZLFVtTM3E8++9KifI+wUycoPiQYRwRt/hFy4aQrWzF5EWEO32nDvppH+t0ZNC3TmPMRDQy+sLcH1y6h+j6TT4bmpirz/AJUXXKxa1ManyYSPnQ4oGm2e613EGgi0rhFMNfFCnwvrBcnYDJbn8fqRCb4nJbrSfHStj4VixJOMTNfx+yVpYRJ/o1wRlxXWcICXFLT0GsncIbRlcbDt40hz400KTcOTO477gSv5lQMtVqMSx4LlQcHF5YjRXr5D/cit5ODHXSAeFoeEriR/7Pv/Zbz9dvHLge2L0eFRn7SsONz2ubGZGtPsQm092xVlVeFIC1saH9KDbZ/WlonxeM1GIHhuts7CIKeTGjs4gUDFCcNBxFExYExnrJBiVRX2xDROTTCUNr9aFVztVawOcnq5CRW1O5tsxgqnTFkqLL5f3aeedJFTs5x3hiRIAl1AeAJx9lWO3+sQpwX1lketSMAJ4egz8OMfgbS2FNY22C75R5ehHyHQaMsIOPRwYM6wRtv82nbQ3U0z6xeGfmXpvnHod2qfeM1unWHfNHrxSfilLmhf6FbyMN3Y3YSyRIyMPr2j8wOU43FhXZEqF299nfPDAbK7aWT+tTri+VeoDh7D/qP/QN7ZNEql139oaMk712F9BTXocyGrkTXaePVJzo9msHQPb9glKQqCsUPkwkJqqPsu351ySOb2EfglybAkWR/yIHOIcRB+wOv7W7ywt4lmH2WW49Z8lBYcbEssLF7ZazqmYW+Ad/M+N3SDNMsRZYF2XQJHkhSK56ZrVBraW8Gb25ShYwnOTIXGBUSbmVmcfxLU+aRdssfFIfZjBWvXIWQXXwSuFNzrGpurwJG0D9jmWSvVjpVa07c5O1UjKipOjgecma4RbM3GJuoOUVZSRzFTM/lqa1GJqjS+Lbg8UHwUjbOqAyZJkY5Lu+Yw5mqE7RLdvYOvC/63I7Dw7Gl+dGeAR0l3uWIksBnvD9hUkiNOitxzAI6dRtYa1LsbqLDBO7PnyFYS7Mvv8HKxSr2/jnj2PIUXkMQJgeMip2bNpdsNQBWU925BrwvPv4LIUvQPf90wP/duIa5dMqtCVWW+QWEDfP+T88pxzcce8pr9tuCXsqBVeW5mVq1Rs6PxWbeSh7u4WgPgMzs6ISXpqRfI5zuEVMQbqyTjc9SzAUztQSuF5bqIRgvLM7loFKYvEY0memIGiowkSshWlwnDgHj/EZLNderLC5xfXyWZO8wrdpe3rAm06/FesIfzM7M0Rhpw+gXCzgZqeIeN0qVFyWCQ8Ae3tPE7HpScmqhxdS1htuHgScGzsyGubRG4krbfolxyeSYZwIiHd6DN/3C1w3w3wxKapUHOwbZP6EhOTgSPUIbPTddAGIf9OK8+FdT5JGXjZ8nxdx1CdvFFkCvN3paLvcUCKA3nZmt0kpJX9jRQmh3R0ePPn2tbvDpXI774DkGZIiOPZ4+fI8qrnUtcN9PoyWkqN6FyNaXtYt2/S25VCEvQtipSN0DlA6adCt+xEDjUbQtLFwwbk9SHayilSJYWUc++RiBtZBCSeDVyDWEeE5Ul0nUhjelliiubMYVw8ObOcT6+hzx0zGSdLS1gtUZQrmc6MN83F2XXRVfKjFC6m1BpRBgalumHv44oc3S9ZST8PDZD+5bgl66gVXmO/vf//U5XVv3N/+ozFw8f7uLEdgH7HJ459Bz8MCQtSryZOQLlwEcP4Cd/iL4YUv3N/8oU1CwzdMDKA3jrj+G1H8Kh4/Dh2wRhgOe6xK0JfJURzO1FjY2TvH8Btz1KR9sobbqa7sg0w6PP0pydIa0EVnOMBb3ERqqZx6W5mmFZBQdGfSpdsRYbKfBIILm6mpCWJnfphRmzg3N19ARlluMFHqewmK47pGVFoSoGWUlaVvTTksOj3iNd17Z68eW9Ddaj4pGgzu1i9LMUpF2HkF18EWy7hmTKUH0PJ6M/3tk/6fmTeUpdpebwT2LqVc6Ib8ROjhTUXYvlPljJgCopcaIOk9UmsVtDN9qsYmEVBXLcJWzWODleMRim1E/t59yoZG2Y8wfvK/6TnqCjBM/eWKPdtXhlYpQwiXFUzgYuDQFuFvFW7SDrixmr2uOMvUk6OkUy16Tx0uumOL31x3jtMYos3xJ+YMI8Tz5nwoWjoWF9bn9kXh/U4PlXEZWLcF1E8O3qyh7GL11Bo7dpilO9YTjn3qaJNv8MfIpb/hye+WEqzd93DrE0D4t3jUJpK3SP1ijCsQ2P7Xkm0yhNTIGTDtJ1OR/dIdnTIjwyBVce8HbskcydYyGYYCaQvBOZQlO5Er/w8RdjsrJikCrS9iQH2hU3OiWjoU2UlvTijKNNm3PTAR9tGkWXqjQfrMQs9XN+/0aXI6M+viM5M10j2yp0dU+yHpcUqmKYldzezBCWYDMt+XtnJnZuwA93XeM1h6Ajv1Qx2t1X28UXwePPyc/c2T82S1euT1QMubWRIgScngr5O4dCyqxHqi3uDjqk0sMrYnLR5h0xAcByFfDrGz3OLV8lTxKCeg3nwCt4Vk5mOViVYqV0uZ1IgtTmVH9Io+ajpYN17w6MTTG0a3zkHEHcu8WKrDGnU9qzc9RO7sFyzHGtv/tXCWoB0eraJ8KPaPiJ+ExXkCtjleV6MDkLF39iBCPfEAurvyh++Qpaa/TRAWpr9Gf+I77I7trDVJqamIVB32zw+yE6qGMVOf7f/l+R/D/+z2BLWF1EV8oMbxstmL+FdGzqtQacPUd0/DmK+33sgydIVyIYCwgWByz3Syxp8Ud3Il7ZK+gmFb20YD0qaTlQdy2C7jqBKmG1j24F/Nt7AYeP7MN1LGabDne7GVJAURlJfVlpuklJwzOJ0c9MhgwzYzx8eWXIRlziOZJ+puhnakcc8jhOTgag2enctvGzCD12HUJ28TQ8/hxtPyc/a2f/MAujXJ837g35YClicZDT8myurCacHm9zS4fEaYF2fM5M1SiwuJC5iGiIsOD2cMif3t9kZGWe10YU0tLo46dpBXV69VH6cUlmV7jdDaPrL3Lifc+Rz68xUiWkQUiGmXsJSzBFzLN0md43gu3Yj/x9rXoTESXo7UIsBGhtZv2b62Y9yA9g2DfGDra904F+Eyys/qL4pStolut+Qvltz9A+A48XL60U1fs/3Umt3pa9Pgk7szrbNT8wZQFJAj/+Papag3Rj2UhxHXcrB02Z2xUWRH2jRvrwbRidwJ2YQbePU1RgOw7i/jxepBGFh+3W0RqyQm+5GwhaUYdRK+esozjbyEixuNLZ5EJ2mOVc0rnf45UDbV7Z02SxXxiZc6nZSEp+MFvj7HSdj9cSLi5HOFtO+FlphtwL3YxuqrBwaD6h2DxJzPFZH9vtvHbxs+KznqOdVZFcoZRmmCvqrvzM56wSFkPLpx+VXN+IGWSKXlrScCW2BVkliGcOMb8Rk9T3cUdUHGxKNm4v0qskRa6wy5QuGSu6Dsriu3oDGUcUlstLky6ZqHFjdUCrnzOTrED3Ae4Hb+KNHidOMryDRxgblZwMPQb3MxrFgKlQIcPwiX/nR0Rtjou++BbV0n3jul8pk3o/Mg5hHWz789Xc33DJPvwSFjQwRe3zaEZ48uK1jobGd9GyjNv+sdM7+WcPQyUJ/Lv/p5Hmux5M74GFO8aLTVUwMoqanjV7J8v3DRe+vmK+1v27xu8xGoIG5Xq81wUV5MxHsL+msbsl//UM/Pf3YxYI8KSkHUh8x+LmSp+7qcXQC0l1ykkvJV5cIE4FQycl8BqklUWpYDR0+LunxxkLuiwOTcSG0mbpeS0qmKg7FEpzZjKkqDTHx3zmuzllpdEakrIicB/9AfgsymdX6LGLrwJPCwPd/n9Vad65P+TmZkJVwenpkNf2NR/J2Hv4tX9+t8/1jYgoLhgqkwuoKs2hUZeD7YB2YHOnW3BvWOHagtFA4gQhMz606eEHglu9mAEebSdGByFJ2ad+8yr+0n281lFulQE9dxRRWsSFg4o8grzBuQfvkft1At1CMsHL61dIWCU4dgiJNiYNW16MgPl1ewQwRU0FNYaDiODGVWTUN2ONF143qsf9h6BSiGPP7fgzPl6wvomu+k/DL2VB+8J4wuK1VsoUH+fTnd32LaeyJPzZ78KDeXM7SmPjwRYNIAjNDWp9hTIZmEIHcPC4KZRYZtbm+WAJCOok0iWrHJzAoxwmVLZHJSz0tYscscaJpcvo5DS3NjMm6i5YklFboVRFIm3+b/EelNMiHXUIqgK35rNnxONXDpof8MA2fnarg4KaJymU4t9e3WBpUCAtwXf213l/eYhAmMwzoO5KCqVJi+pT34dtCf56VODZFq4Uj3xsV+ixiy+Lx58jV4pHVkiSUnF5JWa+l9HyJZeXNWena7R8+5HuzhLQT0s+Wo24vtjHExVCCE6OT7LuWnSSikpnnJ6scWDUpdQVWWkMi8uqomVrWo6icAKe85rkWYrCJpMeMitASmSWcLqm2OwXWDWLsjbLcElj6z5ZlpB3FfWmhXJchlFC2Bqhvrxg0uprDfT1y1AWRooPiLIgfXCbano/VVDjwuKQ5MolvI7mvFUilTJuRElsLtFBCC9+B/E0Nuob6Kr/NOwWtM/CY8Ni7bjw0QcgHXOb2X/U3G4ectvXabJlJJqaG1KWmQczHsDKoimGZQkzc8jpvVTbnVuamCJWFlCVMDkFo9Nw4BjBsVN4A0lcVnSSgirPCWPNs6VCBi6TsqQX5TjSYtLzWB2sY9sKEGTNCTY7OYHlkpea10c01UyT7x9uE7jGS/GnC0O01niORcO1yAqotObwmM9GXNCJFatRQc2VTNVt9jRdUqVpuJLxmvPEb53Wmvluhm0JfNvilb2NnQXrXaHHLr4sPksI0klKqkoTuiYVAm2eO+Mj9Ul359kW7z8YkCtNPykoKk3NtahTsDfQLEWS0dChm5b0s5LFfkmlNf1U89IeH1kWnPNiilZIFUXUjxwjV4p/9f4yCS4rHOB/3o+IREizzBkLAlZtG4mguWcPZVcRTEwTrIHaf5QLZYssL/E2FOcPn8I+ftr8hbeEH7q7aX7dHCG/chF99zZRa5K0cZDQtojdGslwlfrkNBw7A40mwvXMGTXooaI+TMwiH1c5fktsr2C3oH0+jpwCzM4GaWJuSsdPw+X34O51uHeL6vgZxImz5pYjJboszQJkEBr5bJYYCrHegLPn4d5t2H8YO40olu8bR/61RZg9gHowT3LyJYKbHyBtCasPkFXJi17AuqxRFTZuklIO15G1OkEeM2cXzBarOJmiewP2Cc3RSZsiSbnTtlmKK4b42LZmdWQUt4CP1xNcaeiWuCxxbIuaozk46vMr+1v8m6ubRJkisC0c27iBRLnCsVz+l89N0klLfPnkgpQUFVFR4doWAhhk6hFqcVfosYuvAk8TgjRciQYOSkFVaeZaHqOB88hrbUuwMsyxLMGIKxG4VIMhTVky4goy26afZdzpZowFNjc2E+aaDpV2ECSErqQs4a28BlGB1+lzXl6jQ8BiGeCh2XAn+JeDHDecpDaA3/grpzljO2ilCMqMTD6HpwpS+RIqTsg+vEUoIVaQHDhOs1Y3xUhXZtRRa5gxx+oixeICBHWC1RXcFw6Q2D7e/oMEchTqdVh9ACuL6Nl9xi7rd/7v5hzyQ9Q/+G8eKWrfFtsr2C1oT8WTeOXtmwyDnlEVdTtm+HrtEvrwCWNNkyZmK781CsMunDxn4hvK0nRgQc1w1MdOE460SP74d82feetjlBBc0G0SMYYYfYaXpwPU5fcI5m8hGw3Gj52lVjYM3y4q6uNjnPcDkiPH8W5e5kLe5HZPsV7YvJ1oplwHOxMcHHFY6AumaxKNxYlxn4/WEtbjgo244H4vZz0uGPElDbdO4Er+3tlx1qOCG5sJNzZSKg1HxwNe329oynvd/KnCjmArb0prjUbQ8OQutbiLnyueFPiZFBXfO9AiV/pTbIAAHEvgSou5psuepsuZyRqdYYp0bOJS8Oq+BpeWE56fqW19jkBampprU1YaYVnovYcJ0iHdStB1JcNBhg5CBpWJj8l1SdMCjUtnbZPmxCjhtYvIIsNyPN4ZP0le5ci8xFE5MRbe+gOCKwOqujFyQGPOm2fOwZX3oFSoxXvguEhpc/6v/s/IDpwlKDMsbaz5xENG6jrqm2JWbxrV49oi7Dv8yPfv22B7BbsFDXiKwudJvLIfmI5NKRDSFCrbNv9+9SLM3zIF8MgpmL8BgwFc+FOYmIXDx0EpxPHTiNEJhJTI1ogxFE0TcBySYUJiuTwoPQbWGDd7DofdOYJQcK57gzzNeHasxaCIGDkyjX3iNPgBYRyzhkc/KbCliwraVLqCEZ/x0CWvNK5jIzCuCmtRyfIwJ68qFnvb7okCrQW9rNyxq2r6NiA4NRmyNix4fqaG3JqNJaX61NI0mGH7MFecmAg4MR4gpfhchdkudvFV4PHOf/vf3cdOuaSoKCrNeM3BlYJDbZ+GJ7nwYEivsri7nACC2abD6JbF23YuWq40r+8T5ErjUHHhbodLQ0le+Lwzr2k4DisV1GzMKgw2S6WgjFNuXLuLvjGPJzQvT4TGDSjJ8GsBnVTwYuc2btwnGGwiT/zAFB/AGhk1e2Zri+Z8qdURfoCumcR7qUoagQu45iLueub1AnOxrrfAD82f54cwMfut9Uj9pS9oT1X4PGl+tvU64Xro898xhqF5Bq5r5mWdNVPsrm3N2Q4cge4mTM1uPYgN84DtRMvYJtW634OFOwSr9xDefqLWON7oBFmaYWfrJN113rSm0B1YIGTv/ilC1+a868OH7/LWJnS1zXJrAiwHN1aACdRsh0ZuvxYXuJbFmamQAyMea3GBJcDainHJlaaXFry/GBHYkrprc262hm0Jrq4mgObqaowQgqw0tlb7RlwC+5Puy8zjBny0Zl5/aiLk5a3Z2S528XXBNuW4ERUsb7nq3+vmxIViqZ/j2RaTdYe2b/P9Qy0CR+4c/NvF0REK/cE7HB+kfNBtULQnWO5kNMZCnM2Ug6M+y/01RsqImtDMNUIyN2CkSogri34/AiGwbJvLKzEqzbDDfXxntkQu3DLFp9aAsqRavm/YoDw3puZ7DiEnZykd12SgtdqGknw4rurCn0OWoy9dwHr+Fap/8N+Ygjg6hS5LLqz2yEqNVxWcPzD6yJ7bNxnfjv+KL4PPCrZ7ON+st4lOE6yaCcqzKoU+/13z+/WWiYEpS9Ox+aGR9ccR1JuoeoskzQhUhbx0wUTLCFA//Ovm62UpFDl2e5TXBkvQb6H6HR4QUtYaiGSIBuxhjyTMkUv3yLCI1z4mV5pLeRunKtF1yV8/OkKtKlCuR6zg2kbM3U6O1nCg7fHyHlNgnpkMGWSKo2M+/azk5nqKa1torXGlRaYqcqU5PRkyzNVOVtS28fDelsvxsYDaQzfipKgYZAozfxcMcrUry9/F1xICyCtNUWksYZGVFSfHTWivFrA6LBDAjY1051L2cFej45goSmF1ESuboFNIMlljZWgKYpkXHGTAPmuIqCqEHeKUGV3LJ9i/jyv3rlNoQXXvNlNT+1lf3eBGv8DqdvnukePYzzxnzp3/8DvmUtzvwOqSoQ6Bxv/h/0Tn7i0YnUJ8dBGdZ0YFefikOXfmb5qzaOU++thpZLOFnjtA9f5PiQYRiRohtCHJK+LubRrnzn+jZ2fb2C1on6HwEVKi/QD9/k/N3Gx9mWrugGn3HdfEymx3bOdeM58UR0bk8cJ3EHlCKR3eee86udPA2Rxw3hsgu+sQDYlW7lNN7jGSfs9DDwe40uI1a5MOPV5bu4waNnCHm7zHKImWBMMOKmwRhC65Sngv8VhONZZ0mBGCxq0PqauUn6Q13ndnWB4WOELTDj36WUlSVLQCm/NzdTpJSdMztKFvS9PJRSa9N3Q+6bxanunytoft5vUWd7rZTtLv+T11MzvzJFUfwKggd2dnu/i6YZtynKo7rEUFUW4S2Ss0z83W2dd0+XA1ZjS0yUqTqL5tul1U2qRCaEWW+IjMoxAuuYa9dcnJqQZN3yKNUtqLQ17oXuMtOY2amOFWY5aZkRqlKqGS1EOXYZwjkoh+XtGyBKrQrGcVk0ENq7uOzhIoc+hsoIRFMnuIAEHx0z+BJIWFu2ghEEFoFNgffWCirjrrhhl6CNs7tIGw8IoucXuKoNEgKPtfSKr/TVi+/qUvaJ+n8Hlkkbos4dAJxMSU6dwe6uysSlGde80sU+cp3L+D/tv/GzLpklkOYZkRuSGJrajHETgOVWwsa0Sl0D/8dVSS0Lt1i4vza4iViKAMOS96yJExzquCpN7k9WmLRPX5cFDwhtPgQWsK1y6IlFn0DnRK5IVcXhVs+DlrUcEw1Ug7586GwKbiB4fHeH85YiMq2EhK9o943O/lTDccjo75nJmuPTL3etKwXWnNxaXoU4utpydDTk4ESCE+ZXm1i118HeBKQVWZ5/jEuEmMONj2kEJwfs4InB4MCrLS7LRdXjXOIff7GaenQrqJQghoHjnBe4lDXkJR2WTaYj0uGav5tMfqROPT9PN1RDiJlBbDHCxpoXHAkXTinNCVyFYD+8E6qVLc90axEpc7Hz3g/HSAlBKSGDU5ywUxThaM4kmb79y7awRpZQn7j6A313fci7CkcR4anYDRiU9lNkoB50WXpDFO6PSR3udL9b8py9e/9AUNfgaFj2UhwtpO57bd2WnbMbOzQc/smT1YMMvU//a/xf+N/x3+4SMkSUYQeATjLhQJLC1QDbtw5zr62GmqoMbb9wd8sBGy6h/kwIjD3nSVJB1S9z3kvlPUhYDFewyList6P3ZLsBopRnyXGd9mf9Mh67hUvS5CjtMvjAjEllAvjAPCGx8tM9/NWY9L0hKivGSq7pCVxqlcaZBCPFKIHh+2u1KwHpVYgicutj6ufPy2DqB38ZeLv8hzpSrNe4sRGo3QcGoy5MOVeOdilitNYAlOTgSGlwQuLkWMBDb3ehkrw4LRwEYKQS9TONOzOKqkM1DsafkcHvWwsEgV+EdP0a5nOJHHlTzgblGykWyyr2lzZO9Byqyg8F2E32i4EgABAABJREFUkJw7e5iVj25g64L65hLDtSXWyw3GDx9GTs2RLK+Q0SZst4lnDpHc+FOc7byzPQfgx38A/R4sPzCJHSfOIk6cfSQiRtTq6COnIBogaw0aZ8+bi/wXwTdk+fpbXdC+ihb54YeAWmPntrPd2VX9nkmwvvyuoQ51ZYpZYPzX5KDDS3unzMGvS6z338AQ9Qr/+3+VYnkFDhwjfucnDPoObpxiK4f1UrCn0gTT07D/MOLsecgSsiznp8uKZRHiZprptmDfeGAG1zYElgar4pCn6LmSQWYRlxU9IEDQEBUrvYR7UUWFoFSaj1djxmouldYEtonfGGbqiQdFXlb8zqX1HZrmb58aYzR0nmpptevduIufB77Ic7WtuH3YIHv7OQ1di15qPvY0xxFPWpybrW19XBmZ/9bXODEeoLTGsUAphdYxc02Hmmvo/FxpXClIJ1/l5OYm6x2JWFik3OhQ28gYDsaZOLifCGHmeUjGjx9FX7vEMCtY6GZQpgTxTc7/6ncI8gIv2EucF3hrDwgsTTnY2k37+EMzX9t/GDbW4dmXEcee+ZRPrZAS6/lXPlFsA/rmVcgz9Od1XV9y+foXRVd+awvaV9UiP/4QfOrPuPo+XL8MjRGY3Qv/xW/Af/5/G8ePIITWKNIShEKhf/wjI+dvtEDa6P7ADG4/ukgwf4u6M0vljVENCnxL4yZDKB3z9wdUo82beoz5qk9gmTTeo1N1vnewTa40fh5jLRVUI6N4KyVaVfi2YN9IgOil2JXiZuES5xV5Bb6taXiSmabD9w60aHo2gWPx3mJEXBiLq+29s+2bcCcpifKS3pbT/tv3h/y1Y+2nWlp9nufebnHbxV8En+cJuq24vbIaU1aaM1PGyzFwLEMjrsSAoO4mnJup0c8U7cAmV5qoKCkrKCvTsZ3fU2c9KtAaQldyecVQkGtRyVxDYj24w7E8JxpIkoOHgTquFLwx36W6exuvKhjVFauFZAMPC020tgF5Rqvmcf65oxRBSFBmVMsV62sRDNaoy4q4cEj+9A+o5wPOj6+R7D1CIDROGVJ2tlz1R8fNSOTOdXNOXfwpOo3RTzBOf5iN0tHwC3ddX2b5+hdJV35rC9pX2SI/jZKs+j24ddVQjd1NmNmDGBlFn3wWuhswMrbj0K8v/Dl88BbEQwhrRjTieTAcwtICMgh4OVnkQNPhg8GQ0WyTtN4iGW0R5DnphxcppYPWNo1GHbA5dHSO7x1s49oWrg3aDikdj5VeysdFg1rdgaEiLSv85gi2VSF7JQ0gzit8RzLTcIgLzYcrMQ1Pcrjt088KFnom0j4uFE3fptq6yT47HSItwVK/wLEF9/vZlgrSfqKl1Wd57u12bLv4i+LzPEGTwoTQrkYFqjLP97aX44mJgLW4YCJ0yFTFmwt9CgUNT/LsVMjb94fEueniXp5rkBQVTU+i0Sz1c6rKiKK6acGkU6KyEtHrsV5a9NJ5KiGQUnJ9eYgXa9q+x6tuj0MOXIoHtHTGFdGitGxYWkCLTXR9BH3meewgYHx2kmCtT+yEeINNgqgProscn6J+9nn46CI6icycLM/NmXLuNSMEGfbNJTkafP6Z96TVpIfk/4/jL7x8/QukK7+9Be0r8Cf7rDZZK2W6s411o2ocnYBTz5t480ohpmbRSWyKXm/dFDwpjaS/1YYjpxDdVVMAVx7AyBiyDeOuT7AeE2c5gQdub40LYpQsrOEMOziWw0SzyVyR8d05H9f+5Ae5EhbvjJ9kzY642k8QmxmV1jR9i/N7GlxdSajHmk6qGA0kPzgywpnJGn94p8/SIOfdBzmdmYKLixGpMrtphdL4tuTZ2Rq9rCRXmr91YpRBvkZjWziy5ZH3JEurLx2+uItdPAGf5wkaOBaubSh1KQW2hUmuqDQfryWsRQUrw4KZhsNi3/igVn2YrNm0PMl03WWQlfzoVpe6J1no5eSqMqpeW/DxWsTtzYz1oWBvYTNZ5twSU7iVxfWVIfvHG1i2zQepRz1XdMIWv/HaMUbaa/TTHHHrHpNrd+mnOf8530dtU+He6fLaMy/iZDHn6y2S1RWC20tI6Rqj8aBJrdZEPPcqjmOTpsaBXxwzBgv60gUjYKsqQ0U+5cx75Fx7eDXp8rtmpeir7qJ+gV6R39qC9mX9yT63TU4TY0ez9xAMunDkJNZ2jMy2WAQB/+lfG3572Dehfo4LQR1GxxFJ37iJHDoBRY5KEt67s4oe34NQpXEmWLpPtrmOu3CblcYEoUgQtofv25/KSUqKilTBUmaRV5q8VNQci16sGGbG2eNA22dGVZyZqvGDwyMkhXEcL5QJ93SlxWjoUGnN6rCg7kmqLUWjKy1CJ0IATU9SVnBizDc7dZV+aqf1ZcIXd7GLp+HzPEGfn65TaSgrTX0r4miYK4pKc2oy4IOlmKLUrEYF0w0XAYx4NoEj6acl3UwhhDHYTouKQFT4nsOIb3Onm9HybQSaZHSKgVCUqcuELbAcm8VBTi9VlF7I4UmXTFv0r3zI+fnLDBXUxQjDyb38dK0i6UiU7bN/I6es1jg1O8L4udcIk4h4chY37vOeapJNHce/vc6L/VumKBQKcfb8zqxMP/8K+pgxNH5YDPIwHj/XxLMvGV1ANHxEtf1VdlG/SK/Ib21Bgy/pT/Z5bbIfmH202b0g9iFeeP2TN2o7N+2tP4N7d6EqzAPY7Ri6cc0sSHrnX4f5ebO7dvMqyeYmWS+jruaJpUc+f4ugu4aYHef3w6OUbgNPan44N0YRNkkrQR0j1OgkJQ0bVJLyoJ/hSkGSQ1xqKipubaQcbHsMMsVk3aVCsx4VjPkWJ5uCXmXj2RaWAEcKpusunrQ4NOojMO4g7dChm5QIAWena3QSs7N2cSn6wvThrtv+Ln7eeFgwUnMkJyaCR8JqbUswyBSOtJhuuGwkJZM1h4nQoRXYHB3zuddNKUpFL1MkmcKNe4SypJKSxtG9NFxJWmqSvGI9hU44zUqaUtouB7HYP+IyD9iDgttDzaF6RTvtIaVFHc0zxSbLGzG5fYhYSAbaYc/mOm+uwEe3V6nPTHFsPKScfAadZ1SWQ335DlEUk/RuMfrSS4goMozQVkETUiKarZ0OTD+Un7ZTSJ52rv2cu6hflFfkt7qgfRlox91xuRZ+8Kk3ePvWIZ5w6xDSLCCzeh+K1PDZ7XHzoDiuiZOJhubrfHzJ5KMt3yNYXcGpHWAzzmi4JcHCdZTtsdFLyesWDV2QVTarlcdUmeElAzJq/OsrHeK8xOss811/yGTSYG5ykjudnFJrBHC7k7EWlTR8m/U44eY6LHRSTqeLvOhG5I6PffocP3kQAbDYzzk1GfLcTH1HKJKVlVmc1ppuYhxB+lnFVE2SqS9OH+667e/i54nHae1CGUeQ7V8/N11DaY1tDUnLilOTIacnQ+qeZJgpumnJnW7GYi+n0pojIzYnypRTYxZJnFAPIVcus1td3Z1eysqgwLIlUlqkZWXSsivNszMhoS351QN13BsN1HLFhapNNjJGpgVRNkJUagoNca5Q0mXEyukmZkd0puESCYGVZURZjuOYPLdsbQ2dJmhL8vBP0sMd2MP5aY8brD+ybqTUt8Zxf7egPQFaKeMCsu1yffqFn52yrBRsrEFzxHRlf/Pvw+/9G1hbNjeqhTuUs3Pw0z82D1V3A/walKUR8voOSrq82T7BkqxRaotoOGSvV/KdK5do6QxLWqztO01s7admVdyKBW2nTkDJTCg4NjHC1dWE+90Mz7YIXAtXwiA1c4C1YU6vrCiaIfU8JkpTSg22tBASklKDMLts28asrhS882CI0hXv3h8aPzjb2vK826UPd/GXj+3F6ShXBLakHdifornfW4ywhAAtODdTQ23N1y6vxtzeSFkeFHg2LA8VNzoVQ+1zf7VgoWiy/PYG03WHvSMB//XZCZpLkp/mA3qpIlMV890caYFvmxDcoVbc7BaMPvsyyb7jJO9dQwLlZpd9fp116eDVXeZK6BQ566VFUOa07JCkqHAtzbGWgA9u8HEZctEapT7f47l8Ebn+P1D8T/8embBxpSDuDdFxTj30sdaXwXERo+M73Zio1T9hkK5fNitH28XuW4DdgvYE6GiIHvQQzRHI0kfa+p3XfM6MTVgSPTb5Sbp1a8TEPyzcNjEyWULV2zDFzLKg0iRejSJNaRMTS4tOewZdb9K0PPZbEXPFKr8muwRFCmGIEjZOMsCrlXQLCykF41WKGzg8u2eE6VbA+dkGv3+zw0pUYAENz8a3oZcawYfnSPxsCJ5HUA9p9OOd2Jeaa+3Y/WxTitu2QZYwy9VjoZE6H2h5j9CHu8vUu/jLwOOL0+dma7i29URhUm0r4++nCwMsy2SnIeC52RpLg5y00tRcyXTTxbUCOlXJIKlIYkU/N2nt/UwhLYtnJkMCR5KWirTUhK7NeGhjIZhqOGSlJlJQCpt7pXEP8hoOz+8Z46PUZS0quCvaKAqKboc9g1uIlZTjP/g+ly/d4oMsRpQNdHucepGQ9fsktSZBMuCdmyskbsj8cpdCC0TH52R0n5fZQEqLCqDRMkwTDzFI0cBcuLP0E0ekr7kTyOdht6A9Bq2UiTxfWkAvLRg14pP45Ie46CoawuaasZnZfgj8wBSyfLugjRp149rWn1VrYM/thz37jey/qggCD6+sEbfH8BpN2s/9KkHkMnf/PntW7/Nafge3MQ5+C1VkXChHSHSbPe0ax8ZD7k4HfLwagyVprQ4ZDx0+XIkYkRV+y+HmRkFUZMTlNhVi8529s9hlBn6ABF4Ygf3NJoHnIIXg4vKn7a08aTHMS2xLoBF4tnjEpHh3mXoXf1nYLlb1LY/SXGlc++nCJLRAY+jIYWbMt29tZuxre2RlRTcpAcF43SEpLbJhgqg0qqpwbYHvWJSVZqLu4qLJhgMelIaKHAtsikoT5yYd+8pKTD9VFJbkuJ2gfYfnD09xbBjz9odLLGewmApkVSIcj2E65N0rd5kfQs2tMzO4jUwT4jInlBBszJPMHiSVHtZ7b9DvW3hFilOvMxCQHHuG+u0r5lL9EJ54xsE3wgnk87Bb0B5HmhjO+fgZdL+LOHZ6Z5fsEX7ZD9C2YzzU1pfNjccPdm42osjRew7sfK5VKfTZ8+g9B8APsBotZFBD/K2/j95cg9vXkTevcr6zTIIkmJrEHmnz0qRPMhHgqP3kyYvImo+8c41kEJEUDRZH9jLsZCgsXpxrkJbQWrpD2ivZ2NBEsY1VlVxLPW7qJoErCR2LE2Mh+9u+scYKapRFydp773F7WKGkg3/4CC/MNZ+oSDw5GaCUT92VdBPFaGjT8u2drkxV+nOl+bsd3C5+HviiKtqTkwFodujHpDDWb8fGfMpK0/JNQTzS9pGWoOlJ/vWHa7Q8G9uyODjicmoiJLAtqgp6UYr1/pv8lXSBPGxQ/dW/w7WeQmlTNE+OB1xaHtKyFNbYNHnNohHY+B++hU5yxtaHrLUP4QiNtkD2NrGGywwBz2kS+XWkUrzm9sjTPuPf/QHpckjw0vfxV1LiNKWJTVGVVBoaOiPoroIlEBPTJtFju0g96Yyr1dG/IGn9zxO7Be1xbKdSZymi0TJv9JPSq7dR5FCYNGo96EE0RDRbOypI8gzhB1TWViCoxvz+1p9huS660UJbAs68gOx2qB8+CQu3TJbR4j380UkurBdko1N4luZ8UFAbG4UVxfowZVhZVDrBtaEhStK8xPddaskqtwYjJHbARqaxfU1WagJb41iCny706aSKpmdxd21Ib8Wmg8P3w4jeICUp649QNarSvDHfR2MCD1+aa+w4LABcuD8kLhQa/ZmHym4Ht4ufFz5PRfukZ2/79a4UJFupEllpnuFmYFN3JetRQVnBsfGQm5sJJydrDHPFH93p4Vhwe6nHXLfP+1aNl7t3SAZdsjxAqhIlbYQqcO5cJVKCg47Dc8dPUbvyFu/c2WBo1wiBv+2tIcaa6JmDqP/8Pv/RGeNeVsPC4VV/yGv5PdxK4lJR3bgMRYG8c40Xn3mB5GrK65t3SOIEqhb1sTFkbdy4iGTpI0XKCN40JPHOGQeYYGKeLvn/JmC3oD2GJ6l9HraIqaIhem3lEzPisUlYXzGJ1Y6DrjXQz79i/rCtBwQ/gHffgFvXIKyhp/cg0gStSjOvc1yjOOpuGj/I+Rsmz0hKWLpPEqcksY89Ok1SapKsIGCI7TRYzzSDvDD0n4ajU036a4JJ3eetaozIkjhKMe4IkFAIE3/x0/t9fnJvQCtwUJWm5QpKbDZSzf+vDNhbK6kvR7y6ZRc0zBXvPhhyfT2h5kqm6jZ/cqeP7wgCW3JyMiAuFA/6OVGuODzm88xEyHjN+dShsrtcvYufJ56molWVfmraeuBYO4XOtgRnJkM+Wk92VlKenTYzsrgoqbuS+a4RjgDMNl3uJ5rcHsXTOc/YKwRScPfjW/SVoOlYvLrc4dnlm/ypnMOpN7h2b51jpeCqNU5VlliNCc6ePczIaAsdDVkOauSly5FsnY4zziknwd17wOyzzh2AzVUTE/PxB8ijz9D49b+HXlvB7Xfh9scwMmqSPE6c3WGUhJRUeY5+98egKhNQfPoFgKdf2L9h2C1oT8CndiYephdXl4xn2vqK+djEtFmuLkvESPtTA1bleCT7T+BXGhnWzM6ZMLek7MIb6PU1SumQ5IrgwTxSKZR0SKRPEPeQAlzHZsEZIRkIgjzh9TlJogXJzAH2D0sWezmuNAGE/+lmn1TuxVIFZd3mfpFRFIojYsB5NrBtm5+oMd57ELGeGEFKVVVkhUVmhchQM6xgPVFcXYs5NRFyfSOlm5bc66UErsUgKxlkCiE0dddmT8s1fstosw/nS+52UlQFI1uWWA8Xtd3l6l18WWxT1q4UO+rbXOmnUtjbnVlcqCemrSdFRVwobMvsXBaVJlcVlhBERUk/U/zG6TH6mcIC3rw/QFqClUHBxjCjUwis0TnsMkfNjpHc+Ii8p3ClS6Z8VmzFDXuG+X5FTcXMrawQBT4rnkcFiFqbQQGNSiNrddrHjhFcGzLMHJpFxPidqzA2jjp6muTgSRr//r8zimmt4dIF9LPn4cFduHEFbl8zTiF7DsCL30FsL14rhX7oYs30HiN4K/hWzM9gt6D9bChy074LYR6ksUkYnzKy/jvXdlp7vdW9qVqTC6s5mRfjlw1enNqDtIRZwi5ydJZS+SEXlhKyvMSzpzgn1nmvC1ktxPPrnD/xLElWMFGMEExMoNcqyrpNmMU0ZYXA3EgDxyKvKtJS0fBtNmNNWWrjQSc1M2VBPXAZxgXdsgBhIS2wBChgumGzOoTRmmR5qNBAmmuiwkiRRwKb+32LyZoDoYu0YGVYEOUKtNgpUrY0+zSOtBgJTDDokwyJd5erd/EXxXZxSkrFvW7OXNPhQb/YKVJPorAfFovsbZn518PsgSvFjn9p4Ehemqtzt5PRS0uiokJrqLvmcgYwETqsDMyeWmLyPykbLUY9gTjWgGuXwLLQqmBNN3lXt1lzFXbYY9CaxNIZ9SPHmLIiykqwvrLGxYvL3AxsXn/tOfzzr/Ff7lmic/E92v113NYMqj3OOxPPkPU0tZFjPDcYIF0P7t4wSdYri+Y/ttY0BuhjkzsKba2UmdUrZYrZ1sV6Z1b2M3g6fp3xlRS0ixcv8tu//dtUVcUPfvADfv3Xf/2Rj2ut+e3f/m3ef/99PM/jN3/zNzl06NBX8aW/cjzRv3F7iDoxjV5fNU76lrXj4Wg1Wzs0pXZcuHQBHtwj2dggGz1BGDwgmTtMNu5Qb27lqUkJjkO0vEYm6oR1lzjq06lsslaLcHqaOCsZHj7F1Z7mQaegGOQctWysJCZxfV7cP8rBtOKD5Yh26DDMSmxh1FoNz+bAiMvl1Zi1SNMpHSaLDOE4jNg2gxL2NiXHxwPevtflRkfj6IrZlkvd1fRSxUzD4W43w9m6tZ6cCDg9Ge4M0m1LbDnyN8iVRmk4N2uSsF1pPuezDIl3acZd/EWwXZwsIUgKRaUdkkJhCfHUBf+HWYHQkYzXnB0KsulJ+plipmGjcbAt6OeKciudOi81tmWRKbMwLYXgxbk6B0Y83n4wYCUq+HjVuPXY2uLqAF4IfNyGZLOU0Bhl/FCDKze7NKVHiOLltsYba3E6tVi7eh2d9FjOHfKigbi9yfeOT+LPzDC1PAZDwwYlI5NklkOgczLHJ5EeQVUaNmd0Erm+asKFoz5QweaqOY+Uonr3TaPE7nVgbj9I+Yi7kT79AvQ20fUW4ufl6fgLwJcuaFVV8S//5b/kH//jf8zY2Bj/6B/9I1588UX27Nmz85r333+f5eVl/vk//+fcuHGDf/Ev/gX/9J/+0y/7pb9yPHW37CGhCMeegSMnjbhDykcHqFueaJQF+uBRgjTFG20TJxneYBN/775HHg5dKYLFO3jWJPHUXrxzL9Mec/Eu3SJeWcSjgrs3yEaP4W6uspCAdmzue3s40GxTW0k5N1ujHZiAzrpr8/efaz4i1EhKTaXBH50ltjW263A0UXSSksNtn9m6zR/c7FFRAZqpus3xiZDrawnHxwMGmeLMVEjTsx9JoP50h1XtpAC3fWdnEXvXkHgXXzV2ilNpuilLgGdbRFupD0+isB9nBVSld3L9epni3GzIe4sxLU9Sc23OTgqEAM8xrEOylTrx8F7mudkakzWXxUFB4Fg0sj6zUcrgdoelZ59hTxAxYzvc6xd0k4qpus2xA/vQqqDY10ZXghfbkn4t5t90bO4VklBZKGmTFBU1G8Sx0+gjJxGWJHR9vLcuEUcJDa1wX/4uF7qCzHHxu4IXjz6DnJ6DS++Y6CrXhTShGvTgzT803ZmU8J1fw5qe/aSYbRtJ5BlojdYaq1b/RtKPX7qg3bx5k+npaaampgB47bXXuHDhwiMF7Z133uF73/seQgiOHTtGFEV0Oh3a7faX/fJfLZ7ic/awUER8ThuuHdPeEw2Q/Q7nrQ9IlpYJJqewbjWp/tbfN2aiaQJRhPRczusuiWwQTh/DdmzOP3+E5IMeYau5M5PrZBWutJGDdfplQVn0iPcd3Mlreri4BNtGrFshnaOBQ5QrvJZv5gZuxVzDRQMfbyRUAkJborTGlRLftmj6Njc2UlaHBQ/6GScnQs5M16hvOew/3GHlZcUb8wOUrpCInWVWaX0y59idme3iq8LDxen1fUaZ6NsRUVHxWeT1w8/selTs0ItLg4K0hBFfcng0wLYErmNxaiJkkCtOjgecma6B5pG9zFxpXt7b4MREwDu3Vrl9aQ0QrCQV9sqA9zoVLU/j24JXBre4EWuK+w7eocN8uJYSFxV1B056HodYQYqQzK4jpYVvafQHF8x5tGUibKcJ592IpBbQvt9ns/DJw1Fqhw6TpDnpnia1q++YSCulYHafcQHZWIXlBeNEJADBI8VMbxobLatWRw/6UOZmrvcEy7+vO750Qdvc3GRsbGzn12NjY9y4ceNTrxkfH3/kNZubm08saD/60Y/40Y9+BMA/+2f/7JHP+3lDt0fIlubRWYoYn8Dbswchv/i3SKuS9K0/I1t5gFq4TWlJSssjyIb4aR097FHPE5xmHV2boLjj4TsOwrJoz0zA6j10loGuGJmZQKiKqtFgtDFDbSUiTgocSzJ06tzNFWG/ZMyVhK6katdJlCDcKmZxrpgesZhMJI26Ifn/ytEJY8yalrx5Z5OPVvrYjs9kLcWVFu26y9947iCBKykrzR9eX6UxyKjQ3OhV4GraoeD1Q2PYW51aWWl+dG2V+aii7jkcaAfUR0YJXcmf3dqgn1Q0A4cfnt1PXlaErtz53Mdh2/Yv9P3+umP3+/Eonvb96MQFoqPZM2KYiqDZpuk//ee2rDTKzQnChKJUtOqCqVaTTKS0WyGhK9k/O8b+WfNz5NoWyZY3YzuR5GVFzYPpqQl822IK2N/wWL7+J8RK8BFNau0m01XJkckaRZLgbEq+Py1JC4XYO8rvfNxFCIvVVPPMyXO0Bxeo1UagKPjhHh8/9Mg9F6vdpoojvFqAbtbJHIumA86Z5wmPnWFhJSNNc8bH68x6CiU05f5DiLJETM8gpUDPzjG0LAQVwvEYmZnFGR/fOa+qXhe1uYKs1VC9TeSefVhS4r32fSzX//m9oT8HfOmCprX+1O8JIX7m12zjhz/8IT/84Q93fr2+vv4l/4Y/G/TBEzsztGGn+7N9bjSkWlqEOEZpwQV7imyzwpEzPPPgAXVXUv5/fwf2H4Hl+9SPniQdn4bDJ0mCGurqRZLlJYJkgDx4FHHmRSInYLCccPrELEsbQw70M9ayAQuly9rCff7jx2/wHbHOezPPUuw7im2BKHIK6eI7NudmaySFsfTpbW4gLcEwU0TDAaLMubmacnDEYbbpMh7a/OTGgx06JdA5cZKSK4VjWdjKZr0Tc39J7dx0h5mi3x8gVcF6N2XMViR9i7Vc8fatDSxhUnP2ByUt3yb/jO/f+Pj4L/z9/jpj9/vxKJ70/dhOpv5oLQE0pyZCkr4gHz75fMnLij+/2+d2J6HSsKfl8V+eNBL3c5M1krICXbKxvo60hHn9fJ8bGwmV1pycCKi00YT9h3cHO4nu/WFCWZRmtysI6MY5qJIP5yOWeynXNvqc1hu87MVEh58hSVKoKha7CT9eXKe1vsoz3KDuwPDdLsOtlHoxGIDr0e/04D/+DvR7YNuM/e//j2z2hxy7/oZhZ1YX6M3sgaV7Zi8WoLJAaeiuw9gUemQM7fl0BkMsIan6PfR7bxk9QF7AIQ9ao1hY6GHEcGn5kx21rxFmZ2ef+rEvXdDGxsbY2NjY+fXGxsanOq+xsbFHHsQnvebrgs+LOdiJZnBcoyB6mIL0AyOXtSwS6ZLVR/D7m1yePM6gW6c1Ocb5bANZFoa+lBYs3IUkRrXGuBB5JImPcFq8pgW+lISegy0iLj3os5RoVmqzlJZieZgTFH1u0eCEGJLFCWHcp7NslEwjvqQ7c5Bh7vPRWsIgU9Rc4zkHRtVVdy2ageTkZEipNEmpCR1ISkVSVJyeCjk5YRwVrqzFdJOShicfoQwDxyJ0JHNNd0cgIi2zQpCrClduJSt++k6zi118aSSFCd08PRXSTUqemQqfqprdNga4uhrTy4yTvRQChKEiVaX5aDXZES+dm63xxnyfD5cj7vUyQsciLzVHx3w2E0WUKyrMz9LVBz1WnDNMhRUnvIwzbYvpRsDvvbOIyCrWcemN7SOpJzSE4sSYy/qN26ioYDR5QDqzH5kvIh3LiMbyDLFtiu4HZvd14S4kEeQ56Z/8Llpp5J3r1B0H8hzhOOi5A7D/qNlFK0twKpCOUWMjYMsV5BFU2uy1Ld2HzjoVe8yszXE/k8L9OuJLF7TDhw+ztLTE6uoqo6OjvPnmm/zWb/3WI6958cUX+d3f/V1ef/11bty4QRiGX9uCto0nqR23RSM6TWDxHnp2347rh5ASISXWVsheUJZ49/r0rlwCQkaKdVLhkAyG1Odvgh9QRRGsL5sCuNEhOfGrPEgsIm0jCviudKDfY+7+x7y17qCUw420jmdbDEsL37IBjV/leINNhvMCu7eJM7uHywPAi7Glxe1OhhTwoJ/zwVKEEOBISEvNQjenlyj2jrgIhDEsti1sS1BWGrTglb11pBBUWpMW6pEwzydJ8LdTgW0LclVxZircFYHs4ueCbYFIVlaM+PZOkOeTkBQVGmj6ko24IC0qHGkK0vbHHxYvdZISDQSuZcI6K3As83tRrghci35quiElJYWQ5FXFGh4f34lJ05J7qaDt2uS5QBUFhWvmUq+MJ/QX+lwRHmls4WURwWgb5m+gF++BbaOffxUr2JpheSZIlzwHS1ClqbkkhjUjRLNttFImo7FWg4U7Jqaq3wPfg3oLyhxm9po92VrduCAdOWXUj1VlXPk9/xMV9+V30Q+pHJ+oAP+a4UsXNCkl/+Af/AP+yT/5J1RVxfe//3327t3L7//+7wPwa7/2azz//PO89957/NZv/Rau6/Kbv/mbX/ov/vPE09SOKo6Jk5xASqytxOpH7K74pMNz0oSXTswySJeobxakY8/glQVBGBlZ7fRe7MPH4ebHYAmCokCs3CfCoyYrqtFpBu9d4KONhPV+zF3nEFWliaqCmZbPZMOlbo9xaGQSOXKSZ+ev83YZUvU2yKKEthvQnghJSnODrYBMVazFBZWGoqyYqDm0fIuxwGEsdPClRc2VDHNFPy1ZHBTEuSIuFY4FS4OCXCksYfG9g81HitrDBWvbkf/sdI1eqjg7VdvdNdvFzwU/y07jNpuwt+Ux23BACCwheG8x4tysyUhzLLEjXmoHtmEfGi4zDYeWZ1P3LF6crSMYstDLyEqNJTSrUUnPqTFAU9R8eoOSg22X5obgqJ3Ra4Yst8f4/wiXZxZjzk95tFbneTlJSCyH4NRzyFodhj1YeWD2xN74Eeq5V8xitB/Ac6/CO38GQlJtrMLcwU8K3XOvYlXKdHP9HizfN8keWWrchw4eM/uzf/Sf0J6LPnIK6/lXzAU8GqI/voTud81rXe9TKsfPSxf5uuAr2UM7d+4c586de+T3fu3Xfm3n34UQ/MN/+A+/ii/1i8ET1I4qqHFhU5FmNVyVcd71kHeuA6Bth+r8d40v49Ybr7KMxPZpPPcCrxY5sbDxL76JXOtDvQkP7qLCLUf+iWmk6/KazhCeR7X4gGC+B/PvkoweZzmGKiyouxbCM2+ZAE5N1bCk4P1UQRKgdU44O8eHzhSLqUbcHbKnZfNMUxIJ080tDkocSzMoKzaTgrioGA+NM7i0jHN4w5NcWY24upLi2ODaAlWZRerAEcRlyTBTO47mjx8kD9+aW1ty/13s4ueFL7rT+HDxU9okrW+77L8xP8Cy2LG9klLsvH4zLri4HLGZlmSVxfWNBMsSaC04NRGwFhW0A4fAtrixnnByIuSNewP6WcWBfRN8Z9bnw65iuZ+CBYNcMexHWK1JgvQe9cXb8CcJeIEpaPfvgiVNQfrgAnpy2vzAtydhfBo8H1FVoErEmRc/5b0opDRdmiVNwZMOIKAxAp5n3BQGffTmGmJ0wnRq1tYYwXHNQvjjJsVPS7r+mmHXKeRJeEIceVJU5JUgPHLUeCuePkL9gzeMn+Od62jLQr/0PUgT8jTjJ0WTapATzHd4adbI3bXrgm0bHtx2sEZGjHXWyWcRoxN4l9/lO/11EqtLMFKHawNEnlEQMmsXSMdhsu1xdrZOmpdUWcqNqKLWXWNaaKSA/vRByl7BeF1TlZqpzhJnswjpedhnz/GTxZh+WrISFRwe9fl4LWGu6SEti/NzdbMgXWl6acnyoEAjyBUcart4tuB+L+deJ+eSFyGFeCQr7bNoyF3s4uuA7eKnqk8MtAWA0ASOyUf7YDnCsgS2ZVzyu2lJ27eZbUj6aUknVUzVHRZkxqWVGGfrta2WpOZZXFmLmaxLDoz4/OqhERyhuXf9fe4PBEpIvNZeLl++TbEU4S1HnE8i5OrSlvvQFCiF8mskeUmgFXLlPvS6MDJm3IrGJrH27jcdF5hds4dn+rU6+sxLsPQA4qGZ6z/3EgQ1uHsdygrWl9A3tgrWkVOIskCMjJoz7zH/R+CJZ+LXEbsF7Ql4kkFxIMwPQKoqvMAjuH8F1pZgfdV4OW7tc6hGmzezGtf6mtB1mVtYIF6PqVnKLGOffsF4QmpNfvkiZJkJ35uYhmdfwo6G1K9eNNY58ZDXVj6A5lGK0VnmkwoszZ2NlIPD++hSERYW/cJiounzittno2Hx8abi+nqKRFO3NOFEgJXGpGnKd/c3SYqKy6sxvbTEFjBVd7a86zR1z8iSA8dipuGitOb0ZIi0LNPBKc2zMzX6mUIIaAf2E5eld51AdvF1wpPiik5OBCAgsD+JkNnOR/Nsi0vLEReXhlRa000V4wJavs3Y1jM/VXfoJ8bEwLUtjo8H3FiPWeknhK7DsXGB0uAXKS+6EWmtThoNKQYDVFlRm50l7m2SpC71zgY4NkzPoNoTXBg5SpbleKrgfDyPtB1DRToeynGJxl+j0hXy+mXj6vHQTF88+xLWy9+jOnYaeptGudgeNezRgaNmR23+xie0IjxSrB7v+HZmZ6df+LQQ7muG3YL2FDysdtRKYaUJL84EpJXAz2OspQJ96nnjst8eg6V76CInDkfQc8eo1VKiXGHFi/ieA6tr4HjosoTVRRifQReFKYZ59kk8eq2OVgUqz0na0wTtJt+tEtatDk69TbinRW+QcCaNuSobTKmU1HKwipz/MW0ynE+4tGIkxnVH0nIES+tdbogWcqMkHJh5wcmJgHcfDEHAhfsDnpup7TgobMfTHxr1OTEWMNVwd+T+Ld+EFjY8iYCvfFl6NydtF181Ho+MOTdb473F6FMRMsNMobQRNHWTklwpOomi0jAaSH54aIT9bbOX9ad3e7xzf8DtTo4nBYfGfI6OOKi1VVRqsVJZXPEk39nXQrk+m3iwtMC4VTHcMLRfnJU4I21UVkPVfOSgB2lGMr2f7NgrhLWAuKhI1q5Sf/sPod9BNdpcGD2O7c+hHJfzmx8jpUSniTmzts4S/MAIStpHALNSpK9fRpSFSfZ4rIA9foHfxjdldraN3YL2OXj4DbVcj9qzL4Edol0PkWfok8/C5Bz88X+ElQcEagH/zBx7RhsIAa+slcg3/gi6G9Bsm+RqVUJVoXobEEVQq1MdP2MUTFmCquBC8yhZsY5HnfMnZhg9ehqroxmmOSKNuRh7iHJI4frM7NmLU6VEA0muzN5fqTTDquT9XLIomqxbLod6OXtHBG/M98mUZr6bshkXDHPN1bWYM1M1pBRbSi/J5ZWYSsODQcH5PXXqnuT0ZAiCHTXZV1l8yt2ctF38HPAk9eLjVmyBY/HRWrITH/PSnjq2BT+eH2BLgWdLJrcudutRwSBVbCaKatsqSmviYYKjSxzbwy0Ulla8ea+P70jS5n4WarCv6eKVKUedjFQr5semuagFntCct64gj5wiWHqAt/aApF/DP3CYoG/D5BRkKUlZkfUjar11OpP7SWyfWhEbMYhSYDvobgcevI+olCleANEAlhbg+BlElsKp5z9NKz50gd8ubt+U2dk2dgva5+EJb+gjNxrHRV/4cyN9lRKpNefXr5I22oTPvYioHYZLPzX8eJGb7ixPIS9QY2OoUy+S3L9H8O/+X8hBF0ZGSbwGSfsYdmuUZHyG6NgM17qKssy49e4lcqXYKBz2tVxybVPdXcSmwhMWG24brUEWJXN2RqvsoZtt7ELTj3Pyuoe0zFJYlCuGuaYV2CwNCn50u8t03aGsIIoLKl1hW9BJC3pJyfWNlLhQW/tmza+82MS52vV83MVXjm2R0nYyRNOTn7Jie7zoSUtwbraO1uai1fRtXEvwZ3f6lFqx0M/MCouGYQH3+zl/siw5WYPxNOGBdBhxLbJCUVTQbtaYrTuM6oTNyuLf9xuUtoNVFJydniUtShI3oS4EUgpemvJIkoiglmJZAhqj0I4IVu7jSU3/6hXcsWnC517EylP0i99Bx0P43X8LF38Kwz76u3/VFDJANEfQSwsmoXor1PNJndanOrLTL3wjZmfb2C1oT8HDC9RPekN3KMloiBbCUIeddXA97HabepYi8pSq1TZzss6GUSrVmuCFUPZQpeLCnTWySOMUNs+srVEvFe6EzUIwRdLvEyxt8Gx8j2TmLE4WkVVgWzZ2VdChRlAWPOunDL0Gx6wh/1mOMRb4LK/EnGvAR+sOg6TCdiQnJmu8vLfO//XtJQZZhS80454mVSWdKOf2BlxeLHhp3wieYyjFP7/TR24NvQVGuh/lhobxHaOK3KZxPiuP6osgdD990OxiF18WpjjV+PP5HnlZ8d5StCOA2n7GVKWxBGzGJTXX4spKTF4q/Crj0GST0ZrHmwt9Lq/ExEWFa8Fk3eWIJ/h4zZj63lpPmNq3l+9O2ly6coe884B6LBH7jrAalbzHBE2p6VQwKzYhyyikQ29iL02pCQ6fR6iC8qNLDLt9COuG0bEdWJyH1UXkcMj5zjXy1ihurcR2bHDqCEBtJsYpJBpCvwsfXoDTL5qdsiw14o+txeqn0oaPXeBFke848dMa/VrTjbBb0J6IJ91SnjoM9QOzzDi7F2b2gLTNPketgfADrDSheu4l85BlmSl6Nz+CQZckbJGVAj8ecNmZZODtpeG0OGD5zLkljpOgeh1kbwk390j2HKVhaXJVME7OrFvSDGxyEdKsMpo1F9dysDTsdxWnnYjGZIg1M0OvkpycNGKOtKzwUKx2IiIqCiFYKTy6wwEaeEXeJZ7Zz2hgU1UgpaBUn3R1NdcskBaZZjS06aclf3irt5Ne/RelCu1ddeQufk5Iyoo7nRxLwEI/52DbY6pugi+3s9Vub6aMBTZCaJSqaC3d5eoQogWBvf8Atzo5nVSxMSw4Nu4z03BpejZLwxIrTugozfytLhPODJ4uKVyPNMtxspSP1xUrwxy75VFpxb3cQpQO+8sNXrx6kZalkPkp1OkXeSvy+KjnQOzyzHLM+T2HkLW3YWoO1DwyjqiPjKKW5tGzc0amr5RJodbaOIQ0WjCzzyRW1+roaGh8iT+rmMGn1IzacT9x4ne9Rxatv47YLWhPwhNuKU/zNNtWRIqtbk5ffMvchoIaYNz3Wb4P9+cBDaOjcPAILNyhHgR4QtEL2+DUaA4rrtbm6Cd9Vld77B2uEpQpdVly3hmSTXl859e/w3Bjg/eHDqKqUK7L89MhLQrwA07cG7Bx6zYtN6NhVaST+3gQKdbjnI2sy76mgxx0yQtFXArDsVcmPEaiKQRc6gkafozruVvKYEHTtzk3U+OnC0PYUnxqoJ+WXF9PqbSm5Zv06i9DFe6qI3fxc4E2/6M0rAyNY07Lzzgw4pGUhnG408lIyorNuGTM0TAwhsgtEjppTllpxgPJZpyzNCzwpGDfiMuptuROXBBUFc14g96dglK69FNFH49y3SReFxoW+wXHRixEXGI7At1RSKmR0oJoQLK5QVRUyCCAomAwTEkPjFIbacOwD5Oz0BrBf+FVom5vx/VDf/C2Obdm90NrYArTxPQn59bNq5Bnxn3/M4rS4wpv0sTYcO3O0L7B+Bl3LrbpR93vGQ81BKwuUh17Bstx0ePTW1lDwiTL2g6MjGGPjnJ+Zozeyip2ktIdm4HRcdpRgh9oTtbqjF+6iKxK6HUIX/ouMvAp5TQyH/CgnxMNM6Ql+d7BJgCWynFUycdVjd9fDyDuI6TN/raPJSCJM172Ezqhi1yPGOgaMQ6uhEBAQyuktHA9Bw38F0fbNAN7Jzbmewebj2RK/eGtHhrNMK+wRAna26UKd/G1gqqMOfeJ8YCNpAQNrcA2no5pyfKwYJiXDNIS0FjA8b0NsnQNr0pILYdG6ONnGd2kpObYTNccNlLFnc2cmZrPMa9AZR0yy2GxsCmb46wlFftGfe52cjZTQ2XuH/H5a4ca/OjdLk5Vbok2BFQKag2C0THq3gOqQQGWpFH3qYU+4m/9fbPuY1nwx/+R/M0/htKIPvTpFxDbRWdqFo6cAscxS9NSmqL3MxSlRxTe35D9s23sFrQn4El7aE9Clec7Ka9WpdB5brqzzTXzgivvUZ173TwIWhgD0LFJkxg7uQervw4rD7g+eRqRZHj9TY7Hi6SWjV8VjFdD5NgYBHXIUsSwh3Zd/DymqoxaqxXYIDTDXDFMcrpJiZSCD3oWaWXj2jDma+KiYpAp0kBy2rP5VXvI6GRAMjbOvcRCoYnyitMtyUopEMKiUJqaJ2k9FMWx3UElueLWZoIQFU3PxhIlB0eCHXPiXezi64CHJfu2JfjuviYfrRuzbhC0Q6MCTEqLflYxzBSuIxCWxeixI5wbschdnwqLHLitNZ0059qGcfcfZpLbHY0Wo4xbOc9X6wRJTG32CJfWUtqBQ1xo5pqGuTgy6rDYiajaE2SV4uypQzTGX0BYAlGrI6Xkldef53Q/At+nERhlJa4L07Mm0WNyDmlZFIsL8PEl4/3t+3DtQ6gq4w87dwAW71GdfsHsyNoOPFSUvqgv4xc9C78u2C1oT8Hnue6rJIF/+99BGkOSUJ09b2Sx6ytmgDo5azjtiz8xG/3bM7bGCFx5F4ocrTVJc4IsTqh7NvH0Xs7cewc5PUuw+iFyz0EYDo2FTVBD11vwwdvoLMNNa0h/jqyssIC3F3rcunaPlUwTVR6ZdNFSUKqKmWbIXz/W5vdudvFsi+tyjrJm4bguuhIcq1e8uxjRSxQ3bYsz0z5XVzNsS3BlNea1fc2dKI1OUuII+L+8tUymKjKl+eHhJnuaHq/vb+Dau93ZLr4+SIqKuFDYliArK6QUnJ4MUVpTdxOysjIKxkIwGkgmazZTdYeT4+HO/qXHlmgEc8lzLIlvG1PjUlUIARO+JFABz42Ocy+z6MUpB0Y983ppUjVDx6Kav8uf92AqlKSjU5ycbuBszfJUpRkmpXGpan+iIn54N9PyA2g00WsPzCqQUnDnY/jVv2Fci8oSVu6bzixN4MKfmd8Lakaqv915/Qy7ZZ93Fn6dsFvQ/gLQSsFP/ghuXDExC2Vp7KziIUzMgLSNQqlSoG2sWp0KzMD22kVT9LyAyhIEKsWp1dmsBI2lu9S7y8jBunHK9lw4+yLsP4yY3mv47EGPpNam6CuOTwn6WtJNC26uRkQZKGlTqYpWIHEdm9mGy//6uUmiwtxQEQKlBZl0aNiSSlV0k4oH3YxhqVlPCtaiErSm5tl8vBZzdqqGKwX/6uIaeVXxoJ/TSwrqvoMnBXuaHicnHzUg3l2Q3sXXAa4ULPRyorzEsqCsKhxpGfHSQ0pHtWXgfW09YXlYcqebMdVwd/4caQle398gLgxFCWBLwXTDoxMr8koz7Vr4oqKQPte6io24iyUlB0d9Wp6EJKF25yqFu5eFSGBZNT5YCvj+4VEA3loYcHUtBgQnJwJe2dsA+NRupvX8KwQnniFbuAuDrvnn+hX46KLppLIUfeCYOZvmb4MtoarQR58xs36lPpOC/Ca46j8NuwXtKfjMNzVNTHGynU+iFiwJ9YYJ15uchf2H4NzriI8uGmGJH6D3HTEKx62IBu+175MdO4voWYh+BEkPrD2weAeFxbAfo6ZtnLE9BFqQfnSFYHEBRy9wt/UC9xZztM4RwviNrimbRlUy6grqoz77RgJ+eHiESysm/0lagplQEqK53c+4V0T4vsNkzUELUNqIPTpJiWNb5FXJWGijtOZP7vS518vwHAtVKYRlMUxLJuvOjlpsG487M+wuSO/iLwu50uxpOdzeVNzpZNzvFxwd9dk34u1YvW3DRCSBv2Ws/bi4ybUtXt3X5E43Q2BmzH/n1Bgfrsb0U0U3cbngTfP2ck61vsmggLrUdO0WWeAi0pwV2cIC0lLRGvbQ94bEexoISzLIFUIItNasDzMGHY0MA9KixC9S4iQjGrVpBK45k0a2IrgcF+IBhA1z9qQxHDhsjBwWbpvXVBquvk8lhDm7HPcRCnIb3zRnkMexW9CegM99U/0AWm0TyZBncPws4vQ5w01HQzPvGh037vsP8c960ANpGUNiVeK9/F2ybkxRFrQ7i8TRkKSCIE54y9vL1XSSldUmE5fW8a2KfT2LYPZ5TqhNpkenKDKBykuWkorpEY+x0GZfYBGGHlJKXt/ybby6FiOEoUfOdm/h5THDBzHhWJskdhG1A5ybq3PxQbS1W1bRDiRZCUdGA6QQOFIQuhbDrCRXgiOjLho4Ox1ydS3Bk9lO4Xp8SXV3QXoXf1kIHAuBYJApPNvCAjaikj3NR8VLSWGow7ZvM8hKstJGCuhtUYB11xgaR5nCtwVxoam7FmlRkZUVNddiNQLp++gqpiorhsrBL/pYGxn7mxJv3z669z1m0g3WhEMWjiNVSlBmiFqdugNFmrEYVRTZJh/OD3hx1MIZ2sQ3r+GVOf7ViurgcZKVBRgMzDhjbNIwQ2lsVoPCOmJ6L0JKqiOnzHK17ZhCth1Ns+8I+tipHeHIDr5hziCPY7egPQmf86ZuB3lWh08+WryUgjvXTJFbnDfFbAtaKdOdYRl68uAJimsf4m10cPsWsXDw5/YS9HOSyX0MdBtVCYooIl9ZIhmbpK1t8n7MWqtJreZh3b8Hec4B12W2uZ+Gb/PMhIm+2FYlJmVFpaGqKnRRoMqMj0qfxSpH5C7HvQJLVLgjPqXSjIc2m0lJWlZoLQgcQeBY1F2bF2ZrdOKSpi+puTZRrigrQcN7tHBtOzPsLkjv4usARwo822KQV8ZlfycS6RNa3JUCtGC6YZMUilxp/tv3VygrjYXg1GTA7U5GN1EMC8Urexrc2Ij599c63O2mhI6FDUzVJVJKQlvQKwomSZFOm3tJSbWe4h05y2ExJNzsUpLzchtkGKLLgnML79LpeiS5hx9tkMmMfGOV8406STRPEPeRXQ1RHzE1B9N7jI/ssy9hTUyjz736iRmxa1gT6/lXdgwi9Ls/NsXMD+H+HVAF2vepXvjOzuu/Ka76T8NuQXsMWinzz2OqoCdBPFa8dDQ0XVi9CYMeetBD3Dav0boydKQlYGMNKk22ch8pbc6nKYn0CPYfQE5ME9y5Ti2uWG2M0XcCyC2ibspabrORN3mmqmjYQ/5m7wNcKQjSgnTsAB/Fgg9X4x3njqSocC2BYwl6aUkn1bxVhmxmmtN2Qs8u2F+3CKYb9HKNENDwbEIn58ZGimcLrq0nPDtd33FaCF2Le72cw6MWTd9+okHxbnzMLn7ReNrMNinMhe6FuTrL/RwNzDQ9kkLxxnx/JyZGABpNWmhSpXh3cchCL8USgoZnE5WKzbjE3vpZ+nBlSC+tkAJqrhGLFBrevp/gSEF9chxrPaKrLEShOBcoWhMB1cI8J5yId6wQd2aWD2oh5wcDrA8vkLz7Frh7GRE+CTaCAUFDIwcd6v0V6HZhdBKiiCozKkvGJhGjE8aSzw9MascTIKSEF76DRkCaGgHbxqopdgj0S99DSPmNUzU+jt2C9hAeoRptZ0cV9MQ39bEubtvNmgf3jGx2fMoYhQqBCELYXIf7d1HLiySlIhj0KTtrIEDuP0Z9dAIOnwDfR976iDN1xSBZxw8CVv0RlgOJ3c3oyRBPKNKspBKSlmUG1IUQ5KUi0CX9HP7woyHSdeinCr/KGQjYTBRXgnGk1HSP7GN1kLGAZO29dSZrNq60ONAWuLZFP1PozLh19bMSpSW3NzNWooKirNjT8vjegU8oxscPkt0F6V38ovBZM9uHw2bHag55WRHnCoGJiQkci824RGxFw3y0GrPUK+jnFSC22A0QWrMWFWgNla4YD0Nagc2djZR+klMJC0eaS54E0qKkXfOYrtUQq0vURIW6cwOt4QOrwb1EU4sr5lZvEc9vEsxf50PVYF75WEXOUafHa+k95NGXzaV6fBo+eMssTtsO4d/4X5AX1U7npTUIP3hkPKKVonr3TaOGHBnDeuE1xEvfM2fVhxfgzg0Ia4aKfIiF+iapGh/HbkF7GA8VKZL4kxvLk/B4aw4mmuHgUbM8ffCYeVB0hf7oknHRt10u7HmebG0DL494teFCHBt1ZFGA78PxZ0FK6p5FOy7IhGbOqUijLv14iAtknkOjFuLsP0TeWyWvNXE9F/f+VQZxzu3NmDQY4Z7y6RjBIoEFbs2nrCxagcORiTqW7TDfy6i0QiOYrjucmghxpODOZorSmrVhwYcrMZ4tyJWhbBzbQmAG7nXb2i1cu/hLxWfNbLfZgmGuuLISg4Cs1Hx3f53LqwlJUe1EIW3GBRoIHehnFXVH0PIdphsue1oOG3HJIFcMMsG19RxPwljaZbxSCCnZcEewgKl0kxmR4boSFcwQ+IqXx23eXitJsXgwqAhci6ioqLIM5Wl6SK7VZnDR5MriuWzZrMA8cw7e+AMzpqg3zQ5rq409NYO+dw/e+BHcu22k/ONTEA0RzRYAVWfTfC4CpKQ6+gyyPYpotqjOfw9tSdDaFMJvGLX4NOwWtIfxM/DHn7KIAWMrkybGR23rQdH7j8Cd6+B6JKtrZO0xwladOLbJp0Oc4SaMz5jB7e1rZqi77wjy4w84n2+SqBqBLHnBhz+VbY4XKWr/HhoNjw9uuSxsBOztDAiS9zgne3Q6m1TdnBsIBsJBCEHdsXCrEseGfqZw0az0M5qeRAqwhMAS0PRtxmtm0fT0VI21uEBaFqOhwzAr2dcyhduRFi3f/kKzsV35/i5+3vi8ma20BFKYWKTFvnEFqdC8NFvHdawdwcefz/cQQKktzs7UmGm4PD9dx7M07y9sIoVgM1IIKRhmJaN1m9Nexo/jkHqZM+OW/K3DLa5eybirfXRSwlBxyLa5sFFQ2S7h/gOItZjJ0RqOVrhXrnOxVCjLR0/NYhU5TncTGU6jLE0yHBJM7UHa0lx+n3meamSM9Z++iXr3z5G3rxmVdVXBwaPoWoPqxFlzJn3wNnQ7Zik7qBnThy1Yrot+6XvfWGrxadgtaA/hZ+WPP9Wab3s6vvidHTNjoiHasmFjhaBI8MqEeP8JvNX71MfrZKNjZucsHsBwAA/mzS1s/xFkc4N6OgBnCvXgAUE6IAxcuq5LEud4RUpiedgiI1tdIS+XGe+uUasd4HDVYzUYpaNsRFVxMNRMzDRYf7CMk5XEdzd59dUznJmqobQxHt4uONISvLy3wTBTXFqJ6MQFy8OCuaaJrDkzVWO26X1ugdqV7+/iF4EvMrMNHAu0YJiXdBLF8mDIfDfluek6L+9tkCuNbVk8P1vng6Uhe1s+o4HDZCh5440PuD0QjGNxzwqQFgwKheO5XIs8ekWFKHPq3S692xsMkIiypNxKeHf2H6TMCu5EFdVmged5vLKvidhc5f2wQehK+lnJrJ0jHEFLaYKxaS6sZyQ3VhH9mNfUEm6lUPUm7/QsWE7RaozzqkJ6PggLDp2E2x+jk8iMTCplFNXRACZnwPPRSu2ca4+HGH8bittuQXsMX4Y/FlKi/QDx0IOh/QCm5yCJkMLifP82yXJGcOgottRkN69DmZusNNeDoIZyPBJtEYxKpOeijp1B3VvAcSRxCUGZkhclheUSkFJWikAKghPPIa9d4vzMJIkf8trZEyzGCq/MmRpr8M58hzVVUtk2DZ0QlBmFXyOQFhceDBlkioYneWVvY2cGJoUgV5pcKR70NfOdjPu9nLMz9Z3XPQ278v1d/KLwWTPbbZbglb11srIiL2MqjNHAWlwwzBT1rYy0TFWcnalzejKk7kni3gBdltT8gDgucSWY6Rrsa/mo+iwbiz10XLGkbH5vs6AfthCeYCrrUR+soebXkQcOc8ipcCqFkjaO0Hihj+dI+kXFHdliPxq3XufFVk4eRSRK8KCbELljkCZ81+uT3L5JfPogNdsmXl8jyUvqIoHjZ0wHVhbgBXDtkjF8GBmDk2eNfd6lt6lqDcTZ84+kh3zTd88exm5B+wrx+INRnjpH/MG7BGWFBGjUkbUR6u0xyGLK5QUj8y9LUAWq0WYYtLiSNSgQeIXkXN3mvdWcrGriWIIzbPLxpZ8itIVlO/zd77+EsgTBzctQ5GweeY5s3zFaIw3eWU62ZgIuc46N5bkcCAVOmXC4LvjxWokQA5TW3FhP8GwLjeD0VEjdlawMcrppyUTdYbGf08kUthQ4tkUvLVmPCtqB/dQctC8q398+cEYq/XN/j3bxy4XHWYJfOdjEsQU3NxJWhgUguLwa88reBuf31ImzgqDMkK5AWIKgHhJ4krksYbRuI8M6q1GJb1vY0mI9LtC2S6ZtZKXoWTbScTg1YnN+8IAg9MnjhHZL88GV22SZwpMCb2VoctoOjPMjMU202Odeb50Dgw753haB7yHiTSIhqfkl2rFJLAdXFSwMFWVPY4dTvD7TBd+D139gpPjCgjd/ZGywWsaBhMMn4c0/MrSkrtC9DjoId0Qk3/Tds4exW9C+QmzL9kVzBJWmvH19mezmMt7/n70/C7LrytIzwW+ffcZ77uTzADgc8wwQJOgkI8iIVEqhLEldpgqVqVQqSd0P3S/dpifpLR8kpVlKZmkmtalf9dxVVlbVXV2pKqlTUucUGckIBsEJJCZiBnwe73TmaffDdnc4QIBDkBFKRvgyo9Hgfu851+/dd/97rfWv/x90mIs6yJFxna1JqUkglqVPVUlMKSQ/mzzMhneUVXOYl4oN0lqDTtwn7TyiZkBUQT4ySb4WUPcdgjSnnxSMdeZRQvDjvs0fMYbo9xFGwLBnMlqzOdiy6cQFJQbDJ47x8UKfTwroP+xzqC7oFgZbUYlnGUzUNRPsR4sBdzYj1oKCibrF2QmPrKh40EmpFKwGOZ+shsz3MiYbFi3H5PVnMrYvUwrau+E8iiWnm2q/LLkfnxtfpS/7bJUgKxWvHqhzcsTlk9WIpiPZiHJ6ScGQY+DffH/XZqW8MEdSCS6/cYksirFrHo2lkA8XQ4QBBoJDTROKnK7VpB8XYJkUFTR9l0c9j09XBJXR4NhKwnfMkNL3cG99gMxzaLWJRw6wkJUsRAW5MURVwfeOn0b+yf/Gd3uPoXECNXwUz/DwPI/YbzPjSfwwI5Qp2XoPu1nTxBAp4eipXZdq+l0ocy2Ltb6sS5DdTQ1YI2OoyYO71aRv8+zZ3tgHtG8oVFlq2v7yPGp5nujIOVLDpFblREoQ2zXqBw8jTp3XIsP//n/ScjTtYSgKglJw0xpDyAZrhU1H2bTzlCGvwjEgnj6CEw8YcmKcsEcQBsy3D1D1S8xNwZmmzx9tFSyLmAIDIU16jmTLjRnx2ru281txzlKsUCgGnQG3OwWmYVAbblG3JUeGXD5eDflkJaIXF0w2bSYaNpem6tRtSZCWhHnJrfUYFNzbiomKEkcanBvXVOa98UX0/b0bjpYbUvtlyf14YTyrnr9TGnwW2HZAz5Zit0pgGTobKyqFaQh82+BP7vcoK8VWVPD3jntY25lKEUW893CLzHS25zrrxEWFQOPGcj/DEgpjc52yrBizDWKrTd02qbuSlw80eJ+jiK2ItViR9hRG5vNausmSqIFrMtHdooxzUqtGPQ4Qnse0kZBlObZS2Kbge/F94n6Jd+gw0vTwzl/Ce/caZbeD51p4Bw/AxTndshBoSnN7RMvrqQoOHdcvuD2sNWQVmlXtelplZKdn9i2ePdsb+4D2TUUSI4ocTl1A9bvUTp3F2SyI2uM41SqeOazlslrD2l5maBR7YoJ08qAe4K5MSMYQk9NMGZKXjp9hXGQYqmLu7k2SLMC1SuTjx8y1YXVlhcg5xPzCBkHlsJmECDSbK64UbpHSyPtUoUElenxUs7h8oMlGmDPfS1nrJ3ho3ceGyMktwfHRGq9M1/lgKSDKKzbjggo4O17bVR5peSZ1R/Kgk/LRcsAgrXCigom6TakUQVp+JUbj3rLkaN3As/bLjvvx4tg5ADmmwbXViCAraTnmU4SjZ8uMr0z7ZKWiVIr3F4Nd5f2Zpk3bNWl5kiir6GAxvp2pxKZLaljULIMw00PYcVFxez1iI0wJMlgbZFwkI/cs3Y+uSY6M1wizChQ0XIsUSVbmNFxJMXqE/+f6GLesachjzhmbnCgD3GgdSouJwQpDh8bxXFvvEYZEZin10VGMZpMqDJCby8zZAdaFWfLFx0hn7AmYXfqOtrF69S0twXfjQw1atqP/KysNZI0mzBxGXH7zuQSRb3PsA9rnxFdi/myn7aQJotHCME3mfvY/ExcVnimQf+2/RjRaqI+v6BLAxgpZdwaSFA4cpr70mDPDw4SDFerHjjJRtzE+uQpZijQt6hcu6dezcB+KgvvK514seJAWjI6NYLotzLxHOxlQFA6TeRchDA5UfUbSGkkYk5UNRn2L48MuB+omptpkMVEkmFimxZujglJta9p5kjG/xnTT5uKEVtLfe+o90nboxDk1SzJIS44MOdxcjykq9ZUYjXvLkgenRuhubX5Dn95+/CrGzgFoK8rJypKGo4kcewlHzysz1h1JVlTM9zLivMSzJK8dqNN0TaJM/7vl2fRPvQJJglev4a4mu0ojSV6yFmSsr23QywWWYVDYPlczh7GyREqHQ8MOP308oFKCTlzy35wbIcpLAIpKkStBhInh1VBVyWZ9irHOfc5Hi4x7w7wyuM+0MJA3Qy0enKX6/5urVH4dVhagqpDLj6nZJkGzqefUPnpHjwnd/EjLYEkJ9jDqjb/0REe2LFGm1KAmJeLym0/krn6FYh/QXhAvYv68COT2pu3KsuEnf4Rcfkzdq+n0v9/RizMcaDuHsUl9isoLWLiPrEreaJfEZYDbrBDLj1BRiNFoPhny9utUR08TLK/waWxqkMFgyJEIx+HSEZPHD1ewCsHBNGC67GLFMZE7irc6j3VslA+WEgxD4Ds2F984j73Qw7BM8kcPuPJ+gTBN5OwRZtsujinwTAkC4qzkJ/N9krxiLSyYadlsRiUzLRspBC9N+XyyGv1cjMadsqS53zvbj8+JYvtA9dJkjZ/M97EMgxtrMWfHak8RjjzLQApYHmSMeE/mJbNScahtYwhBpRQIwX93cZROXNB0JB8uh9xYj1Cq4kwzZe7wEGEJ7y8GPOzqqsZRqyByTKZEhDE+zFI8StOEe/0CP1JkheJEWxLlJWthjiXglVGTXiW5fKDBapDyuNKgMiJS2tPT3LAakJc8qo3RrI/g/cm/JcsK7DQku/QW3swM8tARMAxN3MjzbVa0p9mKQiBq/mcIHZ/Jus5d1j/fVj/6VaHq7419QHtRPIf5o1xvF+SUaSFOnn9KGmt3AYWB1oJ0PVhZ0gr7P/lj1JFTeiHmOahKm+2tLEFnA0wLIxzg+w3443+r9da6W1RnL0FrSDOSQA9R+g1UzaRrt5DKJpcmF8dr5EWJaVk0UFR+kzV7iCPhEmJyhstORDoISKIM13PoZxXCkNT8GkkUIcscbAe3SPl4JaDVqNGoDKK85GfzBQ86KUv9HGEoVCU4POQy07I5PebhSr1hmNsyWOZ2NldW+wSP/fhmoqwUb9/fZKMTUFUKQwguTPp044JzE7XPePHd2UwYpCWbUc7Z8dquEIAjDQZZScOWu6XxUd9iI8zpJgUChVhfpbeZsr4luDl6nJvrCYahaNcsxoVgSMScawqc02P8v2526SYFpjSYbVs8fLRKLy5o2IJxs8nygxvElaDpWLRnL/L3eMRfaecoaTF59jSJUyN53KeRBdyYnyBYWWet+TIHarAYwYwzhpf6zOUZ0pCofhcMAzkxhVhfB0CZFqq7BX7jyT6xJ549nKvzl3el+kSRf+up+ntjH9BeFM9j/myDHI62O1fhQKuCPMdeRtR81KkLuobdHNJA9LM/gdaIntx/67/A6q6RRjEMjW3PkBS6YZsk2lttfUVL3rS075EKAwgHNIbbHC0qEnuImVaNCd/ipabAqzeRxmHKJCUspsgeP6SWmWTrS6T1MbwHNzC7Btcyl2psHNMQfHemQVE6WAG810lZLC1u9QrqccRyP2OkZuLbWo1coFBKa+BtRTlNV3J3M+b2ZgIITo26XJiocWs95qOVcH+Yej++sYjzirTQzg9BWiKU7oO1XZO6/fRGvBHmDLKSpmtwdzPhRw96TNT14SsvK5Kioradte302+KiZDXIKdKcPC9ZtV0+6ia8P+jRyRS9rORA3eL87GHmJi1krUZSCf7O+RE2opxb6xFlmvP9WsSBpmSqCPA+/DGvrDym47YYmhzH6G5ihD1mmm3tkVimmHaDVs1hTQmyIQtHhMSWSzVYJ27NII+cIH3vbeKPH+q/8+/8X8BvUEU6k8TaLhtWFaTb5p3wdOa153BehQG8/7bex5bnn8yvfYup+ntjH9BeEM9j/qgdkOt39WOa7ecuBiEl6vxluPJjTQLZXNMABdBqUz5+QHzrOrXZGTh8QktjdTd1s3ZsUjOVOptgmjA1g8izJ+LHy/MYy/O8dfQs5sgQSoEzf5fayhay3uB7L71OP6/z/7m6zHzmctM/wW9YXbzZw5iP73J+uEZ/PWVNVdzZiDGA7x9poV6ag/tbFHFFtRChlDb8dE1YDXMaloljCkZrEscUrAQZy4HuRTumgSEgzCqtOl6p3dJjkJZIQ+xLX+3H1wrPMnBMwSCvqFlyl+ixd12VlSYl3dmK6cYFdzZyBLASZKyFGatBxrXViKmmzXpYcHHS35XE8m3JobZNkkuyQLCSFuCZpBVkRYlQIAxBgSC0PD56rEHQlgaeaWAYglgY+JbJ40HJY+Xxultx1Z4iTXKcyGbu0T3k8jxq8RFIE5WmVLU6ReM4i72KrSjnTuHhzh7GCD28A2fJywxZZNh+DYIuJKEejL59VTMWP/jJdjVnA6IQlSSoRgtRlbuZ11OH820mpGi2UcvzqH4X0Wh9q6n6e2Mf0D4ndkFsu9y4C3JhgPIb2urctKAsYY+kjCpL7UskBOLsJVS3A8fPwE//mLLf44p7kNSepr2pOH/wEDJLtep1HGnpq6kZEILSb5IkOZ7rIGGXRVn0uuRHz/Jmu0W8tYW38B7S0hqMxsnz5MIjFybHarCVFky1XG0xsTKPn0bYtk+/ELQ9iQKCrCTJK3LL5pAnuL2V4kqD2DNpuibdWHF42KGsFD842uJ+J2U5yKkURHlBPy0wlODYsMOQZz6XJr2fre3H1wlpCN48OsLC8hMWrb1n99rJtHppwXwv47uHGry/OCDIKh53U2xpcGzY06XwUiGENusc9a3d9Wpg4FmC9sljfHJ3i4EtsTa7NMsKBxOhLEype2p//qhPkJW4EiZqgmbNZZAqhDdNQ1ZEpSAOVhANnyFLEY0dZGPtFqMHjyIHXT0XtrpAvL5BeBhcWWdqZJzRSvHWxjVsA6S1zM/qx1GWwwdZnTk7RfpNVBRQZclupWgX0BxXtzQsC85f3j1sC7/+5HBu2XDtff2742c/0zb5tsc+oH1OvIgYIpotqotzqK11PdB440MtTLxt6KmuvotKYlh6jJo+hGi1YWIa9Vs/JO4GpHdXqFUp0XpAXHaob6zosmS9BSvap6hUiisHLpMLieM5zHk+hu1QRBFXiiZZp8IN+1yev4rcWNXZ3Mg4AEOeSc026bfG6Ic590ebzC9GXDrxMvUqw94sMNcS4jjBall8shLSTQoWexlHRxy+e6jJqVGPIddkeZDxxw96mNLAQJ+O646kGuRUqsIzDR51U4pScWfT4DszzV3WYqkUHy2H+9JX+/GNhLlNHtrJxPZmZzvMxpYrme8p0qIiKRR5qahZBqO+VrSZHXIZ801Wg5yrqyEtx+TVA3WyUmFLwQdLIZtRRiEknlB4Rs5k3cDKQ8zxES5N1nlvMdhW7a/wk4AyLuh1Ja3pScIceqWk5ZrMi0myrMSUIO8vU60s4Rcxc60KaUjKOKUsSmomlFEIRc7Y7EFaziZmq00QpUgD3B/8DaL1deJshfpHP4Plx+TbjiAYJly4DJ+8Bwhd5YlDVLej953tzGunvy+A6vxl6G0h9hiB/qrEPqB9XrxAEkaVpT7lDHpP1aFVGOg6dhQiLAs1eRCOntKsxyt/BsKg5ni4R08TDwa03A08N4GVReh1IezrPprjEQ9PkSYZ/sgQSQlJJaidv0z87tukhaL26FOiyVliZVA/eFiXKMenKW2XrFT88PQQ/+FOl0rABysRSVHx6UbCqVEXoeDlZIluUnJILPMf5QxrUUFaVmRVxalRj7ubKW/MWIz4FkeHXNKyYjXI+XhN98b+q9ND5KXinfkB19b0kPX9rZSNMGe6pe3tg6zE2iaK7DtX78c3ES8SvN7re3Z2rMZMy2ahl7Ic5hSV4NiIx4VxnyHPpBMX/LvbWywNchb7GafHPD0knaVcHHP4D/dSlIKtHIrKZDpPWFQOJ1ybO5vanVoAqqooC8WFMQOyBFxB2XTIigoEPO5qB/j5R6uMDjosFxYHVKr9EC24IoZJXR/r/k3+VrKKTA5QP/lfIoMGKo5whcCyJElWYDUalB+8Tbn6ENnrwMuvwdQhcF1EWegWR5rA/H0tTGzbcP7yZzKv3b1rhyDyK0IG2Yl9QPu8eJEkzA7Q7alD4zfg9jX9u6vvopptXQIwLbh7HdZXYeYIxvQhXhsziacmGV8YEDx4rEuWngfzD6DVgizGEyXOxiJRZwPn8DHcLAJVYaNQvQ5BnuJVFXbT1KfVcACrS1z56VXSmePc20rJqopeUhFlFa4U2FKQ5BVGnlKlGQ0pUGlKZhaUCkxhUFaK+V5GVlZ8uhGRV3pIdLJhkRQFHyzn2yUb+N5sC9+WhKl+flVpG5pn1RwuTfqfq+aw31/bj2fj8xyonyd4vet7lpYgoCwVUhocbDqkZYUBfLoZU1VwbNgBtEt1Xil++riLu/gIR+UkhsmSO4VpQJRV4DX5xGiQVAK7n3PMNDkxrGWzirKkKAyKJOKt4Yp3VMXDTowhDCYbFmO+pEoy7TgdbhFmOUKleMUG8bnXyIxR3KBLb34RaVu07n4Eo21NElMKqSrmPvh3BBOzXA8lH3UKnKLNXL6mD8+tETj9Elg2hl+n6vc0iWxkHJFnmtL/bAb2JXQbv810/n1A+5x4oSTMniHqnTo0oCfzLUuD2ewxDVT9Dpi2LgkO+tprr1ajISW1194kGBrXQLg0r+vfEwchz5CTB3h9xCIcDPBWPsRYMyikxQeRg8o7CMvhJS/jg9pR0kaGI4Y4Q4c0SjCKnLxSuKbJsAdDrolpaKWttbDgYN1GdTYoi5S70sU7ZjJm6T5FpRSDtMS3JQ86mvqcFBULfT33ZksDUwqyQpGVilemfH78sMNWXCKE4JPViNcs+dSmIw3xGcDKioq3Hw1AKDxTMnfw28+w2o9vJr6MA/VO1m9LsVt+BLi5Hu8epE6PeoR5hSm0YMDdzZiFXsb1NYnAYKJtsRJUPFiPcCPFpOegkpyByokLQZBXNByDx0FFw5Us9DKODrn4jkQpRT9TCKfO3VqL0/kywfw8hrIxxieRwuDkSI1oK8Etuhw0E8Rgk4ujDtQP4PkuVqK4JobBS6inBa+zjHR9WF8C10fUG8g0RhqCfH2NmsqJTJt4dIqaaeo9Y20JdfQ06sQ5uHNdK4xsraOOn32Kwr8DUsqyP1e38duuvL8PaF8Qz5OE2Qt0YhvoqizTNNqiAq+mMzN3u86dpTA8BodPIC6/9cSyIY4Qto166TX9uNUlmJjWWmyGgZFE1E1AgfBqJEFMOnmUepkTYLFimER2HavmEy88hHgRxxgm5jQ1UXKw6SClwxszDeK8Yj3KuLuVUDdKuu0xCmEwJHIO2zknZkYRhuTuVszNjYS0qNgeL6NSepRuxJNIw6BmS5rbcz2lUkw1XQyjwJaColIgwJHb9GrAlp/NzN5+1OfTjRjflhxs2cR59Uv5PPfjL358GQfqHcWaD5bCXQA71LLpxDlDNYu0qLg0pVmMUsB//9E697YSoryklxR4tqRUcHLU4epKzkpuU5JzpGbQqtk0EHy6UZAVABXHhmtIBCdHPWxDaNJWqXBMA9MU3A4EazgsxxWjSYY/4nFhwkeM2bh3u8SDgOvDQ3wyfRynXuO1V1/ifJgwWMtoNRTJoE48ZFH/8B1QJVgO6uIcpeNRJimWFERjh3DSAd7EAczhYdJH9/SX89bHsLYE/R6cOq9FiQ8e3n0/PwNS5y8/ZR/zVHzLlff3Ae3njM+Y4117X9NopYT/w3+rqfY3PoJHt6FUcOQkYu572im2LKk+fIfYqFBZpSVt0hRcF+a+r1lJSazv43pw7X2KKKKQNtbGMoFhM59blMOHeX8lpN43cGSbN+QCc8UKweo1LhkS2XVovPwqlRB8uBxydytiLSwZr0nOOCYiz4i2OtgIHnfXGEwcZqEbc2GiwXpUMN0wubeZ8LiXEqRagPg7h3xePdCg5eqMrW5LLk5qlZCdpj0KXpqs8c58AELxwVL41Ck7zisU6HJlVoIS+/21/diNL+NAXXe0UHZaajbth8sBHywpuknJZN1+Sn80SEtmh2yWg5ReUpCUJUM1k4qKJK9o2Cbt6VEO1w0uzw5hrSV044IPl0McWWIYBpO+TZhXfLoRc28zIS9LxhsWLUdyoO2QRyZnRExmu4zWHT5eDuj1A4brNV47ehLZ6ZAb4/iHjxBLm0QJ6rZByzFJpw7hHhR4tRPwh/8WWm0IB5SzR3nv4BxpkmAa81wyQ+pmA3lxDuP+DShK7X+2LczAxoo+EHc24O5Nqge3tV5jnj01hyZ6WzA89vzM61uuvL8PaN9EbJ9qDL+uF0zQQ5n2dglgU4Ncke/WtFUYwO3rFK4FQQQHZhH1hpa0SRPUvZu7E/zipdcoL8zx3sMtkrzE7N/ntFeBUph5RFMqhKjITZufOTN8N+5wcyDI/Dp2EHB5MOCnHcGNtYheWjBZtxj3bS6ev0itt0F8c0DVNPmwk9NauMPjxObD9Q3W7SbC1GytmYZDzTOxpCAulD717tF2fP1gg4vbzte31mPeXwrICoVhKFxTywDtPWV7lkHNkhxo2gjgzdnPNwrdj1+v+DK2Q6DXkWkIri6HLA9yHFMwUbcYr1ucH6/trlGtWGMwWrMY8y2ur0WsDjK2Ism5MQ9zu7e8OCh4FXhjpsEf3O7gmWCZkmHL4NSox1ZSIIDFfopCCw0cG3bwVcEtfxJVVsimwc2NhIfrAXeNgtNWwqmhDGtsEnOzpJNVNOrg3ngfkca8uviYeHKWWs3FOHRJsxSjEFyP2GuTDcBv1omcE1jDEula8NE7VK6jraiOntb+Z6aE0Uk4fBwQmmgWh6g0Rr3+mwjb0UPVS49RoIHqOeXEb7vy/j6gfU586ebo9qnmqQWjlFYHkVIzF/ecdlRZwsYKpZQ6M5s9jkpTeHgb4phyY4XkxEW8NMZMYhLTIzMdalZFuLVFLV/GiyxiwCo8ctOm4ZiopGTDbdHrBTS76/SUyfr1G6jJMzQcwcYgJ08qRkZd6rZELD2kvj5PuTaPM3KWRAmO1ATXVzpUliIQNlWrTl5tW86bBsa2IeLcgfpuuccyhJYfEoKkqFjsZ/TTgq24YMQz8W2Tt2a/mk/afvx6x4tsh54li5wfr9FPC1zb4FEnpaxgrGbt0vuvLAREeUlVVZwY9ShKTeVfCTIMBNfXYkxDYXU2mE8V7wSruEeOYBiKMd8izhUzTZuTox4fLIesDlIGWUXTNVGq4tDafd7pSUwpCVujTHsWHy9H+jtTKaIs4KO4jwhNHosGk6aNSnPKNCXGxE0i6iqDVGml/L/591BbG/DwDt6N97DKIcKpQ7i+h1evwXt/BndukMUheL4mnh0/q8kgng+b67D4QFd9bE+XLx0PLr+J6G2hAMOvf2458dusvL8PaC+Ir9Ic3T3VbK3vLpgqDLQKyPQs2DZi7ntPaT6q4XE9fA1atubxPVhfphwMuNI6TrqW4ng15pIURxZYShH1BzhDQ9Rtk7kHt4nrOd8ROe80T6AeDnBKn/t2nXlrhNVBxrhV4D9eQQ7P0l5f5tzmKm+yzph7EmPkvC5XnLoAvS4nT54gffQAK9zkPgLHtuglJd2oYLxh03BNZlsO0y2HIC24vRER5gW+Lfl4OWQ9yhn2TBSKMCtxTIOWY3CgaWNKTd23zSeloy/ySduP/Xg2iueQReqOZMi1cE2DQy2Hlyd9Wp65W2qM8pLFfkaYlZwY8Xh9po4h4N5WghACEReMyhyjLKmkjVNF3FkNWI51p2CiYfLfHtW0fgEYhqDtmYx6ko1ewp1EsVrZkJekMuNAyyKtFBWQKZg1Eu2hVhVEQveiY2XwdmAjyhIn8ph7eBdpWVQvfwdp25SOC3euIaXF3Pq7xMkFvFYD0XpFq+V3tyg6G7ovrxTi9MUnfflr7+vefb8P9MEytehDnunHu95zy4nPO7x/G9mO+4D2oviKzVEh5VMLRrjeC5uvwq+jjpzAmL8LpqNnWMoC/AZxnJG2x6kdPUG0ME/8H/4t9TLmVdslnjqM13uEzDMIetQffwonz/P9GZ+oW1Gmio+SnBP1jCRRnBQBuTLI45T3+4KSNn1s/n4w0B+87VAmCT+rhrn1IAZzhlMHZjgjFjiUR+Q1yeO6h2NJ8lLRcA2CtOBnCwG+JQhzxYUJj5VBRqkUq0HO3zozDEoQFQUfLWe8vxRiSYO6Lbk0WX/K4mM/O9uPrxJRpgHKNMRTZey92X5ZKTbCnKFtlX0BhJlmQWZVhRSCuYMNbm9T+O3OKkdlQh51sP0xUsOklCbTDYOFforaWOf97gNeHjHJxs4w2bBZCzJanWXMrGI8XOGRfZA1ZVG3TRb7BRO+hazblN0uL7dsVrZSIr9Nrwd3VgbQ2+K4KmjLkmjmGHH/HvU8pnz/JwQnX8K7+SFyfRWqEjnoUb95RQ9Q71R92kOYjk3u1bUVzLbSRxXHmkk96MHYhD4oe3WNop9j5Pm8wztA9eE72h3Eb2C8/Ma3AtT2Ae1F8XM0R5+7YLbnQPaedgA49zJ2o0EaBLqhu20v47VMnDPnSPISJ4vwtpYgjZFlSX3msFYDiUPKE+eJw4ja8XOYjQYN36WYmsKJXeKDJ2jen6dQktz2CKVHP9/EyAT3K4vHhsMxz4cLc2xs9Oh9ch8jXKcyJJv+AS5fOkeTAq9e493liI9XQgBsw2CibvFnhQKlCHPFlG/xkdDjAAItJ2SbgiATDG0TR0xDl3aSQtGwtdzWl5XD2p9V24+dsE3jKT+znTL2TrafFRX/w9V1BmlJw5H8/ZfGeHO2SV5V3FiLSYqKphPxypSPb0u6gwRblXxvwiSv13COHyRpjlBfi/njez26UYYbhOR06Hd7FPVZYuFzugGHels8bAxxiwPYjRZOYTHZdKgqxUzbocgK/PYY05MtZm59wkaxTlkfYV5C3O/wiAqTAK/dxitSSs/nyqMO6eZVnKDLnOUg+x2IQkoliC0XL46Rc28BCts0yYty16SzyjL49/+TFjcPejAyqcFv9tgusxpeUE58nrNIWcLdG/oaVYU6eR7RbP2SP/GvHvuA9oL4JpujOycgFYX6FFSra5uKdguOntI6i5deh94WZmuYN0yLKM0x1wviQumFXBWaZOL5lEXBlY4gnTqMvVlxvgzwz76CVea8brskleC7sy3eub+BUIpHnRRlOlSmTaRq3GmNsbEUIYAkKlhLITNM1hLF0lLAelxxbrzGG0Mm58ZrXFuJsE3BrY2YuqUlsNYjzVRcCnIm6jaVgvUw58PVaNuyw2MtzHTPogLTgJYr6SUlSsFwzfxCOazPm0f6MrEPhr9akRXVU35mWame0nPciHIedVNsKdiKCzainNGaxYNOqgW2bUmcl3TiQh+oXIt4YBKFfYZ9FzE+gSsllya1ae3j9Yp+UHKLOg+pIXoFs2aBWFjg7lpAScBoa4LRcY9bgcG4bzFsGxxb+5RBkXJg4S72T+eht8Voo411/m+QV9CiZIqYs2LA6NybyHuKXpjRW01pN5sk/S49f4i8NUHTfsRVhkiFxFlNmCsKpOUgTEBaT0qNK/PaN7HZRgkBb/4VxMj4l9NpfN7hPQx+oZ/lLyr2Ae1z4qs2R6ssQ73/53pubC+LKNFyWFx9F3odcF3UW/8F+afXobmMcl1tn17TJQTDENQci58duEzSs3A2FpmrpcjWEFgm8WaXNKxwk4BrNx4wsApansFr330Z0zKpA5uDnMHiMqMi46gyGW0q8kLRFxYjQw06SUFeKcZrDrO+YJoBH8ka/baLaQgGqS7poAABhVJ8upFomr0QHGho4WLDgKPDrlZVqCrKJCFKFd245Ny4z+kxnZHeWo9J92RoX0YO6/Pmkb4ovi4Y7sdfvKjZEs/cHto35WfWjisNpKHVP6Sh/92JC8pK0XQk/bQkLRWuZQCCTlKRe0N8NDzF946PkBdgKz0P2XAka6lg3RyGsmLUFYwbJjdXQ2QoaQzNkmysUW0FbHSucWJyiGPHz3N/bcC/3zABk9Oxw+tZhhQG5DkXo3mqyRnExBBe7jA6OooxNEz/3Bt8cm+VhXiLhaDixPA4/3txkEQZSHmII1mHpm0QYRC//w717ipy7ruI9XXNmL57QzMj+11NSPN8xIHZL63T+NzDu19HHT+7W3IU3xKSyD6gfUOhyhL1/ttw71Oo+ajJg4idvpvr6cxsdUmzHrMUVhdQVaUn+6MAHt6lmj2+C4RxAbkw8M9fJLxtEzvbQ9ZVhZf0sVKP1eV1lIxozY4TJyXrGz3G6jal7fK/3Vjn4cDAMHwOmimHDh9EGAYznk1UVMz3UpSqWOmUHJmeZaUs2OqXbAY5kw2DugVGFPDBRoEQ0Iu0sviIb2EaumG+2M9Z6ud8/0iD08M2f3p9kSu5QBpwYtTm1QPtXSLI6zPmbrYEX66H9kXzSJ8XXwcM9+MvZpgvYMfuZOJ1R/KbR1psxgUtxyApK5q2xLMkq0GOLQUNW/dzjw27JHlFq+mAY/GThQiFln071LYJ0hLbNBgZbtJLcjYqQdrLsCTMWoJ+kmMIgzPBIhGSYiXkxuIM9yOwDImoCgZui8CuQ7fHdWOCfHmAW1/g3OVz1OMeOC7vzg/o3rvLQlBytqEY5Ippp+JmaNGcGKObjpP3HhOpEmewiTc6Ct1Vqm5HZ1OgR4YaTaqXXoNDRxCTMzpzC4MXVpeeJXw8e3gXUmK8/MY+KeTXNpJYN21rvj4tCZ5Wuj5/GX78H3WjVkq4OIfZWYUPr+iFOehBWaIGPQgDvHoTRxokJbjHT+OtfARVqTOm0Sm4vYIV9zGtJkG8wuLsS6jr96lRcMg3SDjAEU+xHCW0PINmyycqFEeGXT5eiSjLCq+zRpiX9AeCm9Yokw2HYU/w/RmfmflrvHMH7mQuzuQkni0YZIJb6xFtRzLu23TiEgX82YMBxxoGaQEtW5AWJb1B/pmS0E58WYbjs/R+4DMq6y+KrwOG+/GfP/aWi0FbHMkoB3hq7Tybib92sEGQlvzbW1vcWk/wLMlfP9HmZ4sBY75JXkJWKr4328RgtwABQmEKQZDmJGHBo6BkNcgIcoVvCo6NODiWYKlXELbHmfUU64/hdpwgheKIUdFyDMwUspFx7LLAPzbF9fgMwf27PI7hmIzphSlc+TPkxgI9ZdKrH6MZrANtBsGA5sgQM+0aXlQyyCr8YJPfaKSUeYY3OopZZKjjZ3He+D5hkgGgtsuFouYjDhzWP/schvaXZXB/G+n7+4D2DYQqS33isR2YPKh90LabtTshXA917CzkufYrag4h4r5uut67qcFwdRGOnkZ5PuL0RS6PuyRxiisExorS8jZhQFwo8koyXETguByywBhr0Ag6xKaDWwywzZIHiYFRGXQLQasXsJgZhFnJo26KS8lWWuFIaKuUoihJi4qVQc7t1T4LHUEibf24MMO2TQ61HD5cDlnuZ9xajzEEHGi5oBTjbRdLCnqZwjAMumVFnJeU22afOzNrLyr/lZWinxTbQ7BPz6ztnSn6siXE/Vm3b2/s/awtQ1Bul7s9L+RIXfD6zJNB/CAt6aUFLVeSFhXZtsHsDnEkzAoqBRO+vbt2dtbDm7NNOnFB05FcXYkIkozu6jo3l0s2kJyaGiVXBgcaFo5p8P5igGsabMYlM60a1eRBTrsFWV6Suy6xtDk3bnJ62Ma4+RFspXyYeDQaPisZPKSO1avwt7q8MVjnujnBfNyD0uBUvc8Fo0u9XkcOtvi7oz6dcYOhhS3sjSV9SD5yEi68iuHXka0hRL6h37BnjYjD4MUuITuEj2+xvNXnxT6gfc146rRjWojzl5/biBV+HXX6gs7EbAcEiLKC2eOUq0vEk7N4K4+QUzNw/xYqHGBsrlGbPqQfr5Re1LaNpxLk6DAfmEMo28avgzMyQhQGOHlKa8jmvzpY58edZcasimRtldmVDpVo8zCfYrETYxgGI1EfQ2UURsaBo4d40M/oxjkfbMBIaiNKxUZl0hS6N9HLSgwBhdIkD2nAiWGbQhk4lsXfuDTFZj9huOHwqFfw/762gWUaHGk7GIbAt+Vzy387G5hdUyRhwLkJLVsET0qTP08JcX/W7dsZez/rTlyQFBXb8okMsid0/bJSXFuLmO9lzPcUZ8dqGqwE9NKS5UFOzTYY8kwmGvZTh5uyUk8dsl6Z9tnaKim9hMeVSydUrPRTJpoeY77FbNvhcTcBIVjuZ/iWYKUsCQ+eYDNMOThUQwiDuQN1rDRCVQllzcV6NM9GY5yWZ1PmBqKMuVG2OJ5BnoWcZ4UuFhdabVrjJ/QbEAbIx3fwoxj54Ib+sjXa+qC8va9UQR+1bSr8mUzqOSSPvfuUMi3NrP6Wylt9XvzaAto3NjS4h/JKHD1ZYM+EkBJxcU732ZSCuzeh2aCUFldaJ0mp47Qc5iqFBL3Iklh/i8OBLllqbxZYekySmSy3xnAch0+nRvlvJhrYBy7iFSnKsjG2OrQ7C6znBsNRh6mZJn+0VXJ9Yx1DCDxRUfN9ctEkkinfHyl40FOUCh52M1J/iNdnTJZXCzxHYgjBsbbJZlRoQkkBvitpeha/dWyIrFTb0lc2g6ykqgos00AIQVoqPPHi8t/OBtYwDT5Yj+hv06537D12Npz9EuKvR+wtFzdsiSsFS70Uy1Y0fPlUH7aoFOcnanTjgtNj3q6x7GsH6xTb7NpSffZw87zMbnSojmVLin7JEU8QeAaTvuR+JyWvFJ4tCVPt8ffT6wsMqZS4bzB99AjNmgbMrFRYO4DS7wIKy7ZwigolTCzLxBQS98RpnI2IJCtolTl1z4aT5xC3PqZwXK6ENunmIk5hM5evIo+c1lUey4ar75JYJlUQIC6/+Rnyx7MkDwC1tY5KYgy/roHs7Mv6cd+i/tiXiV9LQPtGLRK+wryayDOUUroskKXY575PMrxB2jhJrUyIai2SMRv/4Q392iwb7tzQpynPhzOXEIYkPnmJ4t2buEWTIhyQbKyjbqxRf3WO0vR4988/IFyc592kie8abEnF2bu3mcrqzNcOI6RJZTuEyiCpBIQp8vYyMvGwzDqOLVAC7vQhLhWG0CfamiP5zkydYcdgLSpoOCarg5w4r7i9meyqns9N16lZBp9uJCilaLsmL0/59NOSpiM/Uwbc2cC6cQ4Imo5kPcqxDMGob+1uFPslxF+PeFZR/73FgMPDLiOtOpdGnvRSbSl2TT0bjuTWup41U0phbjs/PMuGzIqKjTDnzmb8mcwO4KVXL1A93CJdXqLsbTJUbLJWm8aWBlN1m6ymON2I+ZPVkE1gI1EcbIYckxIptJ2NkIYmdm12yJfeZmTlPpblcnD4AAaCptGg1fSY614j7m3gHTyE9DyEIVGmRfzpp6RhQS3sE7VHievj1Cem4MgpRJ5RJTHl2hZsrKNQqNd+4/majNtlRnX1XVQSw9Jjqu2Kj4BfOTCDX1NA+yYtEr7KvJqybK31uHNysm3ce9dwbi0RCYlzYIba8csYL7+BCoPt4cablG6N+M5tvCjEbLbwTr1E0zIYiwcsKYUtDW5uhLwxGJAogzROqIRJLCxG8pjYaxCO19lcKRBSUKmK3/AT8tnjPFofUA8KPM9kIs9JpCIqdTmx6Upqtj4pVwrubMS8cqCOMeHjbKZ4tkFRKoIkI4kivJpLUuoe2MUJn3PjWt9xp4cW5eUui2yvB1qcV7wy7eM1h6jSiBvrMUpVmIaBa2oh4x0Qe7ZUuQ9w3974vM9vr6J+XimGPIusVARZye2N5KlSYVZqAeL3lwIWehlbSY63LYWl9lxzZ/B6M8zppwU/ODZEVFScHvMIspJrKwPyJMO1BJe8kBt2gyDJcGolRaVouiYC6HaT3Y3TRjFZM0gLhSFK3p3v8caoiazV8EyBk8dEUYxfDfjewQb5uVdxRYW8dgXOvkT91lUYm9A2VH4dTp7HCwY44y2ij9/DiUO89fvweAY16MPFORCCatDXvfiy+vz9a69w+vQhOHoKFh7CjQ9R30K/sy+KX09A+4YtEr4MG0iVpdZunDyIsCxtIdPZRN78kLmHd4ili1etIDZn9AKXEiUlZbPNlYFJKm1cMcqrcR/50U95feMehwuDq5HN8FZIYrlEwya1l+dwbIust0niDDFvNfFrNbzNjziYGBSWTeS3MGam+ctH2/zMNakedlB5iiFdbEsSFCWWEJQVzLZdHnQSelHBRlSgELw0WePUmMedzRgB3PvwOmYeM7BqiMPH+Hg1pFLskjd2SoqmIYjzEkMI4qJkqZdyZytBCPBMyV8bH+PihE+UV7RcncmdHasx6lvPJZHsz5l9+2IHxPZ6mX3e57ejqP/JSohpFyQRGEKD3U7mvtNPUyjudxKUgK2q4NRojWIPaD7qJNzvxIRZRS8p+WA54PKBOrfWY/pJzvzdx5x3EmJhYvgWRpIiTIsTEz4Xp+r4EkhiBkNDeOuL3OgrpGXh+i53txIcQ1CuLTPrbjJad8nSlFfWr5MlCV6ziaxmcGwDdfsGLM/rP/DEeTh0FOX5muVclmBanMk7MGJRHxtD3lqBtUXYXIWT5+HSG1p8OOjDygLK0q4Vz409e51wPajVtfPHryAhBH5NAe2XbZHwVNq//Bg1OgmNFlgObK0ho4i6iGDThZsfUS204NgZsB3i8RnSmkXNUERpRiwF9TxFWpIJlTGUZiSGxKHEi/sYccic2GLDicFeh+EJjIkhrN5B5NQQRa9kyARjeYEiWeB7l14lnr5I2h9w69aAcFBSIsgqmG07vHawwYOthM2owBCQlxVpqcirivUwZ9pT3F7t8QNWuE+ddGyaO6nB4bZLWmjqsyoVm1FOzRI4pkGQFawMcq6vRqwEGcdGXA61HFYHKZ617RFV6OHZ54EZ7M+Z/eeKnycrfh6IVRUo1C4wvejzk4bgzKjH1ZUAzxTc78QcG3Y/00vdqQpcX4lxLFgLS+K8pOmaZEXF1ZWQTpzT3x41aTgGE3WTwy2H+92UllEyX5X0pEOjiGH6EHlgMlyvERWKOM2p3f0Imae0bIe33nqFl8OE0nb4YDVmLYgwi5SqF/JhL2FzudD0+4OXmVt4D9lug9/YVQsSpy6guluQZfCnfwB5hrJsSgVX1BApBo45wtzmqna7l0+2aqMqsY6fJo0iTQzZtqV6Xuzd65Rl6///ihJC4NcU0OCXPGOxt8SZF5Bn26/BgImDIAxIMxgeh2ZbS1yFA/B8vJkZnI8f6pKkY+Cdexk+eBvyHFmWzKkN4kpixyWxPIx3/UPk4gNGVx7RaAhSVeEcP0mDBm+mIeQ2ioqa6+IVfWSWUHc93IXbnEwEK1mN4VaTlmvz2sHGLtuwRGEbBkJAkpdY23bWZQUbuHzIMKu4nJFweyPhYTclLxW3NyI+Xo2xDYORmsn3DzcIcr3BbcUFYVZxdyMhLxTNxR5lEu2Wkb7IC2ufJPLLjZ8nK977nL0gFmYlQumDifmUb5n4DGhKKbC3e1QgODbkYkvjqcNOWSnubCa4ln7+9w83uDDhc30t4seP+qyGOecnapwerxFmJWFWMsgU9zcj8jSjMC3ONBRnjR71jUeUj1NU3mBgHWVxUKLSFHsLzg/XcJOIPE6ptRp04gKhKo5kW2xEOVbUwagpeobLrGWSDo8TT/9l6mcvwP1bcPMjyrUV4ukjeJaNHPS0MWeeQ5ETHzhKKgxqeUo0fYRYNqgfOaFnUD1/d0RIOA6Eoc6+ylIr6n+OG0hl2bsqRtiOJoV8GVmsb1n82gLaLzX2sp4MgRibhDRBSFPbt0zP6qaVMLR9OkC9CTc/Rv7pHzC3vkLcHME7dAj5B9e0Mr+U8Ff/FvLP/gCv1+WKc4C0dRJn9TFzhkRaJnPlKjEG3sZ15KtvIpOY7+UF8d1PsfMuUWlQkxZmEiPzlN8Yr1GthNw0W7gm3NqIyUuFEILxmkVSVhhCcHU1om4ZWFIw0XIRTZ9+abJWecRbFb4lmG45POokbMYlRQl1S5uDbsYlh9ouC9uAN+yZNBzJVFNvTkFR7paRPi++6pzZfr/t68fPkxXvfc5eEPNMySvTPnFRcX014qOVcLcntpPFmdt+Z7YUzLZtUkOy3qn4gzsdLGlwdqzG6zMNADbCnKSouDjl00tKLk3WQcCnGwmgnSBmmgUvTfrMtGw+WY1o2QY3bt5nkhzpGMzNnaPsd+kp+LRsofKMPMk40HTxLMG1zKW3nrKmmsxs5Cw+2GCqYfJwPeSQiJmqCx6lDj+TQwSGi9ca5tywjX90GBGHFPduEQiL62WL3J/F6awzt34b2dmA9ghIiVdmOKIgsms4RYo31EC8NKczq1sfw3/8XwBQl15Fnb6o2dJf0A97VsWIyYMvZGN/22Mf0H4JsZv2hwHKb0CagO1g1BtPCCC3r2lmoyG0C23Q1/8WFbLIqffW4VEOUzOIsQmKQZ94bQ0vjonjhLSIqK08JOp2iStB3TRheBT8hq7Nr6/CvVvIPMOTJleSGqkhcX56lbmLx5CmhZFEmI6PNE08SzfkK7T2XcOR+EpyZszjzx/2Uaak7UnOTdZpOLPc2wg54lqM+BZL/Zyi0gOuKi0oyoqsNBiqabX+tKg4P1HjULtACM1qvLuVsBR1oMh442CDslIEaQkC6rZ8/sn9S86Z7ffbvpn4ebLivc/ZAbG92ffOIPQOSG5EOd2koG4bfLwc0Ilz1sOCpCi5tdUnSzNs0+DsRI1BVhKkJTfXY8K84O5mwsGmTduzdgkloA9kE3WT8+M1mq62lVns53T7IWVRsmkYBIFg/pMNsCRFv42pSs43TVLLRqFLmNXoOEZNEgf60NUJIrLCIVcGRbfD+XKNTIxRTI8iDclk0+HCoSFMyyQPFT+rhtkQLmvYXDAN0lIRn36ZuiphaBRGJ5FlwVyaEq+v47k2Umwr3KexnmEtSjBNVBToUuN2P6wKAy2jNzz2GT8zVZYvVDH6VYt9QPslxW6J8+R5/W+/jpCm/rmUemHuOMmevgiAcjz4yYY26RMSvBplFBD061yPXPKrt3HWM16JN3DGJ4mSDKfZpDbcojx8VM+yCAtncY25e7+PfHgbpEnstUjHz1M7NEu0tEBMl3rDJz7xEkZXMTQoGKQFmSdZ6mekRUUnLhivm1xbjeinBQhBkJXcayc0XZNTk81dcscPjg6x0E/5w3tdACYbJd8/3OJAw0Fu290/7KX4tjYEPdR0uLEWsZXlpGnGnz/u45kGtzZiKgXHhl2+O9Pg6kr0c4HSfr/tm4mfR33lec/ZK4e2F/BMQ3BnM+beVsxqkKOACq2qrxCgFElZEWQldzcSTo24ICAuShb6OYv9nKRQvDwl6SUFAKdGPaK8wrcMHvczim6KIw1emqyxWROIj1e4U7lgSPrtMXxTYE6ME8c5H1gmRi/FNgQTdYuNuCKvBN0oR60usVVItqTB4YkhFt0R8EZYTwVGdxNDCEYKhX9yCIDAsLnpzyCKnFXD56BZo20ZeMGmNuT0fE0a8xuYjQb1lXnwPBj0UD/5E+1IfWenOmNiXLoMrWGwHQ1mS481o3NbCxZ4epD6c1SMfpViH9B+QfHs4PaLTPSAz7Audw37XvkOanoWPr5CubpI0Bzluj9D4DaY7yScT1dIGsNk9JjzU+IJF88UyDIn8JpkUyfws4iwLIjzdepKQZ7h+QqnzIh6PRwqnGaTMElxLEnNFkw3BXc3S+a7GathjlBQVhWduKJmCRquxUjNRCCo2SZ5qTg96uE7Es80yErFiGdhSUOX7JW29nh/KWQ1yGm5BkoJXp72ub2Z0d9WdbAdG1MabEY5zvZz18KcJK/IigrHNL6QRPC82O+3fXPx86ivfNG4xdzBOkFa0k8L/uBOFyEgLxWeJVju63nEEV/SiXLSTIEBRVXwyWrEq9MNUIJeXGCbgpot+GQ14tpajC0NTo26vDLls9ZPuLbUp153SKXknfkBIk8xh4apUpe1XBD0M9q5wjAETcdkoZ8z2ZAshxUHmjZDruTYsEPafUQZrHPBhE/MKdqdRTaikHbUh6Epznsx7WYNP4uQWYIyPNSNjyAMEWXJRNPk4ojDRJgjO6k2+P30E903tx3U+cuaBHLnGhQVLDzQ2VU4gDd/C6SBfeFVjLxEvfQabK2jAGPnQJzE+o3OUnBc3ca4+JoewP4VnD3bG/uA9guI54LX82bftuN5rEtVlnDtfUSWUoxNcsWcpJ9VzHcKzo5EkKZ0laTVbOKd+x7ywmWaOwZ8SUzNdvGWYyLAsUw8oWBolLIoiaXNK9aArD2OPTHOBx1FavhYGwVnmgZJwyErFIuDDFMIkrLENiVRXjHZcGg4MOFbNJ2SsqpY7OcIwS4IFZXCNAQnR1w+WY3IC8XtzQRTCIqq4t5mRloq5vspR4cdJuou002LStqs93J6SYFpGGR5RVEqWq6JJQUCPVArAFt+Njt4UZ/seVlCtp11DnnmriPA511jP76ZeF75F+DmesxmnLHYS5lq2loZ35E0myaH2jbjnolSgoVeyqNuRj/V5KMfPezzl4+2qKi4v5VSlAqFXosCxVqY8fHygJuPO6wXgpqhDTjfOjpMw3P5oPRIK4WSklNjNYY9CykE1zdi1sOczTinZkrubaXUbYmRpjhZCqIgS3Iu1gNO+xU3LJ8bYR2aU7SLdQ5kIdJxdhV/6kmfM0T04hCn6jN6fQvpOoixSdT8fVhZBNfVmVh3C4ocikI7VZeFFlcACHswOonwPMgDvXcMj2l5q2eYi8q0NFCCbj18S1ynv058LUALgoB//a//Nevr64yNjfGP/tE/ol7/LHPwH/7Df4jruhiGgZSS3/u93/s6t/2LH88Dry+Yfds72V/1e6heB9ZXYHSCOIhJc5NWlTBfKvpmizOjgnOnT9Fo1jEbjacXql/HBC6P20TvvY/nFcgjxyl/629xZb5H+vAeztAQl62YrUOnCFcjZK3G9ZuPCKyUuiOpzxxHqYpJFyppUwJrQUGSV5wa9Xhlqr5bPpRGjG9LOnHxlHnn0SGXa2sxvmOwPMgYciV5qehnJXkFUV5iGXBkyGG64XDp8AQfP15lpKbVQY4PudzajMhLqFmSl6d8fjo/ICkr3lsMnhKp/aI+2d4sISsq/sePN3YFbP+7i6PYprHfa/slxPPKv6DXwmaoKfVpofhLR5soJRACXAnHl65zPaiRZRXrtk/NMrBNg0op+mnJ92ZbvDxVEqYl97sJtzdilgc5g0Sy3E9QqkJhUlQ5QScjedSnmj2CMTpBFmT0o5KFQcHJMY9eooW6G65JkpecHPM42nZ4uVHx3kcPWFjZQuQpx0TMq0cr7NVHXEgrIjlJa2SW1DxKMmpRb/qAPuDKms+r6iE/KS2UVeeDrM6cm2OmCRw/p30Sqwo2ejqj6m3B1EHNiB7a7oVXJWyuQ69L+u4w6sTFJ9/742cBnmIuipPnUeEA0Wzrvv2v2MzZ8+JrAdrv//7vc+HCBX74wx/y+7//+/z+7/8+/+Af/IPnPvaf/bN/RrPZ/Dq3+/bEc8DrRbNve0uTANWH7+hT1b1boCpoDeONTuIUbZIk5kyRcU48on7yLGa78dQC3nstVZYY7/wR9Yf3KE2LYOYYZSWIP72BubVGuLbC28dOU15/xJWBpCY2GFQG55s2WZJxwa84sjaPVSaEuHzYPIwjBY+6GXlZcXczYbJpMepZT+nu7TXv9G2JLQ0MAZMNm79+vE2cl/w/frpMmhQUQFaU/PjBACkNrm0WTHhKK4OYkomGzeN+xqAqd697v5MghGCxn3FuvEbL00s4zivi4snQ9ueVJDtxQZyXu8SBTlzsitfu99p+sbFT/t2baUtDZ99RXnJsxGXct3hlqrFbXnazCLWU8rdnh1heXuOlsQmud8ExYT3KeX8poOFIDCryJMOwLb53qMndTkLbk/zh7YwBBiWQItnExMgD5oYltulzQ0FplPi24OZawuEhfaDyJBhKcrztUF+5z7U7EfeX+/QNn0mVUjRbZFGIPTpF3XFpPtgieXAX1/dwZy7ukr1EkYNpkf3m30T86U/xq0yTt77zmzRNwLJRXg1WF3Sp0TZ1X310UquB/Jd/D678Gczfg6VHUG+RXfsQNX1UH4Jf0MoQfh3lNzS72m/owepf8fhagHblyhV+53d+B4Df+I3f4Hd+53deCGi/TvEi8HpKXy0MqJr1JwPXAjhzSdfJ80yfxtoj4DjI9jBzDz4l3uzgDbWRk5cgS1Af/hTla6Yk7GkCGxIGfXh8l/LODa40T5AOFpDmJI/6Bak7hSxSjtTb2GGXIdfisEh5lJqs9VKGXYubqwPiXsm9qkWWFWwGAYUyCNOMrb6im8NwTfui/d9en0IaOlsb8kyyUoEAzzQ4M+Yx2BYbnm45BFnJ2XGX62sxcVbiWBZlpTARKKUljnbUQaI0J49jWp5LLy0J8xLQm59CsFcewZaCx91sN+t689CLM6sh7wmL07MkQ9uguN9r+8WHNASvTPu8/WgAQivevzLt89KUr73JtolFdUfuZtUpLm8nPs5qghAe3zs+xlwldntuK0HOo07CocESwyLn48Rl8/BBuqk+nLx1pE37VJv/8aNV1gcVJvCwcnir5vGdlskgKalURC8q2CoLpAGHh2wmfRtTCqbsikkj4hPbpq5S1qRPapjUKfCGxkEayHDAnLFFMn4QNx2gPvgJgzjFW36APPMSIs/wRIXTGiYypD7wGQrhNxBAdel11Pph6Pch6OqxnR0yx6cf6czKb+gyJDwZtP4GZfx+FeJrAVqv12NoSLN4hoaG6Pf7L3zsv/gX/wKAv/pX/yo/+MEPXvi4P/zDP+QP//APAfi93/s9RkdHv85L/AsXqixIr7yNShPyR7fxpUHV36QKAyzfR0xMkG2tktkOAoU5dRD74EGysI9r2dBsYQlFsbqA6Tio7ga1196kSBN6wsQf9snffxtKUNGAwK1TDI/jlym9lWWm0y18YZEbAn98hGqtpB0UNGp1GibUVQX9LZRpstAvuI7ElBbjnovb26Lc6tAXHoXlklUmSppYfov/9OkaUVbgWpLT4w0qpXBMgx9cmKUb54z4Nq5p0CoqRpoRJ4XDVpjyxuE2P7q3RT+vSKuMQ+PjHJsZJU0zvLs/o7GZcS2yMEbGWW/XefXIOP04x7ElBybHcLf7X/2k4OxBhTQMyqqi3h6m6b54ef/D0VE2w2z3de3EXxsdJcpKarbE/M9cbjRN81du/YP+rFpdqNmSQVLwcUehlIFfb/D67BDtmrX73heV4g8/XePR0FEapsFYy6U2NMqILUlWe9iyj23bmEZGI5VsiTorQY6XgbRsTMenZktKBScPlOSrARYV7mST9sgY11f6jA83WFtYQRYlERLDrBPlBZ92C5b7CdcbFoeTNsfdgrQ1wbTrca7R4Ld+8zK1kWEA0s4WvQ/fY0wKhNPk7S1FtBVhBRbfufUJ3suXobvKd+ffISoq6jOztFpvIVtt/fwrb1N2Nom669DdQhU5MhpQ+2s/RCUpQkDVeInctpFjk9hT0zRnZ/Vzlx+h0gQxOoZz8KCeb0VbzKSeizEyQhWFOL6HUf/VrpJ9IaD97u/+Lt1u9zM//7t/9+9+6Zv87u/+LsPDw/R6Pf75P//nTE9Pc/bs2ec+9gc/+MFTgLexsfGl7/MXKV5kT6PCALWxjvBq+IYg2FyH5WVoNEmCEHH+NGr0IPhtKAvKkXHS4xdhqwOtEbh1lTwIYfkR2fGzmj7/J/8/roQWabeHU8TMrS/p3k9jCK+WYFYlYZjgBVsgK3Jp4roOL23cIjtxhpeEwXJUkty6R9NUDOKINMnpSQdLWlSmRRSnvFyscThe5I9qJ8jLgiwrGHUVg26HrV5A3ZGsd2I8cqYaNr2s5P+72cUwxG5PqpcUbPYDOmFOLy64sdjhcEsy2/apeT7jMuM/XX1IFsfYC31ONg2W1x7jZh2Wtzzm5s7RSVPCFP7T1ZDTY56mghuCYBCA0Arrcd8gC54A0vPIHg76MBw85/PLfqGr48vF6OjoU+v/l0FY+UXeY68EVhaFDAYVRVWR5BUbkZaqioKA7x9p7s4dboQ53V6IUZV82slYC2NEkXJ3MyZcWmE9U5yvK8YPz7C1XnA3CNksLYxuxLBnEToKD4temJKGAb5RYUmDE0MmW1ubzK8FtETOYTGgqNnINCBTNYYbHo/7JaaqyDpbrJNTKwuyA7O0bUE14rOeK7zNDoM449qVj8myAseAM5fO0fv4j6ltrRAV0LUKipUV/SbkBW6WUNy7xeYf/K8wMo44eV4r5yulM7QwBLdGkST0H9xHjE5o+6gkRqUZxCGmgo2NTU0gO3J6d68JOt3d91uVpX78QLMngzBGJH8RVvbXi+np6Rf+7gsB7Z/8k3/ywt+1Wi06nQ5DQ0N0Op0X9siGh4d3Hz83N8fdu3dfCGi/CvG59jR7xUKHh6HW0HNmWQamFi3m04+1O3XN19e4+o5mQQV9zXjKYl16CHtw6hLxwjypMUKtSIj8NvFiQT3qQVkiJ2eY660Qby3gGRuAIjj8MoxNIu9fw0tC3hMjxFNHWChtZlSCIw0ubF7HVC2clkU5Ms6pEZfvbdkEC8ssZS2UISnqE/xtujTccaQh6CcFdUfSdiQbYU5eVaS5om5LcrMiSEs+XApYDjK6SYmJwpIGVQVZqegHGUlcsDjIODPq0sem7G+yisfjrIVMC6z5LlbNo2YZfLwS8tFKiGUITEMw07KR6JLW3s34q5I9vslN/Zu61i+DsPLz3uPL/I3PXntHJeSTlZDbGwmrQc6RYReEvpYU8KMHfQwDlgc5ozVJJS1ODRmshwX9KCMuoKMsHkQZ1x70CMohKlkx0rLxbcnJMQ/fkvTChE9vPqJQcNiSHJid4exYjQ+XAh53tcXRMWnyPbNLtr7IJ0KxmHv0zSZxXqBygeXY1GXBsGMQKYkSxq5G5GAQMj8QnG+7JHEG4QCn1SYqK5zeGp7vge1qYsaOyk+vA+vL0NlAHTuDsB0IttsNWaofd/wMzH0f/Lq2jalKiENN8shz7Uy942m2p5Wxc4j+ZWvW/kWIr1VyfPXVV/nRj37ED3/4Q370ox8xNzf3mcckiV4wnueRJAkff/wxf/tv/+2vc9u/+JHEqCTWp6ckRuypa+8sMhUGmK4FG+8gzr2ihUrzDD5+V9s7mCaEgabvKgWmhEZTs59WlzRQ2h4cPo73+D6OKImkjSMlniVhfFoLlsYhcnSM+uoiNFuUns9Ne5x0M8dRw5z1h0g7JXVDMXviMKfqgvsbM1z/8F1qBvzXnSvkp3+L8dkJ7GPfRZ44y0t31xmsrtJsClpVwocLXY4MORQlvDXb4KOVkHudmDvrMZ24wJSCo0MOF5ugqgrXFOR5SVxpZQjHFAx7JrlR0fJN5vsZV1djrOY0Zm2SsXKJLM0xpUFh2lhK0EtKigpsqdmJg1TrS1ZK0YmLp3T+vgrZ45sEjr3X2pFx2ukNfdX4Mn/D1wXPn4cU8zygelaHcyfTiosS39Y6jp24wDUN8kpxYtQhLSpGPU0GkgL++6vrPO6m+LZWFzkz5rGcmgyCAb5lYNsmq8rQGZSQPA5KpDSIc8XMsOSlqTrfm22iypL/9EfXWQ1KImFTdw2OlDkfr0Ra+9E0CPOS7MBhrmYhZ9KcXDic2HjMSGOUc55DrV5RphnSdDnQ8lBFjm1UXFkKmO9lnBn1wJD0opSGbVBfecDc4C6xIfCGDeTBw4iaj7r8pj7Q9rvw6TUwbUD7I4qXXkMtPoTDx/V3u9+D3/jrGM0W6uq7VEkMj+9DmqAsC155TasLFfmT2bVr7+8OUouT558Qxn6NempfC9B++MMf8q//9b/mj//4jxkdHeUf/+N/DMDW1hb/5t/8G377t3+bXq/Hv/pX/wqAsix56623uHTp0td+4X+R41nfM/XqW5+1d7h7g9wy9eOmD+kZFIVuBm+u6QawZcPL39UDlquL+vcX52D+vs7sAB7fR0qDuXKF+PhFvHMvIeMHOoOzbEoEcVbijU0iD8wStyfIRk/jq4Jw3oM0wnFqxMKkZpl4TZdwLaZpKKJByPUgRbz9Mx4+XuP8axfwBbyezhMP5vHCkvjoOVLDom5LeklJPy0J8woqQV5BtU32CDa3iK4/pCrrrMdDSGlgSsHpUY/bmzGdtCQoEkZtiwMNm4qKum1SKYV/7BjmegimpOGanBurgWDbRDTGENBwJEVVsdDLtcitELw528Q2ja9E9ti7qQdpyUaYv1Dx/4ti51qOaXBtNSLISlqO+XOB5Bf9Dd8EEP88pJhn36+3Hw0wDLAMwbmJGp5p8N5iQDcpWA1yDrXt3c9IAY+6CQu9HEOAbWpA7MQFeVnh21r/MSsrbm+EuJbF/c2EI8MuZ8ZrDDkTvLPQZ6EURHmJbyhMQ3GwafO97c9+MAj0jKRhIKuUISk43pTcCHKkAd2kwJaCIVEQr61Sri4xrypi4eMKhyMi5U7igLI4pSoudD5FFDkfrfo0jxxFpQmDfsmZ07Ocb0tqZYr86Cdw/Az1KHhqoNmQEvXGX9Jth5FxPXPmP2Eqi8kZVK2uM6rhUcTYxC7pAyG05czYhK68zB5HXH3/iezVyjwkMcKrwaefoMKBdvT4FfM7+6L4WoDWaDT4p//0n37m58PDw/z2b/82ABMTE/zLf/kvv85tvnUh8gw1fWh3QPoz9g7bi1QODcH0IcSJs1rG5tr7qEFPq+4LAWkE195DzH0Pdfikzt5MC1pDMDqlrykEnLqA3FyjfuIUhhRUP/w/IoIeZWOI95Yikl4PZ3OZObOHZwpcW5JUJt7xEzSGJXOWTRKnSNfh7YUBdwcVlnuAo/EDMEy8qMe1pQ79H3+I21nhu+vXqB+YgbED1E6dxe7pDRsENQNqVYolFXo/VMg8xy0D/I1Fjo0coFJNbCnZikse91KCTHHQEEw2HBD6ee8vxbRdSc0y+TvnR3j1QJNSKW6tx3yyFuFIg9cPNrg44e8yKvUcXMjyICfMNCNypx/zIsmmZwes99LK53uZZt115NcCiG5cAIqWK0mLn28c4Itkp76JkYNnnaK/TLbnWQaWIXQmLrS+9g6A91M9Z3i/k2AYBnlRMuppVuvyIKeTFBSl2madGlRKl553WKhZqd8zE8XtW/P4piTLS+TILPe2MmqWwLUsToxZvL8QkpUK35RsxoX+G0wDr16j7ZmM5VBWFmf9ikc37vBB6FLUmky0PM5GS4SbMaK7CcdOc2A1pQw6yF6f4yIgHjrBUN0lDgJknOAPtbHXU+LbNzm9scI50ad+4iRy+hX4d/8LLDzSpcAdJ+lnmM6i2UJd/u5nSoGGbVP9zb+nqzCt4e0KT6kHpAc9fc3trM7w60/JXlFV2httZFzf5wWzZy/q7f+qxL5SyC8iXE/PfGSp/v+z8x/bfbQqCrVT7bagqNoRMDYkXPmxBqv7tzSYPfxUL1zX04B35iJieAz18RVtN1Mp+JP/nerAYV2yOHmesIRBCc16jWRdkDaH8bOIuWFJaHmUpSKgwr/+AW6W8qPY58flKGLQY8SQXLAG3LMlXbOGAlbjioEYpvIO850woD/hM+L5nHMFvaTAlYLs0T1esUPO2w7JWxd592GHfHGesaBLKx2AUTLmGCRCglKcm6jxuJsyXrOYGm0ShQMMIWg5kmPDnrYWUdDyzF33YkvCapjRix08+4mb9aivVR7CTJe2dvoxL9rYXzRgPXewzkao1U98+/Oltr7IdXnuYJ0gK7m+KkkL9bXGAXZo7DvCzXvv+U2NHEhDO4z/bH7AICtp2HJ3gP1Ff6tCV8Vty0AKXQ4GQdsz2YpyihIsoViPSh52Ux50EgyhqDsmDzopVaUFsM+OebtAenTYoZuYeKYgDhNkVRIJD6UGDIKYpBAIBEmpmO9muKaBC5hSUCk90lFWiqQSzH3nEueDCFWVqJtX+dlSxFQ/o4h9ZlSd07WUD9wmJpvc2Mp4bNQIanWapuK7zR6tHKI4A8fF9iUqjjjjAmlC3eggywKCHqwvQZrqsmFnQ7cMXqCE/6JSoGHbMDb5dB/etODCnP5/keusTppw/jKit7Ure1UZ247UjdauAPrevedze/u/IrEPaN9QfObk8znN2J3fO76nmUfPnN7KkxfgZ38CwqB8eI/4o/fwuutI29F19EEPGi1tQ7OtBoDtbA9jK7jxAdHaGj82D7LYmkYiOC0qzCgicGrY0uLa401udCsoc86EcH6oRtypCPKMIjfAdpGzx3klT9hQNkI1+UngYBBwqzHNPb9FaU1hXt3gbxxv87ibUOYFXqR4a8jDSiPadsXfODNKFN7BbrjE+Sx12+Q3s3U2lMXmxBRrge6lNByTv3RilD+5Fm33WkxMQ+gh6+3N2bMMpIA/utejKBXXViNeP1inbj8p47052wTELtvRs16sAPKiAesdcPQ68nMB4suU+aQhaLkmr880ngsGX7Xv9aJ7flnh4C9zvyAtubYWUiqQAs5t9/6ed984r0iLipplkJeKiwd8UFC3I9Kiou2a+OMGq2FOXlRcX4sIsxJ9a4EBTLdsmrZkyre4shgQZDo7Pjvu8clKyFI/o8ot7DLnfE0wH2mtx/Ug58KEx1ZYYAihTWiVdmLKiop3ViOKSm2/3iaGqshNRT0ZUAmPSkj8MuOTxOVRCrXhWYanxomCCtnZIlUFuVfn8twlfvqohzIt3jcFIs/IhIn9+C5z61c0gcO2NYHD9TSQeT44zpeaD3tu1rR3viwMdNtBSjD0Osw+eld7KJ6/jNiWvRKup/eEscnn7z2/BjNr+4D2DcQLTz6fs1iElBj15mdotKos4e51MExKpbgydIrUmsARFXN1vcHgenD3JtWhozq7a7R0v8604MaHZPMP+R+SGeb9jJpIuECX03bKB4lDOnYM3rlGnJYYyobRCQbKoupt4dpt6paNUcVMmgXC8/nw0CtkWYYQkurRgNKt86iqmGw55JliaTPk/lYMSjDbsjhg5SRBiuW7KMtGXnsfTxX8bGHAoDFKY3WD104fIEtC8iM+73V3SnHa4HFnU37zkPgMuUAagqPDLh8shfiOwcOtlEpBWj7JoGzT4PtHmk9t2kFaEhc6a9iKc93Lcs0XDljv3OuLAOKrlPmeJ+r78/S9Pu+eO9nVi17zl71fqRSrQUGlFIYQlEo91Q/sxsXue2hLwXzvyUD7W7NakuqNbQC3peC9Ra3ksRbkRJkesrelYLxu4TuSj5cjSlXx8WrEobbNS5N1srJibZBRVYqDbZfQlXSSisee5P5myrERB6UUvmVQeCZNT1JUMNO2QSl+cneDlRROjfkEWbHbCxWvvMnp7G2ixR4FIYU3CTNH8XsJQSnwpU0njai8IahK8lNTWNLCcF1cCZ1uiAKG2y7R1CFi5qjXHFAKQ4DaLhkqrw6fXKEKg90qzfOA6yk3e6Xg3MsYjZZ+vGlpsti2jqPh16m2dR6NsTEYDHQ743mH5+ftPV8gv/erEPuA9k3EVzj5PCt19dxrCQGHjhH3+qTmMLUsJhqaID51gHp3Uzcq1pfg5lXdTzt/GZHEqDyHR3fpVBZ5nlOvcoK0wCDGbNfIOgq/SAiKAte1qYIS8oK6rKgbFW/Vc4zhFklap1VFGAdHyFZDvJpLEBUIBCtxSVEpkiLG92yEEKxHWsMx7Acca4An0a8pz6iSmKBQ3KwaGMKnIuNcr0+73aBq12nFMXFeopQ+VWdFRT8tcKXxxNNqu0eWlYoh18S3DbaiAscUGNuiyHszqGfBw5aCh52UR90UaUDdNnhpqk7d1mXG54kUP+86z8bXLfN91b7XjquzuZ0ZPXvPLwKs591vLwDuPAZgom5RKW3PtwOUpiG2e6WK66u6FJmVikNtG0MIKqWtXmzzyXu3UyYeqVmYhhZ3EQI8S3Kw6bAe5rimoJMooixnkBaUFbiW/vzPejXubKVYpsR1LUyjBFVq0QEpcKTElookL7ApydICJ9xk2E75MPDoxEMUaqcsKxFAb+IC86LH+VGXGAPDlEwP+zzYSrDLTM+DCYESBp8sdnnr2Ci2oQjv3MFfeQxAODGDNX2A0m9SqQzpuk+IH8NjcPXd7eugZ8jg+YfebUY0S49h/gE8+JTq9EuIi3sY45YNhqH3Fl+TwaoofFpW70tkWr8ONP59QPsm4kuefJ7N5NRf+evPvZZwPdTkAbw0wSkroscPcZoNvHWla+SbK1COI9rD2vk6z0BKVFWCV2MoSfGFAs9mSA34jf5N7J7CHjnDoFAIafCG3eeVCReO1KnfjTBrw5hxxPdHDeI7t3GTAD75j1jDZwiFgTMxQzvqsJiZmCqnVsAZo8M1Y5Q4q3BN8Ck5M2JiqkwTY7bZnsQ5pBOgKlRrhOj0IZpjbUzL5JVpnx8/6nFvK2Xx3Uc83OyT5ArbgANth6oChEAKmG7a+JaBQiEFTDYsXpmu03LNz81sslIxWbfISoUQiutrMVGuaLv6/r7981Hpfx5/sL3xVQBxL1hZhuDSlL9rfLoTXwSQz97PlmK3V+ZbBkKIXaeEM2MeYVbRcOTufc6P17YzM51R7/zdnin1fU35mb9hL2mkYZv8ny9P0E9Lmo4kqxRZUbHQT1kPc0oFqlKkueKVAzXCrOKVCZ9L03WyvOLuQLF27RpGVTHS62G0JhmtWSR5yfryBkYBGwOFb5d0HIduJnCjnKjaNhItCwSK9uoj5geK3vIazZE2l4cMNg5fIHtwnxYZo6lL2hxhJNyC+ZQ0fMSrJ04TGR08swuVoreyyUdpwgeOR23mKHOzQ5hJjHI9zWIc9HaJGSLPIOf5h17X0wg/6OtRHdOGcIDa2oC9wsKntRDxjh7js+2KLxu/6jT+fUD7BuJLn3yeyeRUHL34WlvryDxjbmmeON/AEzky0swmag0IwycAWpb6i+E3oDWE7fr8XW+LTqPJ0OY89skzcOsjXln9hJ9srqOGx/lo+iSvH9cD0cp5AsbSEPh5BIsPYP4+c+srxCXIR3UGtWPcEEdQlSIxSuylB4zUQ1aMCQrDIhESM4kp6w6m6+mscfoQDUNypivpj42zqhzuJCYLyzFzB+vbGw2YhiAqKpb6uVZRrxS2VdB0JEleshnn5KXOACqlGK/b+vRfqueXzfb0ijzLoOmaiH62WwZzTIMwL3j7Uf8pJRPgKxtY/rwCxl8FEJ8FKyn0Y/eSQ74IIPd6jyH0c2+sRwghSIuSI22XEV8L816a8pFCPPW66o6k5ZikxZPrf5m/YYc0otDU/FHTeCqT/D+9NE5RrrEaZDRcSd0VXF0OMRC4UnBhwuf2VoIhBCtpxZG65GFYQVnyv97awCpz1iPtxWerghEz44CKaZh1LMdko59zYzVixLc4XldEWcFRB17qrdCo14kTuLewzkKYM2+7HJcJRqNExik1z8Yr+vpzbviwWlGWBR+LSe7SxE9yDhQV0fs/o95Z1oQt04TledTyPBw/uwtCuyXEPULBQkrE5Tf1YfTRff1ueT48vvfUNYw9IuTAbrviV521+FVjH9C+ofhSJ59njTy9GjxHimbH40g1WkixQF2U+hRnOyAExvAolePC0dPa/O/Gh5raW1W6xCEFNhUTdQs2lWZcIchMFxH08fOESEriw6M0PBt1/jJqa/0JO1MpWJqHJEIOeniWzc/sCapywGQ7ZYQBW7lkxWoTGx51SobqHoZ0uNIaYXyoxmvCwNi+nsxS3hiTrB0d5+P1BM8ydunrthRYUgOQYQocCZVSVKrCForbG5Huryk4PAQWUCpBP86pGRVt+2mCRZCVlKXi5kZMWlQI4M3ZJnMH6iRFSZRVXF0Nuboc4FkGx4ZdPFPSjQt6ScHtjeQXbh/zLDHjywCiLQVVpegnBXI7Y31eefHLAOTN9Zi01LJTlQJTgGnoucCtqHgqK3v29T7v+i/6G8pKsTrI6Ke6pJsW+vNJ8opom4wT5xVSGvxfX5vkTx/0sU29/G6tR1QC/vRBj5Ugo5dWvHJohEqYUGQow2aqYXNjqY9HToaBUyoCQ7LSmmIsWmDWKugFHQ42hnj5QB1DwNkxl6tLEpXl3DJHYWASCIf5bofT4SKfhC3KsTb1Zouz6TL1ZA3D2p4Ve/kN1MnzxGmO+ug+tSgmMBwI+nh/+L/o2U9Twnd/gDh1AdXv6gHnHX/DF4Rh26g3fhN1YU+Z8caHqBPn9Ezq8TMAT6mAqLLQNlM7av6fw1r8dQK9fUD7JcZnMzn99j9vwQkpMba/QKosd+dZuPb+k8wMpTM+v65Pfmmi7dzNUltRFDnUG3D5Tbh3C+/TT7AKi05rnDoVbhpR5THq1sdw/5Z+LcfPUp64QHD/HozNUF+4QyBMblpjCLeG02oyNTGN93ABbxCAiunaDSrDIMormo62Z1kdZEw0bIztv1fYLg+XIhb7OYv9jLNjtV3CQFYqDg85/OWzh/jRTXh/MaSblFQICqX7Od2kxDcNgrziSMukWnjEb3gB1vUFigtzhAV8tBJwfS1GKYVA6eeXikrBxckaQVbhSYM4q2jY2+897PaF7OUnmUiYfb2h6r2xFxDg+UD07OOeZUJ+sBRSKsWDjjZFfWc+QKG+soP304PQimPDLnmp8G392qK8ekoEYKfUGRclKMGbs40v7PPtJYNcW9Uu5ZMNi9NjNa6vRqRlxXwvY6ZlU7MkrqEgiXlt2iOtdF/0p/MDsrxiOcgZqZl0kpIwq/CmJjE9cEOQcYRdFWCbeAUcHXPpKptzIw7Ekh9OKuL+BvemJkiKkjol0vAwDh3HVwVb6TFIE1qexcLdJTbHZrEGCUMz02ToQwOgySKVwrRMRLOFnxd4JhyQGaK/xncXbyHXlsFxNIEjz3SpcYfcEQb6O1zkiPbwZ/rsO9//nQFrVZZUpqXHcQBuX6cyjF3gUucva4HzxXk9bH3qwgv9zr4pqv63BRT3Ae0XHM8uhGczuc9bcDs0/qeuty2bxe1rcPcmzN+nmjmqy42eDxtrkMbaS+noKX3vPEdceh0On4J3r6OEBQrUzauoQUerDNgeGIIyGPDuVsmNoVdhc40zfsnpzl1IU4QhmCgGXGrUWU8WyckZFRmHT5xlMSlZXNzij671sUzBQm+c85N1PcPk10lSTSY5P1GjE+UcHnKeKnkppUuHCr2Z26b+GUCFwbAncCyDflay0ksYLwpyt4adRrz7YJO1QvLhUoghBEIoVgcpUa4zG1PC7c2IlUFOJy708HYkmGranB7xqBS0XA0MKAizksfdDKW0segO6Pw80lLPEjXOjHvP7XO9yMkZnoDQjsyXaWiJL6F4qrz4ZViMz/qRfWemQak0s/Gj5ZAhz3zqde34zC30su1hdcX3j7SeO3pgS8EHSyFpWVFViqTU2ddU02bMtzg25HJnK8G3JYfaNmfHarRtweDD9/ikp7iZO5TDkxwfcRnxJH3DoJZqSavJuuS1w8N8Z8oizQve+PgKnXt3qfcLes0JNlvTTB0cZj0qEbaFbRk0712lCdxXTeLKwBUFRVdgjp8irkwa63chL0gMk9NexenVG9yiSbLk4B6cwSkz3jfGSXoZ7sMtXjs2pkcksoTX3IjYMXDXHyGl1JT6SoHjwoXXwLVR9Rbi2vuoHUunba3FvaxHFQb6QLmtGmJsu0rvNedU/a7eD3bAsLel1fWbbdTyvM4Et8HzM/ENUPW/TfNr+4D2C4wvtRC+4oIT218elaVawLgooMgQF7+vwTPoQVnpUsWDO7C1BqtLqKOniLOSXFoMb60Q1ZrEn76HNzJKvNXHaxRIzyf2mgTCxhgagc1VAmkjq4ozVYe+Nc5qd52lTzZwupucH/e5mioediPuDCrqqiIXkkkjQ1Ylg6zcBYAddl6UlawEOayEGALdjzENQBDnej6p5Uqur8U4JrjS4KUJj5ZnIoAwU9zfyghzh+ZWwomGy9WtEiVKeklB2zVJS0XdMrFNqCpFsO2LdajtkBQVtoS0VKAUo75Fa5CTFprUsCO9pBRPZT+eZXyhZuHz4tneF4rn9rmeR+gotgeobal7fNE2Nb5Save17n0NQVp+IWtSGoKXJmv86YMelhRcXYl2wfN5r8uzDFBPhtV3jFaf9PLgJ/N9skJhmwJDCHxbbgOmohPnNCyD0ZrFqG/xuJft3sMQ8Of3Nkg2BQ8rl81UUfYT4ryilxbkpWYyHmzYDNcsplouyyt9VjYGLGwUlKKNaiq+264wX53FarV22bDO6EmMq1v0a0N8uqhQVcmHwmdQxAy1El6qK3xrgNGuE4UR1uwxYjqctl0kAY0Jl7jvEg4STCGIKkmQlUghcKSFQYVbFsReA2/5IdJxtYdha5jyz/8T8fgMnmMi0YxJPr0GBw9DrfYU61H1OvDJe1p7FYU6eR7RbCH8+pMB6W1m425lpjWM6G1qFf3jZ5/WbXw2vgmq/rdofm0f0H6RsaPD5rj6lBUGn8m4fp4FpyxbXzcMwKvpbCsM4NZVWHykab5+E5II2trpgJsf4SFw7ANE3R5OnmF31rjSPkbqWDgjB5gbldReukxjJaIqcjAM6smAetjh9eQDNsrD3KgdoDZ1mq3VFa6ENvP2EKbrUPRjMrQCeWpJbEPiWwZZWfHxSqgVPgzBiRGXe1sxi/2MpX66awh6YcJjrOFgS4NBogWMJ+sWLdfkpUmfkZrFJ6shlgFtz+TiscMkccKfl5KV1QRLwrBncmHCx7ckj/sJCz3dn5w74PO4lxPnelMe8rSk1mzbplR8pi805JkI9Ca+w9zbCzhhVn6GTPIiUHseUePMuKeHj/cIFT+Pgfj2/U02OgGWIbRNjhC8NWs8BWK2+fn3ejbKSvHOfMCjbeHfA80nElfPvq6dzGvugE9SlFhSD7rbUvDO/IB+UvCol+wSecZqJkeHXbaiAsuAdx4PiAtFaEv+/kRtV6txdZBxYyPif762wVqQc1jZlFlJXJnEaUWSxQjTYHaoRl6WnBuvcbDlsBlk/N9/skgvKoiDIc6ojAqD+2qIE31BLdVEo7ppoMwGqtGCKKGUDRYrSS9TLJsm3sP7GF6CXJ+H6UN4rsc7scPNNRuqjDNmzBuvedjnLjH/xx8Tlwbu/UfY9mHyAqrH97lgCm4nHvnx7+G4B5hrtJEbK5T1Flc2MtIqwFEVc9miLkcmkXbP8GafYj3ieprUtaPCvx17RczF9j4h8my30uPMvYlYWNDD1J+TLX0jVP1v0fzaPqD9ImN7OJJPPwHQdujbJYWd+KoLTpWlVtU2La0akCZauPjGh/qL0e1uW7lLPa8moGwMEY9O4ZkGc/Fj4jzAsyeI22OkTg1XGvQaI4QENOOQuaWPOKMUGAvU2xVy9DJkCaMHj+JsFFxb6ZM2D6CcOv7UBN1M28Rg18jLioOHmwzXXBTw7kLAQj/l/ESNtNBUt6JUKAFFJTg25CEEXJz0cUTF6VrFSsskLB09/C0ES0HG437K24/6hFlFkmtihG0aFKVW63dMwYWJGm/MNKnbclfh3bUMWq5JVlQsDzJGPJMPlvVw7E7/xohDfNdDbG/iHyyFKHRJb8eKZi9YoAQK9cJ5rmfLk3v1ET9YConycpesspdYsRdYdxQ4HFPs6iK2XXN3w35efNlh8FJVWFIwSAsENlLAnz3o76qrzB2sP9U7e9zNONDUsmI7ti8312PyquLRVopt6gy8qBRFVdGLSxb7GWthQc3W+pXLg4xxBR+vhnyyGrHUz7bLwQaD1hinWgLVKVlc6eCLks3S4rHQ7NQRz+TtR306Zci9zRjLMOiWNpk3w4GapGUq8qIkEOJJ33P7e1ULQoxPA4qtFLvISdMtRHcZrxbB5EE4eppAumwuRIh6A4RgUxaEYYI0BIesClk3CIOQXjdkI8x40JFcc4bw+utcKBdIopg4CKl3NoiTnLRxhBoVUa9HHGxSj0MwTK2yPzaJsmz9Pbcd/b0/OKu1WRtNnZntjbs3XtCOMD/72BfE16Xqf5vm1/YB7RcYz9bCX9S4/TILbrfeHgWQxBiNJtX0IcgzxNikPj1lCYQDXeJQQN0nG5nmJ+3TqKiPl6XMOSX1M5cgjbExKF2fq4FD1S+pOYLvvP3HyK0VWvUmzMxoYklZwq2ryOV5ztl1uvUJFmSD+X7M1Oo6M8kWHXsCkQu6dp2m51ABYVbRdCR5oS1dWo7Jw16KKQVJXmJT8KATU3csXKGI3n2bmw/7bGUu7ZFRWo7NbMvhzmbCVMNiNSgQVAR5RSfOGKlpUsFSP8M2LFzTwDOfAMt0ywG2ZZDmA0pV8agreGO7b+QaCuOTK7p8u71hBLmim+hxgX5aEhda5PZZ4d4PlsKnsqlny5E7vaSdf4MGkygvme+l22obBf9/9v7rObIsz+8EP+eeq/26O9yhRWiJ0AqRqgTZ1WT39pDsXpLLITlrs7P/A19oxme+78MqmnG5trazwyFpy+GwuU3RzSZZWZmVGSkiM0MLhIAGHHB99b1nHw6AEJVZVc2qJiur42eWhkTA4e4A7rk/9RXvHarv8+heRgt6loFjCrZe0kX8JmHjPwlq0paClZ4esVrSYGE24MdLfe63Iiq2ZK5u75Ork6LEEHoUbEsHheb0oUCpktZAd72WAWOBw9GGy43lAVmpaA9TshK2w4y6I1nuJSx2Eh7vxFiGwJFaIX+25jA/4XFu3OdBex3PKIkNk0NGwtGJJl7g8sFSnyftmE6qSHJFTIEtYdSzCbaf01WCBxsr9KaPav1Na1cKTUoyt8Lx0QIDQby2zdFwjXf7j5BUdbH37DG3tktW8yrLmYWpFKUrud0vuTYd4NoGyeoSNVUS99p0vWnMJMEzTHIMuk6V2gh4wQloNvQURDUJvSpOluNlNiQmeB5IU+/RvryBuPw24uJ1TW259p1Xuq/9+AVHfb9MIMe3hb/2JqH9Kccrs/BvaNd/1oWnioLy8x/rai3XHKLy0HH9vPDiufNMd2aWA2VOIS0+cOa4L+pUKJg1IRIOwfxFyjThs7RGtNNjrQiYbW9xP4k5v/khdQkcPAKTM+DWoL2tk5pl4YmcFXzuDA0kJkpKvlMsM0igq2xsQ2GJJr6tpbtubYaUKEwDTo97fLURcmbc5fm9RcaznBoSs3mYdneIMYzJbYdzImbTldieS8O3WB8kxGGEpGQ7LklzRSssGHFLWr2MblLgWgZ3tyLSQlGU2k7m7QN69/CjZz3uboV0k4KqbRDnJT84NoJMItRLN4wiDLm9o1jqxqz3MyYDi5rzQpz3ZXHg+XEPBAS74sUv767aUU6YFZiGYJDm+5YqpiEoypLFnZheWrDYjnneSbk0E+y/15cT01uHmzREgr9Lc/hFbGP2kl5R7il7OJRKMczK/W61HefM1uz913CkQZQXmrOXFtRcc/9rRxsecaaYq2vAx5Vp3dV9tDzAkQJhGBwfMXEtk8MNm6SAEU9ioBimikIpDRQZdbgwUeGz9T7rYYkhDCZIcWyDtURgZhmH6ia2KsgKONG0GWbgmII5mTBX7GBJE4qMR+U0khpJUdKPUmSaYPseFcvkWFWgthPeNbexgwAOHIXTF4n+7e8TlQ06SpHVZ0AKTh106XRDWnWHK+eOkibreEGNYvE+GBUWKwFipMm5KOO8ahE0XGTehEFXFz5HZomOzOM9uYt83IdmU3eDm6uwtQ6rzymPzSMbTW3eCa+6cezFLzDq+zYBOX6Z8Sah/SnHz2rXf9qFV6apRjTtqgcA0N7Ss/jhAC69jSh3+S1lAZ9/BIeOQacNh44TXXgbtgsq/YzhYh9hSLwyhyQmChokwiDIQ0TmUg56yCKDLNMHZ3IOzl5B3PsSVatrcpAwSA2b2YOTrG3lEPaQKsHd3uBvFU9p+RNYc+dwm8eoNqps92M+f9bB9SyedTKuzmifrK+We6z2FNulTzDMqaguhhhhJHHJ4x6ZaeFXtDVOmGTMD1c4LUMMx+dfFyOEQrtfr69tU6fAVha5V6MfF9zfCnEtSZxpw8jxwEahLU2GvZSsKMm2QkoFv3mkivXSDSMyHbIy4viovlmfHPPIylcV+78ugby+u6o5cl/fUArBkaZDxdLowdPjPnc2I0oF/bTAMKCfFLuK/OG+wv212YCPnu7QauuO5sKU/xPKIPCi6zMN3fGESUZQvlrtv2406khtrOmZWr/SMQ3i3a9Z8tUR6GBXSLgTFZS7qFNpCL57uKa1C9HJcLJq7yfLTpRzYcrjcMPbfz0FhGmJY0rGKlpT07MMPnw+YDvMeNSKNUdNuAQ1n0MNl5GaR1qU5MuLzKYFyqhy4PAEljQ4t3kfq7eD9/wDPqsdZVgYWLUu6WIfb3aGrz66T56XOI7k0vWLdAPByFcPsbMYEHDtu4iywC4zBv2QgVml4fYIS4PPvnjOAJvlJzZn/IK3tlaQWwq59Ijvb65y1a1B8zrBzn1kkYLjw+whOHQUTpxDlgVBtYKYv4A6efYFD+0P/hlsrkEWa/rNu7/xM/df6tzVV+xkfu74FgE5fpnxJqH9F4if2q6/rh6ya6teGhL+1f8MUajlww+f0vyWLIM41nuyf/n/QR06rvdpYQjPHmp049gkNJpUxkapZBEHnAw1rPGuSJE7Jizew7UcnMk5op0dDsYdJosu9ahLQAGT03DtPYQfvJjzzx6C8Sm8oE6jUWWuSEirNufajwiaI9CD58EUw8SjaGV8x0358pPbbLclpjSYnB1HCsGpps3TO1sUWUKEIDctVA4Ig9veNIFdZzMBo50BGTOO4rQIka7L9bjNJ26DXm4iypLRvKDmSYZhQZEXWBVTIyhVxlo/ZbUbMzdic7ommfRNBqnJ5iBjOypox3psdeXEZWSaENQq+FLiyIR+qq1j+mlO07Nf6Yq6Uc7GMGW8YpIU5T7y7WXE4SApmAhMXNMmL0vyQqty+JZkMrC5Mhvw5doQKwJpGFQdSVGoVygMh0ccktx6RRnk6zqvl8WBXRPebd9F5QmF5RCfuYrvWK8kvSQvuTobvKIC8nWSVi8DVu63YkqlWOknTAUWh0bcXRHo+k/s606MeuyEOU3f5K05rfVoS43A3I4yFFrwWBCx0stYG+giox2lJIWWqLq3EdPfGXDA38JqjjKdFSyWLmf9jDLPeaeSYz+9RaFKIr/OxQNNPurAETdHbm5xMl3jsw6MzM0QJjkfPd1BCIFVP85CeQ+ZJfDVxxSX3+Oz0Xn89n0cs6S6s0zTtBhLO7QNH5FY9FdbRDWLoLdJMTZDFIwQxANkbwMsEyh1sVnq0TB/9L9q5HGvg7p4HeFX9GgRKA8cgbVneoKytIgaXn8FJLY3qVGW/UI67tanWutRCMqr72l7mZ8nvkVAjl9mvElo/7XjpQtPmZbml+WZTiLhQHdi4QCmDsDv/A344mONZLRtSFO9L7tzE9aXNZIqL2B8Qu/ZOi2uTY8Rlz7u4csYnRbqoVYokXdusrD+r4m2t3kvS0irDbzZOeTsb2r37C9voCpVOKErTHbn/Ibr8Y4wOD9doIZD/HslMqkxMGyGjWk+dWcJnw9Z3BpyPMs5Elj04pxjgdC7psUOrRQ6Vg2ykty0GWaKL1a6pIaNbwo2BxlTVdgYFDwtS24M68wub3GaPn9ufIeb/gyupah0+xwQMVN1STIR0EkLHoY5SVGiUERZwZOlbXIr40hFcHDqAOv9DKUU/STny/WQ+9sxtjQ41lR891CNcxMe/8/P+6wPUraGGb9x7MURSfOSf3Fvh2edBEMo3jpQ5at1bbPyMnfs1qYmkJdKJ6LxikmRCd45UMU2Dd49WOPCVIWiUEgpCGwNCQeBABQC1zJwlEH/Z+g8viwOXMQR6U6KWfG50SpIn7VxfZ+LU/7XKuLvxeuSVq6h9lUpikKxMUjJCsX6ICXJSpq+ve8d9zIYZrDbadZcSV6q/QT/0VKfO1shalfseKpqMV2zGCQlw1QSZiUKDaRxdgWMG7bAzyNqDqyUNmsRVB2TmbVl0vVtZGuDG6NniDyHpGdhJQNqm0sMpMNNa5LHZQ/VyjlVNxGWjW9LhgiiKCGoBaAgjhKSuaOMbK3yjuhxZOcp082Az1tDdpwplC+olAlFa5s0j/gMnyRTOGaNhcYEctADJfQZffYYVLl7pn3NK4uGKMNA7JKmOXcVnj7QKOTX4hXV/T0Xe4Gm4GyuQqiBSmrPNPRnxLcJyPHLjDcJ7b9yvHLhFQXc+Vx3a3muHxDuQvNtB8P1UN/5C6hPf6Qfu74Mg/7uBT94odS/sgLtNsowMPwKlYvXEdJENce1ntzKMwgHyDQiKBOoVLDHmvD9/w2sPYdP3t8lipaws4Uam0RdfIvY9PCEse/xpawA5XmoqTm8siQLjjLczKm6JsqAwpIcKCJU1eT87AjvP+9xt5MTWJITKiF2THquw9G8Q3WQcq8MaPt6zHVvK2eQaKj4pFchIaVXa+IUGcdrBn7gcmF+lCyKWYwUt7YS7m1FSCnwDIMkK2iFBXUUm8rGi1IGOyEKbTnSjXLW+qk2AkUR7pKG46xktZ+SFlpNpBMV+x1LO8pJ8pIjTYeHrZh2WLCUpbsIznIfTLFHIF/pxjzraJX/olTY0uA3jo1o1KRp0M7y/U4osCXz4x69OMc29efvzY2yvFZ8I2IR2BcHjvICZVmkpkMxiEiMCr7vEhclvaT4WkX8vXgZ8PIyUKawHKKjF5nwTTpJgSkEFUfSjXNaYcZkYPPjpT7dOMeSAnNXjistSg43HM6N+/SSnJ0ooygUwoA4V+QlzI/7xJnCMGB7mHNu0udZJyHNC6I8pTPoUyFmuLpN1hij4hRkNQ+j/QRndITW6EGG/ghr3hT9UmKN2hyjjzIki1s9dgyXRNmcmJ3DMQXhQLuce6agiGMi6eL6Hl61JJw+QmXQ4ZDykZPTvJU956wdURR97ll1bjKCsmxUkRM0G4SmR3zqDMH5Kxqk9eA2oDSxeu25LiyTWKt4+IEGfaD36eroKV2IVuuvohT3JjVSouLoxZgyzyAc6jWD4k80Ovy2ADl+mfEmof0KxP6FVxSo3W5N+BXUX/0f4JMfgmkh/MoLe4rr34M40mPJD/+9Rk+ZFtgKqlWtEiJNhGXp3dzuISjTVANLhkNYX9EjzDjRFeXoJIyO67GlEBox2e/B1hpFe5tPqsdILBdUyTsNDZkXlQCx67ItH9zizw0fsZLVyasTVByL7168QK8fshgpPt2IuLGsOTXbdoP3DjpcOjDCVysdWErAtplLClJPUHMd1nsJpmHSjTI6mUQpB3OYIaSDmQvWlgY83NaCtXuAk7Veirmr7G4YBsIoMEqDIi/oG5JhAb04pygVWQlWIVjq5niWxJYG7TAHlCYG9wtKS9H0XwAhao5ECuiEOY4UzNVt7rVi2lGOIzXnztsVVh4kOZvDjMWdmE6ieXX3WxHX56p4lrHvlO2YBr8736TumizMBvzoWR+Epg789sT4z5SzkoYed/7waZfHOymLxkFOj0lsV3e+Yvd9v66I/zoyUhq6g27t9BmJE0zf56NWQed5h42B9pBLipLPVwZMVm0Wd2Jc0+D2ZsjmMCNKtYK+2r2vP2xF/I/JJqZh6IS227zMBBYTZkGUC0zD4NSYx7KZcqThcrzpshWmxCOSzlrMuYZLGA7Ig0mcqSq1oMIZnvDJjqIzNs9SrHU9g7jDqO1watTGPXqMBx8uUioHVyiKNONMfxE16MHmCumpS3zUKqBymMpGzJWpCvFqirf6CKkKLdB99iL1PGNQCrLRM7jLj9lJwem1CG0fx9JO2mJXLV+tL+surRLAX/7bsLMBTx8jHEdLXmUpSkpNtxGG7pguLLzaNe1NanY7KlUU2nXj8jvgfAiKfYWRN/HN8Sah/QrF62MCQ0rUd//i1+o8UgkQvS5KCDh4VCefmYOwvaFJmv0eKst0YrRsbQz44z/Wdi6up2HE17+rR5SnLiAOHgFAbW/q0YlCk7Itm0iZRHnJyjBmsLJKvrPIVbFNcOoM1rV39fvJM/zA578XXTrHjlAfqfLFekgnhuVeypGGQ15oGL8j4OqhBqNVl+/7FuHwGVbSoW412Gp6GKZBLy4wS42OnPRNHMsjzTJMx0YovQsqSoVtGeSlIslLDo44VC3B/W3Ic82BilLJ2IjEtGyiQYY0JFBimgLXllimwYG6TSss+HIjxDRgvGJxYarO23M16q65D9X/fG3ITN2mLHWSKBScGnOJ84JHOzEPt7VJ45GmS1EKZms2q/2MME9xTa0rWSi175Tt2waL2zE/fNZjsmIzP+FhGNq3K8rK3a7xJ+Nlqal09zlzBVIaWk3FsLk+EfDFWghC8fnacJ+YvTcavLX5spuzRij+T1+2CNMct1vjd/I+d9MqWaQ0n8wS5EW5PxTtxDnDtNjdEZZEuVY2GWZafDgrNA9wquaglCa3SxTu2nO63RzTNDEOHWW8oqkXp0Y9bm+G3NnQRYrCprv6nBoZp5sm1vGrHD4wzdPgCrdurpPmChm2mPNhuaeIQ0XNMzhje5xJNrhT+JhCUrMmcfshn5UBkarzeMUgNT28CA7bKcl2nyDsQHcXyWs8gr/0tzBsG19a2E93uOXPIHyDU6dOcb4mqDy7i7x3UxefR07pnZllQ54jygIxexjV2tQd114S2u3AjEqgC9YsfQXZuHf2X4fx6wL2+3/mRof/ufEmof2Kxctjgm9yuN1bHKsHtzSpulRw6iycvQK3PnuxoD5+BuW4cPMjePCVRlglMVCC6WjvpdEGxpHjesQxHKDmDsOBI3rMYTtQFnh+FcN16e/0sNpbPO4XZGlJnRWun+pjVqv7e0DbdZgcrTLMNY9pxDNZ7qWEWcEwy0l7XSqiQN1e4/Gx88zUPYKLV8g/+RFnnZzN9iL2/HkWd2JubUSosuRhUjIdWCAM3pn2KJWiFaZshwWWLPjOoSqlggN1i3/zsEta6BvrdGASBDYLh+q0hhmZUkhZ0A4VdUdiCDBNCQrqjibxGkIwXrH43qE6gSP30YF5UbLYjjEMA6UU7x2s7ROKP1zqYwhY6WXEeYkpJYdGbGwpma1ZGiEKeLbBva2Iy9MVPEvSjXKkIRj3LcKsoBflOinsKpT4tuR1L4Y9xKLms6UcHLFxpIFv6felEFQd/TOlZUnVMbizGe57kClgkOrvfXlUOkwLoqyg6pr0G1N0Zw/BWopQ2kOuFxekhUaLtoYFj3diao7kWNMhzRVKCQ6O2GwMErbDAqUUO2FOXuhi43knpSpLjhYZse1gpgl5kjIQBr4lqTi6gxRCUKJojtY5Evu07BFu9XPc1S6HD8wQF3C3raXJjNxmev0x+aDEswd8WdQZLLao1Uf463mL1KsxNuGQ3l4hSXykaRI1xmnnJkU7pdzc4L16W5+hLNUjVsPFyzQS2Hj6kDOLi/TVKPWZKWKOk2AQ5BnC30UPJrFeDJoW5DkqHL6YXLx0dtXXgDT2uKXAC+mqr4Hx7/37/uPfdGnfGG8S2q9ofB2cH1643iqldEV56LhW6j6/8GL8EUda+dv19HJ6aVGPJS0b6k2YPQi1JmTJqy+6Zx+TJtAY3XedFpbNW8OIB+sQIumZNc6FKyRFnTDNqb/WWQKIfpdBJyS1BKdHfSYDhydbQ5w8IxIm/6fnJqqzge9a/B9PeDxoGyw6Pku9hNFiixhJzTWxDEFrmJIXu/6HEs5OBDimgWkYRFnBqTGfh9sxvVzrOdY8iwrwl0+OUgh4sJ2QlwVhVlIUioZncmW2gr2LMPx0dYgjDTb6WhV+xJUM4pR8kBJnCt8x2UxyhmmJbWo+mZQvup2KbZDlJaVSVB2DtNDIx/cO1bg2W6WX5NzaDKm7kn5SkBaKv3VhjNYw43E7Js5LnnUSHu9EgOBo0+HKTGWXt/bqaHCP86Zdq7WSSlYqLk1VuDBZ2Xf3vrEyYKmbkuQFKN1R9lPtDD7iSZa6inaY4ZqaGC53d3m9OKdimxyYHOFsOaQdZ6z3EvopZEox4kksQ3J5xudBK2amanNmwiPKStYHGU3fIisU/m6yH/V1h3tm0mMQ5wz7Fs0i4VbuMitNXODkmIttCLaG+a51kMK0bR6nDVb7ipOBSSRM+nHOp6tDbXljCLyKT2nUGUk7PDRGUKVNNS6w7t/lVpYgLIvnU6e4MjmNE9oMS0k3hazMcaXgkJmQuRXsyTmK8RluZFUS08X5o/dZGDxF9nYImpPUhEPYD1neHoKq4KYVFhgiHQfRHEMdPwPdjkYeP7qDWn2mkY0v7a9en74AL7ilaJcL4zUVob37wF4By61Pf7o58Jt4k9B+ZeMlOH85HMDOFjjeC4h/v6cT1Z65567Wm5o5qCvCMIRoqBW5DVOPIUcnNU/t7GXEvS8RI03K4UB7oTXH9esePwOwfxhVXKDu3iBPco5FPsaYy4NnHXrCph4NsRfv0q++hWcaSPQBTG7e4J88HDAMUyzb4r87WcG49BaDAtYzkyKOgJJGPGAlq/Dvvtii38kwZYvSHgHTpDPI9hXhG57FqG8y5ttIYVCxJIGtYfMNz2LMt3i8E/OoNSTMFdkwo2IZHB/18GxJnGsdyWHSp2abSEM3TbZpcGszohsXHB5xONJ0mB/z+MPFDl/eeoKtck4FMDh4jI1BRjfOyUo42rCwoiEfbyjSUmAZgr9+bowv1/WIryzhYN1mtZdQc00mA5ulbsqdzRAQBHbIwmxAzdXQ9naUE+UFS50US0JWQJSXtMOMD573CLOSqq0J3nuct9eFil+G2Q92nQ3OjHt8vqqVY+5sRZwYdchLRZiWnBh16SUFYVbw0XIfKQQHajbdJOe3jzfwdl9vY5DyvJPQbcV4UjEV2FyeqRBlsKdgcmczZNTXRPoLUz4CgW8ZBI7JoYbLUifm4XaMaRhYzVmUVZKGUPMs7u2S4R2pO7wDdYtPVwYMspKNkWk2iIkcm8pOyvdzXVCMV0yiXDFXc6j2qzhxn25ZYNcdBoOYzLIxKhUqyYBwOCTdWGMhGdIyA8qGxVImSDKJaSncrz4G2yY6eo60fgw/jwkf3SEqU4KNFeTaMguGpDV3Co7PE3gW4dxx4qakUnH1mTt7Re+y4wjWV1AzBxA/QxFIDQca7m/sok2H/Z8AfLxc1CqlxbT3RpZfZw78Jt4ktF/d2B1RlMOBhvGC7tRM6wU37cBRhGXtL55f6bCqNf08SQwnz8LxeYQhXySqvedeWkT1e6h6Q7dAaQIC1KV3EHdvovpdWFvCO3keb5CQtFY5G29yNn6ON3OMz9ol6ZNtnK1lFuwhBor2ICMqBfWoS18F9B4sU5k7wrWZCg8NRbKZ8FxpmL3hFExYKcORUdJCIdwAhAZcfOeww92tmCMjNpuDgpmahW9pUMPJUZdhVuCaxi7qLmVjUFAxwbUkp8ZcvF2jysCW3N4M6aclnVjbwmyHBZenfeZqFhVbe6R5psG9VsyznYgkNnAMh9kk4nIgiHOLMkvJkYy3V1m+GdHKfYKjR0mUwLMkf+5onW6U809vb/HP77RIcsXBEZffOFbn7IRPN85392MF7z/rkZWKqi25PF1hc5Cz3k+RhuB40+X2RkixXfIfnvSYrtqA4uyET90z9xGJ3zkkXhEqftng1DQEvaTAsSRnJjw6UUFegGEIlII4L/l0ZYgpBU1XMld3uLMZMUwLeknJ/+HyBLapi4dSaUUU6UhOjXlcmalqEWM7pJ/o8fZkxWY7LNgOC6RhcKTh8sdPemQqxpaCcV8y6mlent6xJXy+MsC1JCOeqfeFSgNnVnspGAadOGeqauNZJlle8tHTFk+3+ngG1CoW8xM+auwYYmoGv5cyaLXx4iHf6d7nVu0woV3Bsk2KiRmEYzI26FONuhytNCGJeHf1M+TGElRH8NIQO08I0wKHAi8JtaVSxUemGWPhDt6zuwyPn8elwPV8zRFLE11Uri7ppNRtw/Tczx4L7jnMl7tomUr1J7/nZY7qcKDP5c8wB/6zHm8S2q9o7I8odrZQsF+Zceay/tpLI4i9xfPLY409i/e9//+JUca5q7rD+/g/wdqyBoKcuwb9jt6fJQnKdiCoQZpibKywAESqhecOkJ2IQZKR2D4V02CYZESWQaVMabgSr4jpY+GpgkZnDfnwS2QxipBNmp7JETFkzhfcdEZ5tNmnUygWRi2oO1yatHmUSAaZVqGouTaBU3JuokLDM/lkZcDtzSGrvZS8VKiypJ0UJLsjuMA2mB9/oayxRxw+1nS4uTrUVitpyZNOgm9JjjQcpDA43bR4f7FNmJd0cknDyLFMiePa2Ct3KYdgCMV6qVgyqzwcCqqP2hwZq/D2XJUoK+klOV9uhHQS7QkWZQWtoQZQrA1SBmmJbWhxYwOBEHB4xOHQiMORhkOYlZwY9fZ9w/ZEf01DaO3B10aQe/B7raLf5/ZmSF6WnJvwuTyl1ep3whzPNDAMqNhyV5qrxBC6W6s7BnlR7lrEGLtE55zJqiaVu6ZuaQ2hR31RWlAquDKthYpNY0Ccl8yPe8yPedxtRfSTgsnA5FjD4cF6ny87JUpETFVNtoc5oGXA3juk93i+JTk55tKNMx7vGOSFAmVwqGqz2o0JXIvB40c830xRCHBdtn3BrG+wU0JSCjxRIhyH4p0fsDBSYzCIud3a5OZOiNNosNDZZCHPiQbLeGNjyPa6hsVvriKf3GNh+SkREm9EIY9chO0tvV+LE2S1xoIbEy19gudYGC3d5RuVALXT0udnfAqSBA6f/LnO956BL/D19i8v7d2E6+2vAF42B34Tr8ab38qvcAgp9SjQ9XQ3JYROTratsWZfQ5z8Ce7J1wBMAJ0Mt9a1rE5zQkP4i4xiOCSyXDwhkUppH6edLWitI8cmCXpt7el05ATeX/g93IHJUBk47S3cjVUwBPb3/xv+5tPfp520aPT72HOHoFrj3Q//HWXtJIvWKFuHj1EdCzi8/JTcSnmcG9QMiO7cwL3bYeH4Sfqnr7LaS7m/FeKYBpendWfSTwuyQrG9ywsTSmEmsYZpK4OZqo+/mwz2bv51xyTKC5q7AIy8VASOQcOVHG+4+Cb8i/fv8jg0yEuDA6MB456BOV3l3sYQkeccDmxGiwHLqcVakVOzLMaqDhMVk/d3R4Oq1KMhlNqHqrfjjJtrA57uxEzWbNKiZHOQYRgC09AOzQKd5JreC98wFBwacZgKLK2jaBrfqNsYZSXdWFMF8kLxaT7gq3XtFl0owW8creJISTfOGCYlFUvTFNJS4VsGf/NsjbWBJp1XbJOGZ6KKgrA7YDowOThS434r5n4r5N886lKxBXM1h9PjHoYQoAQLswG2afC2ZzJIC6rr0H+0SDEAU5nkQZ1BUhLl+ndfKMGhus103cEzDQZpgSUNAstAOgZTgcnbg0VuD6EclJTShBScsMuWWeX52iI7nskhhli1Q0jToCwyaG0gRY5cXScbn8dvNgibE0R+RjXwCHodOHwCfvzvdXclgNFJZDQgABifg/mLuyP+FG5rdwu58pQgz7WRbmMMXFcXmdW6do0e9qG1Ds8eojaWv1Y/8fVzuOdE/zro6/UCdf/zn1cp5M9ovElov+Kxr+f26fv6RnnrUy2p85L79cvop28SN96fxZuWNhqMIy2RVWvqUePENMVbP+DG/++PSNISZ7nPwm+8C4M+UQFeaxVZ5OBX9U3AsZA/+rdcmz1MpAy8KQ+5KjXZ+1/9Y+zONpNZqsejrTXYnsLOU646Q7LcpFE3iMoclaVY/R0qkSCNY5yywDMLZNjHzlMOjTgYAu63In74rEfTM6nsQvUHaU6aaxPPBoqjvsIuE65M2ER5yR897uCYGkV3ccqnlxS8PVdlkBZ8uNTj89UBz1XK3a2Yk1XBraGkaQsGseLkiMXMaMDl6YAv1wa4tskftQU1q0rfb9KwS1Si+XimYfDhsx5KCMpSK9T7toFjCP53Z0fZjnOedVI6cYkhMxqOROyO/pQq+WojRBpi365mzzdMeTXOjkChtMTVnujxy8aje2LJaV5SKP1RQ+c1vD4tFVkBn69F/N6pEf4f97Y1XN8UHKg71D2TKCv4dC3k1JhHXsD3j9SwhCK/+TG32rCSOqSNKY2+Q3dPvUTxoKX3j9dmA/1aLxG2pRBca0hCd4A0qrzfLrEoiXOBKRRhnODZFsv9lJ24oFCK+62IolTM1Gzm6g4jpDTWO7w7OsJOe0jtxh+y7J5lWziMNhyuDZ/RtyY4ZYW4sk84d4KqyAh6fXBdvK8+xclcQreKc+w0vtjWCci09JucPQyUOqHZLvTautPyKrD8FJFnesz/nb+gNVUNQ3dszxc12OrUeTh9AWOvaNzZQt2/9YIc/dN2YrvvYf81XgN9vaLr+meMHP2LxJuE9i0IkaUgjBdQ4TjSMOA9+P6XN3R1+JKF+yvxktEo97+CfldrPk7N6Xl/YxQaY8SFIhmdwm9vEOYlgwd3uWtOk6gMJ+uzsLGOlAJmDumOLtU7nyAcaMRkFIIw9fO3t/T4ZXQCxsbh6Gl4eJdgdZF6cJTI8vBci0vVmKi1xkXXwd7ZxAs7SNuGY6fwAh+/H7ETZWwOMqSA9e0Bf+GQx3bNYrVvM0gLRhyDC2mH65WYZVEhwuR+K0IpRd21mK5a/Hipi8gyLNcGISmUICsFhxo2z9sp0nZBaBShEgbCNPWYT4FlSloThyiyPs0xHzMpmKnaHDENXEtybMTh/ec9hIIo1Q7TddehF2c87SbshAVJUTBds/BMg8MNF0OEyFJRSi0TpRNLqe1ZKPnRsz7VqiKPQy5O+fzoWZ9cFTzdSTnSdAhsc58gvSctle+iCquFpOFInu4M6WWKrFS0Bgn/1xurrPULbEvgS0HDs+gnBdIQZGXJiG9rft+uIkUUp6RmlWNlQuiDU/F4vBMjhKIf55iGQScO6cY5x0c9vnOo9ooQsm0ornk23zV6iJGAzkTAxiDjXLjGelrgS5OqPUInKYjTHJFlmJbFTNXh8pRH/f49Bqtr3FZDskoNv36cv5Yvs7a6xVpxhFRI1pRPohwatsc7x0YJbInx1Sqq30U2x1mYbRAZNpUJB+PAVdRH/wEe34OPfwhFBihNXzFX9OezRzSAqiy0ssceZ6w5Dn4F1RzXqjxnLmkE8F5hCah6E9aXNTnacSjnL2G4xYvz+PJOrLOjz/ZI84V4MPyZFBT+ZcabhPZtiNc4LMqytaZjmugRxbPHGs9elvsW7i9zXPa/v9fRS2jX08ns4BEwjP39nGcKHAPCJMPxPBCCZHwGf+UJoV0hGmwQGKaGGlfreo/36I62l2lOgGVBnuoqNkmgzLWQslOBiWm4/DayLFgQkmTCwa9XKSff427kkiQ5VTnCxekKZAni9AUMQ3CtARtVl+VuTLm5xkZnyM0n22yNHODE5DSP2ilj8Q4Nu2RMFlROHOLjbqn1GtMSQ+SMeQZi5SkBGTvKQs0doe5o5+UwKfEsgSENmo0q3ShjEJfcbcU8bqeEWUlgSy5OBWzHBWGmcExNKai5JsNUdxfDJN+F1Bc4piTOc6TQJOnDTUlegG0KBAKDArvdosgLzlUVpn2cKNNQ/DQv+Wh5wMNWzFguqRkZ//Zhh6VeQj8tqFiCXiw5twsyibOSbpIjhMAQirxQWEJQbK3xG2bClmNzkyb9TEtgFWj5Kd80ODHm8Hg7ZXuYshNqY8wLUxWtjGJ62I7N052CPi61XPA3D9a4PlelNUj5Z7d3aIcpg6xgtGIyVbV0Mi4UUa53mXEB8ZmrBGXKd02H95cH7PRjHoRwumoh8oThMMa0LGprT1gflOTSJmgcoVamfNiRRBMXWOlnnDs2Q7zd4uMcsokGlWMnONhb4n2rybKUGGWT8zstjMlxxMXrqF5X78ZWnhK4HsL5rj4PD+/C9jq0NmFkVE89xmd0gZckujvzDoNhvHLeRBzpHVYcoap1jW40rV23aZ209lDGCAFPHsKXH2vX7L1O6+VzXNF2QT8hHvxnUFD4lxlvEtq3IH6CwxK/5OO109LdUmnuQ4Bf8U8DOH4GcWFBy/NsruoK1fXg6nuI1ua+MLKhShaqOZHs4uUdMCo4ZU4oTBzXxtsJYeYYqHE4dloLp0YDrR5uCKhPQVCn6LaJcoXn+cjpOfjeb6EqNYaWjxf1kIGnIc/RkNCwyY7MUylS0q1Ntos2YzUf6XqUn/+YctDHc2ucrh9me32ICneYyLbYwKdXbTLlgRWVDAyH//fSkNnufVZFQDA9RakUo57J9YbB589yti0HP09Y7ISkwmSubvMbh+s86yZ0koK6Z7MZ5kQ5POkkND3Ng7vfihkmOccqiplmlemay61dRODTdsJGP6PiGBxsuDzcVczPiwJpGdzbCml4Fn/z/Ng+4fveSocLTkzXd5iXfUKrJLIstoYJHz8Z8jxUeJZJL8rYiGIQik5cEqU5WWGwNRxyc02DW0Z9C9cS5KVipatNNY9VDWZkxpUxSTIcsFSMIoRBnBlklEig7griTGGbAoQmZce5Is01CbwUBq3D50gGG9iOR1ZqIeTAkRS+RcM3WO4pohz6cU7VfiGp9byTEmY5pjB4e66K8AKypMA0DM7P1mkPtzgiBoxUJR+XBSrKMDaWOIhkgGQ4HOf93Odh4iBVTmY6dKwKzd/6K9y7u4QZDyiLnHHDpCxK7DQm7XWJNp6iPAl/5W9j2Dbl7OEXmohxpF3d21uwua6Tlx9DbUQXYWUJQVVPMqSBuPrevuK9+vIG6uUJyOW3dXJ8cAvufE5pWoiT5/ZRxqrb0V1eUHtFeu6bQFuvrAr+DAoK/zLjTUL7lsQrHJbXKz3X1fYxu9pyxNFPcFxEliJeO+RGWcDF6y8O55cfI9eWCE6fgXtfQJawkCwTWQM8s42sN+DgSdha0XuIwNP0gME9rVbSGKVwPW6MnSFxhzhScmUkIA5Guf28R9pROIXLglciv7wBeaZtbMbmCYXD+sgc3XKMetXl6mBI+OABt0WdTEVYlw7xTjXjq+UdQmmBoRGCjmOzXFisdnO61PXIM01oGDnPI4UQOf+3uxFB7JCFBacrJhN1j4pjAQrPliAEk4HN4+2IrFDYEkwDqpZBPynI8pzi+RJ3Qxj6guWjR3j7QJ12lPNwOyZPC4apomprlZC6IyiUhrtbu95jUV7yoBUT5QXPw5IDhoWbJfyzfo37n7cpShgrhvxmoP3pxo8dpFoLCIcly92MugtV26FQBVtDzc/rJAW5gunA4ruHavyTrtaHvLVTMOmYWHGfL4sKw0LQT0uanoGZxIybBd2OyXNf6z2WSq+Rmr4kLUt2hhm3NocMFh+z1TeYdnowMkae5/x4M6QVZkQpHKpb2JQcaFY4P1XZJ37P1iyedUqSvOTDpT5XZwNsQ+8Xh1HCxsgM9z2l9UQ7zwmMgm1lsKgqdJRDvJIwUYOBV2N9WDDiWZwyFJfGfT5drmLUauRpxkg95PBKh55hU8t2GAuEvva7O9Ac36ew7CcPpfR1a0h9bsYn4OhZTWt5dFd3Vq4H85d0x2UHutPbKwyThHL6IMbUzL7c294YXw37iGodNX8JPvtAi3s/vI06fkYnuZfBHl8D2vq6c/4m/uTxJqF9C+N1hX6VZ/tJag/W+00cl1cO+V4VuHc4ayOotSVot8CyESNNZBQSTE/D4y6MT8LmspbGklIbgGYpas9ixvWIbvyYxHTwq4pBfZIPLn+XbC3i+YNFznWWiaVNBHiuS1xr4vbbXDsGm8pmqTtgrYDlKCYalGRqlCfK5zBDskJxZ/YCInFIs5Ijrstm1aMdFVCtYZQFRmfAsLvDSfoc68GKfwDPliz3ElZEBcNQPIkNTrUTfLtgftyj4eldWTvM8CyBKTTqsOqY/ObxEc5OVPiHH/d53jGJkFhGTLo+AAzOTfpkhQaBnBh1+fOH66z0E75ajyhUySAt8Syt5h9nJVGutRnHA5sTh+f5aqXL0+WY9jDDBDYLaBkO83bMuXGT6SMT/F/+eIdBqvl2vzvf4P/88Tq9ONPoTdvAMjQ1Ye99jPomG4OUZHKWjy2IMJkaZIz4JQ1Z8DDsoYRJWRScGzEpbJcz4x6PdmIW2zHL3ZRnnRZhlOBHMGoJKp016uWAtc/XuOXMoYTgq40BVpZgU3Iu28I/XiPvR+SGRuBmhaLiSB7tRCR5yfogYWx7BVEUzFoGFXeCQZEhHZcwjrApEeGQwjJwoj5l1sMpFKOYzFQrZM+e8snaEuZAseI0manZLM5c4m9YX9DLChp3HmOnVT0y3DPDfE2ZQwmhUYtJBFlO8Tghiku8OzeReapBUpWq9iO0HcTJczoRlSW0NvS5+MN/QXn2sp547I3xQQsYRCFsrWo08vxFVK+jOze+AezxWvws5/o38bPjTUL7lsa+PtxwoA9Jnr2SpL6R4/L6IR8O9E7OdjQJ+/gZOD6vK9Yk1lVrnoFj6zHKsK+rXKXg5o/14bWdfe06Lxng1GuEqoEYGYU0oe5BWSg2IhjrPMFe2eHGzGWStRhHlCx4X+EpiepWoCgpmpOkpkMwNcX9LYun5hiVzYJ3DhrUT59BdHoIP2BOSmZrCqVc/tOtJQKR01MW1uQsrSTDdfY0CksGsQZ7mIYeqY16kvkxT6ML0UTj1W6KKQ18BDM1m2NNjxJoVGwmUsWTYU67tGg4Nlmp+GJ9yHaYkRZwuGEz6ltshBmWKZDKYNYzmalajLgaBv+0ra1k9pTtTc/BNmPCTCEFSGFwlB6nGibOSECal2Sl3tkpVfLHT7sUwERgMUwLRn1rXy1kM8wwDeglBQaCXgaubaJK2OillAiqVYvv1wu+GiiwTJ6GcLYqGXEMLtcFaW5TAj962iXMSsrYYUSlJNh8FDeoxDmhG+LZmujsliWeLRgmOfmnH/B55JIYFvahYxwbdRkkBVu5FkB+uh0Rx4BhM7OzRBJGuGGHhdmAIhDY0sHciPlCGDhlyGkjJ6tWedLPEGmCU+YYtscJsUNkNjhZNcgRqKvvMV2mlG+/p0nNjvvC8V0YRC9ZHnHqgr5m3YBCDbhRO0aS+DhpyELvGXLQhYPH9DX/4LZW0BkZhdnDFN0O0cgUnu0i+12Nepy/pF/Tq+hktvpcJ7/1Za3Ys2cR83WqP83xV5LWT3OufxM/f7xJaN/SeOUAmJYmXL+UuISU+264PwHr34P6v3yA9nQb98RUL7+td3WGhN//n2BjVXuv1Ub0wvvQUcDQav67iKzSkMi15yxsrREZDrY5yWdbFkNhYxtaGR9D32SSLMNvjjEMGrR62zSKkHPNE2y1+1RGJNKz2Zg8jBp08So2SaGIsxxr6Tl2nnDaTTEvvUUt8Ohsd+nZCXbV5OlGipMpMtfnN080iUpQlDxQ0E9yaq5Ja6gRk5+vD7k8XSErFSOeRa6U1losFQ+3Y/7l/R0uTlXwbJO4OcnhkYxSmiRZwbOtPtMjHjM1h6xUJJnig+U+K70Ux9TCx1OBxdWZgLGKRZhkTFolSWAiDYN+XLAV5tRdk4qVMltzGCSSxdGjDBo+80mJbWpemFKw1s8YpjlpmtMtFYYwGKvYeiRaKDaG+mczpSZGP26FPGoJpl1B0zc5NV4hLxTHjsxz/94206ZJWiiiJOfTj76iSDOWC4dFY4S1QUrNkdRHAqpSUQwERawwbWMfuGJJwbAwGCfDESXtMCcxbfwyZRBFGIbAkRZSlOx0+7qDNgzWw4IEn528zrTr4vojvHV6Du58jrm5znF6OPUp3q6DWfRYqLhwegr33hq3nj0mjBLqxoAiDPBsA3f2GqISYBQFavWZBkrZDvmZK3yy1CUxLBxTcK0OxqM7kGaQRkSGRYLEDzuESmpu3DDUmoyDXT6ZIaC1STFziBv+YRJCnDJgYXMdef+WpqjMHNTn5+hpQBOtS+Mg4sQZxG7S2lsRvKL643qol4jSrzvXv0E4/ufFm4T2bY2XDgBR+Ap8+OX4xsrvJSi/6nW0q+5LCXDfwiaOKGcPayTY4n04fV6DUM5eQSze3weUqDiCLz6CPEGOjRM0J8Gv8FbDYqsXweRRAjMkDLuQaTHggeWy1EtRYYinCt4dfkpvfA7/8AiGbbPatfliPaKdFlCCXeSck33ubfX5Kopw4s+59v3r3B0otoVFGZU49QaD6Qk2UkGy2sO0TBylODvhstTNuDjtsTUo8G3Jw1YEqH2DTMeUODIHAyqmIM11x/O7p5tkpdaDvLnWx1haJEtyVGiSBjMotBJZ1TZAlfQTbe1SdVzGKhYiz3A+f5/qwEHkHmvuKElhYhkGF6cD+kmJb0s8SzLeDLizGXJzPcJx+mwMEiwpEaqk0e8gspIYg4nZSYaZTiwVS7I9zDANPXJ8shPTiQusNKY9yGlY8EgYWKaBIWA7BZUVWsnEj+lE0FEeaZoTmRrGP8gVxyoWjmmwVjaI85QVZTBpSoQQHGo6rPdtLo2bVHZWaWw8xabHcGwWlp9oEWSpWIpdjP46s9g0mg2yyVGMnoGRlhAO6W8VxOUOzF8hr3QYNw0iaZPPVHCyiDogKg7liXmuP/n/0mmt8d7OOunELF6jgfospXt2AW9nBdne1mLBUUj4yQfEoYdnG4SFIDTaBMsPNdq2LPAGfRzLIPRGcZIhXm5rpY9z1+DIKV3A7SIYo0KRHjyOH/cJR6eIkjWqlrFvxEmeaQNO19tX9RCvdWAcP6Ph/uwmveFAc0uFsV9QvkE4/uLxJqF9W+Plqk8IvfP6usd9U+Xnehp2fP8rAJRXoTh5VvugPb67q+ko4NLbeqkNsJvwRLWOqNYpzi8w3N7G++pj5K1/DDubWjbL9cAwYXQC0d1h1K/iVyqEc8dx2lsEyZAFq0fLn4RoQKAywslDxO07BEUCtz5BnDzHdFDhO4drfLEeMuqZ2K5FVpZkrQ18QxBubbKz0yMvDc4dm6EdF5iuyyArWXqyQmhkqOGAkw0HZVmcOTDL2bk6NzdjHrYiKrZEIDg76RNn2l/tScfmyXbEVljwtBMRJQm/cahKs2JTlApP5cRZju/ZFGnC4QqYrstyL+XeVsR2VFB1DPICSqV0cfDpj5CL93nLq3CgeZBP6xPU6x6PtmMetmJGXMlczWaiajNMd73EwpwiDGkNMk6MWqSpYlQkxJaLzHKiOOXUdA3XNLjfijENwXaYU3clhhBMuoKdqGRoSM6JCMOByVGPnUFKzVTYtomUBoZj0S8lHjmJYZArLQat9Rctap6kLOHqXJWHrZjzUx4/fj4gShVHGy7fmfOoxRGcPs/8TgfUDt7WMz6LXDp2FUUFV2bMGRlny5QnE7PcdScxtvsYZkbdM/Hynlb4cByGaYprgpuEqEe3EXmmJdiOnEICQX8L0gh7c5lifJwbPZPk9/8NTmebhZ17yCMnYOYgnuXgujbhYIAjFP5kDdZMff0bBvL4aRaWnxEZMZ6dICdOaLTu9jrURzQFJRyA62sD2CQmcgLcsUn8dhuVvGrEKSrB1yIUyzTVDvNK6US1l7SEAKX2uaUiS98gHH8J8SahfUvjpymIvBJf48O09/3i5DkNRw5qWuLqwVe7AnVSy/7EIaDg6ncwsvQV48FSGHy81CH+4H2cjWUWhk+RQVVz06Zm4cDRXYJqiQwHXBt+TBQleLSR8xcgHDK2dAfPmCTsD3GqXSrSILNtvb8Y9jGqdb579hpSCBTgmdAgw8kzQtvDoWTEVNjLj4iTAkcaMDlHTUE7FbSwkarCSVHw9tYtPHObbDDCO+euABqi7pjGvoBxzbE4OWow7pk87URUuy2Sfsb2Zxukl64gDMmVgw2SDhRJn5uywmi9QicpmQ5spCFIN4e0Yy3HdXMt5FwA1VIReTXsaMCzzOJ2p6BsDxivmKz1EwZpybNOyg+OjfDuAS38e39rR49nsxJpKKbqDicTwdpOTg+LJIbTwJlxn3aYszHUBGopBDM1iyg12O6GyLLgdu4wkZasLg8wdzZw8pJYGswfm+PtMclye4x+GOF6HmekJMm1lmOrH5KVLrYpsA2D8YpNviusUXUlJQLpexSWwwctRUkdTxUsWBYLm4vsTBzks7jKcl7FyxO+wzKTvTrnlKTwI+TKM4LmQbArfNJKKR4/ROQ5l1u3MGZmNRDj1HlIYoSUmCdOkTx9pDmQeUE0Ok2SK/wsJjQdomqDIAxBKeTGMpemDNo1h1oZEw5LvBPnkEdPwr0voSiQ21sEcQSWBLcC+W4RN+zrTs714MlDZBKxUAyJrv4GwUgV45BOPC+fh/1z97oyyKfvw+P7uoObmkOcu/qKFuvL5/INwvEXjzcJ7VscX6cg8rUw4G+o/EQlgGpdK+qXuVZMQOkuK030viyONW+mVtcjmEx/b5SVxJ02/tpTwhyiTpdgclbrPE4fZN+jZWsDFu8iw5BgdEI//8xBcD2kIVmQbaKqwLt4HPOrFtmD21og+dQ5SBPsPOF7R+oMwxh3cwm5vMiC2CYKS7yz72I6Fgv2kKjiYT34gs+SkB1sEBWUAikgz3JQis+KGkm7xHq+Qykd0Hq/AEhDsDAXMEgK0qKk81VI2GkjKPnkacS6WsNyXc6M+1y/dA3iCHenYJgpTAGmZRBmJY5pkA0ykgJKlfNpu8DPqsSBS+wL4skjTKdazqrmGCx3E0oFUVZwe7WLUCWnx3yqrqSfgYEiThUXDwZMjF2k+GKDNNJAi6RQfLkesjbIWOslpIVic5hhS8GxUYeduMqsL1kJC4pSEIcRVl7g2xZLYYn54BnqaUbSTmg7I4heyNvHxhiWJmq7xbCvMLtwev4g1+ZqeKZBa5ix0k2R0qAsS3pxzifuIR76GZ5rM95ZZTB5iHoSEucCR2WMO4qhadKeOclMd5u6NDW0voyhyBgcP0/y4y8IVp8Sttuk3cfY2+tQq6PaLQ0u2nOSGJuEzjZMz+H5Hs7YScL2Fs6ghZcn4I8hmmPkrs9n1WNEm5ssRRYHnBx/dp63GiMYb/85LVOVxLDyVBdwWQrDIXR2NLDjxFktb5WncOsTZJ4TtDcRf+1/QNj21xpx/kTEkZ54+BV9poTY33N/kxbrm/jF4k1C+zbHN3Rfr8c3VX77Xd5OS49bFu/qmf6RU/qAb6xANERVqpSX3nrFYNA9v4BjCELDxrFyvKoP3/8t6HX0jcDaRU0+f6QPcxxCFsPEDMxf1LsOx0UO+wSVKqLRxDx0HBoTGmHZ60J9RCMwu20qf/i/QL8PK0+RR04RUMKFq4hKgHQcKv02oFgYPmUjUTz3z7LpjKAMH3fGgbuLJOtruKbk8+AgBSUNz8Q1tUknAtKs5Iv1PkaecahisEXEVzS4wwjWTsqJCe0XFpeCoFrlqqclqgpK7rVipqsWpiEYr9i0wozDIzaDtKTbOEh7GPN8oBCbEUZZMl5zmKxUWLQEnbAgjFI2soj19oCHE3XNpfN8hLIwpCZO+46F5zmUYax3elJgGHBx2ictSkZKhWdJirJkKnD4dCXkWT8nKUqS9TUasmA4DOmoKp5hYAu4mzisKUgLg2bSI3w2YEO5hIVBJkwumCHvjJqUQqd+1zQ40nRIcsXmIOGrz++yHGrLnidRg6HdpBaGXB6d5Q+3fZ5XRglLyawVsri6ySTbSFFqUn4lANvBG3ZxWmuEYYQz7GJLwWBzAy8MkY6LeucHiDiieHhPX+dKwdFTSKVYWPucaLqKna8RHbuO12shhwMiTBLDxFQFseFiFglJHO/rX6p6U8Pw29v6mh+b1GNGx4XZw4jTF1BpCp+8DytL4HkwMaUT8fjUz30+heuhpuZ08XT1vVcLyjcd2S893iS0b3H8tO7r5wlVFDpJxZGuVCfnNHJr9ZledG9vaYmsxXuog8cQL+3iZBpz/fQM4b0qXm8bOXUWxqcQne19fTp1+IQeYw76ulItSjh0AtEcw7DtfSTl/hjUcbSljWWBbaPmLyFufapHT8+f6BuKX4GRBsXkHLHp4Ychxp4kkWnCk0dU7ArnoucMwhVsAd8JasjZWdykQicHi5LAtRkkOWVc8OnzjC/WBmxHOUY05IiV0SslfW+GfiKwpKSbKp51UlxTkBYlRWloqSeheNZOWe6m5GVJUcL5KY/7LUGYlTzrxGyHGWWhcCyD42WftCiZEcD4Ea4fqNEfCfn48RBDSsJMYSw/YkyVqL5P3pxhxDUpSsVaP2Wupnd5YVpiILClQVYors4EJEWJUrDcjYnDiLot8G1Jf72FyDqkhuRc0yKbHONxYjLc3KJbKCpCERUlDWKqVsB2UjJhKuJScWUEvugphlsdHm0NwDQxDMlszeaAA1465Kmo0Yz7TJQJF+2QdOU5a1WTOLUZ9wxWCovx6RmyoSKenKMSdnXRU6kiXA/peVwRbdojJrVWi88mz5Nst3AaTRa2lpHtFsqQqLLUozrb0dfN5ioyz/Esm4+McQbGJEG9yVtJB08YOBvLRBi4nU1y1ccrc+xDdZTp6MQ0c1BzKuMIzlyBu5/pKcVeYZjEu7sva7fbEtrxHX4uztje+RRvurD/YvEmoX3L4+uqvJ+boLkHGJESFceAgmxX/duQUBZasLVAUwN2u8E9DTvT9Qh+97+Fj/6D/vrjey8oArYDa0u6U/MqMHsApKXHObv7vlIYDAwXMkVgS+yzl2FtVe/0Bj3N8UkTfRPZWt917XYpLr3HJ2mF5OPbWEXG2bqgevka6up3uRH5JEmG1d7guugjpUBmDtJzWTATBtLl9nhVSzwtPedU1Of/vmqzjU1SGkwbETsyoJOVyEqFUEHDl1RzwYG6xXI35YNHW1QqHguzVeIsJ44TPBPWBxkGBos7CSdHXXpxxmY3hzjCKBVOVhJaOTXHYtqIiLIMy3Zp1HyO+4owy3iiBDulzUEz4zfLp/yBM0kvzvi4E1EWiscd7V7c9C1MKZgf88hKRcPTR3lnmPBO+x4bOxkf9Wt03Qqd0uCqSDGLmIu2j3dinPJplzVzgs52yIyjGE0S/vLmbSZLF9ec5tbBK7hCcXvMJxqkPF9cYTkWOKZgbm4CoUpYX+FOO8UcPqWWtZlQOWljDCdPmCbB8ieJ/SqBYZJLGzwPN+nDxrLW/hQCzl0lV4IPJi+i4hjxzjwqqBJ8/j6hgkhYePduEcUJphAwOQtHT2m+mG3D6nO6ScGXsok1VCAtzm5vUrcUC8UK0Tu/xXtskj5cxNu4iezcpzxxVhdse7D7ShUe3dbJTCmIQtStT/WI3bTA88HM4dBxVFFQrCzB4j2QUo9Bfwpn7E0X9l823iS0X7P4ExE090aW4VCPClWhkV2OAxj65lHmYLnw9AFcWNDJ6s7n+sC7HuL4GY1CE0JLBE3O6v3ZyfOIR7dRJ8/Dk/uwleubyMlzqDiiDEM+3lHc2QoBwfy4x+9cOKBvLg9vQ15AlqGCmq6mxyZAmlCkRA9ukUxdxC0zbuUeg3ZM/ekOZ+aapIdO4UZ9dgyXmztryFLhiDGuX76IWWSMuB7vCIOw28dtDdjITMgLHFkQGZJGEXOsSFhLQNoFo7bJW2cO8/FqhCEURbfD6jAjE5IHW5McHqxiR4qzjmC1NsP5qQqDtOTihMvHNx5Tdg3sPMf3XcZFzmE7Z7uEISa+obg049HPYWH6LGs7A+62Uyq3b/C4cPnSmWQtFowmLaIE7oVdjNExDMOg6UssQ3B7K2SYllRsAykEcRhS7ghyK0ApRS8tCZXJR+YUc/k2C2lC+vv/jI+d8xRC0pMeF2XORL1kpgfywCHOGy5R3aRW8/lqPeT59pDOEBxDYCYpaRhTq0lOuyH5aJV6sk7s1jjfe4xMEryGDdMH+V23wscjTVZ6KeQF9sGjWj/x2UNdoGQp+elLfNCRPKjMUamWzDQ8RFEQ1sdw4iF2tcGNe8skQuKbgiuXTiBdXxc9G6ukac5Hcpp1Zwy7LBgXpbaASVIkJUHVA8vAjrq6sBr0dEKdOQSjk3DohL62F+9rG6W9rqze0OPxs1f010dGtTrOP/9/aQL1sA9nLqNmD/2ETcyb+K8XbxLar1v8DILm1xoI7rpiC8tCJQnMHYCVZZ0UV5/DqXMagBJHWuPxyQNtpTE1t2s//1xbxvQ6eiSUxGA5KMfVXVW1rhPSk4dw+3PwfIanr9GJc4pCIYWi3w8J40R7tbW3tbbkxjJMH4Af/K4eO64tgVPBi4fYRUY7F6gso14zSQwLVRZYK4vc7gtSLMxDFznftIiTnCgvqQX69yAB37UgiRhFctgUdFXG8bzLXzNXqHkNPo4K2t4IIyomDBMEUKQ5UzIlkxZWmTMYDimyjEOW4IBM8AJBnGsO2r21Hk5ZcCAwmepsYJgNHNelduAwlazgTH+J5vY9PluvEM8e41k3Y6JispMZGBfeJtnssB6M0F7tQAprhcl2qKgYEQcaPklW0lY5q70USxokWcls3WKjrxgkDoQlMw7sFAaVwGGYmyznEf9ss2RuKIjHBb4sCWzBYRlzcPOhHg2vPic4cY561aOXlJRlyYGih5mH1HsbnFR93ioK6pf/BmJgU4sLYq+OU2YEjQby8DGKS+/w6eqAxLCoqJKDvTUaxZA48YnOHyFAwNYa5DnR7S9Q05cIHO1cYCh4Z/UT0nILLzCJ/EmS9ha+pYgLi+g//FsCpSH+xexRPkxqPB2AVxb4jskxOSQIC60TKQx4eAeufQ+Wnmpn9niox52PH2gko707Neh3od8Dy9QI3XCoC7Pv/AUIAj0yTzVACsfThV9nG+YOveGM/QrFm4T2LY+fSFA/BSjyjd3bntfT7l6CIteAkBNn9Y1n0NNJCXT1uo/a2gWW7FlmPLitH1tvaETXyXOoLNWUgPUV/f3REPod7D/4x6wd/x1WOwky7HPCH1I+/U8aWfn8sVbxr9R0V5cM4a/8d9ph+NZnyHtfsLB4j8HZ69yWPvHsaTzLJChTzpohg0qVaplwWwh2bn6BU8YkD0Ky3/0bWK5Dmabwr/5niEJsKflb3/sdWg8e4MYx9U6uNSt31lBxROw5POuVuJZBKkx+b6Lgdq/gQeqwFAp2etpU9ImRobwMI4S5us1qr+ScbXKchPkj4zROnOCzW4tEz5/hGQVjXkLoBrR7JYQxT9s5aaFQqiQsJGupRbjr1O1mBROqxHcl6zls9FPCrMA1BZ1ByoGqhWGaPGsnrPZThlSputBwDeoJGKqkU4AVx5hFxDPlI/OUUDg44ZAflQ6flAc4c/Y8b8k25qlzLHgVBr0hXmZwd6NgXEWc6C/zvfQ5duTAh38E3/st3sq1G4OXa0kvXI9kGJMYFr5j0u/2sbeW6QpJoFp4l4/CoWNaT7E2gifBI2e2ZiNQvL1+E3vlCfagC0ENL9nASSLCWOBVA7zWyq5NUUE0cQiloOrZkMNRK+Y71QRpTWtNRQU8vKWh/wePQXdbS7g1xyCKYObArmPEUCvtN0b152NT4LqIq+/pXe/17+8r4WiX97Yeg5++iLj6nW92m34T/8XjTUL7Fsc+abModi0vvoNh298MFPmG7u0nOjXPR22s6OR0/Azi5LkXCuH7qC2xj9pSpqW7qoPH9FjQlPtkUxFHlJff1onp8V3tQ1UfIS1KDrUWOQxEYZvzdR+1vKSrXsOAUr8W3Y72sKqvw5nLsPQE/Apy6Qn1POSaJ+nWJc1mBSkUgWdTixMSw+KUlxOmIfftMRajCmfur/Lu+cMa1dnv6uT57DF89Mc8SKqoqQN4Iudkd8D9+hFkc4xISIQCUwgsU+LNX+ZaHDNcT7F6MWFmEoYJZaFQrQ2ixhS2dMlLxfbEISqiYPToKHaesGAPiB0Tt0hBSL7cKfgk8ohWYuKiZKZustLLeLwTE+cFEoPpvMeJZIUtZdM3Jsl9k+maw0o3pdHfQfU6jGwPGZmcJJ45zIPtmH5aYMQJY1HCTDRA+VVmKDGcklxVcBjwA9nimT1KKzdpmz7CyBgPM7pjNewwxvvyE6qGYMF0SGxBonq4piDqZciyRC4/gU/ex7h4napvI6S3XzC5SYKdVhjMHtMC9xRk7EqyGRKx8F3Urnmq6XosHG4S5SVubxtDFfteYYxPIVeXWRg8JRIWlbE5xGpbcy0sG+/cJfy+5IAwEQLeHTOxPBd144fa/cEwQEidsFae6n/r7GiX9jTVhVO/B+eu6MnCgaP6zOxe7y/LyFEJkED5u/97bdlk7ar2xxFqL6H9KWkxvkmUP3+8SWjf0tgnbT66q2/O1REUAnX9e9+8iP4p3dt+p+Z6+yLFrx9s4CdQW2Wa6hHk0qK+KSx8Ty/tm2MvdOxcD4Y5zF+Gp/ehKPCkgWcapLZHo71O8B//iCzq65vO7GGYmIDGuF7Kb2/oTvHwSd0phgNIYoq1ZT5z5kgnC7x4yMJcgHnpOgv9PmGWk967wz9JHbYSrXc5vbbF8NgUleePKXpdos0Wdjzgg7jC/cik0s+YnThIMn8EsaI5fnaacnjURwlJ1Za6G/BsxgZ91vspnSihjHMSISmHJYUV88EzxURgstLPOTbq8sl6xJmGibvyHJVm4FhEP/ir9J/sIKTALMDfFUkuy5IkL+j0E+pGjpNs8wNzA6kULZnzZHaazHFRWcZKRyFw8Ch5q+zwv7YiyrLENgR2USKkwWXVou/7XHNDZFNS9Hvc6W9zPztMkvXpmSMYZs62XWepWuFfbnc4+PCHOJ1NztYlHDqKPHCGplTcosLQ9KknXRZ6PeSNH6KyRO+X9oqoNEG6LpfjbT5M5khNh+X6Ec7JPrHlEeWKqpSI69/fN6A1VEnl7qd6QrC+rHdXzXE4fha+/BQZhwQCRMdHCaWnAdU60nW4PtHQzgOWFiBWRYE6dUFfk0mk9RgxdNE3NgW1Biy8Czvbetrw7LGG1h86/or+4utnbV8KzrZR45P7foMKtEXMyXN/KlqMb0SL/2TxJqF9W2OPtGk7EIUwOqEP6E85SD8L5v+6AeHXHZxXxI2HA12t7myB7eou7Pan+kZSb2ji6G4UCiLp4P3uf48Z9pBBnet3bhLFA9wRA1nmiJFR1PqKJlY7NswG+v/3Xtu2Uf/Nf6u5bQePE9s+SU9SSUOiXVWNigly8S5Bv0t3ZQ1j4iz9bkbm+KwkOXa/TZFl3Jg8T9IboLKEQkgqvs2wNoYxGTA+Wmc+GTB4vEhAyrUeZGev4juWHqsBbx+octgp+HxbUqPL0taA9aKJUw5Yaxzk4KzHUjfHNAzubkV0eyUb7jwHghTPgIu3bqDaJnlSwWqO0s5LWoOcQVpQkZCriPmiz4nhEkW0jjdoMTOyzuT4CP0DC7TDhOdrkhoF67h8UDhIWzJZtYnTjKm05Lwdk0YuNSOn7ttw7DRRb8iZ7h8yJCTobfBFc56mbbJTazIvBzyIY4xhj7upT391h8rgKcV7p2iPH4RwhZGJCvHDu0Rpn6C3AyvPULa7X+QoQ8Ltz0iFC+UojeMnWZmcoutMUdtexnv4BaVlwbF5LbvW02r1SKmTQWNMm2+qAv71P9V/+EoFbBdzYorMtCDPtVq9oTUuK3kEpocqdrukfhcQ4AeQJfDv/1c9Rs8yvTPb3t5FWk6C6+7LV1Fv6o7rpWv/9YSizl3VIKV+9xW/QeBPR4vxjWjxnyjeJLRva+yRNmcP66X27OF9+5ifFj8XzP9nHJi9Q67CIXz+oearRUMKr0KEhbe9hXR2b3JAmWV8akwQd1Pc1QHXj00iDYFx6TrVOKLs9eDGf9QVuu3C3FG9nPd9XaX32roLcD3Krz4lDGO8rXXcqTmc7pChIXAdC2f2AmqnjYojikoNimccZUicx/jGgCNlSuLPgzJI8hK/XqWX+2SVOtN+BVns8M7mY8xhhevThywQBZEAADl5SURBVIhEC79Rx4hDrG5LOwu81LFWqj413+JeNE3qbTKojBIMN3GsNuVSF7c5S5QVgMJ1LaJBiCx3iITgo80Ce7vFjNmgmm1wf+IMUzWL9UFKIAqcqMMpta65U4ePMggn8Qygs80Xz9tsDBV5UKNTVHBMweypJv2thMvFBpKM741n2IeOMvAu6G724Zd8duMuCQJrcp5Kf8C9cBw7jxnr9Jhb+oqsBE/5RGUJhknNhDuyycyThziuy6mKSawkVrNBEcUUcR+5ugTS1JMB0IT8LMcLDJwyJYlizkwEnA0UlWGIdF3U3S9g5Rl8/mMNibdduPIO6vlXGnix3dJ/80Ff71GnpuHoGdy5ObKbn+jkdOqCvh4+/UATpL0Azl/VXMnaCIoS4lSb364va0Rja0V3/ZtrkKSaknLoBJw8i1LAzY9Qu/w44/Lb+yLeeyLEKhzuSs2hd2l5oTvASvUbtRx/4fg5xRPehI43Ce1bGi+TNtX17/2kptzPGf9ZI409pf6y0B9PnaNob3Nj/DxJAU4cslAqRFFo00/TJe6n+I5JZGgrlUoW6UQKGtYf1PSyvVrVyezoKbj4llbwl5aGeHc63NhMSd0a9sgJFubGWMgeE9ckbtLH+PR9nczWlrjBGIkxgSvh3FQVwzJxRUHFUORX3kaFn9ErYaV0mTswiVGWvPvl7yMNyaC1hV1xoBuimg1NY3h4B2VZ5CfOEZ65xu1WQlqUDCYO06xGTKSb3E17TKiI02OCC/YQ/3iF2PG5vREShxFetUJue6RJgrW1TC0POQ4cT/rE3mkGccloPuC66KKSZS4GOZ5b8r53mDTeptZrcZYdSrlMpXEAz5LUKjYjniRNFadFl9PlBrLRwF68B8mQu6pJkmaozTZFFGF7Hsn4NMfPHqd94zZjokecxhwdblCpBbzXf0p06Cy3N/r0GQHDYCTu0xv2OD9TJTt0isXZI9x8eAsnslnI16FUxL0hvmMihNDivmHIvJtiHBih6tlaZJhSd/StDe3jt7WmuyjDgPrv6OvJcWH13+huSpq66zdsGGnivv19BnPH9fVfCSj7Xfjw3+vrRmm4vqoEiCSGk7uuEHGkk48wtGC2X9Fdv2khxqd0orj7hX7cV5/ocScKdfIcolbXSjWrz3WxJaUuHB1HA6GOzyP84NWx/C+5e/pFxRP+rMWbhPYtjr1u6us05f7E5OqXRhpqz5/pa75XFYX+b2/04/oafj06Q9KYwCcjLBpEoiD4/EOoVPHOX8Nd6hIZFrYA59Mfop49hO1NqNf1CAiF4diUB4/A5AycuUzx5cdEi4/w+jvIoEr09CkJI/jlOuHkIaL1LYLNFSqbK3rv9uwR7GwRRQlJs4p77BCdOGfB1HJKXq1KYbt8uDxEHT5FkaQccGwqjknnzm0G220eWGNEScBz1WQy26a2o3grf4KUBoXtcmMko/9kh+eRwDIET9sxADNHr3PaTTkfrlAVQ6TjIIIKjpS8daBKmLi8NVjkozZYvs/KxEmMJMYrCyYrkr90apztToe1/pDY8LHjIX5/hQ9qJ/jhxAzW6CTj3jbzcwK/lzNhKaKqzcVpnyjOmP/8D2isPOSTcoTB2AECq8m5SoNkM8IvUnqG5ImqUBh13MREDhSbtWnW8wwz0ILEQRZy3q0TfPfP8xbQ3WpjPFrn9iAHabK+ljMxeMLqIOdc1SCWFoPmAe6KEdLNGLficc3xUJNz3Ihc0qlTeNsp16alVnxR6P3X6IQeK5qWhsBnie7YoqFOKL6v96SWBY0mXHoLUeaQphi7jg/7126WapCIEhANEbudlXjZwLZa3zXCtbXEle3oxJnEu9+rdOdTFLr7e1meKktfoHiHfVhaRO0+Xlz/ngZh/SnHG3L2zx9vEtqvYfy0ruv1RKcsG6XKfXUPlaZw9wtEnn3t975sKiouLKCuvgfdHTzbxe0YxGmKowq8z35fV82GgTx5juvHxgkT7QsmH97araoVpIXeATqutqgpgEqVolTcGDokxgROXrBAgRf3cdw6YQFOFuGVCnHqPKrX0QaLi/eh3cILQyxGuWWPgSmp5j3eYgsOneD9B5s8GAoCx2S66oJS3Hq+jUpNzMkLiGEXYxjx1GqQFArpeZzdeEZdpEQyJTFM6lWPfBgyiAssSsZrNlNVl0uHJ6mYB+i3eyC0yr4ptfBx1bPpzV9CPN6iXq9iZorTVw7RUCmfRA73bjwizwryQYQycmxrFGEYxFGK1duhbIxRWA4y7bHQsBkeazLSSsgKhR0NcLvbDKKUu8EURulQViY5PezjdNqECCSCI4HCGrEYdtaJ13POVCyWqrO0MsHq+BRrm1369oDGF4+50rrNA5pkmORGwMnA4GHfwDcUFBlds0qtEsCIQ+LOUgl8oqwkOqSNLrMuVBxT/9sgpJIm2gcM4PAJfQ32u7obT1Pt0JDEmte1t5vaWtMjxzTWSELbRg2jF9dvcxx14IhGvlq23qu9BmIStTrq6rs6sd37UidNz0dcWNBTjT3V+zjSrz02DdXaK6hebEeLBqSpvqaPn9Eo3yz96eLEb+K/eLxJaL+O8Q2L5K9dcO9VzkrpXdyXH2sC865txytL6K8zFbVt1OpzzDThmrSIj5zBVRlye10/n2GgSm0aGZSpvqHVRjTMvyy1bqQqIY2RJ8+QK+0TFT26TyIsfHJCaRPtrBOUCQvdHaKpw3hBhjQsvR/yq0SGjTd1ALm+hATO2kMGrqIebhJGCa3uBu7WNmqyjz92lEFjClmzOL/9gHSoGIk2CEfGUUFA2/aRiY2ZKK0faEqYO4ldFqjmFFFacm7UIXr0gKeJiZGYNMcPUSjFB8963Lu7CArO1ODt9y5jWiaqKPDu38RpQ7ht4R07zuT0OFFWMny4BUXJRukQKh+vVMwJSJG4RsG4kZFYgnNHD1Idt5G+Tx243kzpK5Nbjze4mVUoihplWWDkOTTGkPkOC8UGkeVjj43z2dQFksWH1IuYsr/NnWyKxEjYyU0m4jZZWmIM1omKnPbGJklg0ayPsOIG9MfHcAdPKdst5oddzhqKwCygfgxn+gjhIMTeWMJz9a7HGZsnTHKcMsP16lqLcWMVRkYRl95CZCnluz/QAIvni7oTsh2oNsBe1ejd1jo0xjTB+eBxkk8+oOz1XxRb567Cb/1VrZLvVzBq9W8GMkkt5banMyqyVCc/tOq9ppe8gxh0od581fn95DlUv6ffz/JTLXs1f+nNPutXMN4ktF/H+KZF8uuJrrsDe5XzTkuPVEYnALSLdbW+b2JIHOl9wuvP+5Lztbz/FZWwr0dJoxM6URoGwpD770u4HmpyVt+oZufgR3+sgQFLi9gHj5E/ug9S4pcpztg0g0EfURvD3org2EnkZz8iiHuaOPvbf5281+XGneckDz/AKnPOTh8h2FrBUwV2HjEwXVZMF+QYjufhGILZZAfhNHmn4SE3Yxp+jciYwJ2epbB93E8+ZDbZZirapm75BBWPosz5LAlQD7+EiVmuOAPSrUWuSBOmD3I/neXjpR6P7y9i99oI06Rve0SDkKBWQe1sYSQR18cCwuEQt1ogtjdwqg2CikMmJEWWUk+GxK5P6lRoVgrO2UOoGmQjJlIYiEqAyjPUp+8jFBilICsd/OOnGOQFx2ommS2ojgiCnokcdAmiNSgTFlBE3RW8QYcBDYa5ouZl3IptxoyCbdNlsS2opBlvp32cIiUsFKbjYLsuxwKT85UqQWkhVbm/g1rY+JIoyfHWnyFPn0dFIVetIdHqE7xkgLHtabmrMNQ8sEtvaZcEAG8WNT6l92G3P4O7N3VxNTquARdFAUmiVWz6Pej39Ei8vQ03frjPgxQnz/3nnQnYp5eIl4u9lyYTohKg3N0R5cGjuhM8ee7NPutXMN4ktF/D2LeF6e68Um3+xKGuN1+4XrfWIcs1BP/oacTpC/tjl1fGl+euvgJAUXvP2evo1/AD/Rxzh/Xnewiwl9/Xp+/vkqZ7GgwSh2B75HuQ/ScPMY7PcyV8zgdrz1FFzmfYLMQRcnIOjpzQN7qvPiaKc5K1HrZh8IUYpVcdpVY/ppVPckVSGoylCb6dEkqbU9tPqGxnBMPHyOwMWBbX6DFwXcK5ce6tdhgtIpyyx5loiTHHR1ZHGIzNkAxtAlEy6LX5SNqI0MJJI+bVM7K5S9QpkEAqbaw8IRAOjso0Eq+vNQTFwWNUbAf+8F+g0gTD9XjrL/0t5sfm+ez+Cur2IiLd5lK4yPTlK2wzhcwEtdZzwm2L4fhZKl/9eN800hufwVKKnRQqY+OcmfCR1RrVUzMYnz/RvL3GuP4dP3kAeQlBlWBigroYJbVczmZ9DvuK2iDFyrcpDEXhBSycmmX11AK9jmBk6ynp6jNkvIycntIWQ0kMQiCVIqgFsJyhWpvQWkeGA4I7N7W8VKRNN0VzDDXof70Fy90vtDSaX9ndowUajWgYEFR1UVWvwaP7WmoNBRMzqJPn4O4XOtnVR74R1PQzwRU/BR4vpERcfQ+FAsULh+o38SsXbxLar2Hs28K8Vm1+3aFWryuE9DqI0xf2l+9qOHjloO+NavZi/zmHA5TjadRZUWgV87/4v9WPfYnbI7IUpXaTW57BD/6yRpfNHMTotOD69zVke+4w6f27iKBGZe0JoR0Qra0RnD6j0XB5Blh41QBLtfmiqLMhbSq2B1aBSBJGKg6P3GmKzXVWR45hlTlUanhxjwWxo1VJFr5HaZjcbBUU2wlL/ZwD0sazTMbMAqlK8H28/g6OGiUchogAVH+dShYS2j5MzeAkQ2KnwrkanKorJCaBWyK/+FiPcUEngJFROHcNHtzSNIfeAG9ni+bkNN/PlomWPsGL+kjbxp6axFvbwMkiIr+KkyV4w84LEENnR4/oZAUlSh7XD5HPTOMHPtdtG3H1OygEhEOKW59xw5omKRRO4zgL332bhTufEz36Eo8cxk7zvDFH0mvhCfCKlOLSO/zx84In20PomPz5io9dGWUwNod/8jymZYJlo768QfHgNhEm3vNFpGnsQuNjDdqwHBAK1d7WElOvWbCoonhVUm1v/D06oVVD5i9CWWLPTMPauv7bC0M/961P9e/B9fTkvN9FGBJVFpqn9jWKH18bPwMe/7IE1hu04a9uvElov47xM6pNXk9ILymEiGr91erz5+DB7JOtJ2d2NfHGNKosHKLufYFSat9m4xUYtOvpXcQu6kz12rs2HhY8X8QzFE57QztBWymeYWj/queP9U7k+WNkqTgrLXojc/iNOSIhsYdruEZJNzeQZcl5P6dlOJhpQWD6hFGfaLtFoBTFg9v8KDjG/T4EjsmBZoUz+QRjA+DIn2eQKbyoi0wiFkZNoqkJbN/js8c5oVHFKXOCjadcc01i28dduIAZ9lCmjbj/pR5VpbGW8MpSuPOZBr1IixvrMYn0ce6ucN0PkP02QcWHiv5blFtb4FWYFxmILtWGi2yM6l1TEkN3h0g6ZGZGZfogcTfDSFPi1CHs9glqFbj6HmrlGdHqCsnQwi8iQmkRRwmV4/MEYZ+yOkIUJVw9MUKSTOFFPWRwgR27Qpy38WxJN8vo9WM+ERaZauLuKK6PK6SUFMfPcmMzI2m3cQZrLJRbSJXr7ipNtEKMYegCwqughgNKgv2iS5mW/ps3xmF8Wiezzz7Y3XuVelxpWiRnLujx9F4yOXpSAzWCmt5thQNYfqLBRa111MQM6sTZ/WnDT0tCPw88/g3a8Fc/3iS0X8f4E5Ixf9ph/nkO+j7YJBxq0Verp1/z0R2NPKzWUDMHEXGkl/AzB3WHVminaFafa3US34fpgxo0cucmcqTJQpATqQwv6yGtg/qGsqfwH4UgBMG5c4xs57h1C8MweEcNMcd8+oMht5rTJENoZiGYgvDgSZzJObyWB0GV6MEdypEST9bYqY4xbUrGrBJsRyecvMDpZyxMVJC1GoGUUBQsmF2imSN4KkdaoxhBgBwO4MuPNMTb2P35ykL7bJWl/llNB8qS6N2/SHLzIX6tShjFhGlOMDKq90btbajWyR2PGz2bpJA4huL69SsYtk158hxqexNKhSclTp4SLT/FSwyK4QqeKHEONrQiRzgA08IzwHFMwuokzvYa7qf6NYqs0MnIsHE7cP3KdWQaoyybRhRjChgmBXXPxmiepJ9nNA8cIHr6mHBtSODZRCcvkUgLv71JWCqivCCoBrobjXbdym1H72jvfgHPH2vz1zyjrDcJh5H+PWaJtnApMv0xTTS8v9GE7U2ye1/AgRPw239N72VdTyMUDUNfe4ahEY/xnuQVkOeaLF2t/0yO5ZuE9e2PNwnt1zD+c8iYP+0wv/y1r+W37XaERrVGefE6HDwCfg3+7T+HQVdX5tMHXjhTu57uIF1P3/in5qAskNWaBg7cubnrQTWGRBGcvawr+Lf+nE6ES4s6obk+1OrI+1+yYFpETQvv3CVkR0E4oOY6vDV4SuTEeMuPYGqWqPNQP6Zvw/2beK0WjjlJ7LiY0SpWJ4L+Q6IDJ0jKCH96hlAYDOoeEhPv7FXk7RvIg0cJSgWH52H1GWVnR79H0D/n/a+0N5wArryjeVYrz6E5RhHUKGpNLNclXF3GEQrv2X3Epbf07+Lf/S/gV+g/fUziH6FSqxJGKXGU7CMm6exQtDaIpM2VE5OkZcl7nS3S1hZePESKQ9ouJc+gWkPOX2Jh6SlR3MZbWURmh2DpCdHkQRLVxD96nOjpY4ajpwmiLjxfxCoL/rZ0+I8HjmGt7eApBZ5LVIBdJHgSVBzhqxzn4BHCzTUcSrz1FU2KXlvSu1SldJfa6+hEZZqweJ/CMLixFpE0J3G2t1kwu8jWhlaKaYzDxqruyFpbIARGrak/WvaL/e7euNu04JMfak5bPNQ7uDwHA0Rt5CcRu2/i1zLeJLRf0/jTqDa1uv/7+4vx/Yr3pY5Q+BXE7GHN+zGEHiHlKZy9/CIBvpRsVVFoaaI4ohwZgYvv6BtfZ1t/39gEnLuGMT6pk9lwgDpwVFfjSawljYRATkwTJDF8+eNdAi9wbB7z/pcEtqFtcVAEYgXySI+qSpDTc5wdPmcQpdRlSexUiArwBm0c0yAswBod53aSk5UWzsdfsuCB9Hz40R/qEWk40KNTr6JpCrsAmT2IOEJo7tWh4xTzl/ikY5B8fg8zz7hUtgjmz2HmqQbbjDRRFa2eUZEGjmUSRimOI3E9Z1+rsMhzbsz/gGRtGScPWAifIfMMu4jBdTTHS+V6HNfvQa+LPHKcIMth+aG289nZwnN9HIEe62Z9vN//H3VCaLdQ3/1tvCLhLx7wiI5cwMsTcD3CJMO9/RBjI6JwPOIL73Dt6DhxZwrv3mdI09QdkWnrDm0rh4PHdaLaWNGJxrKIjl8gaWX407OEO1tESAJyOHYajp/WtkO1ut6RCYE5PoHIcj22Hg72iypRq1Ocv6pHk5Oz+no6dUF3+nsE6j9F2ag3avi/OvELJbQPP/yQf/pP/ykrKyv8/b//9zl27NjXPu7mzZv8o3/0jyjLkh/84Af83u/93i/ysm/iv0Jodf8f7aPr1NTcvlPv13aElQB1/IweM1WqGHt+aryWbONoX4lBrS/pMWW3rVX3lQK3gthV7gf08/sVrWH55IGu4nttPaY7dAwQGJVAJxIp9Y2srZVIMAzdsaQpYqSBWrMhHBJsrVLzDGK3igN4skQ6LguHfKI0oni6yM2hiT82SlgbI1IGwfamHn1WAq0lKE19876gk7zyKjqxVapw9so+vylVknSzha8yQr+K7FnI4a7fnOvpkezu782bnGRh/CCxMvDrVWQakycJUaVBzgrJ+ip+2GMwrNKaPMzYkYPI//AHIJSWCwvqsPpUj/y625BGGmgxMqoFf7dWkf0OC+km0aajx37b63rM19mB//gHqO/9NoYqCWyJ8LStSzWNKWcPUhqSG32TbLmH6/tcnT+HEfdgu6avhUNHtH1Lra5dyPcQqqcuwNMH+FmCW6kQ+TWcyRm8DP0+d3dm2NosVjRGUfOXsG0DlZaIW5+idndve44QRrVOefqivt6On3kFpfunmWzeqOH/asUvlNAOHDjA3/k7f4d/8A/+wTc+pixL/uE//If8vb/39xgdHeXv/t2/y7Vr15ibm/tFXvpN/JeOOPoJc8+fsJ95DWxiXH77a28mL1e0+9y0fhdhGLqrAb03qTU03HtXkaFMU+juoOYvaej343v6oxD6Bn3wGGys7O8ORSXQNIEbP4TxGZ3MTpzTI68khrkj8OALZDhkoX2TaGIG7+BFZPUYRrUGnR2CJKawDZxIEYYxThP8a+/oLqazo50FbOfFTdg0UVfe0z9oEuvE8qN/hxIG1Bu4F9/C8RxCYeEUKd7xk3B8nkIYxGmB71gYl9/WnejqIvLWDSpeBXH6Arnt8nHoEfUHqNpx7Dxh4Hgs5RISB+/BBgvSRNqW3kWmCfR2dlGGBjQn9M9fKIokJupHeEWMLAuCUoMriic9omGE51hIpaDXQX3+IapSRV18i7gUuLaL4XpEUUoqHXzfJS5KYsenUm9op3Ih4NLbiLs3Uf2uviZGGjpBeR5cfhsjjrhuu0R5idPJkW2hDTYddzcBzmo1/PlLiLs3yRwbel1UUepi5cFt1LAP1Tri4vVvvN7+VMeMb9Twf6XiF0poP09SevToEVNTU0xOTgLw7rvvcuPGjTcJ7dsWe4nnNXPPnxbfpOz/ckUrLl5H7O5BrPVnxDs7u+7YdS1oa9mooqCMon2XaaSE935TJybD0B3S41wrRhyfhzOXX1TnuwlPnL2kKQlnLr2gEsQRfPKfKJKIyLDwanXkwncRzx/pm1OlCo6H3FhloSqJ5kbxzp/WFjKGRP3gL0Mcw4Nb8PA2Rb9HVAq8OEFurOik/+G///+3dyaxcZxnn/9VVe8Lm6u2aKc2U4ttSZT8yYodI0KQmQEymSAIPN8hAXJ0ggAxEiSCYeTg2DGywEEOBnywjcCHOeTgGWAGCAzlA+wvlhPRWixrsSTKkURJlLg32d3VW9U7h6d3dpPUwkXN93eReqnut6uL79PP9n9E2d3vk/6+jdvYH8iT2reDsKkwPB6c05/Ql/STtfwEurdwYF0MA8hfOi9FNkODqGSCVCCKfWeEWzkPybxiazDHDqagcwuRjZtJXfsS2xsgkk1JMUogKCHZC6fF8Lou3L2N4+TpM7vIbHsW//Atev1JrNQUTipJ39avkbn5L/yWRa99A+v6VYi24ChFX3gTOcPCH/TTu7uXUNrGP5zHTqXxB/2E/F75PgtN+EYui9q1T26Ho1Whv+K14QEi2QTKyUHXKtTYsGh8IiFbMmnJw2YzmG1tMD4us/eK0lnbZQ5Z6QdSzbX2IN7ZnI7XavhLinnPoY2NjdHR0VG63dHRwZUrVxo+/9ixYxw7dgyA119/nc7Ozvle4qLh8Xgeqc+nvv5fJE8WDGFY93fpuIlJMn4fZlsbbiqJPxzEjLQAK7G6u/FPTYq0FuAmpsiePw0DYmCybh6VTZO/fQOP62DFWjG+soH89SsY0RiW14tlGQRWdGEEQ2T6PsZJJnCGbmFt2oq5eg2+jnbMSBTD8uDEx5lY380/6SStTCIt6ziycSPerVtxE1MoxyF34TTOYzsxPD5afT7cK5+Tv3kNANPjwdrQTW58mGw2Q186iNPWRSgb5oBlYuRzZLNZme1lp7CGB/F8+P8wUIQDYcLf+wHZf37EyM0bKGsFbV0ryOUdAuEoEb+HlGEQ8HrJmwaBlgj+VBIvLulQC9H4CMFVa1kZyTGx7nEypocWe5I2U+GJRgl987+TO/spuXyWzM1WTK8XNToIlgdn6+O4Ux66tm8j7vXgjF0i2BIl4Vqo1q8QSSfIBCOkx9K0JONYKk+ycw3G3du04ZLyBQnu+DdaVnTy9Tsfk8ykCVsBQh0bMSwPysmT6fsYlUnj+gLk9xwktG49Vsaue+2otlYyg9dRmTTsPYh3xx5yX5yFXA4jGsXXvYVsfBQjY9PS2ooT3oXh5Mld/xKv4WB1duFbvYrsqX+iMmkMfwDf3oNVt/29T9/TNVv5GWY7/mH8Xdwrj9resVDMevZfeeUVJiYmpt3//PPP09vbO+sbKKWm3WcYRsPnHzlyhCNHjpRuj4yMzPoejyqdnZ2P5udLZ+/7UOU4qEwWpqbA5yeRtDEKr9fZ2clYOlt6fRkgOoYRDOGmM5DOwMgQGCZZ04JQTDwRywc3r8tEYkeRGBqW44fuSoFAckoU/SM5jP/8W8kzBJhcsZ50YpiQzyIR6+LWwKBIVX12QkJlgwMigDw6AteuSH5vfATaOuT9rv8LpuIkHMhk8oRuXWUyPsF4KEdk+KZ4GJk0tHXiRFtx4hOimDE+in3xHEzEsUYGMRybsaG7+O00qYnLZHbvI7pjN5MDN+D2DRKfnYRQhAMm5DMuStkY+SyuZbEu5oV0mtAKP3lPNznHIT00jJqIw9XLEB/DNUyItooazN1bmMG1DH95Df/gANbdK2T9fqxwDMO9S8IfwZ/NEMDBWbMBJzmFGY2hbl1j1PTiV3lSg+vIej2o0WHwB0jcHiZ5/TpmS0y+t5Fh3ECIE4OTZHJXCHgteje242lw7ahNO8rekAuqe2f59mQCtWkHHeEg4+Nx8dTTEurNbN4p+dnBO7hDdyV/GY9jXO2HkWEJBU4NY9y8eU/qHsXPcE/HP8Dfxb3yyO4dD4E1a9Y0fGxWg/byyy8/0Jt3dHQwOjpauj06OkpbW9sDvaZmaXA/IZ17aimoqZ5U3/kBnPhQSvZNU4Y3FsOJE2OwbZeUil84LZOT06lSUQqGKUUbLa1SrJFMYLbECB14Gn/2U1KuDAkNBP2osWEZ5ujzg6vkte/cFEmvVAJMSwpN4uPSRN6xgmAwiN8KySSAfJpgMi4zvaItUjwSCEkl5OB1kX8KBCWvd+s6VjRGr2FguxlCMQcrl8fIZQkcfIbJlvPyeZ085HP4vvldnnHypAJRAricmnDJDkkerMc1iWRzWB5Ryigqy9PSXhaDXrcJ66vfoDcYxv7sJEHPBJadgIlRLLufXuMUti9EcOcTWMGVIlG1biPWtt30fvC/sZUl+oxODhUKobI5uHweCpqKbu9XpQLW5yeVtEkbIUJDA9hZl9TEl0T39s6pablRlW5xnEuxj9EwLamQNa2qhn315L9h1IQC7+l61aHER5J594+7u7sZHBxkaGiI9vZ2jh8/zk9+8pP5flvNPPMg1V0lDcji7DWQ/7e1TntepfEzLQv17DdFjgtkkzrbJwYqGhMx5YEvpajg0ufSMuDxijZgIAiui7r0uaw/GMbdsQdPOMLBZw9iJ1IEgn6sC6fEmH12olz2vX23jBmxvHDxFARCOBjYq9YTDPixVqzGUorez09iD90hmM9irf4K2ImCEnwIOjpg0zaMA18taWyaPh/uvsOovIOVmCQyOVHOfQWCEr7yB8UjLEwuwLLwdnYRAxIZh6ybwGfCuYvXmMRHbHyU3vUxrP6LcuxIYepBWzsc/BqYJmZrO4bXR8RnlQtXIn6wk1g+PxGVheQE7O7F2L2/7Jls2kbkk7/J/4/9H9SGrXDtkpTi+wLg5FGmgXHgWYzHDxBKpQjcTmJfv4bf7yGQHEElExiVM83mcp0lE6jL58gEAyi7kIcrKO6ry+fk/0rBqrUiYuw4mK5Tde0A93S96sGajyYPZNBOnDjBO++8w+TkJK+//jobN27kpZdeYmxsjLfeeoujR49iWRY//OEPefXVV3Fdl+eee45169Y9rPVrFosHqO6qNIaq0Ixs5HNkBq/jrt9aEj8uvU/FhlLsOyq9TuUL1wglG+2dqGAIY2sPxNplWnJySgzVlfMlBQnr8QNE21pk48xmpOgkGJahoQYQDIuHuHI12NtxDJO+hJeM7cHvjdLrD2I9tgfLcYi0XYfrV6VYpKVVdAsnJ6Qy9NY1jFVrRKW+uFGbljRdT8XFk+x5Qgwzkm/EQEKbygXDLCuspG0CvgB+y2RiMgmOS1vIwFaQtnxE8jlR47ALnub1q+DkMUIyhVmd7YPEFAxclTCkZUGsTYyfxytyVXsPYbhO+fyuWg2xDmhtE+905I4YejsFE+NS2ei4ovoSjuCJRjmwOUhyrJ/g1dNYpoWKRFGFIZyzXiMFQ0ZySkK/vYcwpqak6Keg2MKF03INJhMyZqYos1ZReALTNUnncr1q5ZBHjwcyaAcOHODAgQPT7m9vb+fo0aOl23v37mXv3r0P8laaJUBtuf19h2QqjeHEGCAVba6dksZtw6wydA1/UadtjHyuasZVSSi5UFVnBIJiVM6dFKNy56b0uFFHQSIQFANz8TPpbfu8T7y86EXUzr0Scrx9A/vuEJnIJgKrVhM3fMQdB1//FYKtHVhDtyRXtWqtGKH2LgmD7t6P4eRLXql7+h+yUedyMHhDDMNUXBqaQ6JzmPH7wE7D5u2yntYOMUYnPgLHwbAs9j15iESrRXgwi51T+L0eAioPPhmASaxNfjhs2ykVoKY0p9N/Qd5vMi76mHYKvvU/JYzrC8pk5/OncJNTpYZx0jakpgAlxrdzlYQyw1HpbWttE8NYcS1YpkEkb4thbe0oVyTOYCiKP3iKOUy27gTAiY+V2jGKBk0VQ9KB4LRJEFXoEOKyQCuFaOZEvXJ76o2ooc5U7NrcReXmEpZmXTeZQPk84LgY0Ui1oUsmxJi0d1VvVHU2qaIHpyp6kkjbuGlb1ELyefF6Nu+oKiMvegTYKfmMXavk8Q3dorRx6ngptBVcsQJvGs7lQyjTw+1Jgw3pHKGNW9n/X3uwvjgj3orPD92PwdWL0oLg8comPBkXg2Ka8n6uKx5dMgFXv0Bdv4pjWEx5OnFsG8tEQp8g67h6UTykWBum69LiD3AwlMHOuwR79mBFYxiFYZeqGDYrTGYmmxHPJu9AIiG5uZvXxJuMT4Dtk7V+eUnWGArLc76yUX44tHaK4VqxRqS6tu+WtgClwDKntXOoZKKsr3jzmkxhqGNMqn4sFX/wtLSiBgcgMQlbeggcfIZUOlutnl8bFpxpgvSWHjluFqFizaOLNmiauVETYiz9yq8ZUdNwKnZt7qKwERlFY3Ly79IAfO1LXHN9laHj9g0JLVbKbTHD3Ddq9CcDQTEwqWTJEzN27JHjvT7cybhsymlb1goivbVqbSlUh2GKyoVSWEO32Wl4mQq24EdxSYXxeG0y5naykVbCT32t2qAXGqW5fA517qT0luUd+etTiPFMJsS7GR/FsVP0JXy47Tamk6d3fQxva7voRebz8nmH78rBaRvyOaxIC5GPj8GX5yEaQ33r38sjg8IR8VqL318yAWvWie5h59MwOYGzuxc7kyOYzYim5vG/wdioyGht3FYO5Xos0eVMTELaxmprR9V83mkUJdBy2XK4sIJp+dhd++TfTFpUPwpqIFasDSNXXdk3l7BgvXyvpjnRBk0zN2q9Iaifk2gwFbv2ebXyVxgmnpYYrFmPsbVHwmUgFYdQlrOqCFc1mvtW9Wu/+PpPPAU1AxqV46D6/hP6v4CxIVi5pmy8DEOUKrxe0UP85D9EYDefh3CUSPsKYpNx7NgKgsFW8t4wYRyCXlNCjRUYliVVgNmMhDwTkwUvySwVerCnF/ovQmISO9hCJhKhY/VqRu0ctkpjTYxJ6C+XlRCqMiScGAiK9zY6VFhbRMJ7NUM0ldeHUq4UZRSqAAkGwXFxBm/SlwmRMX34TUXv3QGsXE7Ohy3Gz+j9qoRLg2FR8QBRDynmwxoJW9dIoBnhiBjUOuLWlTP3Kn/wPLA3pdU8lg3aoGnmRG14ByjlL6pyEg2mYs9YPl04xk0lZQOrDC0WZrXVzX3U8RoVoC6fK4f4kDyc4fPDvsNVBSfq5N+l5Lw4bdtOQkcXOEr0B0NhMTaphGggAozdhOQk1p0Beg0LO9HF011dZLfsIryxHVO59avpil5ickoMmj8omos7n5QQYj4ngyxzWYKuwn87Scrw4PdD0O9COifGb++hsial48DeQ1ItOT4miiUD/4JACBWpKJwpFlcoZA279mFVDKxMPmGRuzlJOBQgaWexPz1LJJ2StocN3XDga5g+n8iP7diDspNzVrAvSqCpZELWcravVKFYT9y6VknkoaDzZ8sGbdA0c2baJlOnrLnhVOzZyqcfP4A/HGRqMlE94bqeIS38wi8ZwmRCcjhfnBWjdOsGatNWKXoo6EO6yQRGfKyUh1PJhGzwkSjc6IdQVDyg//Y8XDhZniiwa5+sx+MR4+fzQ9sGSCax2tqJeP3Q3oX/sZ2YXk/DajrDsjD2PY3K2DBwTd4rmxFjWRFWA0TjsNdLKBgkNT6G9cWZcuGL66D2PV0qnuHMJ6h9hzGDQdwnnpKyfwyIj+OkbTGWxSrB7bulUKagjVn8PsOuwh/MiIRVNk1Q5aXKsf8CdK3BuPx5eep5OCLSZPeqYF8sQhkckDaICmM43yXyugR/+aANmua+adj8OkOT7IwbfjAExz+U0FyFsSup99eU+xvbdolQ8ZlPRCF+4EvY/JiUk2fS8p4bt9bNw5W0KTtXSm5si1TSmR4Lte9wVa8YPh9u7zMoR8Hp4xCPS2m7Wfjzae8q92pVGlkD8Poo6uKYPh/uoSMytcBxxFvqeRKzWMBR4bl6LYtYRzuZTBqlVClUSCAo+bZ0Wgpl7BQKA/Y9La0FaRtuXZMCDKdQjr99t5z7yQlpCagxQqZy2T9yETudJeAxsbxeMfI5Mbgqbc88WWEWVDIhFYuRlobrmO8SeV2CvzzQBk2zsMwQ/lF2SpqaXQfy+dImWtzoleOIV1NonFZJkc9CFWaP3b0FEyNSTl4o4DB27IGMPS0PZ4Qj5RL/aKzUHqAqqwEr8nKmzyczt+yEbMxKwZbHMEKRUmVe0atUu/bByY/lOedOll4DCkat6GE5Llw4jVsUU64pnlFOXu5TMkpHbdqOymalneDGl2LQ1m2Wx9O2VPGlkmLch29LaDKXKVUJFr3A2opUNTaMmbGJRuT8qMPfkPeNj8sPg1kmK8yEchwJdw4OyB2bd5RGu2hPSfOw0QZNs6DM9AtfWZYodKRTEAjh7nsas7YJ2+OVJt5cTgxLccMtVMSxvls2Y8eRYaPhSLnHrMaINirxV7XVnIVqSK5eFNWO8RHY0oNZKLpQn52o9ipzWZHkCoXrFyGk7YIu5R3JeV06h9raA5ZVZXSVnRKPyzBkVM7YkJTWK1ckrdq7JLdXUMww8jk5P+EI3C0Upmzf09CAlPq9Cl6d27kaoi0YXSuhayUq2lKarABML+ZoQG0JvpHPwfbd4pnt2IN5D0ohGs29oA2aZsFp9AtfTU5IP1ShB8xIxCEUKYUosVOoHXvg/GkJpV05L57Hnl7R+DMt+L//S/rGAgHU//gBZnHzffxAWTKrznpKUlxeX8mDVB6vFFrkcyjliidY3Ji37Srn4mpDqDVeaOWEZeU4qM8/FQ9r6LYYb8OAf30B3T1Vx7jZrIQPJ8YlB9WxQnKEW3aKJ7dlB8bugkB4QTEDOwU9T8KOPfLZZvKEKotqcjnx5ij/6DDuQzZqxhL8QiuGKjSF1ztW57k0D4I2aJolg9nWIZ5H2pZ/Y+2lqdOSkzLEqFgWPPZ4ybAU81zcuS2q+z6fhMvi41KaXqTQN6dqNuVpvXOPPSHG1OPDuHS2oLg+KYYNMKKxaTmzaRV6BS+0KDOlioobuaxM2g6GoL0DxpTMdfP4pEfL5ysdk8naMkutKCmVnJL/t3dJc/MTT0EuK4bA4xVjVqmkUUFdY1EpFWaaGMVm8trZYoXZcYZlVeXT6tKgBL/Yh8eF06XzX3r+fWgtajT10AZNs2QwfQGMb/17dUEGFHJSf5dc0dWLUhCSSVcbFpDcmmVJONKyyuoaMHMvUsVjbjIBZ/4hhsPjRXm8shnfuSmN1oWy90ZVnVARmgtHqlVBMmlYs7E89XubFGuQz0E0Vi4OKRzjBIOSCwvHwOcVzcXOLti1tyRNpfovyGts3lEabDpr43JlsU2NVFjJo6wx8FVK9vsPlwpdptHIwFsW5HMNG/PZ0lP9HdRThtFoZkEbNM2iUyr6aGvF9PlQ7V1yu7DpSk7KxAgVjFFRbaImNGW2xHCfek5UPlo7qnM1M/UiVT6mXLDTqJCU1Ru79k0rKimWvdf9HNMKOxwxsF4fWB7RSgyuBQOMfYflwDpKJ/KBTFHY6FgJfi9giA5l/0W4WZCTMk15rp0sGymm57EaGfO55BGNRLx6ZEuDz198vbo50tka86HsiTdQhtFoZkMbNM2iUuk9FNX2a41C7WbYKC9kWBbmvkN18zC1YcB6vW4qmYALZ+D8cSmrX7sB9h8WL7BBc3dVK0GhGtEMR0Sbcvgu3Lha8BozsHUXbN9d7ueiOsxW2eultvRgmS6s3gA794qGY/9FqeIcjcGK1eKx5vNi1MLRsodYJ4SK48i0gWhMyv/rnLsqqbDKMG+kcEw2U24dmIGq16oMdc7QmF+qOp1BGUajmQ1t0DSLS6X3kEnXlcoqbXZzKBiYUYapWPxRW5VYNGqWJY3PKwoSWJ2rMXJZDN8M71+5/kLvmZtMSB/YVFwkqbbvFjWSXBbO/ENySL1fFU+nQU+e+eRTBMNBUknJXbm798uaI1Fpxr51TUr2N2yp6mWrXZObTIgxLGpSFnrSZqLceiBhXuPimZmV7BtQT9C6KkRc75zOpAyj0cyCNmiaxaXC+zI6u+pKZcFDbIydKZcWCIqn47oSeix4ZjO+f+X6C8oiamwYrvXLaJWRuyLm6/PJfWPDkM+jUND7TN3PWvRqjM4OjHS2/D6BgOhL5rPg8WFGW+R9i6ofddYkLQ1ZERX2WGAnS60IMxmn2jCvkctWG6MHPdcNzulCq3roysrmQhs0zaJSuYH5167FHJ+oksp66JvMDLk0w7Iw9vSKl5ZOQy7TsMS83vqL63VDEZRpiMp85yp47AkxRv+6LKFMjwey2SoR3spRO5UhWLVph7zROZHjwrJg33MYF8809GKq1lQc5nn3pjwYDJenPM9UTfgw9A/v8zUWStXjQaaua5Ym2qBpFp3iBmZYnqrb8/ZeMxhMI5dFGaaEPgeTKNNCHXhmmrpG5fHTpL5qFObNrpUAuNt2A5+LZFZB+mnaZ60NwaZtuT+bKReluE5dQ1hvTQbI+Jptu8rvUZzyPEOOasbqzTlu+o3O9ZLxirQKf9OhDZpm2dGwsdtxZLNVSsrqQ+GyrFRRS7KgXD/TJO1iDqx20zb3HZLGcGZoeK4NwRa9mhnU6GfzNIqVjKXnztFrqqehea+eTO25XlJekVbhbzq0QdNoqNlovT7YtE1knwpVfSWZqAaK8bU0yg9VGZZGHk9hsrJ/wwYS4xNy30xh2HvwNO4rR/UwPZkl5BUtdL5OM/9og6bRQNVGi52C3furet1KElctrajBgYbK9XOhkZcy7f4NG0rHzBiGvUdP455Dug/Tk1liXpFW4W8utEHTaGD2XrfC40UR5HrK9XOmzmBSinmwyvuLDcezMN+exsN8/bm81pLJsWkeObRB02iYQ7FIxePGg260FcazSgC5OE2gaFSDISiW7c9l/Y/IPLGZXmtJ5dg0jxzaoGk0BWbbtB/Wpl5lPB1nmlJ+OdS5DP88l1COTfPosQz/YjSaxadkHGuqDpf94MsHyLHpUKVGGzSNZhGZKdSpnPw993496txvvk6HKjWgDZpGs+jUC2UqxyHT9zFqZHjZbdD3FdrVoUoNYC72AjQaTR3SNiqTltxaNlNWDNHUZ4m1A2gWB+2haTRLkUAQwx9ATQ3P+wbdDLkn3SStAW3QNJoliWFZ+Hufxrh5c1436GbKPekmaY0OOWo0SxTD8sx/1WOlQooObWoecbRB02iWMzr3pGkidMhRo1nG6NyTppnQBk2jWebo3JOmWdAhR41Go9E0BdqgaTQajaYp0AZNo9FoNE2BNmgajUajaQq0QdNoNBpNU6ANmkaj0WiaAm3QNBqNRtMUaIOm0Wg0mqZAGzSNRqPRNAXaoGk0Go2mKdAGTaPRaDRNgTZoGo1Go2kKtEHTaDQaTVNgKKXUYi9Co9FoNJoHRXtoi8gvf/nLxV7CkkKfj2r0+ahGn48y+lzURxs0jUaj0TQF2qBpNBqNpinQBm0ROXLkyGIvYUmhz0c1+nxUo89HGX0u6qOLQjQajUbTFGgPTaPRaDRNgTZoGo1Go2kKPIu9gOXEJ598wl/+8hdu3brFa6+9Rnd3d93nnTlzhnfffRfXdfn617/Ot7/97YVd6AKRSCR44403GB4epquri5/+9KdEIpFpz/vRj35EIBDANE0sy+L1119fhNXOD7N910op3n33XU6fPo3f7+eFF15g8+bNi7PYBWC283H+/Hl++9vfsmLFCgAOHjzId7/73UVY6fzz5ptvcurUKWKxGH/4wx+mPb7cro05oTQLxsDAgLp165b61a9+pfr7++s+x3Ec9eMf/1jduXNH5XI59bOf/UwNDAws8EoXhvfee0+9//77Siml3n//ffXee+/Vfd4LL7yg4vH4Aq5sYZjLd33y5En16quvKtd11aVLl9TRo0cXabXzz1zOx7lz59RvfvObRVrhwnL+/Hl19epV9eKLL9Z9fDldG3NFhxwXkLVr17JmzZoZn9Pf38+qVatYuXIlHo+HQ4cO0dfXt0ArXFj6+vp49tlnAXj22Web9nM2Yi7f9aeffsozzzyDYRhs27aNZDLJ+Pj4Iq14fllO1/5c6OnpqRuxKLKcro25og3aEmNsbIyOjo7S7Y6ODsbGxhZxRfNHPB6nra0NgLa2NiYnJxs+99VXX+UXv/gFx44dW6jlzTtz+a7Hxsbo7Oyc8TnNwlyv/cuXL/Pzn/+c1157jYGBgYVc4pJiOV0bc0Xn0B4yr7zyChMTE9Puf/755+nt7Z31eFWni8IwjIextEVhpvNxL6/R3t5OPB7n17/+NWvWrKGnp+chrnJxmMt33WzXw0zM5bNu2rSJN998k0AgwKlTp/jd737Hn/70p4Va4pJiOV0bc0UbtIfMyy+//EDHd3R0MDo6Wro9Ojpa8mIeRWY6H7FYjPHxcdra2hgfH6elpaXu89rb20vP7+3tpb+/vykM2ly+646ODkZGRmZ8TrMwl/MRCoVK/9+7dy9vv/02k5OTDa+dZmY5XRtzRYcclxjd3d0MDg4yNDREPp/n+PHj7N+/f7GXNS/s37+fDz/8EIAPP/ywrgebTqexbbv0/7Nnz7J+/foFXed8MZfvev/+/Xz00Ucopbh8+TKhUKhpN625nI+JiYmSZ9Lf34/rukSj0cVY7qKznK6NuaKVQhaQEydO8M477zA5OUk4HGbjxo289NJLjI2N8dZbb3H06FEATp06xZ///Gdc1+W5557jO9/5ziKvfH6YmprijTfeYGRkhM7OTl588UUikUjV+bh79y6///3vAXAch8OHDzfV+aj3XX/wwQcAfOMb30Apxdtvv81nn32Gz+fjhRdeaNju0QzMdj7++te/8sEHH2BZFj6fj+9///ts3759kVc9P/zxj3/kwoULTE1NEYvF+N73vkc+nweW57UxF7RB02g0Gk1ToEOOGo1Go2kKtEHTaDQaTVOgDZpGo9FomgJt0DQajUbTFGiDptFoNJqmQBs0jUaj0TQF2qBpNBqNpin4/32Wm7yOWVFoAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots(figsize=(7, 7))\n",
+ "ax.scatter(X[Y == 0,0], X[Y == 0,1], alpha=0.3, marker='.')\n",
+ "_ = ax.scatter(X[Y == 1,0], X[Y == 1,1], alpha=0.3, marker='.')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Split into training and test sets\n",
+ "\n",
+ "We split the data into a training set with 80% of the samples and a test set with the remaining 20%."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train, X_test, Y_train, Y_test = model_selection.train_test_split(\n",
+ " X, Y, test_size=0.2, random_state=10, shuffle=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Data Preprocessing\n",
+ "\n",
+ "The minimal preprocessing steps are:\n",
+ " 1. Convert from numpy arrays to torch tensors\n",
+ " 2. Convert data and labels to the same data-type (in this case float32)\n",
+ " 3. Reshape the labels so that they have two dimensions. This is not strictly necessary, but Falkon internally works with 2D tensors, so the output of the `Falkon.predict` method will always be 2D.\n",
+ " 4. Change the labels from 0, 1 to -1, 1. Note that Logistic Falkon uses the following formula for the logistic loss:\n",
+ " $$\\log(1 + e^{-y_1 y_2})$$\n",
+ " \n",
+ " where $y_1$ and $y_2$ are labels and predictions respectively, which only makes sense if the labels are -1, 1."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train = torch.from_numpy(X_train).to(dtype=torch.float32)\n",
+ "X_test = torch.from_numpy(X_test).to(dtype=torch.float32)\n",
+ "Y_train = torch.from_numpy(Y_train).to(dtype=torch.float32).reshape(-1, 1)\n",
+ "Y_test = torch.from_numpy(Y_test).to(dtype=torch.float32).reshape(-1, 1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "Y_train[Y_train == 0] = -1\n",
+ "Y_test[Y_test == 0] = -1"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Define the Falkon model\n",
+ "\n",
+ "We use the same base parameters for both models: a small amount of regularization ($10^{-7}$) and a Gaussian kernel with $\\sigma = 5$.\n",
+ "The number of inducing points is set to 1000 which is adequate for the problem which is very easy."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def binary_loss(true, pred):\n",
+ " return torch.mean((true != torch.sign(pred)).to(torch.float32))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# We define some basic options to run on the CPU, and to disable keops\n",
+ "flk_opt = falkon.FalkonOptions(use_cpu=True, keops_active=\"no\")\n",
+ "\n",
+ "flk_kernel = falkon.kernels.GaussianKernel(1, opt=flk_opt)\n",
+ "\n",
+ "flk = falkon.Falkon(kernel=flk_kernel, penalty=1e-7, M=1000, options=flk_opt)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Define Logistic Falkon model\n",
+ "\n",
+ "The logistic Falkon estimator uses the same base parameters as Falkon.\n",
+ "\n",
+ "However, instead of specifying a single value for regularization, we need to specify a *regularization path*: a series of decreasing amounts of regularization. \n",
+ "For each regularization value we also need to specify the *number of iterations* of conjugate gradient descent, which will be performed for that specific regularization value.\n",
+ "\n",
+ "We validated empirically on a wide number of binary classification problems that a good scheme to set the regularization path is to use three short (i.e. 3 iterations) runs with increasing regularization, and then a few longer (here we used 8 iterations) runs with the final regularization value (here `1e-7 `, the same as for Falkon).\n",
+ "\n",
+ "The `LogisticFalkon` estimator also accepts a mandatory `loss` parameter, which should be set to an instance of the `LogisticLoss` class. While the `LogisticLoss` is the only implemented loss at the moment, the learning algorithm is defined for any *generalized self-concordant* loss, and we plan to extend the library to support more functions.\n",
+ "\n",
+ "An additional feature we show here is **error monitoring**: By passing an error function to the estimator (see parameter `error_fn`), the estimator will print the training error at every iteration (how often such prints occur is governed by the `error_every` parameter).\n",
+ "This can be very useful to determine if it is possible to stop training early, and in general to monitor the training process."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "logflk_opt = falkon.FalkonOptions(use_cpu=True, keops_active=\"no\")\n",
+ "\n",
+ "logflk_kernel = falkon.kernels.GaussianKernel(1, opt=logflk_opt)\n",
+ "logloss = falkon.gsc_losses.LogisticLoss(logflk_kernel)\n",
+ "\n",
+ "penalty_list = [1e-3, 1e-5, 1e-7, 1e-7, 1e-7]\n",
+ "iter_list = [4, 4, 4, 8, 8]\n",
+ "\n",
+ "logflk = falkon.LogisticFalkon(\n",
+ " kernel=logflk_kernel, penalty_list=penalty_list, iter_list=iter_list, M=1000, loss=logloss,\n",
+ " error_fn=binary_loss, error_every=1, options=logflk_opt)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Train both models\n",
+ "\n",
+ "Training Falkon for 20 iterations (default value) takes about half a secon on a laptop.\n",
+ "\n",
+ "Clearly, since the logistic falkon runs about 28 iterations (the sum of values in `iter_list`), it is necessarily going to be slower. Further, logistic falkon needs to recompute part of the preconditioner at every step of the Newton method leading to further slowdowns.\n",
+ "On the same laptop, the logistic falkon algorithm takes around 1.5s."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "CPU times: user 2.35 s, sys: 59.3 ms, total: 2.41 s\n",
+ "Wall time: 604 ms\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "Falkon(M=1000, center_selection=, kernel=GaussianKernel(sigma=Parameter containing:\n",
+ "tensor([1.], dtype=torch.float64)), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=True, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty=1e-07)"
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%%time\n",
+ "flk.fit(X_train, Y_train);"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Iteration 0 - penalty 1.000000e-03 - sub-iterations 4\n",
+ "Iteration 0 - Elapsed 0.27s - training loss 0.4697 - training error 0.1544 \n",
+ "Iteration 1 - penalty 1.000000e-05 - sub-iterations 4\n",
+ "Iteration 1 - Elapsed 0.51s - training loss 0.3774 - training error 0.1551 \n",
+ "Iteration 2 - penalty 1.000000e-07 - sub-iterations 4\n",
+ "Iteration 2 - Elapsed 0.74s - training loss 0.3575 - training error 0.1534 \n",
+ "Iteration 3 - penalty 1.000000e-07 - sub-iterations 8\n",
+ "Iteration 3 - Elapsed 1.12s - training loss 0.3554 - training error 0.1530 \n",
+ "Iteration 4 - penalty 1.000000e-07 - sub-iterations 8\n",
+ "Iteration 4 - Elapsed 1.50s - training loss 0.3554 - training error 0.1530 \n",
+ "CPU times: user 6.39 s, sys: 30.9 ms, total: 6.42 s\n",
+ "Wall time: 1.6 s\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "LogisticFalkon(M=1000, center_selection=, error_fn=, iter_list=[4, 4, 4, 8, 8], kernel=GaussianKernel(sigma=Parameter containing:\n",
+ "tensor([1.], dtype=torch.float64)), loss=LogisticLoss(kernel=GaussianKernel(sigma=Parameter containing:\n",
+ "tensor([1.], dtype=torch.float64))), options=FalkonOptions(keops_acc_dtype='auto', keops_sum_scheme='auto', keops_active='no', keops_memory_slack=0.7, chol_force_in_core=False, chol_force_ooc=False, chol_par_blk_multiplier=2, pc_epsilon_32=1e-05, pc_epsilon_64=1e-13, cpu_preconditioner=False, cg_epsilon_32=1e-07, cg_epsilon_64=1e-15, cg_tolerance=1e-07, cg_full_gradient_every=10, cg_differential_convergence=False, debug=False, use_cpu=True, max_gpu_mem=inf, max_cpu_mem=inf, compute_arch_speed=False, no_single_kernel=True, min_cuda_pc_size_32=10000, min_cuda_pc_size_64=30000, min_cuda_iter_size_32=300000000, min_cuda_iter_size_64=900000000, never_store_kernel=False, store_kernel_d_threshold=1200, num_fmm_streams=2), penalty_list=[0.001, 1e-05, 1e-07, 1e-07, 1e-07])"
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "%%time\n",
+ "logflk.fit(X_train, Y_train);"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Testing\n",
+ "\n",
+ "However, the price paid for with a higher training time, leads to lower training error.\n",
+ "\n",
+ "We found that, on a variety of binary classification datasets, logistic falkon obtains a slightly lower error than\n",
+ "the vanilla Falkon algorithm."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Falkon model -- Error: 17.05%\n",
+ "Logistic Falkon model -- Error: 16.90%\n"
+ ]
+ }
+ ],
+ "source": [
+ "flk_pred = flk.predict(X_test)\n",
+ "flk_err = binary_loss(Y_test, flk_pred)\n",
+ "\n",
+ "logflk_pred = logflk.predict(X_test)\n",
+ "logflk_err = binary_loss(Y_test, logflk_pred)\n",
+ "\n",
+ "print(\"Falkon model -- Error: %.2f%%\" % (flk_err * 100))\n",
+ "print(\"Logistic Falkon model -- Error: %.2f%%\" % (logflk_err * 100))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Plot predictions\n",
+ "\n",
+ "In the plot we have the outer and inner circles which are correct predictions, and the circles in the middle which are the mispredicted points.\n",
+ "Since we added lots of noise to the dataset, perfect predictions are not possible (there is no clear boundary between the two classes).\n",
+ "\n",
+ "Here the error difference between Falkon and Logistic Falkon is very hard to distinguish by eye. However, there may be other applications where the best possible classification error is desired."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_predictions(preds, ax):\n",
+ " ax.scatter(X_test[((Y_test == -1) & (preds.sign() == Y_test)).reshape(-1), 0], \n",
+ " X_test[((Y_test == -1) & (preds.sign() == Y_test)).reshape(-1), 1], \n",
+ " alpha=0.3, marker='.', color='b', label=\"correct cls 0\")\n",
+ " ax.scatter(X_test[((Y_test == 1) & (preds.sign() == Y_test)).reshape(-1),0], \n",
+ " X_test[((Y_test == 1) & (preds.sign() == Y_test)).reshape(-1),1], \n",
+ " alpha=0.3, marker='.', color='r', label=\"correct cls 1\")\n",
+ "\n",
+ " ax.scatter(X_test[((Y_test == -1) & (preds.sign() != Y_test)).reshape(-1), 0], \n",
+ " X_test[((Y_test == -1) & (preds.sign() != Y_test)).reshape(-1), 1], \n",
+ " alpha=0.5, marker='x', color='b', label=\"wrong cls 0\")\n",
+ " ax.scatter(X_test[((Y_test == 1) & (preds.sign() != Y_test)).reshape(-1),0], \n",
+ " X_test[((Y_test == 1) & (preds.sign() != Y_test)).reshape(-1),1], \n",
+ " alpha=0.5, marker='x', color='r', label=\"wrong cls 1\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwMAAAFACAYAAADgcaDIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOy9eXQU6X3v/Xmqq6u6pdaGBAIkQIDYmQEEAolNanZ7GM9kZrz7PXEW5yYTezzxOLGTc5Ob/eYmHns8ju0k1zfLjZP4ehkbe2yzS2ITICQQ+yJAgAQIJLR1q7urq6veP0rd6pa6JcGImQHqc86cQS11dVV11e9bz28Vpmma2NjY2NjY2NjY2Ng8cUjv9Q7Y2NjY2NjY2NjY2Lw32IsBGxsbGxsbGxsbmycUezFgY2NjY2NjY2Nj84RiLwZsbGxsbGxsbGxsnlDsxYCNjY2NjY2NjY3NE4q9GLCxsbGxsbGxsbF5QrEXAzY2cTQ3NyOE4MCBA0l/trGxsbF5dBkrm15ZWclv/uZvjtFe3T+f/vSn2bBhQ8qfbWzuB3sxYPNY8ulPfxohxJD/vve9773Xu2ZjY2Njk4R344F2ypQp3Lp1ixUrVozq7//yL/+SoqKiIa+/9dZbfPWrX33g/YguSgb/N3fu3Afepo3NgyK/1ztgY/OwWLNmDd///vcTXsvOzn5vdsbGxsbG5j3H4XAwceLEd7ydcePGjcHewLZt21i+fHnsZ1m2H8ts3n3syIDNY4uiKEycODHhv3/8x39k8eLFeDweJk6cyMc+9jFu3bp1X9v9X//rf5GTk0N1dTUAhw8fZu3atbjdbnJycvjEJz7BnTt3Yn//p3/6pxQXF7Nt2zbmzp1Leno6Xq+Xy5cvj+Xh2tjY2DzWXLhwgWeeeQaPx4PH4+HZZ5+lqakp4W/+67/+i5kzZ+JyuVi5ciVvv/32iKmff/3Xf82MGTNQVZXx48ezefNmAoEA//qv/8of//Efc+3atZjn/k//9E+B5GlC3/zmN5k/fz6qqjJhwgReeumlEY9p3LhxCRqVl5fH1atXeeGFF5g8eTJpaWk89dRT/Pu///t9natr164xb948PvKRjxAKhQiHw3z5y1+moKAARVGYP38+//mf/5nwHiEE3/rWt/j//r//j4yMDKZMmcLf/u3f3tfn2jya2IsBmyeOr3zlK5w6dYof//jHXL9+nY997GOjep9hGHzuc5/j61//OjU1NVRWVnL79m02bdpEYWEhR48e5Wc/+xmnT5/mxRdfTHjvrVu3+Pa3v81//Md/cOjQIbq6uvj1X//1h3F4NjY2No8dgUCATZs2EQwGqampoaamBp/Px5YtW9A0DYD6+no++clP8vGPf5zGxkb+4A/+gFdffXXY7b711lv8zd/8DV//+te5dOkSu3bt4gMf+AAAH/3oR/nSl75EYWEht27d4tatW3zxi19Mup3/8T/+B1/60pd4+eWXOXXqFNu3b2fx4sUPdKw+n4/169ezfft2Tp06xW/91m/xa7/2a1RVVY3q/Y2NjZSXl7Nx40a+973voaoqf/RHf8T//t//mzfeeIPTp0/zqU99ik996lPs2bMn4b1/9md/xtq1azlx4gS///u/z5e+9KVRf67NI4xpY/MY8qu/+qumw+Ew09PTY//NmDFjyN81NDSYgNnS0mKapmlevXrVBMz9+/cn/Lx7927zpZdeMufMmWM2NzfH3v/f//t/NwsKCsxQKBR77cSJEyZg1tTUmKZpmv/jf/wP0+FwmHfu3In9zX/913+ZQggzEAg8lOO3sbGxedT41V/9VXP9+vVJf/ed73zHdLvd5t27d2Ov3b5923S5XOa//du/maZpmp/4xCfM1atXJ7zv29/+dlKbHv35q1/9qjlr1ixT07Skn/sXf/EX5rRp04a8XlFRYf7Gb/yGaZqm6fP5TJfLZf7d3/3dqI81uh9utztBp77zne8k/fsPfehD5m/+5m/Gfh58rqI/792718zKyjL/+q//OvY7v99vKopifvOb30zY5vPPP296vd7Yz4D5uc99LuFv5syZY375y18e9XHZPJrYkQGbx5YVK1Zw4sSJ2H979uyhurqazZs3M2XKFDIyMli9ejVghVSH49d+7dc4deoUBw8eZNq0abHXz5w5Q1lZGYqixF5btGgRWVlZnDlzJvba5MmTGT9+fOzngoICTNNMSCeysbGxsUnOmTNnmD9/Pnl5ebHX8vPzmTNnTszWnj17lrKysoT3lZeXD7vdj3zkI4TDYaZNm8anP/1p/v3f/53e3t773rdgMMimTZvu630A//Iv/5KgUx/+8Ifp6+vjy1/+MgsWLGDcuHF4PB5+8YtfjKhTp06dYsuWLfzlX/4lf/iHfxh7vampCU3TWLt2bcLfV1RUJOgUMCSaUVBQQFtb230fl82jhb0YsHlscbvdFBcXx/6TJIkPfvCDFBUV8b3vfY9jx47x05/+FCAWZk7FM888w9WrV9m+ffuQ3wkhkr4n/vX4xUL87wzDuK9jsrGxsXlSSWZrTdNMeD2VPU5FQUEB58+f55//+Z+ZMGECf/EXf8GcOXO4cePGmOzfaD4/XqcyMzP5/d//fb773e/yJ3/yJ1RVVXHixAk++MEPjqhTU6dOZfHixXz3u9+lu7t7xP0bfO4guVbZOvX4Yy8GbJ4Y6urqCAQCvPHGG6xatYo5c+aM2uPxyU9+kn/7t3/j13/91/m3f/u32OsLFiygtrY2wUg3NjbS3d3NggULxvwYbGxsbJ5EFixYwJkzZ2hvb4+91tbWxsWLF2O2dv78+dTW1ia87/DhwyNuW1VVtmzZwt/+7d9y6tQp+vr6+MlPfgJYD8eRSGTY98+fPx+Xy8WOHTvu86iSs2/fPj75yU/y0Y9+lEWLFjFjxgwuXrw44vuysrLYtWsXDoeDDRs20NnZCUBxcTGqqlJTUzPkc2ydsgG7tajNE8SsWbMQQvD666/zyU9+ksbGRv78z/981O//2Mc+htPp5JOf/CSapvGZz3yGz372s3z961/n05/+NH/0R39EV1cXL7/8MqtXr2bNmjUP8WhsbGxsHj98Ph8nTpxIeM3lcvGJT3yCP//zP+ejH/0of/d3f4dpmnzxi1+koKCAj370owB84QtfoLS0lD/5kz/hU5/6FOfPn+f1118HUnvt/8//+T8YhsHy5cvJzs5mz5499Pb2Mn/+fACmT5/O7du3qa2tZdasWaSlpZGWlpawDY/Hw2uvvcaf/umf4na72bhxI4FAgF/84hcJ6TqjZc6cOWzbto0XX3wRj8fDV7/6VW7evEl+fv6I783MzGTHjh0888wzrFu3jt27d5Obm8srr7zCH//xHzN+/HgWL17MD37wA7Zt28auXbvue/9sHj/syIDNE8PTTz/NN77xDf7xH/+R+fPn85WvfIU33njjvrbx4osv8v3vf59XXnmFb33rW+Tn57Nz505aWlooLS1l69atLFy4kB/96EcP5yBsbGxsHmOOHDnCkiVLEv57/vnncbvd7Ny5E1VVWbt2LRUVFaSnp7N9+/ZYasvSpUv5j//4D/7jP/6Dp556iv/5P/8nf/mXfwlYC4pk5OTk8C//8i9UVlYyb948vvrVr/JP//RPrF+/HoDnn3+eD3/4wzzzzDOMHz8+ZavNv/iLv+Cv/uqvePPNN1m4cCGbNm2ioaHhgc7B1772NaZNm4bX62X9+vUUFBSMqk1pFI/Hwy9/+Utyc3Pxer3cuXOHv/qrv+Izn/kMr776KgsWLOC73/0u3/3ud2PHafNkI0zTNN/rnbCxsbGxsbGxGWv+7//9v/zar/0aHR0d9tBJG5sU2GlCNjY2NjY2No8FX/nKV/B6vYwbN466ujq+9KUv8eEPf9heCNjYDIO9GLCxsbGxsbF5LDh58iSvv/469+7dY8qUKXzqU5/iz/7sz97r3bKxeV9jpwnZ2NjY2NjY2NjYPKHYBcQ2NjY2NjY2NjY2Tyj2YsDGxsbGxsbGxsbmCcVeDNjY2NjY2NjY2Ng8obzvC4hv3rz5Xu/COyIvLy9hYuLjjn28jzf28b6/mDx58nu9C+8bHnWtgPf/9TbW2Mf7eGMf7/uL4fTCjgzY2NjY2NjY2NjYPKHYiwEbGxsbGxsbGxubJxR7MWBjY2NjY2NjY2PzhGIvBmxsbGxsbGxsbGyeUOzFgI2NjY2NjY2Njc0Tir0YsLGxsbGxsbGxsXlCsRcDNjY2NjY2NjY2Nk8o9mLAxgbANIf/2cbGxsbGBmy9sHnssBcDNk88yqFDqNXVAwbdNFGrq1EOHbrvbek6dHUJdH1s99HGxsbG5r1nrPTC1gqb9xPv+wnENjYPFdNEhEIoDQ0AhCorLcPe0IBWUmIZfCFGtSldh127VAIBgdttsnFjCNm+w2xsbGweD8ZIL2ytsHm/YV9+Nk82QhCqrARAaWiIGXmtpMR6fZQLAQCfTxAICDweE59P4PMJsrPt8LGNjY3NY8EY6YWtFTbvN+w0IRubOAMfZbSGPT7U6/GYuN2WcXe7TTwe27jb2NjYPFaMgV64XLZW2Ly/sCMDNk8cum55Zjwe0wrN9ud8xqNWV49o4HUdfv5zQVubGgv1btwYStz2u7H/NjY2NjZjTlJbO0Z64fWGCAYfvh239cJmNNiXhs0TxZBczQ1B0g8M5HzG54CC5fHRIyKpMfX5BH4/Q0K92dlmzAM01gZY12HHDpXOTomcHIPNm+1cUxsbG5uxJmlev8NMqBEYrBf+1ZX4/FJSuz9YL4LBgdSgh/XAHgzC22+7ME3rc+3aBJtU2JeFzbvKe+2lGJKr6ZdQZRXf7KWI1RXIcSFgU1UJhkRKY+rxmKSnQ1tbYqj3YRaHdXcLjh5VkCQwDCgr08jNtUPMNjY2jx/vpV4kz+sHPYVe6LLKjp2ulI6ad1svdN1aCNTXK2RmGhQX63Ztgk1K7MWAzbvGw+6gMFg4kgnJ4Lx+l8vkbZ+XgAbu3fTvk4hFBN7+SWpjKsvwzDMmzc2hhM94mMVhdntrGxubJ4GHqRfJtGHwa8lqwHQdfplCLzruSRx9M7Wj5t3WC59PYJqQmWnQ0yMhBHZtgk1K7MWAzZgQNaTZ2an/5mE+JA8WDq83RFXVUCGRZRLy+mP7lDFon4QYlTGVZYaEeh9mcVh2tklpqUZXl0R2tmF7eWxsbB453ku9SLbIgOQLj8E1YF1dqfViNI6ad1MvPB5rW8XFOkLA1q1BO0XIJiX2pWHzjok3rvn5ghUrSOpteZjddgYLR1ubNCohSRYpiOb6348xTbYYeRjFYbIMW7Y8/CJlGxsbm4fBe60XyRYZwDvWi/tx1LwbepFsMWNjkwr78rB5x8QbV7+fmCFN5oF5WMZpsJHOzzeSCslw++RymUOiCaPd38ECE18cNtbEe5fGmve6psPGxubx5r3Wi1SLjGQpQcNFC5LpxWgdNe+WXjxMrQBbLx4n7K/P5h0Tb1zz8wdSaVKFeR+W0RssHMmEZLh9ioWAH2B/H8SL9aCGdCwMcKqcWXsqpo2NzcPkvdaLVNqQMiVo0P5EH7DfTb14Jzbf1gub0WB/dTbvmHjjWlTkoavLev3dGMI12EgNbus52DAPt0/R33V3C4QAl2v0+5tMYOJzQgeHgB/UkN5veDmZEU/Vbu5BcnRtz5CNjc398H7Ri2QtoOPt3Uj78zD0orvbqj2ILjqi+/ygD93x71VVkxUrtIRtD3d+xlovbK14f2N/JTZjQtSQxt/kDztncbSFYJD4UJ7qAVqWwesNxQxfVZU6xPAOZ9AGF4ft2qXi8wkuXpSZPVt/xw/e0eOIvq+722p7KkkkFYlk50fX4fvfd3PmjJPs7MQOScOJX7KCP9szZGNj8yC8H/QiWZMJGLDvAMuXa0MezuP3dyz1YscOlaNHFQBKSzW2bHlnWhE9lkBAkJZmUlur0NkpkZtrJLXVY6UX8cc83LZtrXh/YX8dNg+Vh5GzGDU2kcjQoi9IfC3alz/VQ/lggkHLMyPL0NMjaG2VKCgwYp6boV6W5PsYNcKWMZeQZWu/okYyEgFFuX8vmMtlYhjEvFFRb43PJ+jqErGWeMlEpKtLUF2tcuaMk44OCSChQ1JU3NraJPLzjaSeqfiCv7Hs9mF7jWxsbN5NvRjcZCKqFYGAQFFMhIBQKHGhMJix1IvOTgnJMst0dUnvWCtgQC/a2qwN5+YaCToUb3PHQi8GP/R//OMDx2drxfsb+1Q+4jxON8ZojiXe2CiKiaoOXwhmmqR8KE9mjFwuk4sXZXp6JNrbJUwTsrLMmMdqsJdl+vTEbhhR4r0mHo+BrluvuVxmgkCsXRsaNmw7+NirqlRMq5MdW7YE2b/fij4oismRI0qCeA323EQXD9nZBgALFoQTOiRFtz/Ye5Oq4G+swvq218jG5t3B1ovUTSaiWuHxmNy5Yz38TphgDPvwOpZ6kZNjYFimmexs4x1pRfTYo3rhdpuUlGgxGxu/7bHUi8EP/b29A8dna8X7G/s0PsKM5Y3xXovEaI9lsLFZuzaU4A2HxFAzkPShPJUxCgYFs2fraJrg5EknsgwdHRIdHdaCQlFM2tsHvCzxD8fxxIe8n302GEtLGrz/0dkHqc5JMs9NVpb1Xj1sfUZXl6C3V3CszklWdqLnZfC58HgSW6W6XAOfo+vJ2+ulKvgbq7D+WHqNbGxskmPrxVC9SKUV2dkGQjDiw+tY6sXmzSHKygbSku5HK6LnZDi9WL0qhEO20mXb2iR8vYy5Xgx+6M/IgK4uWyseBcbkNv7Wt75FQ0MDWVlZvP7660N+b5om//Iv/8Lx48dRVZWXX36ZGTNmjMVHP9GM1Y0xFiIxmiEyw5HsWAaHMXWdISHTVLmc8echaoQ+8IEgHR1WzmSqQq1IxBIEAI/HoLHRCcDVq+nMnauTlmaycWOQzEyFjg6JqVNTT3WM349ocdloPSTJvpP4906/tp+8eh/Bikrq6lR8vSDvqcEzTSayZFXCA3u0UM7nG1ovMTiUnSwcnargL9m5fhDejcJBm/cHtla8d9h6MVQvUmmFx2PZzFu3JBTF6hw0+L2j1QuvN4jTaUVwJ08eXi/iJxbfj10cjV7kn7D0Ytdu10PTi6EP/Rkpz/WDYGvFw2NMFgOVlZVs2bKFb37zm0l/f/z4cW7fvs2bb77JpUuX+M53vsNf//Vfj8VHP9GM1Y3xTkViuCEyo2XwsQwOY8YXe91vyDTqCYoW9F64IBMKCSRpoFALrAKuzk6JjAyDNWtCLF6sceiQSnq6SX29gixbOaSyTCy3M9X5SOUBGa2HJNV3snFjCF8v5NX7cDc2EAgIAtpm5t/YQ0a4nqyZi0jfEESWRcL+xIfKy8u1lJ9TURHC4WDIviUr+BsrHnbhoM37B1sr3jtsvRjZtsQ/DO/Zo3L4sML16w6mTo2wYsVAUW98h520NDOlXvT1CWprVSTJSu3cvNnE50s8F+9UK+Dh6IXPJ4YM2xyNXjyMuo8HOSc298eYnMr58+dz586dlL8/duwYa9euRQjB7Nmz8fv9dHZ2kpOTMxYf/8QyVjfGOxWJVDnlwzHYCA4+lvh8y/Z2iVu3pKQh02hrOCGsXM2RHq6jYVxJso67q0uKpdkcPqzEthkKWdu8ft1BQUGEtLSBFCPTBE0TTJhgEAwmHu9ovGajMZapvhNZhuwc0NdXojkg+1g9Ky+fQNcF9+YtZfILq5CdImFb0WN3u00OH1bo6hroKDH4c4Y7hw+ThykgNu8fbK1477D14v70orNTIhIRhMPWQ31Hh1XU63KZCR12ZszQOXBARdehqcnB3Ll6LCU1mncfTdUJBBKPaSy0AsZeL3w+QVOTVQsB8PzzwZhT7b3WC1srHg7vytd479498vLyYj/n5uZy794928CPAWNxY7xTkUiVU56KVEYw/lg8HqvYq7bWarWWkWEMKRbWddi+XaWuzvqb5cs1Nm9OHrKObu/OHYnOTolQCBQFFi/WOHJEob1d4vp1B0VFEcJhyyCmp1vtO8ePFyxaFGbVKo2srMTc0sHHGy903d2J3SXuh4TvJN1I8NxgmugRQcdTXiYcaGD69Ai6DhNfHWrYo8fudpuxjhB5eQZ9fcnzRMfSsL/XecU2jx62VjxcbL0YvV5kZBhomuU8Mk2rjkCWTd5+25XQYScUEoDJ2bMy4bC1gPj8533o+sCU4sE59JC8e8/g+rfRMtZ6IQT09EhkZhqY5sCC7WF65m29eG95V065aQ692YUYehEC7N69m927dwPwN3/zNwnC8Cgiy/Ijfwyj4eMfh95eyMlxAMmPV9etvzFNcDis/MmeHlBVk2Ra/8EPRj3w4Pe72LjRMhIZGSDLGXR2QjgsSE+3riVNc6Xclq5DRoagqwtmzoSlS6GvDyorZerqBLNmQWurYNo0k/x8aGiAPXsEnZ2CSZMMnE6YMMEy5r298OEPQyAw9HizsyE/X9DTAy0tkJlp/f+ZZ0Zh4KJtguL+Ly7sg2AQc9Om2GvGL3dyqN7FhZMa026lMXUqzJkD2ScbMDdtQo9YXRzcbmsfPR7YssU6B7W1gkDARXo6FBZ6cLlS7070+7LOt/XaaK9nXYef/1zg90N6+iiP/33Ik3L/vl94krUCnpzr7VHQi5wcweLFoKqwahUIIWOaLjIyBJMnC1wuWLTI5IUXFH7jNwTXrklkZJhUVqpkZaWTk2NtZ8sWa5s5OeByDXy/Ua3w+yEvD86ezSAYHKW9jGpE9N8AQiBqat6RXvT2gsMBn/qUpZdgvZ6V5SE7e+QCZlsvHt3791053bm5ubS3t8d+7ujoSOnp2bBhAxs2bIj9HP++R5G8vLxH/hiiDF65J1/JJz/ewXnrQsDNm5a3JBQKkewURSLgcqm0tVl/F4mEEGLAs6Lr4HSq+P2Wp0eSNNraNEKhoYako0Nw/bqLnBwDXVe4d08nN9fA7Q4Riajcvi1YtMjqBS0EtLa6mDLFiSxL3LhhMH58mO7uAG+/PXhIjXW88edixQpobZXw+RSEMGlrEzQ3h4b1yCmHDiFCIUxFQWgaoYoK1Opq5HPncJ47R/jwYfwvv4zYWY1/z3Gk2yqF9yI0TVzO6dJK0jK3k/6Lanqv9bLXuYlAUOLiRZmZM3UuXx6Yr7BmTYjt261c1x/8IHXxXypv3Giv564uQVubisczuuN/v/J+v38nT578Xu/CmPIkawW8/6+3++Fx0QtZHtALpzOEYahMniwoKIAPfjDItWsSubkeDMNKI7p5M4Tf30coNNSGTpyYqBdLl1qDMCMRqKkZnb2MakWoshKlthYRDAJgqioiFML91lsj6oUrfTvqT2oQbT3sYDM+v8T58zKaZtXSLV+usX59CL9fUFur8OMfD18sbuuFxfv9/h1OL96VxcCyZcvYvn07q1at4tKlS6Slpdlh30eM0UxvvJ8Cp2QtQQczUkhSlmHLFqsdWyQCdXUK+/apsf0LBgWetAi6IbF7t8qFCzISBiXLNFas0HA6k39GMGh1/3E6TQoKIkiS1ZEi+hAdzf/0+UTs3GzfrtLVJZGdbbBlS4iCAiN2rCPm1ZomIhRCqa/HlGWEruNsbEToOqbDAYDz3Dnc/+t1DtWqnMlcxd1uFxNzA5yfvIGSzDA7zE2Errrou+zi8hSVRYvC9PRI9PZK9PQMzFeIr5cYLo0pVUFaMAjXrllDZoaLKthdH2weBFsrHg8eVb1wKRF8fQ4OHVK4dEkGw2DREo3Fi8NMnmzZvMHtN10uK6XIOm4Jj8egqkpl+XIt6VDMZHoBjM5eRrWioSEWEXC/9RYAgRdeiL02nF4s9oR5/dQzzLzmxrilIm+QUFUrHSpaDNzZKREMChwOK9ryIHrhcplcvQpOJ8NqBdh68X5gTBYDb7zxBmfPnqW3t5ff/u3f5iMf+Qi6rgOwadMmlixZQkNDA6+88gqKovDyyy+PxcfavIsMvtlv3pRibTpTDfGK9wQNvtlTdXZIVig23HYt74PV+i3eaL39toslNd/Gr/s5sPYLnDip4EnT+UjT3zEjW2Gn8tkEYYovAN6zRyUYFMyZE2bu3DAXLjhRVQiF+ifv9oI7bcBgdXVawiJJYBhQVqaRl3cfufhCEKqsBECpr8fR3Izj1i0ikyYRKSoi8MILKA0N+LolNE1wc+E6Ou458H7Mx8YpfoSAX/7SxeWpGzBMQThkTcNsb5cwDJl796waiaysgUE73d3WRGawDHFUnKPn1uWyvq9796zzKsvWQuBrXxPcuePB4zF47TVfSiNvd32wSYatFU8Gj6JeLKr6Nj03g5z+0KtcaVZYvizI7B++gdmRxgl+hwsXBuxktONQtAPdvHlh8vMjXL8uk5trFTHrev8D/ljqRbxWxC0IYj8LMaJe9PQIjh5VaVlgvb5ACwMm3d2WTjidsHSpFtvf0eqFopjcuiX1t9E2ef11D7ouIcueYbUCbL14PzAmp/zVV18d9vdCCH7zN39zLD7K5j0i3jirqsnp004uXZK5dMlqzzl4JZ8sbDjSzT5SdwVdh+5uK2ypaYl/E79/QoAZMUg3/eSdqqGkT3Bl9u9Tceh1ZvZWI81aTcddyJtgJhTSArGR9ABXrzoIBgWHDqlkZhrk5Rn8wfKdBDo11MoKq0jLNMk4VM3sm9k0Fa4BrHB1V5d1nKMOdfYbeaWhgUhRkbUYKCpK+L3HYxncyaf30jttPcWzLA/NzZsSaWkGhmmdrJUrNRYuDJOebqIooGmW4EQ9Ohs3hmhttYrf4qMc0far0fNfXh7i9dcz0DTBjRsOPvKRPnp6BDk5Bh0dEpcuOZg3LxL7jkYjzDZPNrZWPBk8inqh6n0UN+8j75DBlYl/wJy33mDG1WquzapEmAY+nzSsVni9IVpbHZw9a6X9ZGYqPJNeha8ntV4YBvT2DhTnjspexmkFQgzoRLTerP/fUb3IP7EXbf465s6zohednQKXy6Czy0FmpsELLwTo6JAQwqoX6OsTrFqlxc7laPRCUUzCYTh3zonTaelqT49EYaGlTzdvSowbNzCLIFnKmK0X7y32+stmVMSv3HUd9u1TKS/XaG+XKC/XhhjrVGkmw93sw/Wvjhp+6yFUpqxMw+dLDFtG9y/awWH/mi+wLCCYdbmaL12tBkDduoofz/0il+oVLl22ciPjhSlqSzVNEAxa+ZPBIOTkmAT64HitScGNRq5dl5n/6irEzp2Mu3CMhcVlmJPDZGSa1B11ooWlEcPhCQbRYaJWV4Np4mhuBsBx9SoIgbu5mcALLxDyeln0y2qyvneYYllnz24vp0458fc5SEsz+K3f8qEqptVKTrcMu65DZqaZENqVZZKmMXV1iQTv3Y0bDnRdkJtr0NkpYRjWtm7flmhvlzhzxklLi8zGjSF0HbZtcxEMWn8f7cdtY2Pz5PEo6kVt5e8x7aZE6fm9/NmlajIzDQJb1/Cfod/Hd0zG4zF49tlg7PMHa0V6upVWevKkTGYm1B11ku9MrRd6fpgbLTLH6pycP8+IqVMxvUg3SD9QHduJqF5Epk3Dce1aTC/8a7yYkYOUnz7CU44wuraSr38jE59PwuUy+LVP9zJ1monLBbJs4HBYxzR+vBHrmhf9Lgfrhctl9tfFWROO79yR6OsTsQFspmml1t67B2lpBqdPO9H1gZSxHTtU2toc5OdHeOYZWyveD9hfgc2oiQ+Put3W6j83N9FwRIl6Je7csfIik3mCooY4OuFwuLzBqOHPyzO4dAnu3JFoaXHEPis+fItpGZy333ZxbNPvMf1bVTHPy5Xf/DxatYOyMo2ODokVKxKFKTvbZOlSjQMHFFTV5OxZZ38epdVC7vy09Xg8JuPO1eP+2yNIPXcQkyax4OWVTOsLoYdNmv/xIEqmQlPhmpQ9tBO8Wi6DZ53bURobMB0OIkVF6DNnIiIRpNZWRDgce19PuZd7DQppmQruo4eZcjVM2+J1dHY50MMw42IVuqyyy+eNNZzwepMb29JSLdZvG+DIESXBezd9egSPx1oIeDwG06YZ/PEfm9TW+jlzxsm4cdZ31d0t2L1bZds2F4oCU6ZEYqFvGxubJ5P70QuXy8QwLG97VAviebf0ovnjn2f5t/eQk2MiSXD9Nz/P7D0Gsqyh61h1Bf3T5AdrxalTTubPD+PxWEMpQyHB+eLUepF9S+PoEZP5N/bg0134yktTLn5ietEHxVf2UG7Woy0rAdPE3dwMpom2dCmy243z/PnYOWies545KmQ1X6L3RxF8vVvJGWfQeU+Qf6KKzA6FvuUrqapSR60X6ekD7VIvXrSaU2RnG2RkQGurdY4nTDD4yEd6MU2Vvj4/Bw+qscXE9esSb73lJhKxHFYrVmhMnGhrxXuNvRh4gnnQvr6jze+Ldj4b3BkwfsJh1Jh40g02btLwekO0tUnkT4gk9EqOGv6+PsHy5Rrz5oWpr1cSwpbZ2Was00LXYi8SBiv3vY7i78IhgRiXxeR//TruOb9PIOAgN9cYYnxl2Uqz6emRWLvWWjCUlJgYhiAjw/IonU1bT9nZemSHCbqO0HXSD9YgV1Yi76lGb2mkpbAUtyv1IkjXiXm1so4fwpTPY7jdAITnzrX25dw5jOJiwOoUgRB4MuBuiZdAHxT59jK+7zhnTkPvtPVMObsX5VwDvtlLCWgDId14AYs///HhdZ9PEAqJBO+dxwOvveazvo98o78IGWbNsnJj79yRyMmx+lCHQgJFsbYdDg/9zgcfv50XamPz6PBO7tuR9ELXGfZhdIhezArjybC26fWGaLstyJ+YuN0H1YvKg6/jCnRBF9CvF545v08g5BiySBmsFV1dEuvWhcjIsIZZjqQXBasrmXNtH1kX6onMWYon3QASDecQvcgwSb9+iWCBE4eq4jxxgsDzz+M8fRq5qQl9/nwATJcLTwa40+DCVC/FBsy/fYTSeyoHQhtZ7d/FxOZ6jAkl+HqtbY9WL0pLtdjfz56ts3y5lYIKsHq1hmla5z8YFBQVQXv7wIyfnBwDISyNcLmsqHsgYM1pGMtrzub+sU/xE8poph8Ox0j5fdGHywkTjCEh3OjPmtafV9h0AKMvREfJKhqOR70f+1hSJhFZszL2eYO7OFy4YBU2CWF5luI7LeTpBmV7zzL16Da0zFyMP/wtXKdP4j6wnxeBm5/+PJ5MkfSYs7PNWKrMhAnGQGcij4keNjn1lUOYJlxtlnlq4UxMXUOpr7dyOIHCl0pwLV3F4gwtqagFAlYeraJYxWUTpSCuSB8ibCVTOk6cIhzQiagyeiiCM9OFCIXANJFlwcaNIcztNbhWOwkvXkjW9w6zpvUwd35qorxUQmRVBcbPBd3dJPWyJQuvx4tnvPfO5YJp04zYvjscgnDYmrYJxAx/bq7BlCkRQiFYtMiqVxjMO73mbGxs3n3G4r4dTi+i9ijVw2i8Xky+eJAiw8eFovV0dAga6p2Mb6jiXrrCU58rTUiFfDf0Il4rcnMNcnNNtmwZ+NyR9KIc6HpuKRO3DB0Alkov/FNn4TJqoa4ONB392GlkEcaI9OH4wU8xi4vQysqQJdi4IWhpxQonEa2EFVcOs7DtKOnpBuGlJeiVlbhCDBuVGawXQgx0PvJ4ElNQc3PNhP3OzxcsXTqQUmWaMGmSwfz5OvfuSWRlGaSlWe9JpZW2Vrw72Kf3CWW4fEt456vy4UK4LpfJxYvWqPM7bYLm2zqzOhq4+g/gm7+O+Tf2kHWhnuDcRTjjhqsMFpRoaNc0Lc/Sxo0h6O+04G5oYEFnPZEpOYT/229xd+l6Miq8ZAuBSE8nK0ek9F4n82RFxcP8ZQ2TrjXSvWApF4rWMz3vEHJdFY7mZquQSwj09ZVkJ9n24HNeURGyUpA+tAr9QJi2n5zA1XoDd8dNujMmc5mZXJ5QRprb4MOh2ljhWPqBapSLDWglJXSUVaK8dRJFMdE0we1FXuqqhw/5ejwmbpdVEBf9bmTH8N0sovs+eTI0NVnFZNGFXjAo2Lw5xOLFGjt3upCkge8jfjvd3VZNwuAJyGOJ7U2ysRlbRtIKeGf33UhtJWN60S2Yei+EWduActVBtbOC6Rf3MvV2Hdcnl+Lrhey4LrQPqhfab/0W7cvW4+nXC9LSyR6XXCxSRT2i6Ucj6YUkCZxbK5KGUofTi5P/ZJJ55hjpty29uDNuMk3mTCKOOUw2ghTurUFfn6gVbSWVmN87SXa2gaYJ2pd68USGj8rEvp84vcjKMtm4IYjPLw2rFR6Pid8PbW1S/zA4oz/KIfjiF31cvy5RX2+1d41P3Ypuw+ezFl+prrl3iq0Vidin4Akg2UU/nAEeK09QMiOp65ZxKC7WURSr606v5kXt0Zlw7hgFN44hBNybY3lLUj6xQ6zAN1GkiHVa0JctxYiYfPXss/iPOSgp0dj08is4nBLV1SqqarJypZZy/+ONj2UsBa5shXtzlnJp2nrS0kB9diP68f3WG/r3Va2uttq/Ddp3j8ckTQkTvuMnLTudrKzoeREEKyrRf9TIOW0ms3pv0TNjOh0XHGwfv4WiCTob52qMa6hHPtaApkFgRQl6RQV5e6sJymas/Wfa/mo6/FvIzbMKnhO8bP0WP+3oIZ5VQrSXVVrerv7iZUVRkFetSno+otdLTw9kZBgEg2KIJ6m2VuXiRSeZmQbFxfqQgr6DBxVOn5ZxOq3uRmPdS9r2JtnYvHMG68VID+tjEWVO5YgYohdz1+I7r1PRe5jQziMIAdenl3K3xMvijOS2PMpo9OJas8T/OfUhZvRFSEsz2fA7r3DgkBv10P1qBRDVi9lLaSpaj3uM9MI0BdfnrSd75wkwZ1Ii3eSsVkx3t0Tjc6+xNriLKUcOk358eK3Iq6/i9iKv1TRiXIRAUBrQizgnXNrRQzzrDNJevg5PBpZeVFWR5nKhrVw55HzEXy/5+ZCbaySNPJw8qXDypJJUL1wukwsXrLbY48YlFm6PBbZWDOUJP/zHn1QXfdQAd3UN9ZCPVdRgsJGMz/1sarJqBSZMMDANk0t561l5+RjTp0fQwzDx1aFh08EkFSmzvytPPyFNMK1pL7sdmwGF8nKNEycUGhoUSkq0hKnuqTh0SCEUElRWhoisWcm8FSbXdgg86RGcVbuI6DqB554jVFlpPVT3pwsNNvAyOs+KXxIiiCpchNkIyGCauGqqKZqm47l9A39I0HviOkFRzBaxnbSStYhNFRjn67l82erwc2rxJp7duxN3YwOFL5XQvtRLztEqbr51AiXs4p6ksqDYj+dDqwAROy/RCceet7fhOt2I75VXUKtrYgPPALQkC4Lo9WKaJm+9JYZ4krq6rNcyMw16eqw2dfEPDd3dgvp6JTavYdmyoR1F4nkQr81oPJg2NjapSaUXqbQCxkYvUs0HSKYXfeMrCW+rQ1FMphfpZPy3oSmZyRhJL0wTgiEJz8FqmqVKiqZH2L4zjYsXnQ+kFUKAvnold4IKHhPWeYM4qw69Y70QmGxmB60FOp3HbnC3TyLTfRWlsIi1wV20l1Qinz48rFbk1VehHm+gtd6JuJROrxEi7VfWJpwTU1XRystxHj+O89w5HA4Ieb2oVVXWlON589DKy4eclOj10t0tyMz0sGPH0MjDSHrh91tpxi6XSSgk8PsTU8cGc796YWvFUOzFwGNK9OaIRBj2oq+rU4YY/ncSNdD1gR77qcKHscKj0hAzbhxEBIMEEGRPDyM5BOqNZpR/+hb+3/kdkKSUN/oQj1LUw91ghUVD/QW9K1sOQwR23tjMd77jQZKgpESLGezhiBbHNjRY/aQrK0McOOjqFwgTU3YSnjOH0OrVVhrP6tUIn896sI5ErH97PCDLCJ8PRyiAe4IH4fOh+3yY6em4tm/HeeYMAHnLCzjPXEwtzGSnYHnRQVD6cOy35gXoutXTefzxavpKVRwlVt5ntoDO5V5a6xRmpzvw3+tjafgI8oFwguhoJSWE1q7F2diIun8/clMTkaKi2ORjoWkMp3p79wpOnBjw5kQ9SVGPT3GxjhCwdWsw4buK5oxKEqiq9XOq6+RBvTYjeTBtbGyGEm9fh3tISqYV8C7pxawwG+Qaxqt9AIhpGooqcF5vpuA/v2lpBam1AkanF1N2V7Pq24c5uB92XdrAzGKDZcseXCuqq1UaT1qOJ4cswPkO9KKrCxwOlKNHUerqKJqZxq3b07lmzsJJmLLVBnqgFueFOsJhc1it0NdXEggIgmdUnp7jx3O6nslGENlRkXBOME30efOsicZvvYXS0BBrZarPmzfs+Th6VCEYtPSirMwqOr4fvZAkYjMJurtFXCQ9kQfRC1srhmIvBh5D4m8ORTGR5eQtPlMZ/gSjmW4kdPWJdh5I1dv55z8XtLWpSW/K+BuwuGU/M7N7cUgm7m0/JvPOHSITJ6HPnoUDcJ49S/rX3yS4tJRf+tfFbvQNG0I4nQPbdDjiPUrC8mb0G/Zo/n4h8EzE5PJJA8lKdx+VcQfrmbhytR/R0039gXQajqWDJFmLidV+xNFe0DTU3btjXhM0DdHdjbpjByIUwnS7CW3ciOnxYLrdlsFXFAiFUA4cRD51CunaNfSp0zhxfQJ7Sn+XOTeq0B0qaR4fJRdO4ggFCawo4dSSzeTVVzGlpY60skWEKgbyTaOdhnx+CVFoYmRqKA0NMc9T/HnxvfIKclMTjlu3cNy6hVZejrZ0adJwdXxHC0juzRmpY0h2ttWFoqNDQlFMjh5VYn2nB18nD+q1GW2XKxsbG4vBD1Jr1oSSpnQMd08+bL3IOn6QiSLA+MUanm0/RorTCkitFRs3Wjn28eZsJL2IbKhkCXD+x05mTrf04r60ojIEkQgNB0waDjjAJVMSXUxEdOh9QL1wOFD27EVgotbUEMmfSH3zeK5PKuZC0QbmXNuDQ5UolQ6hXGtG2/ocp0pSawVCILZUcE92cb0XioKC+ecPIzfVA4laEfJ6re/jm9/EceuW9b3+7u9aryc5MbpObAZBQYH1WnR+zf3oRUmJxq1bDtrarNqC+AnQ8TyIXthaMRTpvd4Bm7En/uYIBER/666h9+1wq2NZhglnD1rDTeJaAeQcrWLShQN0dw+8JxiEa82C9naB3w9paSYd7VZO+WBKSjQWPR2iZL4P98njIAThOXMI3OjCrD6MWlND4PnnCRfPov3wdU7WWbnvHo/JyZNOtm93JXQmqK5WOXRIiW1fW7ky8YE2EkEvWcwhx5rYQgCs95mjcQboOq4d29la/z9RjhzB0dQEhkFlZQjJ7wO/HzMjAxEIILW1IQIBzIwMpK4upM5OTI8HEQggfD6QZUIbNxJauxaE4Ng/nGffW37Cs2YTychiR7CCv7/+Am5HiA9+fQVZz5fxD1e38r0rKwkuLkFfX8mGjRq3F62jd8FSpDSV+IOSZfCus3pBmwh+EdmMYQwcSkwM+sPA8ROOHc3NiWIxcPjs2qWyd6/KkSMKbjcUF+ssXqxRXm6lDkQXCdFQfzLPH8CGDSFcLpOeHom6OgW327o+fb6hubIP6rVJtg82NjbJGfwgFbWvg4tJR7onk+mFHjZx7aumqHl/wvtGoxdRb3DJkhCL5vooF0dxyKPXikBAsHevmmDnR6MXph5hT2AVN6avipnWUWsFICI6mwM/QTlyJKYXlav9CIGlAQ+oFwfPjGffz3Xks+eIpGewnS18+8pWrk8r5/de85P+wlr+d9Mmvnd9Lf5nnxtRKwBkp8DrDSEkQfOcdVy96ojpRai/sDp24kZJVC+OHlW4eFGmu9sa7FlZGWTu3HBMK2CorY5qRbyegJUylEor4MH1wtaKROzT8BgSf3MIYdmAZC0+h10dx7VdA2IpNy0/bsQxuRSB5UVqbxfU/FkDEb/GlVnrKCuHgwecLGjZTfMdB1mft9q96WGTHTtdsfHtpUs38fwicDU0ACaaK4N2YxyZHTruEydobpY5PP4juJ5eiytsjWyXJDh/3onHY8ZCsElz/6P/0HWUnbuoahhPY0uIkpcKqVyvx94HI3t9hM+H6Oxi982nLG9KXx+ip4fqPSqVFR5IT0e0tWG63Ri5uRAOI926hZGdHQv1mm63FfqNnnQhEPc6CbgKOX4nnZO/SGdx+x3OR3SkPh9zFuRQXeNCUUyKpkfY2+wlQ/RRicaBAyoXLip4StZTXK4hSMzqiRXJpRuMq6tCE1Z7UMeNG3jefBPf5z5H+j/8A86zZ60BZ4WFEIngPHeOrC9+ke6vfCUhNSs+zay7W7Bypcns2SGOHFH4p3+yjqm0VEs6cXiw17GkROPsWSusc/26g+JiiQkThs5iGAuvjd0pwsZmZAZrhWkmn08y4j05SC/8qys5+8ZBxl1oRCpeSsXaIOkeRqUXADu2KxytUwEoXbaJ5xcbuI8fxzQGacXxEzRfS9QKn8/ab0liSMrOcHphhnUOvnGWxgvjWDHnPKtemU/1gfRRawUAvT6qjubEtin6+qh5y0/FC+nW5MoH0AsTQcgfoaFnPqfrC1iQe4tzF8JIvb0sjFyjes8yFDcUFensbV43aq2Afr0QJnNu7EHXBZpm6UX6t76FPm8epqKgHDuGfOkSpiRhTJqEdOsW6f/4jwCx6MBgvcjKslKAnnrKxOMJ8c1vevD5rOGVr73mw+VKPG2DtWL5co2+PsG9e9awuAMHFFavTt504p3qha0VFk/woT++xN8c0VHrw3l0kobU+ttYAlaqSX09wZCgpbCMrlIvZjds3+7iXoeg72oYr/soXILiT30Qd80OZip1tASsdm8Tzh0i3KHR2bXFck6YJjl11TTkpTO52UExTQiHxNWCciY3HeLej24SmDSdpjXrKU0LU7nCOpYPfSjIgQOWQa+vVxBiIPc/GcLnQwoGUDIVSgtbWLVIRnTLVK62DK2qmiMadyPdw+7WhRy/rVM6von1My6x09xE3Q+nAoW89OIzhK41Y8oyrp//HOfJk4hIhFB5OaH16xHBYCwHFLAWKLW1OJsuscW8hLnyeb5/8Sl2dc1m8rg+Xqo8hx7Ko6FBpmSFyed+p4v9O03q67NpOK5y44aDoiKdikotJt6xzkjL+8iI+EhzjmNc3X4KW+owXyqhd10FnjffRN2/32p353QiX7gAhkHw2WcJP/UUUlsbjtZW1Opq/Gu87NrtiqWZqaq1ELh4UcbjEfj9Kn19IuZo6uqSkoZmB3sd/X7rZEsSTJ0aYc2aEFOnGkkNcMrrchTYnSJsbEbHmGgFJOpFfT3GoQbGXZXpnreUK9PWUUiYqiqFG9elEfUira6W7CMGkmMzCEFXl0QgS9BX34oZjpDukGguLGf6jYOc/+FN2jJm0PRMolZ4PCYOh5UWNFq9kPw+XEaApfO6WV/UhNY9Ge9iH0TyUFVpVDUD1fV5NN6ZQdn4/WyYfIrdd5ZwZMcy5OvXWPXqfMxnniF0+TLoOuqePThPn0aEw8PqhXq4ls2OOqTs+XyvYwu/6HiGQvMGLz5zA0nTOX7YZOliP5/7nT727XfRUCtoqE/nRqtzWK0QPh8eNZ3iK/vJulBvdfB7dRXSfqtA2Hn2LHpxMcrhw0gdHeizZ6PPmIEciSDduYN87hwhr3dIWnJUL5qaZDIzBXfvuunpsdKEOjsl2tokpk0zEs7dYK2ILlp6eyUWLtQpKIhQXp66SPxB9cLWigGe0MN+/Im/OR541dxv4N3btlkPsVOLuLvQS8AnKLqwl7BDxfX0Kn4wYSM5msHT3UdZ8tMGXHdDtBQss9q9eUIYfSFcJxqY75C5FNnCgpbdTHccxdEpEz7bTK9ow58zibudKkGtkBm00NEKMy/tQVu0NiHPs7IyxLZtbnQdiooiVFRYhj1q5MrLLcOHrkMkgqkorClswnAqiKNtsZzMyg0bEc4BgxtfuJVwCpwyUtlinpp2j7KSLCInJbyZXVAnISLjEM5sTI8H109+glJbi3T3LpEZMxA9vTHDLrq6MBGQnWVFGjQNrawMqaODNZuncORfVKTubm7edVPfWgC3OimbchpvsA2xy2SzpnHi8kr0GTPRdatDRE2NmujtWhRA2bkLKRjgWdWNf2kGaWWL0NdXxmoEAOTmZiKFhRi5uQjDQEQiKI2N+P/bfyNiSvSFXfT2SFa4PcMacrO2QsPvs85HTg74fKAqRiykHF+LEu9lcbnMhPzjyZMNli/X6Oy0JlFOnWptIFUB4f2SbLqz3SnCxmZ4xkQrAITAVBQczc0oU4uQZZML09ZRfHUvHo+DQMDL5AKDH4zfQI5uDujFnSAtU5bH9MJpBilsOclcHJydsoGFLbvIrf0Rvjt38fWYtGQUEEbhcrCQ8dpNgkHBzEt7CD29dlBNwP3rxaoFdyHUCqqCcuQIIhRis8uNVtnf+W04rRCgpkksemEy5cXL8EeWUHHiBPqNXlx6f+rP+DyUo0eROjqQT55ESBKmqg7ohcuF1NpKZEI+wu2K6UW4vAzvrA4OXh1H5Jbg5pksGi70gtPJihkNrNcuw26VzYbJyauLMWUZ3TFnRK1Q3G6WlHoIzl0UG3wWrRGQz51DCgSsSLZhgCQhdJ3Aiy8SCRv0iXQkHXx+MaxedHebKE5rIeDxGOTnDywEojZblhO1IjvbZOtWq51odLBlVpY5Zl58WyuGYi8GngAe2MtqWv2E0fVY4dCzS7YTiAgylQZqzRVc6RS8vGA3s6cGmXwthKLIzJyhkzsvjOqpRhMr+Zm2mfFCYXJzHX9ecASpyET1yEh6gMvMYFvkw2Q0nWLWtWoOpa9jb87HKA6eZsLFY0w8G0Z4VwMiNixG1+HWTYn2WwZf/1oaTy+OcOKEwpIlGlVVKi6njtf3S8v4mqaVo+90otbUWA/nPh+S34eZnW15XnbtsnI3+wu3Bie9ewO7OHBtHAeuu1g3343k91G5JMJORxo1NYKFBT6IRJC6u3G0trLv3tP0uOezSnVZnYKO1rG7ZQGOhcWU/tZsMAyEz0dkXC5VjRMRAorG93Dz7niu3vYwfXw33tJ7OO52Ypqw+84Sq9NPWKOoSEKWTerrlVj4uqREw7u4HbE3gJnhweHzoa4rJ5yZhYi6syQJ3yuvkPG1rwEQLrXC8QiB48YNIk8voe6Uh0hfmNshmZlNewiaKhMdIQrvnkMvLuZs+mZ6eqwUpA86dhAod+FbtjKWdzl4YqYZ1646mn+8YoUlvtEJx2PllUk6rdPuFGFjc1+8k4gchoFSVxfTipkziiho+jtcUpiQvBTFaZBRX8vn5gTI/fhq5v/wAIrTwRz5MjmlK3Cs16C/1mlckUJZ8xE+Jh0gs6UZIYHrQ6s455tDoL6J8Sdr2GlsoHncx1nkOM2cu3VIJ3VqXV5WrgoDjJleSD5fbAEwrFYAK5f3oezcxaEfjiOIpRfriy5gut3sqM9jwoUgSwMBjMxMHNeuIQyDmmAZPep8VjlkMl5/HdHrY1fncoyP/wory1zWeOBeH7tvP41QFWYal2hjAlfbs5m6NIv1xbWQ4UG03WHvxemx9tHTC/twyGpSrZCqAhjpHiS/D6O8HGdWluWwgljRcMjrJeOrXyW8bBmitnZgqKascvyYQcCQcTceZPbvrqT4yh4CkeR6kZVp8Gcrf0Z3yI37A2WxFKH41rEXL8rMnKlbdW9xtSrR1Kyx1AtbK5JjLwZsgCR5c6aJvKca48hxfM+9gMMB7rfeIusfvoln0iR8z71Aa2gddILLDFJ04IeWhyVzPs7rzVB/nbPlHyZ3LnTcc2Au8eKoa0CWYebMCKFlJShHjjC1soD/3DmDgJnJDVnm2vgVKJvK+a+jm1jeuZs5VyRmmAKB5c05flzhhed8OOvr+fu35/G9fxnHoTkqv/2y1XLu+HGFpbN9EPAhX25C6ukBSSK4detAZ4a4nEzh81nGvX+RILq7weFIaO9GX4CQ00P9uSz0uXOp9Aaoqs+jodFNZY6VSgQggkH0SZPpE5M4El6KvltjS5dVb1B3dzolHX7Ut3+OCPRhItiR/iINjW5K5tzDGWyluSObm3czQAj2Hh2Hd3GYqjMF1DdlsXTOPVa9OpHqA1ZnheZmB0VFkVgHi0MHcjGuFLN+RhNmmhsj3UN1jWtgsJppdaEA67uNtoeLFBVZUZ8f/Ihx5jzcss746yeYURgAp4zL1BFBGeeJej640KSn7MNk1u7Ac7oBpaQEV64RyxmNdpDIyrK6V4E1cKajQ6KrS3D8eGJrwrHs9RzdVlqaSXu7xMaNQVSVJz4P1MbmYTBYL/Swibl9H6amE1qzBqHrOJubcd26RWjNGoJrKxA7QdaDFNw8xsxtx3A4TLhyhb6LbXT/ooHxG8ro6HTQ3uHgtHsTm7oakIsFwikTeOYZCId5yu/jG3vmkZPm5GhoJWJZOXulDXTe2YV22MWcRVKs3nWs9WIkrYABPQnKcXqxLk4v1jgxnArqwQPgcBAunkWvsZQjkVL0X9zj2V4fu3rKOXY9n4U37qLe3gt9fey9Po8j7hWUzOvBqbXQ3J7FzfYMjBYXu92W3d9z8ymO3ZlA6ZTreEvuskMpov6EGKIVRDzsvzYdzR/BWxKxuhYhhgziVKurE7QiOjWZQ8cYd9FATnOiB3TS36ynXGgEUXCZYURQxlF/DO9cgbrlRZx7d+BubMBdUkJINYFEvZBl8PkkVHVgyFx+vkFVlfpQ9MLWiuQ8wYf++DLEUI8QWkuWN6frguN1GTgiZdwJeikv15h6zCoOQ5ZpX7aOUJXEhPERws2CsA5KXMvPiCG4dFFm2z+n4/cLJp2owtNlICZbHmOpu5tw0XT2XJuLZERom7WCvUYFhiGRfdi6wVsWeFn5YhDJsPIbVTmXkhLwPtWGp+aHNFKJZCyg4/oUjtcrCIfV8rNitUBsE0g9PRiZmVZxWzBIaOPGISHehPZtqopSW4vQtIT2bqRZ3h30mRw5OY3689mgaSx9up1N6/Lp6JatKZbHj2OqKhvMywQXB6k/P44TTetx3G6ldMJlKhZEUE6dxtHaCppGxsLZLCldi4mbY62FfHjxGUxV4aRayhFfLmFHCNesThbNdrPq2YkIp0xFRYjGRme0DhmwvF8AxylHV+ZRsUFQXeOi4YhByQoT0zBx7d2DcuQI2vLl4HDg7jfwWkkJLFmC660fM/7mRSIRQX7vTTK7JMwJ4wm88EKseNz/7W04/nkHt8ZNovAlq2d1dCEQ8/BccDB7ToScHANdh8O1ThACRVFjHp6oIR+rXs+6bv3ncJjU1loesOxsJWlRs42NTSLvVC+83hC7d7vIaMggSy1j7m+tJONrX0OZWgSAVlqKr89BSJPwr6xA+/EJnDV7EVMmYc6bT2u4iLYbBu1fP8SVWeuo2qtQ3LQbKVvD6TCITJliPYRrYfZen8uFzEJupH+IDp8bcdEkM9OkzbGZyrUa3jXdSN39XnzVKhYeK70YSSuifzOsXmzM596Vchyd96wuQj4f60ra0XIE9fUTOXXuIwhNY1lhCxVcwv3TX4CikC47Kdm6BNOdnqgV7hUcDZYTUefhXCqxeGoPqyoL0cYvpkLSaTxtJmhFdbVKRQX4niql4YhAU0wqzSA1b4dpOJ9OyTJ9QC8OH8ZMSyNSVIReXIzQdas2zggjuV3ofWHGdTWTs/sMRk4O0vLlaMuW4V+5ljtf+Dbph39GYO85Cgv70JbFtfuO6kUvXLwkU1ys4/EYhIIml69YDxGDi9nHUi9cLpNwGA4eVJAkWyuiPOGH/+jxIIZ68Ap7pD69XV2Wl6C+Zx2e9AjBI4Kco1WE78rMnFGE5BDk1VcxvTmdSF8Yh0dFe+kFHI0NcPkyOBxk/vZzTDruZNs5J8vv7WBBy05ynp5IwdZCwqqCs+E4J67lUh/MpGTOPZ5aoqH/ROH8eQeTJ+uYJhROMdECRiy/0et2E9ywEbpMdl+bizANVo07Q42ZS/NlJ0WzomFFmeDWrdbBRSJWLqvLZRnr7OzEg+9v3yZ8PitlaN++Ac+Pz0olCm3ciOjoYH24mvod2UgdHRCJsDX8NlLOeli+3NrOypWWoOTkULHJQX2TRKS4mEhhIWW/Mx0toqOcbLS+JEVhTcElAsvmc+jcBBa9VMjqpS5Mj4fVcojqnYLsbT+kIus4kXQP/g++hinLVFer6LrguecCVFZa3+3x41bIe8lSnfrj46g/b+C4fJmywha82l00zYt86pRVPKyFIdNF4IUXADBkJ/pTC8EwmPiznxOYUEhG/Q2MCfmxcD9CEAgK3B03iUyahh6G9qVeskXi9TPn1n6mGRr5paspKDTp7oK8Y3tJH6dwSaxBCBIM+UhdIEaTHxp/vYfDMGOGNaU02obuSc3/tLG53/vnQfWitdVqFSxJlei9Bs/87U4KW2Rk2WTmjCJr4nm6wfRr+4n4Ne7NLQXnOSJFRYgrlyl46Ve4Vw8nL3rYeSaNsq4drDV+zIKnM4g8vYSIMqAXxwKZzJjsZ9JUE6caYP9+FUky0XWJebNDqLssrTDdblZu3IjpkKF9lHphGNaiIy1tYAEQrxej0IrY36TSi8xKmD8fY9w4TLfbSsnZupVKVaeh0Y22ciXC56Psd4uQdmwHRQFdZ83E0/Su2sKhSxmDtCJAdbWKIpxsOvhXiF4f5hUPvV94jZrD6YTDA1pRXW2lDAFUrAuBQ6XhmEzjD1sQus6KOedYtXo+wsDSC6wW1eHFiwmtW4daU4MhOXD0dDOlNAvpUB2KXAg7TyMMA8e1a4Reew3z5/swgjqKpNOrmWgahOPat/p8gnEnDzJVDsKs9SxfEea5DwUI/bSGM8KDL2sV3d3ioelFVZVKMGjpxZo1GqGQrRVgLwYeKUZT+T7YULe1SbG/j6ZrZGebCTfN4BW3wIyNCr97R2Jp+y6KPXXcKCzF9ZlV5J+owl1fzypJJoyOubAEh2SFE0V7O+TlITB5+ncWob4SRAtLXM+YT8VHM5CCfkIry63waqabRQUTWb1hPLX7+iieYXV8kGUoLIwgyyZqpM8y7nFGt6pxIo2uSspmVGM6ZC51a7TcUYEQVbtlvBt0hMtFcOtWXG+/Hat9SJbjCQwYfV1PmkoEoOzbz75fRJDu3UWEQuB0svvmU2y92259Rn8hWHDLFoysbKoPpFtvlCRIS6Pq1GQqV/vRystRamvB6cTIy4MMDytXWq3uiFj5qabHw7o5rWRkHcfIycHR2YnU1saB1lmcOyfHOmKYYR3hD+NSMnG5TMrKNI4fVyCkIXQdb+k9JH8AcbuNyLRpVN+YiVYfYvWnxkOGByPdw8FvXsK1/yzeSC3m5ImktzQjJMstE5k0KTZ1UrrazK3cyfhyZiCbGnn1VQQrKvH5JVwuE7fLQOvRmHqzjsImDX1aJfknqgnfbaRFLcWTbuBdp+H3i4S21alylEfb5SH+eu/uFqSnm7H3PMn5nzZPNg9y/yTTi/Z2gcuVmEKRoBeuuNbApsnc63uYII7RvWgpF4rWk6vsYFxDPZgmpQtBHGnAiYwxvSimF2r9MeZ89nP89CuZqJcsvcidk4W0ugQR8BMqH9CLxQUTcXqcaL1dGC43TU0yDofl8zl/0uQDRVbdVOwBPSt7qF50BWm5lQuyoKpKtXrtyzJmWtrABPZUjEIrIIVetCzg2eoaXK2tGDk5hLxezOxsTIfl5InfftVplc3jcolMmQLhMNrKlZCTnVQrKivBcf0a0g4fxrgcpM5OHHfaaGrKxek0qaiwBp5VPN1F4/HJNDXJeL0hKipCNBwyIKxjqgrrZzSh+adimgzoRXeI1dPzEPfuEVxRZulFSMJrVoHTieNGC2ZWFqYk4bh1i+xXXiE8tYi7Lhcd2dNJVwSKAuaeatqXevFkWDVnbilI1rl65hhQ8CurSD9QjXy9gWvOFdzuta4xrzf00PRiwgSDK1egszNxGNqTjL0YeIQYTc7c4Af73FyDcNjKjZQkyMhQcDhA0xJvmuiKO/vEQUIHNdyuTRQX68yYblK2v5E+I83q9pChxVrImYqCFArhfust/BfauJ09mcIvfZFATw/OY/VUfbsTJbyBwPhCbudPoqb2Gt4VOmZGBqHKSmZ5BcVhHXVXFWbDeERLIS+8WIh3gx7zYoQcaRgut2X43G7I8KCmSTz9kWkE/FbO/fML/ciXqjl3LZOTP3IjRCGVFUEcbW2WNzwrK9F7k4o4z09Ct4heH3vPTaWhR2F5zmnWTznH7ltPU3d7JsplhfKiC5Dp6Z8UKcd6U0cf2qv3yDQcEhBxUfnBZ9BWrbKiFVlZA7mmkUGFzGvWYHg8SJ2dGB4PkQn5hK4IgkErF//QfgfH/99N9KDB0jnnWLF0Pm++mc2xOpm8DJmZDgdVdeOoWBxh74Ui0pvvEfaFaKh34rh6lA1TzrJD+gD1zRP5UM/PUPoOEZkyhcjMmdbsAUnCdDpRDxywzqNhkPk7v0vG5g8jdv4Q9XgDDcedNM1YjzsNvOtC+EpXkbY/jFlbT/rxBiQJJv1KCaE5q1k40WoLd/SoktJgx3t2RpsfGn+9RwUkGLR7Rts82TzI/TNYLwCam9OZM0fH4xm4X6N6YeytRTWDtDu8LF2q0dMtWH23EUlycaLIsgtiQwWax8RUVfSyMjxnTqLu389tZyHduTOZt+5p6Onh/NcP0npkKYqpExhfyPkZm5jSfg5zXE4SvfglexvHc6x1QC+qqlSO17vYE1c3ZXo8sQ4/T39kGoGeX+F4vcxLs88gIhc4c3cix+vnWA0OnupA6urCyMuzZsk8qFZASr04dqsIxemkbIULKRiwHvz7FwIxvVjtp2a7Sf3xbFjyPN5XyhDSCFqxcSNGfv4QvSgu1qmvV6jZK5N+8jh158dhiiDFHy4kEoFvfMPDsZMyEwKZTM/rYs+VYsq3eti330V68/Q4vahLohcHE/VCCJRjx6yJ9m1tzFq2nK75pYz/1Iv4fvRDWn7YSMthhbslXjZu0pj/6ir0X+hMOXEE6W+OILsgtKSEgsWrmYwWu1Yfll4EAoLSUo3yco2sLFsrwF4MvG9JFvIaTc5csr7RVkhMsGZNiM5O62Fy8BAyWbZW7CfrDMZdaGTKLJmCX1/NxMYq1CIfXXNKmLIxhDVqXgxMbTRNXNt+Sm/2ZC5FZnKGLSypuMtPfuDg+DWZmSuCfGHNAarOTeHY1cmY6ems2kysy43kHzQLYJkLRHasF7SqSmiVG61irX4XQdQ7UnsgnRVpx9nQdxK57xLrP1ROVX0ekp6Na3cVwudDvngRffZsK99zkPcmKclSiTI8KBlOSko01s5z4fvAH1IWCBI8lIFakIu4ch5x61bM06+qZmwhICI6m7VfopjjUU45EJVPYeblDfnYIcVpuo7vtdeQ2tow8vMRLlfsnNTXK1y76OD2+YlUzmth3dQLfOVrS3h7t4s88w7T3TeRXVBrrOD7O9JBknj+2VK8My9D6Dp1J4upay/GyM5m2dQbLKEJfUIRkcJCRDhMeN485CtXrKFkkyYh3buHMW4cDgdk5ED7+kprsvVZF54MYtfRkaMKR09s5dmGUxQURJgxI8IvIpsJNEi43SalpVpKg50sXWE0+aHJQsfRQUk2Nk8CD6oVMLxelJRYkUZZZkjanewwkaWg9ZBXp+BY7OWlzB3kFvkILV5CxrIQngyQZZEw4VcrLcXR1ES3WcSt2zLXPv2HpB3cRvX+NDAMXljTgvD7qb+cjxSajnd5p7Wjg/RCzVIoFQN64fVaXWckeQna/IIE7/7KlRpmWOfY319ghaOPzcGdaOXlrO+7zg4lH0V2oR6uRb50CS5dQlu+/MG1AlLrxf40XK1pyC21RPLzBxYrUb1Y7ce1exebtQAyxUjqEhifx+BvbohW9C9cUulFw0HB9aMzudmdQeWsa1QsaefNbxTy9tuWXkwr7MLpMKnVV/CDP1AAcX96MXeuNdF+0iQwDKR79xCqE+cza5GdgvalXloOKyiZCoGg1H+twh7HJvLrrHSkyZMjNMzdjLZPGlXB8FjqhY2FfSreh8QXZAoBW7cG6U95H9WFHA2ldXXFh8TMWH930yTpTePzSzTNWM8cCXLP1TPpn47gcoG2rARnZSV6RMT1hLcWAmp1NcbUKeRNDOO/2EvDf+zgq1UVtMhbKV5+gS9UHkKYgnXzb2BmZODSQwNtPRko4F1T2IThcqNlLI61o4y2FTPDoB49muAJEbLMqqc6UO40YarZCC2E404b3hKT8DIfYl8AMysLffZstOXLMQoKiFbHpeoTnRJZpvRzT0Gvj1BGCf25VVR8UGdC7S8INjZCXx/a2rXAwGIlOoJeCgb6U3Z8hHzTkwqI6fFgqirSnTsYOTmx/TOmTYv9TfScNDQoTJthIl+8iXHtFl/9r2KuTEljwSw/RaGbONNkwn06R46n0+VTeeaZABXrdNDyWff0ERpOTLciE2lprPzyEoI7nrHa10kSGAZmRgYiEEDq7SXw3HOENmxArapCOX4ckZUFJSWILRXck10EoqllAjrvCZ66uRshrMmWPp9gfEMV90q9+PzW0J5UBnuw4Q8GxaiN9jtqh4g9gdLm0WVwm8QVK7SYc+d+75/BemFF2KxGAB7PoAcsMfCQN/VmHRObj5ExXUdfaTUWyBbWvll6QaxDndA0jKlTmR7yITSFhv/axf8Nf5DIRMFHp+1nw/QmzIiBEKBkZyCF2xK89IP1IuRZjGDANqLrKLuPDmn/Kfl9rJl2FWOOG1FrIN+4TmTyZCo2CyR/B2LvwOwXbcWKhKFfY6YXHwgy4Vs7CV2+YeXXr18PnoE0Uam7/yE/w2Ol7Dw1FZPsIZuPFTJ3dw/UwwG4XMn14qibmeIyUiSHyJ1OvvadeVy/KcfphYLWp3P0qJMun2Lpxdogoicdb+k9Ghrd1ucm04tIBLm52dKKF18k+OyzqDU1KPX1Vve6l17CkwF3S7wEglLM7vt6IftIVayw2eeTyDlahX+1pRUjFQzbejH2PEaH8ugTP9Lb57Mm+PX0WJ7855+3BnDcz0Xo8VjTAO/ckZg7N8y6dSFyc82U2/F4TNxpcKFoPSsvHUOxIsWE+hcCCTl5G4Kk7a9GOd6AtnAhzro6ZugXadqrkVMchrINvPz5CWj+dVQdzcFztsHqsJDmJhTvdRkUbj10NI1QSAwsBEyo2W7iOTuOVUu6EzwhpsczMBzGMDAVBW2d1/LYRPM5PZ6EhcBIfaJTIsuQk53wkuT3QXu71R1I161uPWvXYubmDgyy0XXrId8fl1uaSmCi3qwUOavRCZJghYqnzTBo6pjP9HGdTJ0U5NUvBqj9Wjv1Z7M4dH06xjiZOXPCvPKKD8nQUfZWsfdCEUZ+Pvq0aZCWRvWREN6KSoQkMNLScVVXIXp6kNraiORPhFAI0dFBaM0aEAKXywVCIDsTHzYwTeZf30W4tZGmiaWkv7CWGc7tFP6kDoBIiZesLDOlwU5m+Acb7YdhhO0JlDaPKvHtGTMyrE5a0RzoB0mXS6YXWVlmyu1EH/ImNh9Dlk0UBfyDOsYM0Yv6Y5imiQiHmaFfpakWni7Q6VyxjtWvzifkm0rV0RzSPQ2sKWwamo8fpxcHT+USOiDHtAKG1wrT3Z9uKsvWw2zEarcZe7gOBDBycwecNWOsF447bYjeXqS+PsStW7jfeovAJz5htbAWWA/1hoHo7k6MZA/WC1km5PWOWA8X0wtdx8zNZdp4icttM5gS1pk6VebVz/Ym14uXu3Dv2QV9AfZcmoFWusR68FcUaqp7qHhmKyIYBLcLdc8e5IsXMWQZU5JA09AWL7YGfqqqpRUybNykDdhuh0lefTVFtxupzi7n/LT1vJC+nRlX62its7RipIJhWy/GnsfkMB594i8yh8PE77fGsGdnW578ri5BXV3q/Llk2+vuFoRCcO6cjNMJmZkmmzeHUq6MZRk2bghibq8he2YEyVqHoFZX07XYm7AS31vlouCShyVLStBnTMe5t5r/6X8V3Wil967CrWtyfxFtOsdPK5QsKiW0dDpkJPGw9IdbTRNCIREbkBLLnzybyQrJDb2tsVzQ6Pu00lLUAwcw3W6c588T8vsxc3OT5nOOpk/0/WB6PFihE83q+uB0DjzIxwuJohCqqLDyPiGpwEQnTRoTJiStb4ga9mhuacWqAN/6PZmbXR5wOJjiVNi332Tj/FZ+sH/AO1RUFLEmUC7qoqphPHW3p1I64zqrfjeTqnoHjT9sQTncgjPNgf/pUio3bMRxsxXThD3Nc/BsO0V513aM3FxCGzZgTpoE7e2x6yV6HTn215J36yzXliyBpyvYsCGIUgvTFzgZN0dCWjcwSj7ltZfC8Eev5dpaZUitSzLuRwTGctaBjc27RXz0+Px5mQkTrIngublW+ufbb7uQJEb9wDKcXqS6H2SHybPKDsR0HUWxnhfV6mpClZX4fFJSvSiZOxepqwup6Qr/q/230SM99HSo3L4mU7U/US+Cw+iFkZVNSJcHtCJam3XCwQqnCr2+IVoR2rgR6cYNnCdP4mhrw3HrFtrq1e+aXhj5+dZAsO5uzIwM699RO6/r1oDP/pByyOsd1oElgkGrrmtwN6N+EvRiRYBNi8/wxrb53PRlE7mVRtF0g337XUn1Yt8Ok02hAHuvzbFm3DynUbFFcPCN49RvG8eFXTDr2SK8G3S0FSsQ9zrZfespXKf8rA71N9NwudD6awv7T3/cdSSQm5qsKcnr1rIwTWfesysZ9w/HKJDPom1aFTu9wz6rjIFe3O+C4XHWC3sx8D6hq0vQ0WE9/NfVKUybpqMoBtOnW8VbQgwdmx39d7KbYdculY4OidOnZVTVMtSdndLwF69pkr6/CuXi8VhfYLWqCqWhgbwIuF2b8fms7jGSGWF710ruRAwqJvj5u4svUdM8k/VzHCz4jcX8+OfwzW96mDQpwgsvBKj06iCyhz0H0bCmqUdoOGDS+L27iEiE5bPPseqzs9FCU4caYofD8kD0p7jEHsaT5HOOpk90UlJ58mUZ8yMfIdhrjZKP9yoNFhIcDsuId3UlzfdM2Lck3SliuaWLAlQuaqd6Xx7BabNYPSXIspUZaBGd+gOCn9WvprUvh4KsbqYucSDrUF/ngkgenvSrlE62BtJoGYvxLm1HOdyCkqGg9Wo0HBHgSKdydQH7rs2h/lQ6K4BIVjZSIIDw+1NeN8EuDSkYJGuqwbWwgJ01KE3H0ZYsQfGWER1uORzJDH/8tXzpkkxZmTZs29DhPDfvJLfaxub9RLxeaJrVcSU6STUaUR38wJLqweeB9KLfI600HkdbWYI/TisAPKsrY/dVgl7MDLMp++e82TCb6pvz2LAqyIKtk/jxT+9PL2JpQUB9nUzDAR1Hyw3KprRSuaANrXxtrNVnDFnGzMgg5uXqP47Y78ZCL4ZLK3K5MP/8zwn+0z9hKorVhWfw8Mtos4tgENPlSlkfcF96UdLOzroPEpgqs2quk9IVATRNpNSLY2cykR3FqBEfS+dEWLVlIpLfZxVmizmcu5zG8aMCIatUrs5m9+2nqb84jmWFAfD5MfMnIAIBS/eSYZr0FszCOHKChTf3cmXWOhx79uEwdJxPFWM4rMFkI/FO9WIkL/+Tphf2YuB9gK7DkSMKly7J9HchY/Jkg4wMk+XLNQoKDCAu39pl4HKZAxeyy2DjpgHPa3T1mpdn4HBATw94PJCTY7XQiuZyRiLWDRWtpnccqEU/fp7IU4utgVL9GC4XUprKxsr+UJ8rTNreXbjN8dT9qJCGEzO5UVjJykWdfPlvV9Hh93H8TASwth8t7hp80MmMZm0NKLW1OC6Ow9HViV44BRHSOLxfonydZ+jQsOxstNJSqxNEdnbMy5Lq4X1In+i0NKT2dstbk5ub9MsZNlTschF88cWh+5Ui5JvSiMvDdKfoJzrqXqoO4Lk2naWLSqgs7URkejCkCI3H07jj9+CddZVXP3CC6sZ86q5OxuWWUcqmxPJYtYzF1vYzPHhL7iIFA1a9hmLS0NA/ut5YQdmMvWzsa4SjKtqKFUOL6uLOs9hSQfsZGX3fceZygs6CCO4XSuhc4sUTGXadNaxnJv5avnQJOjqGbwWXynOTyvDbBWU2jxrJ9KKoKEJursHy5VpsemvsgSXdQNfFA+tFMGhNhc3Ntbbj8Ziotam1wlRVZGdcDne8XvykkIbpL3GjwGRVeYgv/80UOrq7OH5yGL1IYc+FAIUQNw7cZXrmXRxdnVQ+1051Qz6S08PKsqHveUd6UVU1EMVN1m1oNGlFHg+BT3zivdGL0hIqS+/264U8rF6Il55m+ZIujHQPwjkwTG3D9Ausn+9mhzJrQCsoZ+mzHXgjdShHz2FebhpRLyIbKqh/28XkA8coPtpA5kqNwNKltC9OrRej8eLfj17cT4Hyk6AXj9GhPLr4fIJQSFBertHWZhXZ9PVZF1tBgRG74KLt3NKkIO2B/rSddINxdVUYsgSbymPTWFXVpLfXupBnzozg8ZisX295UrZvVzlyROH6dQdTp0YoK9NYvy7IxcMG0w5dxn85woRKL+kHa1AaGqwcS3MgJ090DRTFHmueCJrGlCIXn/s9GeF2Uf3zMEIQG4FeXa0m5HWmMppmWMfYd5wf7RwPssz0NJPmGwrNd+bzXKmK8suf4ei2jHhoy5aB/MktWwaMIsnTcGJEPUD9ufxKbS0ASvw240jlmUlgsFcpVci3/29TGvG4fRNdXUPFr78Y2fR4rHzansuIPdZnBLdupXSFwqKnVSoXeXDsDbIluA2HsgJHwQSWP52GKWcn5rHKMtqmgX2pdOg0NPb/TtPYbG6HNDciFEIrLU3c1/jvUFURK1Yw5dNl9JyrJy3NJBQS/KBnC1JV6jDtaPIvo56Yvj7B8uVaQoFkMlwuE8OwwsTxRY/DGf5UoWgbm/cjo9GL6ANL7qmDyAdCtC1+ML3QdXj9dQ89PRLt7RKbNgVJTzOYctag6HBqrQBS64WuM2WWi8/9XhDhtFprptSLYR6wDU3nxH+2cvtGNlKWyvQ0kzd+UExYSWeZfB6l5xJCsmwj0SLbB9WLYBD54kUknw/D4yH47LNDvpdRaUX8NqO8W3rRdQnpF5o142DzZkpX6Kn1YkkaZnb2gH9+0H4kaIUkUbnKj/qNE5YWjEIveuaWEalcRcGeIxiGlWn7traZQJWU0ks/mlz9+9ELj8dEUawamezsxAXDk6gX9mLgfUD8BTxhQuriL9lhojqDVtoOVtrOuLoqClvqSCtbRDBssmu3i0BAIMsms2eHMQzIyRmouAfo6pKIRKxBLeGwoLXVwa3bDpqmr2PCtRNMOlVD+mcv4pg9DVOWEbo+MIxFCKso1+Wmqm6cZfz7K42rq1WyskRij/3+vEUY6A6UymgKn8/6DKcMYR0jfwKGYxKmEIieOyj1BxAuq4BWKysbaNEZZ1xTpeEMQZbRVqyw+jLn5sbCmsOGipOEY5ORKuQb/9kpjfgw4he/LwiBiESQr1xB6ukBYNXzz2M6ZKRuh5W3mZXJRo4Qnr2UoGd58p2VB+o1qvfIEAzGvs/dLQvYMPUcqKqV5pTsGNPSUGprke7dY0pLK9clg2DIjRCCqef20rVioDvEYAMaTXXIzU09Mfh+PDHR6ZLRTk5e74BgPM7hXZsni9HohSxDdpaBrIcS0jzvVy+6uwU+n0R6ukFzs4NQSNDdLXMvfz35BSNrBfCO9EJKlSZjwr4dJmFdUDm9CT2g0+SYS0t7IWsX3WV93i9xXrqF1J+qEnz++YQH7PvVCxEMos+eHcvjH2LTeTCtgHdJL0wT5fQp63eGgVZWxqpVcn8Xo1HqRSqtkCSqD6az1QThsIZvjqQXebfvUnHgJhoqimr9bXznucFaEH0gl2WGPJwP2sX78txHHZSDMxeeRL2wFwPvA5JdwEl7pQsRG/jlbmjgJaMBTYD+KyW0L/Wid1ur2bS0gc4SLS0OZs7UY9u0VrVG7F69fl3C4XBw8qQT1Q2HKr/AlpuXyG1vQXTcRCsvR1u6NKFXtOmQ2aE8S4MQlLxkUrneHzPiubmCJUu02IP/wLwAM3bDpTSabhcuJcKLa1s4dqOAyKxiZl5rZtmk63h2XEAOXAdFsaYyDsk7SrJt1eqkQDBoGdgkIePoQiCl8R5FOHbYfUi13VSFYcN5l+L3xeXC9fbbSD09GJmZVts+nw+i+aQeD3pxcSxqEC8e0fdHz4npkKneY42lLytswbvgLjscH+RwyxoikQK8yztjYkRnpxVViXbnaG8H08TR2op68ADTFi2hZ/ES5HQnzp/WIcV1hxh0+LFUh0uXoLRUG7Y39Gg8Md3d1uIiL8+gr896mIneR49zeNfmyeLd1AuXy8TjMejpsTy2J086UVUTl0ti/5ov8MwIWgHvTC9S2VIhQMlwUjqjDe/My/zdnlVMmTkB6m6z9taPcV/ZjTBNwgsXxmxjypbO8XoRDCI6OhKGfMX+zuOJ2eax0ooh+/CQ9IJgEOeZM3EfasbOY0q9iE+fwnqgN9I9VNe4ErVCeZb647nIab/ChoLTmDmWBomuLhjUElZqb4dIBPfRWubeuk7P0pUE/vuXEQcPUPjDgc5zg7XA5TK5eFHG55PweAyefTaY8nzej150dg7oxWDv/5OmF0/AIb7/ua+K9n4DrzQ0RLt98dOIFV5TVSvs1d4+MFhMVa3WcELAnj0qmzeH2LIlxLJlGj//uYtTp5ykpZlommDVyiBZh6vJLpuCONICgNTcTOgLX0gw7tHipKVPB6ioEAghx4z4hAku5s8PJay4E1KEILnR7A+VrprpY8/5IiJzZlvGSNfB4WBN7mkirmKEz0f4qadinXmGEN12dzdKbS1qVdXA4DG3G628HDM9PeZ90ZYvt6YUR4vNkuWPDg7rjsQoRCFmxN1upI4Oy+OTlzeyMMTtS3DrVus100wUqMGfj+UBM10u1Kqhw9hCGzfiMjRKJ19n3fxWhF/Hu6ETHFOQjFy0ddb1pO7ahXA4UCMRQhs3Dpzn7GyU+noiEyagr15JeiBAqLyMQsXBOKO/k9CgojBfL7FUh/Z2ifJy7R0ZXF2HAwcUTp+2OqGUlQ1dXKQSice1b7TN40f8tTqqVIX71AtZtuYN5ORYA8g2bgzx2ms+WlslamutHPG0NJOZM8JsNHcyfgSt6N+FB9eLVLZU1/EGdmHO9lF1cSb67NkIXWdGxh1CwoU+YyaS30eksDD1w3v89ru7UQ4cwPPtbwOgLV2KtmpVTGeEz0dozRqkjg6M/Pyx04rhjjH+HL5TvdD1obUSyT4/6iQCSyv603qiMyJMtxuX8CZqRXk7OPKR5KWEnioa0JlAAJGfD/1zG6LnWXU6cW/fjl40DceiOThDfvT1lRRCSr0IBmD2bD162hMcPQ+CrRdDeYwO5dEkWS4cDHOx9Q/6iqJpieG1igrr/dnZVhtSIeDyZRlZhitXrIs+N9fE5bJax+XnW14fgUnn9/bDhRNkzZTRysuRmpvxnWuj44vfJPcrv2sVOPUbDK+vCrQA7LSKS8nOprISxueE6WjuSvA8i2R3zCCjKXw+q6/xjfnUX89i6XM9/e3M7lF/bjyys4yKEg0hOxI93cmQZXA4rHC1LFuhYiFQ6uqQ7t3DceMG+syZyJcvJzwQv6O+0iMc32BMjwdTUVAOHwZA6c/jBIYuUFLhchF8/vkR80mjxxTrthQ9J7IcS48qX+si/eguHHVWTizPPkvleh0hrJB9NJzO5MmImzcHwvW5uQQ3b0ErK0M5fNjq1e1yY2ZkoK+vRBEC5dAhRChEqKICJAk9bOLaV0PR1TSai9bEBuHpupUKlyrqM5wR7uqy0g1UFUIhWLp0dIuLx7lvtM3jRaprddiHk/vQi2ibUk2T6Ow0KC7WY97S3FwTVYXx4w16ugWumhrC0gmYLRMqK8dxzdKKe1/8Jjl/97sIh5QQhXxHepHElkb1Ym/LfOqvxenF693UHSgCw2Dth1TCq1eNbEf79ULq7Y11pVMOHbKirjk51oNwIJDoQPF6Bx6W36lWpDjGeN6xXgyulUh2jj2eIVphZmUh3bkDEGt7vXJlF2nHBmlFdC4Q2QmpV/j9A9EKWcbMzSXwoeesNKL+AZdGugchxFC9iM6s6NeLmTfSuVy4GiHFLQRMWy/GisfkMB5dBheqdHUJjhxRYtOCo3MBgJhhVxoa0Eqs1p/ynmoKf1iHYUDPvHWkp1sP+ls2B61CM01w9mx//3tzoJNatLiyuFhHCHjmmRBX/tnFmYsu2m5rLP6dpexc8ofktL3JhNOXafr7/Swo9lv9jaOGIiMD5eBBHK2tRCZPJrRhA+LnP0e9eXOI53mkOybarcClD7QzE06ZVa/OR99uYmT8CqFFXQl9p4exAwkeE8PjsdpiGgYIgdTTg4hEhjwQA6OrNxgL5P4FV1cXRl4eoq8P0dWFUleXKDCj2E78kJxhe2X3T6xE1y0jHk336T/eyJzZROJyYonLW42eT3p6EjxQh/Y70LrCVGzpL8Du9VFVn4d6VGLlSit3WIRCuLdtw9nYSNfLr3D264cYd/4EM3taWaLX8fP5v8e+fSpul8Gzzu1I6S60lSv7P9hMPsRokBGOXgeSZJU4OJ33133icewbbfN4kexadblM3n7bFWslmnBf3I9ebArQeksmEoGrV2V6uqx7KuotjdcLgOmtDloPqrS3a7CyjFvP/yHj2t5kTuslzn/jEJ0la/H6Bz1YpqWh7N+P1N6OMWECIa937PXitYXo83RMdSGh9TLCGZfqM4JeGNnZ1r72t2gycnJwtLZiqipmZmaCXkhtbe+eVsDY6MUDaIXw+QYWRP3RB5zOUWmF8PkgPz8hWmHphYOKZ7YihYJW2tGBdFTVTNCLaIta/+pKzr5xkHEXGslVXYR7Qlyfv96KWm0Ikr6/yorwv0O9ECI6LXt4b//jrBf2YuA9ZnChSiQCR48qsbb5UU8+YBXvqmrMsNO/mp4UgcvHVPx9Ert3qzyXVUX6yeOkz5uHf42X0mUhco7sZUrXGfLPPUVkzcqEnLj0dBOnE+b9TilvNzk40mjy7bfWUTTdQC5/jeIre5mhaRAIYmYMGAqprQ3H9esgSThaWwnPmwd+f1LPc0pDGWeUQhs3Ulrui7UzAxBOmYqtUFurUHUi3/JAMDBUJWZEBhMX+gw++yyiq4v0Gzdwnj5teTr6+jDS0hIeiIEHKgB7UMysLKtmoa/PMrJCjCwwqdrgperQFDXM/d9ZcMsWhK5b5ySujmLEnNj+82mqKqFQyHpPWMc4fJLGC+OQz99j1avzqT6RT0OjVRBo2WVBqKICZ2Mj6v79ZJ5rYpxejOx2IN0JoZw/z0RtL/eWr2PWj7+BYp4i8tEXYqtWtboaU1Xpmr9qWCOclWW14Y0uotPTzfvqPvEkFYrZPJoMvlajC4H6eoXMzERPPjBqvXip5e+Rg354+RWyspwUz9BYvvtrzLqjoMmfARiiF7JcylnZ5FAN/LxmC5ObDF74b6/QfLqKC80esmeGQAsM6IVhoOzfj+P6dWvgVloaUlvbw9GLD8nU1ipUHxADHuvR6MUWK8JJJIJy+DDK0aOW91qWiUyZkqAXRn7+u6oVMIZ6MZJW9B+TVlpqzfGJm50T3/J0JK0QPh+eoiLo6rLek0wvDqTHCshjetFf56IcO4a59xDjWtPonr+Mrk7B5Kv1pHvg4vT1KG98C/e1cwReeGd6kZFhzXYazUDLx1kv7MXAe8zgQpWurkTXhTnoWtNWrkxY/fp8EsGnvWw/kEHkgsAhGWxYFiLj3Dmc584B8LwLlLtv4ZQhGJ5FpP/9sgxnzzoJhSyjKUnwzFdK+PeP5tB5wcGt2ybl5RrpL6xl3po+zF27rHHu/WFScfMWclNTLLRqulz0jxEe4nlOShKjlNDOLI6EycRxXScGjMgIJ9nlQp8zByQJZ0ODVVy2aNFAXmj/3f8gBWCjJtlY+UG5/UkFJi7cnio0nbKQTB40un7//gHjP6hjxYjHLsuQkwO3b1uhYF1n/YwmYBb1pzM48jcquGSWzr1HxWorN9g0QUgSvldeQW5qwnXzFlM62+jKmkLvlFlkPjufwm3HmPj/6ihsPoyaB339Hxfv1fSkG7hdBj6fFDPCwYBJ2x0H+fkGLhds3jxwH43Wg/MkForZPJoMvlZ9PmvYWGZmf6pnnCc/SjK98C/w8rOqDNQrcKXJ4Fl3H+n1+8kGNr78CupXv0FWxz40bQ2aYcSGdA3Wi/m/u5xf9Hlw1MH58zINJ1wItlDyXJiK1X7MXe6YXmglJRAMYWZkWA/+Ub1wuaC7+/2jF/3ec23VKqTeXoycHCv9UdeH6MVD1Yr+430YejGsVmzcaEUcjhxBPXgwcbEQt9AYjVYMrsMbohd/7QTZoGRFgMpKfeC9QhBavRr3//t/tN92E+pS8c10kp4RpntuCeb+48w+dIxs6SA4BgbIJehFWiTxoT3dIBgUtLVJQ/RC12HfPnVU3v7HWS8eo0N5dIkvVMnONikt1ejqsnrfJr0oB4XBQiG4etUBCCRJovuzXrIyTdxvvYXnm98EIDJpEoEXXrB6GEe7ApmJRrOiIsQ3vuHBMCUrq8iE5mYHX/iCRu3hNELKs1axUIbVgabq0gwyXFtYM+UyRlYWyvHjVn6GLOP7/OcR/cY91R0TNUQxA5WqrVtcl4mBQSfE2tElNeyDhcPrxfR4kDo6oN/TIwKB2GTg+C/joYR7U9UjDDayXi9SW1tCkVqyPE7h81keN4cjJqCpPFUjja6P7t+ohC26P/3t6kynky29P+R4x7M4+uOtm6cdQGwT7M17iZCpUlkRxFVTgz6tiMDZ2zhMnULzOn0TJyNkwcwZOublZlzZOtrKNSgNDda1BDGvZlptLc8qIdrLvXgyQA+b/PLVI/RoblqKV/Haaz5croH7aCQPTnTwnhCWl2g0od7HtXDM5tEhXisGp3pu3RpMfl3G6YXPJ2hsdHLtmgNVhSlToP0zr5CeZjkK8vfvByC0dg2+V15JmNYbrxdRJ9W16zKGYS1Krl51MH16hMrKELW1g/QirLPn+jxUaSarF7dgut2oBw9CXh4hr3dIpDIZoqvLKuAdphV0/+GOiV4YubmWXkhScr14WFqRbH/GUi9crtRRjf7PEKHQ2KRApdKL9q0onZ0YeXlsfvokNTs2EJQ9Vp0aJur27Vy9mc6dPg96WKfw8kFmzTU55lhByDQplq4iDIPQ6lVD9MJUFNIP1rBxQyU+v4Qn3cC5t5q3tmVzPKsCj8dI0AtdZ0Rv/5OgF4/ALj7+DL5otmwZeeUZ7/ns7ZWYPDmCLAscDhOHbA0ukY81YN64hcMBkaKihIUADBrpXq/wox+5uXXLQWGhTlmZwbVrDm7dcvDmmx4WLgzT2OgG8vAubaf6aA6NP75F6WQnhjsNrawMdd8+y9NjmtZCYAQDYrpc1iCXnh4MRSH4gQ/ETsiQfMb+fY0adoDK1X6k7uQPsEO8H8FgQvebYduJPgSGbQEXpb+jUrwAJM3jjIbdDx60znP/36by1pgeD6aqIt25g5GTM/SY76dwurfX6kbU1ITU04M2azY7IhuIFBbGBKhmr5MNWUcwPdNoUCuY1HiQZeF6rlxXuGssZJZxAef1Vu4ZN3Aev06haEUIiEyeRPipp2KGHYiFjEUohLuxgXyH9Zr/RzVMvn4ax/RlnO0WXLrkYN68SEJBpdcbwu8XQ6Jrum4N3qurUzAMeOqpMB/6UDA+7XUIj3PhmM2jw2CtGK2XMqoXsmzNliku1vH7JRYuDJOVI/C98grKvv1EItbz7uCFAAzYYNOEt96ytGLSpAirV4doapK5fduBELB3rzVE7PhxFRFIp2KDzsG/v0j91XyWTW8jWFGJ65Dldaajw9q4yzWkb//gA1eOHEG+cAHCYasr3GBPuK0Xo9cLrzfl4uvd0Atj4kQcN2+CENS81YdRUE9j+0wwC9js2E3r9iaOOlYyborOhLvn6LrYyaGuieR3v0UhLWQ7jeR6UVGBWlMTqzmQKytRq6vpO3Ac3beKnGkROu45YnoBA1oR7VA0+H6K6sWRIwrhMKxcqfHBDw5v/x9FvXif797jT6qLZvDKMyoCLpcZu2hVdWB63qpVGr29/dGELAN5dxV3D9/A3SUhSZB5tRm1qirlgqChQcHhAEmYEArjdMDXv97LN77h4epV6zJRnTqNP2zh5Pd0MHspnXYL74ouhF+yJhRfvIjQdWRZTjqhcTAiGESfORPn+fNIgQCu7dsJbt2KumePNQws2jFBlmM5nzEMg4NvnLXCjmlDDVJST3l/N4Nhuyo8JEbTSzqpAMS/z+MhtGYNru3bEcEgzrNn0ZYvt1rNdXdj5uamXoBFn4gHPxmn+txU28nIiBVhRzIy2XN9HsduF1A6vQXv4jaqjuVy7MgkmLYCb0kz4bTFnN+dTrA1DTngxzUzn7TJCsHdPUy82YhpCoJL5+OYMx1tyRLcP/4xAJFp00CSUKurCVVWDuSRNjSgNDTg1mHv1FUc9mykvcPBmTNOWlpkvN4QVVXW/eRwWJOQo+kT0XvLKtS3HnRu3HDg9wskCZ5/PoVnlce7cMzm0eB+tSJ++rYsW5O5IxErrSg6S2Dr1iCyZJD2xpt0dkpEs4LS3niTvleTLwi83hA//ambSZMi3L0jIGTwwkd8CIeDc+dkfvxjN/Nmh1hu1tKwbRyNPzHBzGbpgl7WFd1Ak6fHOuMItxtFUWJ2PhXC54vlykt+PyIUih2sumOHrRf3qxd+P2Zu7jA78HD1Ytm0W2yavpudgbUca5zB4nkqy9QWjhydQndbFrK+DM+nSqjs/SGOGzJvbZ9A/r17ZPlukLE8H23GIL0oKgIhUGtqrK51DGgFgLa6hKvqOnruOWhvlzhzxsm1azJCWNEut9tkzZpQ0mJ8n8+aX3PjhgNdh0OHFFau1MjLS23/H0W9sBcD7zGjuWjiQ7wXL8rMnq2TlmaFt8Dy5GzY0D+FMt0gfX8Vjh++RSTi4OyzrxAICNa2fA/3W28BDEkVik4TnDpZxtF6F25HMIwA+6oK+dznfHzjGx6am2WeW99JY6OOqSqIYJB1864j/BKmoiACAfTiYsy8PPR79wamKA6TfhJ9TYRCVicH00S6cQNl/36rxB9r0rAxLjch57OyMkTN22Hqt40DaQ7riy4MNUjD5XSOdXh3NCk2o8gxTSVI8e8TPh9IEsaECdDUhLp/P6aqomT3d/NJsl3h8yE0LdYabvC5Go3wxB9H/HwDpUXl6fKprFqegZaxmJXrdfjKKWTHBERmNxUbBF89vxqppQWn1MdLE08hgmGuzFlF4cldOISB614bxhUTp8MBpkl43jz0+fORz52LGfPogiDm8ZHhA2+soLipjzNnnIwbZ90/bW1S7GGppsYqChs/3iqu7OoS/eUjJtnZBqGQ1WoxL89qbTqcwX6cC8dsHg3uRysCAYGqmpgmBAKWbkQXAJ//vA9d7/d+SgaeN9/EUbOfc7PWceGFV5nz1hs8XbMXjzQ0QmCGdWq2m0wpCGOa0HHex907Bs6GM6z+vflAOufOORFaiPUzmqhvXm2lKQrB+qILoCogRKznvTljBuL27QGblMKWmh6P9VDZ24sxfnys0w2hkK0Xj6he+N1Ps6rXh/nPrbjMPlYuvceRUzLN2mSEHub31WrMsIu9l2ZwMXcu+Td/QI/hIXQ7jSlFBs5Tpyy9mD8f/8svx2oGwIoQRP8NYG6q5LUKP5cuOWJ6cefOwHyN7m7BW2+5OXfOSWamwfTpOq2tEgUFBh6PVayvadacDqdzhBpFHk29sBcD7zGjuWjiQ7w9PRKaJujrEzgc1oUcHR1vCYOwcgIXzuPy9Kc5nb+R7GwDrUxDXDxnPaAPWgg0/uAGZROusG5hK3tDRRy9VYQSCVF/WNDQmIFpwoc+FMCMuEF2WF4Fl4vtmR/GW3IX9egRlGPHrGLiuAm4I4YTo4YiErEWD6qKUldndSiKTho2zdjQmvicz4otAvn8PVx6ANJSTw9+qO3edB3u3kXdvj02kGXYkOlI+5NKAOLeFzPEgQDhhQsRwaBltIfJoR3ReCcpTBsy9j6euPkGpR4PpsMEYX2ukGVWfnkJkt9HsL9tHJrG1ZwlmE4nO53TWD/rAtMvXMR5Ox2tcg3hsIajpQXl8GG0sjL0efNQjh9HW7LE2v9+oY/vl24Y4Nhdw6wNFbS0yLH7Jz/fwO026eiQcDoHiisNw5p4HPUCbdhgDd7bt0+NRQ6GM9iPc+GYzaPB/WiFx2Ny+7ZEMCgYN84qvO+fHYWuxy8iJMz0dMIVazhV/Cpd7TLa1leZ1xRGpKcPWQgcfOMsjeeyKZt+glWfmcqB0A2+f2IBPzo0naNhN8Kt8MILASpXa+z92syYVkRmzGCHPJ5N7EGtqcFUFKtlZbxNGk4v4h8qdd3qlCTLuHbseDT0on8BgMcz+hSbR0EvXK6RFzbD6AUuF6u+kI3w+dhRnxcbNGqqKnuP5xOe7+Wk3M4zOdtYYZziyNRf4f+FNrHFOM/yKz8jMmUK/s98JmHatqkoqDU1ibu8p5rgUi+zZkViepGdbSCEdc9Er5nMTIOuLokzZ2QcjoEIwXPPWROPQyFBbq5BVtbwD/ePol48Arv4eDOaiyYqAj09gvZ2iZMnnWRkGDz1VBifT6AoVktSXbe2p61ciV5aTst2FboFQhIEK72E13sRUmKKkEv3URbezwbHOcRpnXULAUlCSXewT7MiBjitvMvjJ1RWTG9jQ8Fpdrcu5MjJYkQwgy09nZjj89BnzsRcsIBQXp7l8e/qGrlAWLZamolw2Mo5lyT0BQuQ7t5FnzEjZohWrkzsAhGdQSD5fYTexfBtjKhwBYMoJ04MSdd5YO5HAAZ1i0jpoRmFlyn2ufEFX6J/NH2yXN64/YxdUcFgrJjNyMoe8M6tCLB5ySmqGsZT11KEVl7O5vxfIrIyMGYVE3G7oawM97ZtyM3NCMMgsKiE9iXe2D0R3yki1nt6Wz33zst4P7uKYEhKyKO2pqgq9PVZhn7NmhAHDw50jAgGBRMnmrzwQnDUBjtZSoaNzbvF/WhFd7fg2jUHmia4ccNBWpoRK5SM1woA/2c+g64ZsNN68BcOiZ7PvoLDKSV06pH8PlxhHyuUK2z0HyFcs5h1Syyb89aJOQjTAMOwPPF7XTRemUDp1Ct4l3eywz2F+iMZOCPjWbewFeHzoS1fjjl9OqFIxNKIjg6rQDjaR3+wXrhcBLdujXVGc23fDqb5/teLuEWOSE+3jisjA6m9/dHWi/iFjaJYdRxxnflS7acA66G/u7u/wDmbqmhL6gStKMSZnctTH89kvj6LUKOfBcXZ3L7aSceyDUSOnYBIBHXPHvzejfiCTjyrKkg/WDNktkbLDxtpOaxwt8SLd11/FkVcGp3LZU3cLi7WCQSsZ6qsrMQI3Isvjl4r+g/5kdILezHwPmCkiyYqAq2tVpcfVbWM+erVlsE7ckShpkZNyCPdW+XiRKNCaamG3y/o6ZVobFRifZajhnLlMh/ph84ghASShFaxlpUbXVTX5eD48VVrle5wcF5MpmRWN+sjVzAzJrA+7Qq6PJf0pjM4ey7B+XNW6DYzE1UIq+1btEDYZw3+SlZHIHw+K01owoRYoVNk0iTLwKSlWXUO0ZZog0JzwvmQPf/DEM2bZOLEWA9tRgi/jhlxhnVYox3XYm6kTh1Roou2aMEXQPD550c+nmAQz+uvx75r32uvoaoKJYsCeJe2o7m9rCoPotXnoaZJaCu3oG1YHytyM9PSUI4cAVXFMOBn2mYCVVLsmlbi+qX7uiWaZqxnjgQB3UUwJCXcP7IMeXlmQqtRSN4xQpaJLRAeFQ+OzZPL/WgFWNd2R4fExo1BHI7kWgGwt9rNiROWXvh8gl6/gxMnLL0oL9esXv0eD6tn38TZU4+ZlWnpxYoVhHUPU1s6Ede6MWWZml2TcPnvUlpwA+8KP5Jfw7u6HSI5pG1rRu2tQ7p925pof+sWrFhhFQjX1iJfugSXLlkLhWR58vGd0To7EZqGXlSEMxh83+pFfI59tE2fUlsL8P7Ti64uyx5nZY14rmLH5XajHD5sDUTLzR15aFx/nYdy9CgAWmkpatZzlCyKJNGKCOXlOpoY0IvVlQKhtxFpmYiRl4fhC1DzdphuyYPbbbLVo0LcbI32pV5aDisomQqBoBSXRWER/Xd0oR1dGDxpWvGYHc6jy0htqGQZCgqs8FQgYNUGRFeuoZD1s88v4fMJMjNNJAlaWhyAQlqawb/+azqGIVi6VMMwoKamfwDL8mzCy638TSM7GyM3zxoEcsygrLAFb8ld9v2wlyONJSj3bmIuw+odneamstSPK3INTSlF2bMHYZqIS5cQEycitbZipqejz54da3kWqyOIIyEkmZYGkQhS9AExKyu5h+h9QHS/8flGHX59KKTyDMV5+O9numcsN7enByMzc2DyZPxnJMl5ldrarIVATg5SZydSWxsrl8soO3chVQ2ExCvX6wMiLYQ1QbO31yoaD4cxFYXA5JmMb6jiXqkXn1+it1cgx/VLd7lMDFNwdsp6PBngcoWSTo8c/OCUzKv6KHZ9sLEZTi+iWuHxWFqRm2uQO87Sh1Cov+agd6BGJlo03NLioLXVRVFRhMxMk8b+4YFVVSoul8nKlSTkfxvpHqoaJ9JwRlA25TTe0ntUHcnm2PfDLCu4waa+nxBpX4Q5bhxkePAu78TVaWCccOAIBnFevAi5uYju7tiDpVZWhtTRgbZixfB58t3dyE1N6DNnIjSN8MKFmDk577wV5kMgXuPIzye0aJGVkz9Ci9SHwnB6sX07Sl0dANry5SMWdUePS+rvCJUyojNIL4TPh9TZGUtBk7q6WFV+B+eRo6m1Agb0wudDvnDBan96qYnuBcvpkTL6OysKOspXkZ1lxMJCLjc0z1mHicDTP6xvJL14ErXiMTqUseNh94cd3O2hu1tQWzvyBLyo18fYW0uaFER3VOLxgNtlMK6uivHpCqdOrkSPSLhcJrNn6/h9cOiQi64uiQULNMrKTGpq4gawOOSEbglClq18yxUmXu0ujnsdbJhwBulUG+47vaiOPnyf/SxmXh7AgDHweDDcbujsxNnebnlv0tIwFcUq+uqvJUh2UFFvBbqOum8fkSlTcLS2WmPrc3PftXZu94U8MJE36PePLvz6LhKLXMj90z2FGF0a06CCryFDgFKkERn5+Rgej9XVw2NN6BQ+H1IwELtGRFcX9F83sX3s68N56RLO69cJrV5NlWM96UYvhdctUdKXeDl6VEEIWLfO6iBSVaXGIltr1gx0D3K7DDZu0pLeO6nu6Uex64PN+4d3o5f4g+hFfEpR7qmDyAdCsLrSio71QvGVPeROkNjvqCQUshbYs2bpXLjgZMcOqzd/ebnG2bMyoZA0MKwrLv/b9HhQj0qUrDDwaneR/AHWzekCn4+MkydRnFfRHYLeP/5ja4cyrIdBKdr7PhCwpv3W1iICActpMXOmVfM2QgqL1NoKWFN5ibbQHE1B6//P3p9HR3Xd+d7wZ59zqk6VVJoFYhAg5skYkJAECJBKYpBjiB2bOMm9t5/utxPn7evuOIkzPEk6yV230+nkJp3J7kzP7ST9JPftttsjNo6ZJTGDUAHGZh7EjEBDSSqpqk6dOuf946hKVVJJCBA24Pqu5WVKNZ2q2nt/9/4N3++HASXekdfs4bV7jS8kr7d3g97cjHT5MsbYsTdvcvZ6sWdlRdWebsYXpstl9YsYBkCveEigVxI10XvHcVpXF9syHiPYFaa0fAaOgzKdnYKzZxVycgzmzQtF50tNjYqJQGAOmS8S4UHniuRhoA/u1ukvVho0MhjtdqvZqa1N4tQphYULNfz+mzjgySaKFEDs81g/XlUFa+wbEcKDLDs41BBgvf4Iig3COji31zEPlVoquHhR4cUXUygoCFNUFGPA0idaECkj0sIrEC0tuA4fprrrNUSai1BwhnUi7/lS+i0GskxI06zFvqcmEBi8/T6mXj3S7KQVFw9eg3gvQFEsR95w+O67Ud4iYiNopixj83jAZhtaWroP4cc9to9uNPSUETkc+L7ylV4DHIcDU1GiEoIA9hjpv+g1pqRgCkFo/Hi0KVMJXEihTqrkkWlO5syWqEXw+utO8vPD6DqUllpzZPrVHWgdGi3FS6KZsuz6GgxFgpWL4j7OYHN6oKbMyHy9hwKMSdxjuNtcERmLkfe4Vb5QFMjMMKwyzH0eHGFYsbwCc0MdmTQQChUSDEGDx44lPSpobbWcjBsbFUIhwZgxYashONasK4Yv4riivR37zp2s0t7C3vkuxqRJCLD6AHrWkbgmYEXBvmIF4q23MDMy0KdMsbhFluPKfRJ9MGPs2KhajulyDaqbf08g8p0pytBq8j9gmC6XlQnuMVaTL17Evn//zbPJioKZm0tw1apb4ovgqlVoCxdaAadIs3Mk43PyZPSaYt87NsMSTnUR1CTqbxSgvZfH8uVBNmxwYBiC+no7LS0SKSkmJSXDxxeDNfA/CHwxLKPw0KFD/OEPf8AwDKqqqnj88cfj7n///ff50Y9+xMiRIwEoLS1l7dq1w/HWw47bOf3dLDoUO7gMw7qtqtDSIiFLJrkjDE6dsm7nZIcTDrLIa+thwTvaKkaYdvJfqWfyQQ+SBP6S+XT6BYXH94Nd8JZWjdhUS/6lAxzLKCF9jIEsw4QJYUtdobxnce9J4RmpLoQgLkNgdW6qhObNQ75+3drs90SBIxcXebxWWgpC4JowAfPll6ORAGBAibJE0EpKeheIe2CRHDJu1sj1QUNRCLrdONavR580CeXkSbSSEkQwOLS09ECfJ6Ib7fVaZVLhcO/rORwYEybEvYa2aJFVgpYohdxDitqiRZiqiggGKUt1EdoZ5p2GanhPEAhAfn442vtimlYmTOvQyL9Uz+gTGkcdq8iuryH/Uj0pC+eix3YOMvicTtSUGTtf8/IEkYqF+81R8l7Fg8IXtxspHGwc9d2IRGr4FWWY+WK+1aC/xKWBYXBgj2D/4TQuXJBxuSxJUlk2mTAhjNsdjLvAvnxBZAMoy4hwmJC7AuV6E0ZqKkZ6ejxfBAIEqqutA0JeHq4RI3pLaGTZiixHHHNvsk715YpBTcvuNdxrfAGWeMfMmWAYt/Q7ALfFF30z1H0zPv3eO/YQtWYNZX6rr8Bz2InnsPWQWbNCaBqkpVlzcjj5IhFXQGK+iLzO/cQVd3yZhmHwu9/9jm9/+9vk5OTwzW9+kwULFpCfnx/3uJkzZ/KNb3zjTt/uruNW9WGHEh2KHVxtbYL331fQdYm5bXVMG99FY0oVJSUapSVBRh2uQdqvWopACV7b5xP4AxKtxW5GNR6Iat++FarGHxJMETYWavu4sfEwnZ2CfRmlpD6xDPkAGIalMCHL8PzzLp59xotz62bo9rP11CRUu8HSgsY4yTPT5cLMzibwyCMITcP/xBOWuswANelMntxfvWAo6dtEsnJJ3BEizXbG6NFw/rxVwnOnZVeKQmD5cmz19YiuLpTTpwk89tiADzczMqyUeGen1QeSwHsiLjJEvHOo3Q6zZ1sHAafTsoF3V2o0zVrC6BMarvc8rDU8aALMtYXoVRX9slCxc1pVLX+OWDWVvr0FEQJQFOjosMoyIo36EWnSB61e9IPCg8QXt6MlfjO+6OwkbiMSDsPJkwo+3zDzRU+Dfoo9xKPmW7xxYAlmu44rNYeZM3WOH1eifPHLX6Yya5aOe2kXji0D80WsjKX/U58i9PDD1tozGF985jO3zhdJrhh2RL0FRo/udS4ejrKrW+GLSMbH6bTcjyOlxQPwBQ4HFVV69CAAUF0dYMuW3ubfO+GL9nZLic7hiBen6Hvg93otU7KcHIOuLmhpEezYofYzL7vXcceXePr0aUaNGkVez+l/8eLF1NfX91vc7xcMdPobCEOJDsWShizD7Nk6qt1k7HvdFIf3MceuE64sR95Sh3rcg76gED1kcvmKjM8n4iSuXC4z2iOgKCZ2e49xUkMNbSVuTk2sIndTgxW1EfDu6BUoByAcFowbp/PJT/o5sF9hR40N0W3ja9P8bDs/nYZjKSwYfwVjtgupK+ZEPkBKs19NuqJYt9va4h471HRorOLCTaMRQzFtSSKOnIet7ErXcWzZggCMlBSrgS9BY3jsbxTJUJiGibp1q9WU7PdjIgiuiZcujZgaRboaJcnavCxaFCQtrbcG1OcT1FPNX+geFMV6ic4ECzv0zulIrfX27f3VVGLhcJjRDVhODmianc5OqzRj0SKN7u4Hr170g8KDxBe3yhVwc75IS4tXvZJlmDZNR5F7+WKWotNVWkHeoRqchy0JxVvhi2CQaIO+dr2LjSdGItllJFMH0+D4cRszZoSYOlVHUUyOH1M48a6J6NCpDvvZ2jgwXwy03ifkC58PLl4Ep3Noajd9XmtIXAFJvhgC+jkXD1fZ1a3yRawPEsI6+G3ZguT1Es7IRHukt8Q11jAVux0kiZ07VZYvt+ZkLF90dgrqxdD5wu3udSOuqVEH5Apdt4JEp04pnDoFFRWW8/WhQ3bS0y2zy/uFK+54ZrS2tpITk+7Jycnh1KlT/R538uRJvva1r5GVlcVf/MVfMG7cuDt967uGRKe/gTCU6FAsacT2DLQscCPsGpkHGzjz9iF0XXBseimTFpSxYZ2TcBhOn7YchyMueO1eKA9sIs1sQKwtpKuqglP/shO1xkOWaRK8JnPjhgSYpKXB0xP+zEZzFQhLYSIUMPjqtFexHZ/FGU8OPzq/CBEOUzSzGfdDXqSuYP9oQIIUYOziYfSc3k2nE7FjB2pzc3x2YQjp0CG7Gt7MyCyJXtxOberNiLOz01ISychAijSK9/2t+vxGWnExO8+MI6C4WD7yIMIII128xLb385FPNrDgK0WWGpPqYNe/nOTwiWxKpx+j7NlZlrKVx44sWxmD9nZrk3P6lMyE49s4aVOYMUNHkiwvgoicXIKvAlkGTbt5WUcgIKwNmAKmqdLZaUV9Tp2C5mbr3/eDo+S9iAeNL26FK+DmfNH3gBF5ToQvDFnD+/oh9LcOc0kxGf2JQm485GbfJpXu7l6H+qifQMjEHdpEag9fbHauJHNfDfkn662NjlzFxfdm4pSDLJrRwcWULLSQweTJOqGQ4OHZAR7u2sPxxjQOv+PgMDfhiwHW+358EQyinDljSVEbRu86PgS+uCUH3CRfDA0JuGJIZVfDzBcYBpgm21sfRmvUWDLjMvb6nrF6bhqmCLCo2gGdPmr3Z3H49UuW6uHsG2y0r8HjcXL0qMLMmTput8UXnZ2CbVvtzLmylZMjh8YXgYBAkrgpV0TUHBct0mhulpg3z2TLll6zy4ih5f2AO54Vptn/g4o+X+7EiRP51a9+hcPhwOPx8OMf/5jnn38+4ett2bKFLVu2APDDH/6Q3Bj1kXsVn/mMNebT0kBR0uLuUxSl32eIPN7pBH/3WsSh40iS9fz35zzO6c2C99+3mlHmzzdZutRk9Gj485/T2b1bMPniSOZOd1Py5ApkRdCw6BO0nHUxr+NdZqV2s2/qEjayilVs5C8n7OUhm532hSvRQgJHKESWIfPdv7rKP/4fJ0yaCpLEY//TGriRD5I2lAUz7oP4QdeRt20jbcwY6OiwnGOzsm7rixzw/dvaELIMt/sew4xEv+99BV3vHbyAePtt6OqC1FTMVaus39Ua2IC1YKTl5SFOnsRMT0cdMQJXj8lcFH1+IyM7B8nl5cj76aTYSlhZcILNL3dy0DeBkrQmcrduQ9gUME2ylZEsLQqxctI1cMxn7eNO0qUgjqxUsrKsa0xNgalnNzPTf5D2h5cS+PoKsus3kbJ/P0Z6OubKlQkX+MxMyMsTdHVBXh4UFLgS8ldmJowZI3q+Bplw2EUgAFVVsHSpSVZW/3mexNAwnHxxP3IF3JwvRo0amC+uXlmLeOM4aWlWQHSD+Um6dwpOnBAsW2aSng4lJSZHjggOHICjR6GCkaRluyl6YgVqjUSd4wkemZfC0nkqR46m0zQpnZlT/cwptLHyEYlNmwQOh41gEByhEOXzrsGybr73p0kwadLw8MXVq4jdu5Gzs0lva7s7XAFJvhhu3G2+aG+3+lUuyLx7YzKOc7lU21U21WdzqGUUJe+dJjetAxEMkHc8j2WTBSuX6IhOmbWVIdIVG0fOpnLihExGhkFlpcmF84LxxzYz196Ad8ZSAv/3zfniVrgi8riJE2HiRJkxY9JwuSxhpieftONwfPhqUUPBHR8GcnJyaOnRmQVoaWkhq89kS0lJif67sLCQ3/3ud3R0dJCent7v9ZYvX87y5cujt5ubm+/0Ej8weL39/5abm5vwM+g6rH/LzghPDWMvWg1anZ2C7Pp1tE2pxG63ce2axNixGk5ngEuXBBcuOAgGFY6OLCWYFWLk+VYyM00Ki6C2o5TdbwoUPcDxCUupXtBO+bJ5dGz3MtkeIPRwsyUNF9Zp3xSmZptCMBwm7PeDw8ErbwR7FSO83puf+GPvjzSlOhyMcDjovHLFisIEg3A7v1+iLzLmfdVwGHGn7zFMGOj3vS/QN4JfUoLa1GSl369cgX/7NyuSExNRy83NpWXWLBwXLli9AG1tBBsb45Q9gPjfyAgz//8aRcfLndQ0zaHm4HTU8F4WTDrPsiwPXS2zMEaNQnR0UDiuE1NIdBpOgl1dqOvWUdbcgnHFwUvnnqIr7ESWTcZMCtOlz6Fl1kJmaC3cKCxE7ejADAbRYtajvg2VpaW9twcbZpHHFRTk0NzcHH2OEIMPzw8aY8aM+bAv4ZYwnHxxP3MF3B5f5ByowXZFJyfHQAjI2r8OY4Ebv1/l7Fnr74ah0dysIgRcv26nsaQQ0zDJvdBKYaFJR4fKqw3LYJcA/Dy6Wos2Cbe2QlGRtTeK8EXHpjA1myW0YBA9HAYheOWNGIWh5uabZx/78kUwiF3XSW9royMcvjtc0fO+Sb4YJnxQfOF2UzivnY5tJrUHFHae+W/I3ZcomtXE4lGX6LooY4wcSeGYk5gIfFclTLsd1r9FWXsHi7LT2CSqqd2WTW2tRNiASbPANnoOl2YvZOYAfHGnXGE9L5fS0l6+8PnA57v7P81QMRhf3PFhYPLkyVy9epXr16+TnZ3N7t27efbZZ+Me4/V6ycjIQAjB6dOnMQyDtLSPdmTN12nVbY6/Us+FccUU/H/LyN1by9Sje1EvmciTqxCSYPXqQFTWKivLiEjzkpnVm2IWAircGj9tKLMWWsyehVrEpcEiC/ymkJuGsJPSSYeomrKHrWensO+AJa9VURFEhG+SWo1dFOx2q9koGMS02zFXrSLY3n731IDuQVm2+xV9a28xzV5lj57Bkqgu18zMjNfJdjh6taRNk+CyZb01pw4HoqWFg/+jBkeXga1jJqGFC9HHj0cfoVOTOYklOU0Ja1WF14t93z7kixfRu0PkF9i44v4knX4b0z5fjCKbuNIiOtGiX8o3UUPlUBFb/nG/qULcy0jyxe0hyhfX6mksKsb8v5Yw4cQ2bK/XI3mgeIGbRYtDZGTEu227XAa6bgn+RA6zFRVBPAcUq3nAbsftDsYFRiP/jvJFoJzDjc0sKLiKe8570XIMgIolVlPxoGU4A/GFLGMuXkzQ6bx763iSL4YNw8oXGzZYKnPp6WiLF1vrfleXNeB0nYP/UIejU7f4oqQEE0F4jMKOlvEsmXGtH18QDOL61a8AkBsbqXZ7OXRmFeHJk1EUiSdfmEeXjwH5Yri44n5WnLvjy5Vlmb/+67/m+9//PoZh4Ha7GTduHJs2bQJg5cqV7N27l02bNiHLMna7nS996Uv9UsMfBcQOFFcayKl2Lowp5kahm3lZGsrqcgyXyXxFYsxMywI+dkCVlmosWKAhy0SlrqC3kUY+cwah65iKQu3W/B4HPxF3AY4tm3Edz2ahJFE5sRHSMqiadBrdPhO7mhqVixusQSv2fun6dQCMnBzse/ciQiHsDsfdVXi4F2XZ7kP0q72NbQAcTNmjR7Y04ikgAgFLH/roUZRTp7Dv2UOwooLgqlWoNTVIFy6inevmJW0thHTyu7s5rczg9I3JPFGuEagIIHX5+teSCoEIhRC6juy04cSP1tKFMycjbvzHPj4WfZs129sF+/fbh6wLr+vw9tuCpqbBG46TGDqSfDF0DMQXLYVuCvM1zAkV5MuQEZKYWGIdBCLj0+0O0tQk8cgjAXS9d3MyOFf0vwDHls2kHUmhlCaWFacj+f24FzWDnIeqmlbz8E2aeQflC8NAjfDFXTwQJPnizjFsfOH1Yt+5E2w25MZG5CtXCI8ebTWVB4PQ6SPYIfHSlUrQNPIDAU47H+L0lek8sXYQvgBEKISphdh6cRZC1y11FYeDnTvVeK8MGFRO9Fa5Au5/vhiWSy0sLKSwsDDubytXroz+u7q6murq6uF4q/sCfU1jIn/re/Kc84VifJ0wL01DkS2t22BFBXpYUL/Jhj8g4XSauN3BqIKKEESzBdCzuNeqePYZLMy/RMWCVmoPZLN33ziQVcrLgxFTweiiXDa/HbztCEmAz4fpUKkoaYcsATpWxCgUsgyr+jrQ0qcZLDMz6m4LQF4e4tq1u2OxnlSFGF4MEDW7qbKHrsc5LgeXLsX2/vso772Hcv06IcC+Zw+hOXMQfj/hsWMRylnoCIBiw3SmWAuxzQ5yuJes+zb7ud0EFy1C3bMHQ7ExvSSD7MU2XJlBZPnmH69vs6Zpcku68D6fVQv6oDpOflhI8kUvBookDpUvAuUV1G5x4K8FZwrRiObWrSptbRJZWQarVgV7DwIxXOEubmXb/mz29XBFRYX13GhmoIcvFpeALfw+4ZapGDk54HTgntcEaS7MQI8qUI/RZKINWpIvHhAMB1/Y7YjOTuQLFxCAuHHDOhA0NqLPnGlJ0QY1UIS1D5GkofGFqqIVFSFa26i5sYAD5/IomtlK2ZdGUbNDikpV9zsQ9OBOuQLuf75IzpBhRt9F/DOfsf4+kKRcZhbYd+/G6A7SXOTGlSaiKWF7up3T+UtpapIsBZXTCh0d1s7+8cetA4EQoKomhaUmtndlag9kUzH/BppqYreb1NWpqKppOUXGnuzTXATcbkR7O2ptLY7amqgpiN3jAcMg9NBDBN3u/gtp30UBoi7EjrtlCZ9Uhbg7GCxqFntfIIDU1ASRCGBMNFBqaSH00EMITUN0dFidUzYbZmoqpt2O7G1DlC1gbfsl6i/lw/lGJkyaQlGxjsNh9tt8RKOMgQDBRx+lVl1FMAjljyhk2uTopiYyrgf5aHHKLLpuiVW0t/dEW/uoPPTdmLlcJqmpcOVKf73pJJK4UwxWmjBUvtDDEv5umN64FZ/uwLeomHAY9u+3I0nWeF+4UCMnx4zjCrd2g92HsjFVO/Pnm6iqNbZj51WsNHGopBituBhMSx444kJvO3wYyefDtNvp/L//78RrcpIvHhwMhS90HdHcbG1OMjP7Z4bCYfSHHkK+fBlhGJYDa4/RmWhvx1QUjE89ytrt+6m/MBazsZGCKVMoXDA4XwTLy9ndkIbnoTQenuij7JEx0d/c4TBQVTPhQaDn0vupeKmqyfXr1oE6kSLQg8YXydkxzOi7iHd2Wn8fUFLONDG6g1x65TCX9tq5UejmY/JG8i8d5lJ+MU6HQV6e1SzW0SGRnm5gmkT1pIWwLOENA2rNYg7uF2iqSXmlTl2dSkODnaIiyzJeKAqB5SviUmzqjh3YDx3CSE8nPGaMlaYDRDhslX8k0gSGfotCxJLcVFWrSWuYF95b1pZOYvgQCOD6yU+QfD7EyJGYTz8dly428vIw09PRCgsxbTb0SZOsv2VkgBDsPDUWLRBEXL6C3JaC2dbGeWMSzlSFv/3brujbJJIMNGWFgJqK54gCkpfyaj0qN1pY2DOuB6kgidRz6rqlFx15vNsdvGkkVlFg1SqTf/s3bqo3fbu4n2tMk7gzJNrwRzBUvmia52bSqa1knG4gPL0IV6qBt12Kex+z96nWJt+EoL6CTt3EcyKT+bLOokVBamsH4YueMhCppQXl1Cm0RYuQL1ywjAxHjEC6ccMyihpoU5/ki48GdB11wwbs9fUAiKoqzJKSfpkhMyUFfcoUTJsNqasLIyuLYHk5B144QjAoqBz779Q22pA6Ujl30YYphSlcAIsW9QZ/+vKFkZ5BwJZKQFOgZyrV1qocPGhn/nwt7rmJ0Lf2P3beJPiYDxxfJOlnmNF3EU9LswQP+noNRP4fCEiE57m5tNfO+Cv1jGo8gDJRJ39tIcrcMiZKIRTFKg0CK9LjcpkcOWJD10W0DEgIQJZxZCp4Dkt4DsPFizIFBTrl5cFoM1jtzlRUNYXFizVEj2qQYbcjeb3WJi49HfvOnYhwGKOn2SeKm6VdFcWSbbsLagm3pC2dxLBCampC8vkwsrIQHR1ILS39UsLBFSsQ7e1WU5YsY/ZEhOjqRrsW4pXDDyG1p/HU+B0IReaUuZTjx23U1Ki9DYwJ0tACq0nRduAoDeuyOfS2QnhyBoULtAFTvokQ2XjFGjIFAkQX1YEisX4/Q9Kbvh0Mxb08iQcXg3kO3ApfTJig07WmCGfZEhDWhqa4WMPrlcjIMMjMNNm923LPjvCFqSiYLhVHChw8aOfgQfugfFE26zrC77c2c8EgUlMT4VGjkK5eRTlyBGw2bO+9R3D8+F5uSPLFRw7C50PyeonWJre0WBnevpmh9vbehuOIuVmnj2BIpuGgjffeGYneHcKe2cKkdBNjfAEejyvaAJ+IL4SixHHF4dvkCrD4QNMEI0ca+HwCr1dEo/8PKl8kqWeY0TfdFKsjHRlMmzdb9f+xBjFinmUXH3GJbC+voH5L/A+fl2fg9UpUVQXZtcuK4hw+bKO4WEPTRPQEHKmP03XQdUFdnVUPWlurxkVUcThQzpxBCgQw7XYCjz6KCASQOjowU1Mtp9hIZuB2067DVbeZVIX40GDk5WG4XEhtbZgjR2L0uMf2hX3//rjxYbpcCEmwMmsf748awdFABvvNEsDGE2vaMdPMuLTvQJC6fFRNOs2Bc0sQmgaaRkVFqPd5PeH+aNQk1UCxJbaY9/kEdrvJvn3W5igytwbamPV1hB1OA5mhuJcn8eCiP1f0v3+ofLHethL/dik6nqurg2zbpiJJ1uYkGBR3xBdGqgvTbrcivjabNcdXrkQrLcW5bp3VFxSRmE7U/5Pki48ETJfLOjBGZA9zcixFudjfQdf7cQWKAmkuKmdeQDnWxat6IbpNZpKthcLiMIu+GqJulxZf6pNgrNyUKwBMEz3c69Ad6b+JxUeRL5Iz5BYxlDRNbLqpLyI/qHW6lFAU0ILwMWUTaRP1iKs25oY6/NoqXGmRciOBrgtOnrThcpmUlwc5fNjGjh0qp08rFBSEKSy00mCRcV1QEEZRTBoa7NEFv7Cw95QsAgH0adOiE1TouiUDNnJkrwxYTI2n1NISlQgbUtp1uOs2k6oQHzx0HREI4PviF5FaWrDPng1eb7/fdaC0fGD1agD+du5FfvwfKRgZeZgOB+7FnZiZsmU2FvNeicaLkepi69kpENQwbQrY7dTWSpSXB3Hs3Y0IBulaUsHmLQ783TDl7FbmL5QIL10cN18jG69wGLZtU6MRnsiimmhjdrMN251gKO7lSdy/uFOugKHxhaZZPWatxW58XVK0hNRmI+refed8oaAtWmTJQebmIrq7rUBRTg7hceOSfJFEdHMeXL4cbeFCEAJ1wgTUl18eElegKATXrGZZeB37/2RVQoRHjmfxV3KQun1ULKGXL26DKySpt9/mHW2VJdDiMFhj34iUoqIt/mjzRfIwcAsYjjRN7A/qchnoIZPp57eSQwPB0kIuF7nJbagh82ADU1A4PakK1WFZyy9ZYtXzezx2GhqsxXr06DAFBeHo6x88aKdwrh93UTM1Dbk0HHLS2ChTUBCOT7FBVCkoMjEjC7lWXGzV9WVk9J7k9+1DOXUKTp1CKy4eUto1Wbd5nyPRgtsT5en3uw6Ulnc48D/2OHUbTEJl6aBpyJcuseu3jbgLb6Ct7CX8fq/b3o4pydQ25LLu4hImTu3iC88FqdulW1HOQwqr7WHmBzzWpklbxfTGrWScaCAwYy5hv8n6t52YJtHFPTPTJBCAkycVfD4Jl8tgzRqLePpuzHQd2tqsf9+NiP3dJI4kPlwMV0p/KHyRtb+G/NetGu1woRuHw6S9XUT5IrK5N8075IuMDGtz390dN8eTfJHEgAc5v3/oXAGYqoMNuf8VbWHPemu3s+s3+6iadBpSel830VgxUl3UbTB540IZE6d2x3PFYRvFC4JUhoKIfR5GmHZai91k19cghAexuJDAR5wvkvRzCxiONE3sD7pmTYBAQJBzRCIYLOQtbRX+GgmnYxVr5sN8VWLsLI09e+xs325p1y5fHmTdOiehkBXRiSzsjWfgxpkQq1b7WaVtRKrxs1J1clh6EkWRowt6bW2M3m4ClYd+E5qeRToYtCJDzc1oixYNKWKTrNu8v5FowYUBfteBZOd66o49J+0Ulmi457Ww65eXqb8yHoCyRT7Iyuz/uqqKfc8ehKbhOj+RggllhMwU6nYp0ShnY6ONix+vYhY6mQcOsPjsIXRd0Dq9iNyqxax/20lDg530dIMpU/TofA0EBNOm6ZF9C4GAiCo/RCJDDodJTY2KLAvC4eFvBIvgZpHhJO5PDFdKf6h88bFPQLaQMJZp1NT0HkJSU00aG2UmTAhz/ry1yW9slFFEmNSgj/nz5CRfJHHHGPAgl5Z2a1xRq+I5bKdwiZWRqlvfQcO6bJCmU1VwIvq6/caKw4Fjy2ZS389mijEOv5gaxxXnzinMnRsiUF6BLQz5r9RHy+zMtYV0Lalg/bqPNl8kDwO3gOFK08T+oA6HSXjpYrxt4K+ReshDonmRm8wskL2gaRapdHYK3nnHQSgEx48rgGDKFJ0507u4vP4imq7j6N6HeASMrCxq67MJE+Kxx8K9NaAHrFN1+fIwUtBq3IlEX4TXm3BCRydedzdGTo4VARriB03Wbd6/GJCcb+F3jcoZ9pQbEHZZGuXBIPYUJ6T1MafpeV10HXX7dmssahoPz2wHlyuaFTt3TmbWrBAVS7sRm/3YT51g8uSpBEIyjr8sg811jDnp4lR6BR0dEkIQna8RWdHIZi3y99hormFY5DRunCUVl6znT+JWMJwp/aHwRZvb4guvV9DdLUhLM+noEBw+bOfyZZmmJgnDEMiyyYSxQVz7d6Kf1nGeakRUD8AXWxU8uwWEFSrKe42eknyRRF/cMlcoSnTcRMUi+nCFEFC+PIzt0FXUYAhSBn7dyGFkSWE7pgl7zHE0NKTg8dgxDJg4Uccu60itLYTnzWFywwE03Sq7u1rkRm8XmCakpxsfWb5IzrZB0Lfm83bTNEOpHXX1bTzp6TuOkEpnp+DsWesA8OSTfo4dU6itVdm+XeXM4SD/JX87pLlI6e5G0mTMLh/21BEUzjGjEyvSae844iPt3Rr0adMsS++e1JvpcIBh9Dcbu51FOqa5J6o97PUmF/n7CYP97n3rcQep911c0g2dPgj31BNjUjXtHGZWJkEeBuiVCO15XTOkW74XnT4C0igOnshkfpGOaUJjo8y1azJPPu5D/fOfSf3X/41y5QriRjNnRy4h8O1fEUIjZVIxUwqCCEWmujoQ11ycaB7HRnPb20WPnC/Jev4kborh5Aqv9+bPGYgvjhyxcfaswsSJOufOWRus8eN1rl+X0XWTixcVJmW18NWpr7DVtwhn1wB8EdZZpb2D3RyB/bDAEbxmeQvEzO1h44s+jaB9N4lJ3Ae4Fa6AAfmiL1c4amuonO5DSIKAe3VcFkHEvK6R6oryhaaMImQ6ouVu58/LFIwPYe5uIHXzayiXL4HNBgWTaDxhcuGF7VwrXI7TYTBlcgjDFCxdGuzRFBUDzuUHjS+SM20ADFTzeatpmqHWjg404CJ/93oFqanW393uIG53kO5uifPnZWThYHneOcv8Jd+F74kvInSdYpcLU+61mRc+q9NeSAJpvw9TVnqbu1wu1Jqa6K6sn9nYrTRjJXCSjXOsTRrA3D8Y4u8+UB+BaG+PlvuYTidacbH177yR0cftOjqSYFBED61RSUPXaspKrrNkmYl2KMhrr7m4ekWCcJjRY3VEdzfqvn1Iuo4+ZgzhYIiRx3YjK4JzU93MeFhQ9O4/Ey5dwDs7KuOai1m6uN88jhy8Iwt7dXWAjAwXweDQrOiTtf8fTQwnV7z9tqCpSb1pn0EivjBNSw0IBKoKpaWWUtC4cQbhsGD8+DAXLsgsKFcQu12skPZgjk3MF5H57C5uRb5xHdogPGIkUpdvePkiyRUPDm5hj5CQLxwOHOvXRyVHteJi6zGZGdBjQmk6HFGZ3H584XyEJbPOs/iRPA78P4KrVySuXjRBlpgyNkDVmCModZdQrl0jOHcuWsAgpekCD7VeIOfKUUZXT8O0C46edbFr55I4IYpEc/lB44t75DLuPQxXzWdn59BtrRXZJG4uxTgq1dfb6e62yEbXYedONdozIASsf+jbVE5vxByVBw4HkXeICGbt3m0n2J3LKqcT0eUjnOpi23tjsafZKI51lc3IiLq/JjQbGww9HTSivT3eobapKdkY9oAjUQ2nunlzr0nRwoUIv99qNIw1ikl1EQyKOLv4qKTh3DC2fZYEne3MFK5eLidfXEKWQkw0OmloGMu4Yy5KDJMNoSqKgvtQJI2QKdM6fQEzj+/BWb8DL+Cf4mb6+W3R5mJbAreyyEH7zTcddHdbEov/9b9CONz/88Yi6Rfw0cZwcYXPJ+jqGrpGeV++EFhRfbCahiMbJUUxmTAhjCRZfKGh0vncV5CvN1kywQn4YteRHIyzU6iadBojMxMTQU19NvbUEcPLFxcvWmtBz+skueKjgYQ1/+vXY29owEhPR58ypR9XmC6XZZiXiC8OKJRSj9J8mu3npqCHs8jnEhNzW9nXOIZTp1PYEp7Fx2/8J0bYYPPlOYyyt5KnSOhaGNUMMHLza4R0MPI/zfRzW8g44RmQK+DB44t74BLuTdxpzWfk5Jef35vOVVWzR8u5f7DDvmsXQtMs0yYhwDBQ6+owVZWW6WXs328nJcVk4kSdDRscnDhhQ1HMqGa0x+MC+zQqJgTpO2yjE+iwE+auwV18jVr5URreT6NwjsCUdbjT5q1IhEeWsWsapqrGO9QmG8MebMSmiR0Oi9R9PozcXDh1KiozaGZk9DeKqehVyYpKGs71UznlDFK9DyM9g2Pn0hiT6WOi1Mq5tmzOHg0zufM4ndnjeGvOd+k40Ih38lwm2i8hLl2h4uyfMAomEFy6FIcWYvGWH0Wbi0dVlw1oW9zVJXj/fRuSBGfPwsc+NrjDMST9Aj7qGI7+gAgvOBzQ3Gy9jsNhDlgyNBhfyHIFJ08qTJmic+GC1Txss8VyhR3IoKLCkXBsWw7FCh4WodtnUl4Vpm4TNJipw88XgHLyZLRkNckVHxFE+MLrBSEQXV2Wu3Z6OlJHR1SdKqEJZV++MAyKxl+hSpyA9AzUsB9boJOJWa1gUxgVusSVk6M5Kmew4vFPsPXyQxw4l8eC8U3MGudFnL/EyIka4hLYFMi/fGBIXAEPFl8kDwMDILY851ac6yD+5JeXZ9Wf3bgh8e67tqgqUOxp0L5rF/b6eoSuAxAsL8f1/PMojY34Vj/G7l02zp+XuX5dxjBgUYkfmx4ipNsJtXZRvsq6QLs9sYFTRCIOwHNA5fArOkJvpXT6acrKZyGEcsfNW5FIEWPGIK5cIbhsWbT+M9kY9hFBz++tbt6M8PmiJK+VlKAVFVl1mj2+BbEGNJLPR8USF54DiiWariis0t5CqvchnzjJVmMF3aaTT3xKp9o4Qs3udF7ZPo5T3eMx2nxMvXaahY5D5H1qJR1VXyfz2WeRr14lLMD7i1+Q9vOfM3lyGE2DUV8qQ7GJAdO0iaznYfC0btIv4KON2JKdiFvwrfYJRPgiNxfKyqy1essWFU3rHz0cjC86H32Ml153cOKEQnOzRH5+mEsXBONGdKPd6KL8Y9aLqOrAYzSWLxoOZHLo7TMIXad0+sm7wheRNcIYOzbJFR8x2OvrrUyQ3Y6ZkhLNCASqq6OBpThE+GKhA8/uNFAU5HPnWDVhB7ZTJwlNnUZQHkXIkcbCGUdZPupdajU7/+5dzdGrI/jhrhUISVA0q5XFf/cw/sBUXP/6ryBJhAsKAJhsxnNFz9s+8HyRnGmDQNctqatY3dm+P2yiHz3+5AcbNjgIBASnTiksXKjh98fYW6caqJpmGX4pCvaGBpyvvop89SrBpUtpLqwgVCuxdq2fPXvsaAETz0uXuXDVwVTpDCsnnkVsdVFetYK6Xans3m1n8WKt32eJLPCe3YblzCdJVE04gdY1vjcNeyt9AX0QSfvR0WFFdHpMRKK4g9dO4v5BbPmAPm0aweISzFF5Vh2wz4f8/lHMUXmER48muGqVVXfc7Wfr6cnIZ0ciwmEwTWqCObhLZZg+DVmZytzFI6io0tHCK1i8wIvhuswb+zIwxuUTFGHynhyBVlWJWldnLeqmCbKM64UXwDSRBDgcAmlnba9BWYI0bWamFUH1eiXS0w1AJRAgTq6x7zpwu82iSTw4UBRL6Wf9escd8YXfDzt2qLfFF4GlS3mjexWBoMycORp+v0TQb9J6qgOHOI161oNDTqGiuhoUS4BCVc0Pny9cruhB4E5fN4n7B337BoLLlmHKCjgdg/NFp4+6rQpKqIhILdzWi7Oomgqh0hKk0ZMo0g3Klswi2D6GpZn70A8d5bXDMwhPnwYhnbIvjUIoMva9eyyXV8NAPn/eurCCAhwqSDtrCVZUoIfFgGU9Eb5oaZFwOEzS0lS83sHLgO5VvrhHLuPeg67D+vWOhLqzsY9J9KO7XCaqanL9ukR2trUvyc01OHUKWlokMjONeHvr5RVgmtg9Hux79gAQHj0a37PP4jKsx/j9gkWLNN47YEBIJ4yM7tfZdmoSVTPPs32jaWm5F2q9rQaxij6ytfijSEgtLYhgkLp3DEofUfqVFd0WeiI6pqoSDAaTEZ2PKGJrQXdcmoIvazLujGZr0TdM6jaCPb2NZaP2EJoxwzoINE6n4WgKxePP4l7SRc2+TA6cHw2ShLswzMKVadHGxt37UwgEUjELRzEmLNBtdrZcmMnl40G+LP8Z+0EPhtOJ9olPYHvvPdQdOwguXUpozhyU48exezz4fODXqqPu3r5OyMyyrl9RoLo6SHu7YM8eO5s3Czo6rA1eRobVLHb5ssTYsUa/Bf5eSPUm8eHgTvjC4TAxDGhvF6Sn3z5fdD37LJm7YMaMEBMn6mzZ4mDi6G4cV9qYYrtBQ8tkjH0BlizxUXMoz+rLSfJFEh8S+vYN7DqWR1BXcM9rGpQvtp2axMELOguW3KBy6lm2nSjgwLHRYEyn7BNjWTwhjGmGEUJh18kxBByPwVw/Yx2ZmEKi8aLKr35r48uz3rb4QlWjWTYAbf58bEeO4Fy3DgDv3Ir4sp4+fLF8eTAaBNi4UTBjhnW4T0kxaW6WaG8X5OTEc8O9yBfJGTgAfD6rlEBVTbzeeN3Z2Mf0rf2KyExFxpbTCaZp0t0tKCnRKC3VEALq6tTo84yavSCbyI2NvS9umqT++td0PfMMK1YE6ewU/OlPKVxoUpisKEzMaUNuVXh57xRePzCZMYvSKSzp1eeNVWkwHE422tfgOWynaFYrq8afoG6LzIFL4wn/8CCLv12McPak4/rIvN0SFAWysqC5+ba/9yTuc/SQPJ0+fA25Vp8KuaxyOKndY+dQ98MsyLmMqYWsiEyKE4fuo2iWjntWG1KXhntBGK20FEnkoVVKAEjtXoxUF4GAymuvOQF44gk/oNN4wcaxEyoHzRQKp04FpxP74cMYDgfBpUutCOqhQ2jz56McO0Z641Gck1fh81l29LkNNVE7+p6PgCxb/h65udDebm2W2tsFJ09ac2KgyG8SH03cLl9EsgmRPrLHHjN5/fVb5wuzhy+WPvMMpaUaGzc6sNvh4vUUJo0JMCd8ESNs8tLhal75bj7jCojTc0/yRRIfOGJKwoxUF8GditUDEB6cL1QRZMH4Jsqn6pDmYvFX5xDaIiNlTkCIoNWH0HOgDQQEr61zAa4evoDGRifHjtk4GJCZ//DDCEXG+eab6FOmRINGQtfRCwpQTp0iNwxOx8B8EQgIJMma7z2tD6iqyZ49Vv9bZqad6up7nyvu8cv78OBwmJw5o+D3C+x2g+rqwE1rvxwOk82bVVpapGiKNxSyJN4iGYOeMune5zkMUugm9de/geZW9FkzERJ0nrxOV9MJ0mfUYLrdeDwqO3aoSBI88bl8hN/Pq2/P4niDxuzRN5DOnqPiuVyrnpOYFFxKCnJzM45cH4WFMuVLBGKdwoqsfUjXrqFeTMX5dhOB1asRXV1xMpD9ZN3uZOHHIqxAIIBhGIhbbcS4R9HU1GRFtu5xmKaJJEk4HI67/90rCmRlUlGlg6zh8TjxGGvB1U1Z8ctUpZ0lnDMLY9w4ghMnUlzsxUSguaZH+wkqZBMh7P2kB93LV3DsmMKxYzY8HjsirPPkkvOY6Wl0H1OQ2tsJI9DmzsV++DAYBuhhtKJCguXlANgPHmSNfSPNi9zkNtTgPOxBKyyMU++KzO2IbnRxsUZXl3VfRsa91fiVxIeP2+WLvtmEUKh/CcHN+KK5RUL3XEc0nSBteg3bRDUbNjhobZUYOdJAmjyZnR1jcFw8yxVvGmMuN2GOG9F7EOAW+eLNKwSXL8dMTR1cBjTJF/2Q5Is+6CkJi28MHoQvxo2jZFYT4eyH0cJ6VISifDWIcLCfd4HbTX++WHwO5dw5/JqC8DYRXLMahMDe0IDt4EGEEGhFRQSXLUPdvh2nx8OauQzIF7HzOjfXopCiIo22NomcHAO///7giuRhYAD0taC29Jv7p3piF+5I5Cc31+DECbh4UWbGDCsdFHmdiDpE9HmpBtQKWrwKBrncCE9mGmfozhjFcWMGuieNucsEx48rjBxpMHWq3hNtddJ0XTAyrZuC/CBSSKdug2lNCtGTglPVaBq5InMLgapqhE0hUF2Nrb6elc7tYM8gEHTjWL8eEQjEyUDGyboNYio19O80gM1mQ7nXj8i3AEVRkGX5w76MIUHXdQKBAE6n8wN5v2jdscfeE9VJYeH3ltDlm4uZlhYdP9EmsphxFaEf4fVGlYiEz4d85TLPfB5+8ossRFjHvmsXH7/2fyz1Kllix8mHCIT8LPzhQuyHD1tp4UaZk4tWslgKWXroQuD0eBh32AOAVmjZ0fvapX6mUbJs8uc/w65dKorSW84R60SZRBK3yxcR11OvV+opH7KkCSP3R8bjgHyhTyZDb+SSMYbLbVMZcdzFMWHj+nWJESMMJk7U0XWZxitOWs+OY8zIIBMzWwmHMqitVXu12ofKF7Z0wu+OQoRCVnOnafbKiyb54qZI8sXAiOMLAEWh9MfL6b40nfDEiaAoA46pqDeGz2fxh89nOWQrCs98Xo/ji8fG/R7h9VKbtprtxw2WibcJPv4YtgYPjY2yVcLzXIXln1FRAXBTvoiIzRw9mkZdndWLk5ZmRMv87geueHBm2TBjIAvqvoit/Yp1C1ZVM06xIVG9qPU8Qbfp5N3yv2X85Xp0XcI/roCUTxRy9q1M/nyiCuULJpMnh3nyST8VFUF++tM0GhtlDCHx3x85yvLJp9l6dgr7jk/HdOnWAq8oVqlFW5u1kfL7LTv5zEyErhOaP9+SgAwEEOEwpmnGy0BmZlqs1JO/TmgScotNXoZhPFAL+/0GRVE+0KiUaVoN+ADoOsrJk+z67WVWKlsJT58WbyyTaFzpOvZ9+1BOnYITJyzFCRO2n5+OYBGisxOpuZmt5mRWqLUYY8YQCNuob5pA1r8epMhuuRVfvSqTXV+DuagMIVkLvN3jiV7nQA3FkXKhSK323r12Jk/WSUkxcbvv/bRvEh8cbpcvXC5LLvr9922oqsnGjYKiosQN6wPxRdq8cehLSjm2J50XaioBGDUqni9QbISw8cl5h6gsusFGe0GcVvuQ+aK9HREOY+TmIjo7LVnInjq6WOWXJF/c//jQ+MIwkE+fRnR3s+/rhyhfYaBcujQ4VwCmw4Fy8iSSz4eRkoKpqhDS2Xp2ShxfbDFmsLLrdbTsTvaHi9GPBql0bIhyxejRYew1tWjuiuiBYKh8EQgQLRWPKA3dL0mt5EwbALfT8R15zuXLVp1zRoZJIGDVigIDNpUYSxdi7t1FQJOw203sqsDrF5jLFmHfZqJpAk2zFu26bQoiGECRnIweA6H5hQQXjKfs4y5CO3VUtVde1MzMjC7ssZrNpstlqb089FBUxkvdsQPR3R2VgbQ3NFi61T0n8H4mIbeh//ygpHrvZ3xQv0FkYbfMw/xUt73I9vc19jdMR8mcwLJZSkITsthxJXw+RDCItmgR0vkLmKrK1ouzaDiRQeHHvbhLWtl/8AT1rTMgXWfZbCcVSge5uw/gr9d4cWwZJwqW8+iUDSzQ9xGqCxEsL0etq4u+h2FA12t1+MQqMjL7l/+kpVklGi0t1pweOdKgu1sQCFj13kkkAXfOF7Js8UVXFzQ1SdFDRaKG9b58oToEcx8O8bZeBnutx0ycGI7nC8VJ3qx0QvPmElwhqFCsEr5b5gvDwHQ4EN3dmC6X5eOxYQOYJmpNTTRam+SLBwMfBl8UTbvB6s7fsPX8DA68n0N4fBaVsy4OyhUAIhBAnzbNmlgdnYj2DrZcn9+fL9pmQNoalpUH0W900dFwmQOHrnJ+7EJS/3YZRWxEPehBCG6JL1wuk9RUaGrqlaMfOdK4b0pKk4eBQXA7Hd+KAmPHGtE0b15ebyNZoqYSTJMTz+8i53QDlwoWMPmZMoLv1ZDh8WA/70QLVqPYTGw2eP7nKXDyLAvzL/HN6htssK3h4LtOhJJHRUUwrgY0cjEDaTZrJSVWirdHAjTOCKpnE9b3BJ7Uf05iqBDCGu+FhRruec2ILSaVM87DMQO7CqKn3rOfsQxWaZDpckU3FLsOZxFgNJWzLuJo8lE4LUzIWcDuU2Oo+PwEQjskbJ0jwWWgHD/O3AKVl9tmcWJ8JQjB9GfLCNWFMO121Lo67B4P2vz5dC11c/Tnu8jc2ICiqLQvr8CV1hvV1XXLQdzttjZ5WVm9LuAul3lPWson8eFhuPgiL8/A6TQTNqwn4ovAkRqaXj9MVpMLzGoQgnPnZJ7/WQqciucLz7vZ4NKGzhc9df9Bt7vXHwTiuAJJ6h+tTfoFJHELiOWLijldCA9UTTiOaM5F1h+CFGdCE7I4Baye/3a9m00Ah8UXV+L5ovzpCei7JGwdeYgcgxVNG3hJFHJRjOPE+Eqec3ejUWGVzt0CX0S4YNUqk0uXgjgcJjU1apyXwL3OF/fgJd0dfJA/RGyUqKDAhddr/b20tH9TSTgseO+0C5tSytuNq6japJGbs4oCxUZIdvDY4wGql/vYtcXkjbfTmGoEqVjQitTtp3JRM0LJi4vuRD5s7ISJS88mquXsuejI4waM6iT1n28b7e3tvP766/zVX/3VkJ/zk5/8hNTUVP7mb/5mSI9/4YUXePHFF5Ekie9973tU9NQ7flhYvNiSLSRsLdLhqVNYNlUQfKSaoK7HbRLMzMyEYzOwfAU+3aTheCYhZxD355vZdiCXg+86KSzUqMn6BKEFrSyUNln2kZrOj64/zVkjnTGhEMgytXUOlpRV0NUtkXNkF0pPOYPPJzg9qYqpmMw8doQRsiBjxcJo3ffmzSqyLAiHVVasCLJqVW/kF+5NS/kkhgcfNl/0zTBHoot9+aJyo0bTtdVkX0slJDv42//eBeEwr71iZ+8umSU5N+GLvk2+sWv8IHX/N+WKng+W5Ivbw0ebLzLQSkqQ2tpYUphGaMkcgjG+RYONz8H4wuEwCEx7gspnzuA40IohKfzs0COcts9mfFYHhELRPpquJRX4uvrzxYkJVUwKwpwLR8ixCdJWLATiTWZLS/v0+dwnfHGPXc7dwUD6zncTkShR7PtkZprRg0DktOj1Ck6OWUpIMwmdt8qIAkGJS7MryZovqFjiw7FlM6s0P0rBJOw2A7nbWnhJc/WP7tykcStay+l0WvrRLS2gqvHRm2RUpx90XY+rX+17+2bo6Ojgj3/84y0t7reCkydPsm7dOrZt20ZTUxOf/vSn2bFjx4ferCYECcdTovhpojpjMjMpXw2mS7dUJg6PA6Bwrp+Kuc3UenLxXByDwnQqx5/gxyfXUtuST/mcazzzNT91uyQOHLBz4ICNSZN0UpwVrJkWwHnwILmGQFVXceyowqQbQQKndDJMqy47IgYwZgxcuRKfCrY2ZRY5KAr3TRo4iaHhXuCLvhmDgfgiNdXEROAtdVNYpONe2oW6eTP2/BG8bxvJ/IktA/PFULnC5UK0tyNdvoyRlxfvIJ7kioRI8sXtIcoXq1bddEzdCl/Mf9iPCPjxHMpEiMmsTDnKz1+fSV1TLuXzmvnSJ46xSZ2Ix2MnHAZNs0QBnI5evsgKCU6eXE3ohMLMjiBTU/yYsom3vbesr6uLKBdEFCTvF774SMzcRPrOH8YPkaiuNNbBzpli1c45nSaVVRqyDFK7NeANl4uqqWfRypcRVCZhpFqSWn0r+gZt3LJkLizd9b17wTBIPXcOfcYMq/4zlgwe0KjOyy+/zG9/+1sAZs6cyQsvvMClS5d47rnnaG1tJTs7m5/97GeMHTuWL33pS2RmZvLee+8xZ84c2tra4m7/5V/+Jd/+9rdpbm7G6XTy4x//mClTpnDjxg2+8Y1vcL7H0fAHP/gBv//97zl//jwrVqxg2bJlfOc737npdcXid7/7HX/6059QFIWpU6fy61//Ou7+jRs38thjj6GqKuPHj6egoICDBw+yYMGCu/VV3hqGMJ5MhwMMA9HeHk35QgKVCcNglfYWUq2fVQ4nzF/DvoOL2He6iIv5DpaUdvPMcyOR7AoVFdZ8O3pUIS3NxOeTaF5USZ5iKQqt9nk4EbTRvaSQs1OrKOjSopv+WGlRV6qBrvc6USqywfHjCt3dEi6XwZo1gbv8BSbxQeF+44uHHw6xfLl1YJHafUgBP+7iVqq6LgzKFzdr8o3Ox9ZWlDNnwDBQTp9GnzYtni8eUK6AJF98aBhmvqgOvYXQ/NiYwr6Di/Cwlot2k8WfNnjmr3VC6SuokK0+mlDIOghY87+XL5TdHv7bmXcxJGiav4C2kjIyRbxkcF6e5RIOojeo0A2KjXueLz4Sh4G++s4fpMxTrJxoogOuoliOpxEDmsggVBRroTddLnacn4jWFaZiftga8P4AtXUO1BSpn5V8XNrWbu9VBIJoFIhQCH3yZMyUFOwNDZZaUF8p0XsEw5muP3HiBM8//zzr1q0jOzubtrY2AP7+7/+etWvX8tRTT/Hiiy/yne98h9///vcAnD17lpdeeglZlvnSl74Ud/upp57in//5nxk/fjwej4dvfvObvPzyy3znO99h4cKF/O53vyMcDtPV1cW3vvUtTpw4webNm4d8XbH45S9/yZ49e1BVlfb29n73X7t2jcLCwujt0aNHc+3atTv7wu4Uum4pjcT0pgyIQADH+vXWeJVlSwI0khaOVSUC0DRqPCNwF7cidflwL2q2IkAOB+MK4AtfDiNJvZJzluZ7TP1mGgQrKlAOeLhyRaajQ/BWdzXFaii6NkQ2YqpqQm0Nys4gTfPc1iYx1SB1Zw1LJAfekjJ0nWRD8QOEjwpfDMgVPXVyak0NmCZC09CnTAG7HSki3ZjkiyRfDDeGyhe6jvB6se/bF9X6HypfVE06zb7jRRZXTIZnv9yNkDIBol4H4bC1ie/LF879HlpbJTRNsHXqSuY5u4D4Q/ukCw107bxOsKICn0/C3w3TG7fS7HMyvmAJ6en6PcsXH4nDwO0oPQwFN1t4dB3eflvQ1KTidFpyhIkk42IbzyIDZPduy36+vBx8c4pp2CuoPyZTdnIPWleYw5cCzF2bH+uTFP2wwRUrEC0tqLW1qNu2WRKO8+f36rWHw5gpVljJcLmsbEHMyfpewXCn63ft2sWjjz5KdnY2AFlZlqd4Q0MD//qv/wrAk08+yT/+4z9Gn7N69eq41GnkdldXFw0NDXzuc5/D7NEQ0zQt+j6/+MUvAJBlmfT09IQL8s2uKxYzZ87k7/7u76iurqa6urrf/ZFriMWHqsah66gbN2Lfvx8ArbiYYHV14gVe13G89hr23bsx8vLQp01DunIFMyMDIz2D2p2plipRj2Nq7VaF+lfyAaiYH6amITdOxq2uTqW83CqHEAJstj7zXzZRa2vRNAiHBePGhVlpbmTGwrK4y1MUyMo06dSD2D2eqBNldn0NY6/X0zRxAW2GicvV3202ifsXHxm+iHCF14t91y4c77yDkZVllWjE+HsA1pN0PckXJPnirmCofNFT2iZdvYrtyBGrsd3vRz51ivDUqZiqo1fFrg9fmCaY6ijokagGiysiZmdgDfN+87+HL3QdcnIMJAlWGJsI+MuIqOkqCmRmGMihQFSG1LWkgilnt5JxogF9ahE52WG0kHTPetR8JA4DcHtKD4NhKAuPzyfo6uqtG4uVjBss/WyalrZ5JM1VXhnk8HsudtTInGMKBWP8lORfoKzIASIz4fWpO3ZgP3QIIz0dfeJE1O3bLb32U6fQioosNSGbjcCaNfE1oAN82A+jJnS40/WmaQ5pwYt9TEpKStx9kduGYZCens62bdvQezIvd/O6/vjHP7J37142bdrEz3/+c2pqauJqUEePHs2VK1eit69evUpeXt4dXdedQPh8SG1tltkYIHm9A0YSRXMzjnfeQWpqghMnED4ftvfeA0lCKylBTf84hXPDuOc1Q9iF3WmHaZNgVjablBTeeMvF9esSZWVBZs3SaWiwc/iwDUUxKSwMsXix1jv/TWtht3s8+EsLOTJ/FSM8NUy+VE/eIQ29qiLudK2HBU3z3NZB4LCHtYYHTYD5yUJGlJdR0KVFF/bBIrpJ3F/4yPCFoljuqx6PNVcNA23BAuwHDiT5IskXHxiGyhfC50O0t6Pu2oV8/jzylSuYDgd2jwcjPR3fV76Cqlpy1rF8YU6dxGFzLAHTgSMFTp+WepZ562BgmlYZz/z5g/PF+z18Mf5SPbkN8XyhhwWtxSsRbR04PR7sHg+LAO9jRYyqLmO20OIaiu81vpA+7Au4HxBJ3cbO4b4Lz+XLEn3neER3NpJuikjGDZh+jjmtV1QEKZxv1b399Kdp6Lpg9BiDiTlepGAAd+ENSOsTmelJn4lr15CamzFSUpA6OhCaBkKgLVqEPnkywu9H3b3bOoXH1OYJr5d+H6LnJK5u24a6eXP/++8ihjtdv2TJEt566y1aW1sBounVBQsWsG7dOgBee+01SkpKbvpaaWlpjBs3jjfffBOwFuj3338/+j5//OMfAQiHw3R2dpKamorP57ul64rAMAyuXLlCWVkZ3/72t+no6KCrqyvuMStXrmTdunUEg0EuXLjAuXPnmD9//pC+l7sB0+XCyMqyhJkNAyMzc8BIovD7IRzGHDECMzUVY+RIaxMhSUhtbZTNbGKV9haOmm3YNm5G8xvohoympmFzSJimSVOTRF2dA8MARbEkfI8ftxEICOKCYEJgqipaYSF6VQUrVmpM+dsy8tfORUpR4w8COrz5pmD9207e8K/CMCyucjhAr6pAsfVuNjZvVtm8WeWNNxwE7r1y0CQ+ICTiCujli5QUy7Oivb3/Zu6W+SIGFRVBCgu12+ILOjuhoyO6touurqi/R0K+iEiKJuKCJF8kRJIvBsdQ+cJ0uRCahujoIJyfj5GWhpGaipGTg+TzITU1sbikux9fhE0ZnA7mFeoEg4J337Vx4YKCLJscO6bw+utOjh27fb6IHPbXvy3xYlt1dNhLEthWl6PYRFyAYcMGlfXrHWzYoH6QU2RQ3CNnknsXA0V0IgtPIi3oWEWIRx81aWwM9reVjzkR6joY2/aQIgXY7lxJUJOoKA+wio14L2SxNVzBjeuChVnHEFh53o3yx6iQzd6GsIgyRHs7jk2boLsbTJNgZSX+j38cdc8eyyimpzwormnM5RrY5nsYnCRvF8Odrp8+fTrPPvssa9euRZIkHnroIX7+85/zve99j+eee47f/OY30YawoeBf/uVf+Na3vsVPf/pTdF3nscceY/bs2fzDP/wDX//616OybT/4wQ9YsGABxcXFVFZW4na74xrCBrquCMLhMF/4whfo7OzENE2efvppMjIy+n22NWvW4Ha7kWWZ73//+x+uMkSPIoS2cOFNa0CNESNAVaG9HTMzE33CBOxHjlj3ZWWBEEgBPzsuTUHr0Kh4phnkPBrqFRrPmEh2kzVrApw5o/DrX7swDZOxIwJ84jGNigq9nwOktnhxtNZUUSAzi34ZAbA2dbt2gRaUmXmhhsBEiAT+1NraqPxcpPzj9GmFjg4rvvL444F7JuKTxAeDwaL/LpeZ0Gemb1naUPki8rf9+63yoIhvQEODncZG+db4YsMG6zAgBP5PfxpjzBjMEycS84XXi72+fmgKREm+iEOSLwbBUPlCUaz9zObNiK4uSE0lnJ9vOWe7XJbalc83IF+89pKM4pBYvTrA6dMKO7bbEUaY0fkmn3gigNsdvC2+iGSiLl8yGXlwOydtCjNm6EiSxRXBigr0sPWYQADq6+2RRBwLF2rk5n74ZUPCTFQ8dg8hNpX1YcDrFWzbpkYzAJWVwei/HQ4rIrl/vz2qBV1ZGYxLTebm5tLc3Nz7gn2K/PWQyebNKiM8NeRfqqd12gLeMar5mLKBEacPsK5pMf/e8jEwDD4x9SBffvIEtQey2WuWUjQvQHm1QNgUhNeLum0bwu/H8cYbmCkpyOfPo82di7ZiBcHly630rsOBWlMTt5gLny/aWyB8PoKVlUPSmk6Efp83Bt3d3f3SqPc7FEW547TvB4k7/Q0G+31vF6K5Gcebb1pRR1lGW7iwV4K0h8TsmzZT4xlB/aV85q7Np3xZgC9/WuNKi5MxOX5++h92nvt6Lu8dUcgyWlgy+SJfe+IIm9Q1CRvt+yK6wUo1UGzW/GxpEfzv3+Yw5ex6Cq7VM+fpOagfL0etrUU54GG3UcqRMctJS7caOQ8dspOebjBlis6KFcEPRIFmzJgxd/097hfci1yRmdlrNhQIwJYtjqi8dF+ugCHyxRaHdeCwhbAbAQ6dzGR+kbUGvfaaFeEkHO7li/2Z7AsVMb9cpaLKOhxH+cLnw/n//r8IWQZNo/sv/gL/X/4l0GMs1ocvtJISy5k+EVdAki9ugiRf3CF0HenyZew7dyIAE9AWLAC7HWP06J60rX5TvvjkF9P4j5dcNB3tQJgGi2c0MXdtPo7Um3NFz2XE8YWuwxuvq7BhD1NadqEVFjLtC2XkHaqxSozmFrIusIo2r4wsmxw9akNRrMPAs8/6yMn5YLbhg/FFMnbVB32bvGIzAFZziRkX/XG7ew8HN0tN2nfvRgSDBCsqQAj0kEnXa3VknHHRssANwMLuvYy8doBjx2ysZynnZ1TyiXI/etBEP6VSeyCbirlNmO834DjSjTN4hcDq1VFlCDTNsovv6LBup6db9XeBQHTR7qf57nJhqirS9esYWVn9jGOSOtJJ3DXoOvZ9+1DOnwfDsFRN7PZ+UrfayhWULfKhNeTScMjJay/ZuHa9kzEj/RRktvL8TyZw5YpEZwcIbJxpy+Fnb8wiMEFQVCaiJT4DXAKbN6tkv7sLpxRg1pfKUGyCjHSDJ1ybCMvH0ZbOR/5YmaVcUVFBR4fgvfUuzgRsGAZ8/vM+ZNnau92rDWJJDD9i+aIvVzgcZly2QFVNMjPjfWYGw2B8Ic8rZcS+TTw8pQO7OZmXXlnK1SaF0aPDfOIT3b18sT+TFWIzyAZqvYlYMh8cjl4lofZ2hCRZZXpOJwgRjeYn5AuIPg8hLHnHWCT5Iom7hchB0+dDaWxEnzzZkryN8MU4y1MARYnnC4/Ka/8/wbVmiTEjAkxIb+U//yOH48dlhM9GZprOnuMjOPOynSc+HY4ToxjgMhLyxerVAVp2HiFoUzmxwI0rTbPmLuALOthfryJJ1vNnzQoRDguysgwyMu4NrkjO1BjoulXL5fVKZGYa0TSu2x1k/XoHpgkbNlj/j2QCAgExtNSkaSKCwWinedeSCsva+thh3m9fxNWQnQUL3KhH91JQEObAATsNE5azYKLO3/5tF6YJddvykYxs9MIOqvw12E6dRNrdAuEwgSefJOh2IzU1EVi+HOf69VYDmCT1r79LpOEbmQGJEkUPsI50Eh8CYhoMhc8XrU+WL1zAVFXMjIz+JQaWtA/llTqvvgpNJ7rIF5f57azf87+an+aNLVlkZZt85jPd7P9zkPqTuRx1ZrFmhsSyZd3U1amoqpkw6uPzCfzd4FICZBxrwNygw+pyUnfWUiYf58rqmYhVi6MZA4Sgc3EFJz1p0aYrVbVKg+5lu/kkhheJ+CKWK2pqVIqLtbjesvLyILLMzcfITfii2RfmCaULx4hUqpyn+eW+JdhsJgUF4Ti+kDtVjOBZVp6tQfZ6Cf3nKfxPPQUOh8UXV65gpKSg7t2LmZKCMXJk/1rtPut/0O22ZIBNE7Wmpn/0P8kXSQwX+nKF34+ZkYE+bRqhGTNAlgfni8U+3ni+lWttqeTLV/mXT+7hp9vLeO9YKpIMs8Z5ueG1c64tC6PFhmn2zJ3b4IuMvbWMm9bNlfEzGL8iiKIIwAoe+Vok2GM9X5KsPaXDMYR14APEPXIZ9wa8XpGwlisQEEiSZT50/bqEw2Et7KpqRps/YtO9sdGiKHoiioDV+b7bQ/Y5Be+sIq52VRIKCQpObEMI2LfPjmHA8vBGvGZl1CLbSvHaMfVMhGmgvP8+KAq2hgaCS5Zgf/fdaHrW/+lPWzV1Q9B4Fz4fQtMwRo78wOs8k/iIoW8ZgdttRRq7uwmPHWttgnyWY2p0U9JDCEaqi7o6B+rVi+QbXibmeNlsLqfw47mc2xJi8mSdv/mbbr54JpPruoxsFywo7WT79l6puX5SvPRk/1LgREEVUwyYfnwf0skG686KCmyFhf2elJkFxcVadCMYcZy811wlk7h7SMQXPX3vuFxWhqCzU2C39/JFJHOUqP4/bom+GV9ocD21AOE7zU9rFhIyFUblGADxfBFOgzcU5HYvUmsrtp5eHP8TT6Du2GHNw9RUOr/xDWtjdTM/EEAEAiBJH0pfQBIfIQzEFT2Hg/DUqSiXLg3MF6qDPT85gr0tm3ylkwkTwmwWK5n/qWzOrQsxaZKOv2sExhmTJqEyfkIIh8Okrm6Y+UKIqFlghC9yc++dQ0AE99jlfLjo+6NHbrtcJna7yd69VvNXYaHGwoUa9fV2tm9X45rF+jaRfeYz8S8YMTsyDJBlk5MFlSxwahSc2EZ410F+IZbwZ+MRPrvoz3xp3A4OKBpvN1g6wVE9XEUhWFGBraEBuakJ+dIlHO+8AzabtaH3+62yoIhGNAwq9xZnPhM7qZJIYpgRje44nUgtLVbdcZ8yhLhx2kMIdPvZenYKnsB8nig8yQp5KzUnJrD32lIetrv48Y/box4DBZMMkARXrsi8+GIKBQVhioq0qPdAIpSUWAt/5sfLkJ7fF/27uXIlelNLvw1brPnTvRTdSeKDQyK+SCQskZJiUlYW7McX0L/huO8Ldi2pwNjtsfwFZIOTE6tY4Axht8O+hoW8tHkhV284WLpM49lnfdGNDPTwhaIQWL0a0d2N7cgRpLY260DQ3W2VAmVnW8pAqhrXJzZYmU+SL5L4IHBTrkhUkhbDF9uOjuPA2ZE8OfsgyzP2sdH1BHsvzKJwpM6PftTO9u0qBw/aKZgG2C2VoT17VISw9niJ+CJyeHe7g5bh3wPEF/fgJd05bteFMCPDpKREo61NiqvlUhRYtMg61eXmGnR3W+VBmtZf07iv5GhnZ8wbmCbK1lrOnJHRdUFG23nWnvwx2le/gDNT4vUzC5CaBcvlWqp/Wkpou585domrWghVNeMGppmTQ6ioCLFvH0ZWFsrZs1Z46uxZtOLi+AVa11E3bEDyejEyMxOaeWglJUNzik0iiTuA6XJh2u3Y9+4FwJ6WhlZWFjfuYqOMUUJIc+Ew/BQ93IXb1oLRNYVl0wTd6eNRUyQkiajZTFGRxsKFJr/4RRpXr1oKGV/+cpC6OhVFMZkzJxRdG+IO7w6DNfaNcddrvLOJzc2L8AekfgoxyUzA/Y87caxNxBcRRZvLl6XoYyLlpH35Auinix93bSGToz/fRfY5hUzveaaO0MixbUCsqECWIfDGPtI7U2DMMp591ock9QaM4vjC4bBKg8A6EHi92M6exVBV9IcfjjcQi6nLRggCq1dD374AknyRxN3HULiib0laLF+ocogFk65TPj2ELhWx+NE5aHt17HaTHTusg0DEmOyXv0zl9ddTEAIKCsKUlwfZtk3FMKCyMnGgd8XyAKk7a+Ou+X7miwduFt+JC6GiwKpViU9vGRkmOTnWQWAwDei+WsdpaeD1EjWvMPZ5uJS/kNYFFUx/7efk792K/iuT9XO+BmIHC0J7OT+mpMcZrwIhCSrMmBNqRBtaCAKPPgqyjOjuRjlzBq2kBKmtDa24OO60HJGDi5rKLFyImZsbfb1+6g9JJHEXEZo923I3zc3Fvn8/UmcnRk5OQuWR2Ob2sodCBFYqaKI3GlQhmwhh1XWqik7RNB9LFwte+HVmtGlYluGF51PRQhJCWApgqSkGK1ZqvYf3VIPs+hqE8KAtLiRYUYFaW4vYuZ8R3QFai934uqQ7NjJK4t7BnTrWDsQXigJjxxpxwhID8cWAuvimibmhjuwTh2mfUcg1fxEjL77EiI2v0J1q8u4RGxOuHoJRC+iYoEedVIUg+v9YrjAzMnoPBO+/j5GZiT5xIlpJSVSOMbbsRzl9GqmjA4DA44/3zsskXyTxQUFR0IqLka9cITxqFPaGhkG5AvrwxdwQgcoqtOBUa2wrSnRu7N4hUzTNR/kSgWkq6Lq1wbpxQ6KgIMzPf5bKiZM2xo0LEwrBqpUBfF0xJoCdYG6ow37Sg1b4YPDFA3cYuFMXwoFOb4k0jBM1Dvf/e5p1R495hVlayA3Nja9T4kpBCbNtJ2nefZ6c7T9lek6YyWUgOWQ2HLQMLaILO0Qj/Pb6esCKzgRWr0Z0dWHfswcRDGJkZmI/cAARDPYu1gPVP/Hh6kJ/lNDe3s7rr7/OX/3VXw35OT/5yU9ITU3lb/7mb2762NbWVj7/+c9z+PBhnnrqKb7//e/fwdXeJcREHeVLl6Lj0MjJsUrbBhp7Mc3tIqzHOaDG6qa7fe9gdvuo+VEBum0ZS5cGOXtWIe/YTkLvBTGWlzN5cojzjQpTztRiKCYu90JrQ9YlMSLVjjmnkGB5uVXSV15Oamo68i4DX5c0LEZGSdw7GA7H2jvliwHFJ4TAkWmndXoRpydWoaom3eph1H21eH/zFn5tEuqyeTwxz8/B4zVs8FQC8QeBvlwRXLUK/1NPYfY0/5o9uuxxUtNut+Xr0dGBkZ7e28MTMaZM8sUHgiRfYCnN1dcjX76McvYsps12c66AXr7QdaTuLkueOuKb1DM33L53wO+DNwQb0j+JrgvWrPFz7pyCtmkPkhbEnFpJUaHl3VFwfCsPl0g4HRX4fBJOp4EjzW4dBMrLAQhWVNzXfPHAHQaG24UwFn0X/sjtiOtkZEHv+7hoKrpkMYps4g5qrH9LRdY1TusFZJmNYJq4ms5xTp7Iw0/6uC4HUdXewSt8PggGkSNa2j3urJHegGB1dfQxjs2bMXJzEd3d1qTJyIhmDYysrKh2OyTrP4cKXdeJtXPve/tm6Ojo4I9//OMtLe63AofDwde//nWOHz/OiRMn7sp73BL61h336ENHxqM+bRpaURFGdnZ0IxLXANYT0cQ0e5vb29stFRNJSmyO5/NhO3sa19luFk2ChV+ezxefy0HRgzzUtY+ilnr2XZxLyiWZiUo9Tmaj7KhhtcuBXxakzAkgOWyodXUEy8tR6+qQc3MpntPM7PA2pKrFvRu2RJ1lSdxXuJtcAUPni76HiShfLFrMrIUmYzs09uy2cSA4lwUttbTLJqRar2PzHGR+USHXiecL6fJlpBs3EKEQps1mcUXPBirw+ONRDwGpqSk6J4XPhwgErNIgiB4YYjkhyRdDQ5IvbhGxfAFRh2uhaWgLFyJdv25Jpg/CFdExrGkYOTnY9+5NmEmIZr/OWNmvtPSxFBUvZZlb54vPZqDoQeb497Gy00Pj63MY2SwYOeYA8ryHWWPbgHH6DNKsKehLKpB370atrQWw5tPq1RS3v8JsswapcpH1lvcJVzxwh4HhdiGMYKDa0pulmhPdHwgIJFngLakg77VDZHdfZlb7FWw2aDOm0VJUQUWW1hvhiWjrHj+OCASQL10iPH58vCdAj1+AunOnJSl66hRaSUlvo82qVVECiNukRZpwIpuvBxwvv/wyv/3tbwGYOXMmL7zwApcuXeK5556jtbU16ig5duxYvvSlL5GZmcl7773HnDlzaGtri7v9l3/5l3z729+mubkZp9PJj3/8Y6ZMmcKNGzf4xje+wfnz5wH4wQ9+wO9//3vOnz/PihUrWLZsWZyj5EDXFYvf/e53/OlPf0JRFKZOncqvf/3ruPtTUlIoKSnh3Llzd+urGzoSqECoNTXWGD55En3yZFAUjDFjCI4f378BLDaiOW8ehEKI1laEpmHa7b1Nj50+yMoErIWYbj+S10vZNNAmpbBp0zwKJho0UglHTabs+HeqzG0YY0aT8TePoShgP3gQ5s8nTQHlgIeAYcMhhbAdPozQdUR6Oq7338cxcSK+FQsBKVryZ6qq5U6ZxH2Ju8UVMLx8YfmBWbfbUscwov0yo/TLdO46g/fpx7G5K6igD1+0t6Nu324Fi2w2tMLC/lwR4ZWTJ9GnTevd+CtK3IEhyRdJvririOULu93yuggGrUoKu90af6mpBKqrEbo+MFeUlFhR+lAI+fp1MIxoUHQgrjAyM1ky5RL+wmZqt+fF8cXkc//OyvBmlILRpD/xGEKA87AHU1EQhxrwmyZdAUHuhtcQwlLokn76U1wHDiA/8QRB2QST+4YrHrjDAAx/o8ZgC/jNUs2dnf2bxFwuE6fDIPtALXY0mmxjOZ45hZEdZ7GjkeupteyuEdG0bCQToC1ciD5tGsGlSzHGj+9vBR85Sbe0oJWWxjfaRAgggTvkYBbzHypuomxxKzhx4gTPP/8869atIzs7m7a2NgD+/u//nrVr1/LUU0/x4osv8p3vfIff//73AJw9e5aXXnoJWZb50pe+FHf7qaee4p//+Z8ZP348Ho+Hb37zm7z88st85zvfYeHChfzud78jHA7T1dXFt771LU6cOMHmzZuHfF2x+OUvf8mePXtQVZX29vY7+h7uNvqWEkhNTb360FOmWAu9LEc1yvs2gEleb7S/Rd23j9C0aSjnzhGaORPlzBl0WWbHpSn4GnJ75BN17Ntq2HpuGqktEqXzbWy7MJ0GI50Fs1p57lnBs1+u4tT+fSDBQvk8wXc9CCHwPzyf5vmVOBwmJxvsZJ+sJ9t7kXwuYYwZjZg5E33iRISuWxmDigrEplq6dx5EWTL/von6JJEYd6Opb9j5ItVgytmtZJzw0Fi2FnPvQbJbz5F75TJ2m4lJ7xCM8oXNhpGRQain5E0rK+vPFTGa7VpJCcbYsUm+iEGSLz4YxPKFdP06QFTmPFhWhrpjhxWA2bGjX4Q/yhWA1NKC4+23sb33nlUynZqK6OwclCsWFtowUl3UenLxHFRYMKOVL39B8MXnqji9fx9ChlLjPKGDHhAC/9xCmovcZNXXcvXVw+iaScgrM3p0GHtDA6KpKf6z3UdccY/M4HsbkQXc6TRpaZHwegW5uYkbhvummtPS+jeJKQqsWKlhyAJVkrgYnki+LYy/u4AJBSA57dFBE2mIse/di9TSgu3IEbSFC/sdBKKPdToRfj9GTk6/mrqB6j3v2TrQW7S2vxl27drFo48+SnZ2NgBZWVkANDQ08K//+q8APPnkk/zjP/5j9DmrV69GluV+t7u6umhoaOBzn/scZk+NoqZp0ff5xS9+AYAsy6Snpw+6IA90XbGYOXMmf/d3f0d1dTXV1dW3/R18EOhbSmDk5UVvI8tWmjeRUUzPc43MTDAM69Bgs2GmpyN0HZxO6yBcXIIvazKew06QNdzzvGxtGEHD9fEU50NoVirkzKL00D4qg6d54SvFzLvmIS0drotJeL0X6dp3ESHLnAiXcLpDxTAF5vhKVu99Gc2QCZtgFBSAJOF79lnUujrsHg/Sfg+7d9s5llHGObWSL5Z1oev3rlxcEh88hp8vBPMXSgSmP0yuoXP9tIk0cgLGdQPp/eOEV7mjz4/ji7Y2S1hi4cJ+63ncHHW54g8CPUjyRZIvPgjEjkUjM7PXBdvptNQfBvC0iOUKAFNVLQ8MRcFUFMIFBQm5YlvDCA40WVyhzUwlPHUq9nqTUvZgf8/Pr9/JZ5VZT1o6NDGJy5cvodRfICdP4AmXcLrDgWFUU6IdZkR7I5oh0z16PA4JmDQJ/4wZ2A8eRKo/GMcVXynvQlFuX73sbuMeu5x7E319BrKy7KxaFYz2B6xYbnWaRxZ2bxu40ui9P1EquqcW2iGFaJ2xkNOTqphydisp5l50TbNOkeEwwudDKyqysgKlpUheb3zEPxZ9dXcB4fVGoyQD1Xveq3Wgw006pmkihnAyj31MSkpK3H2R24ZhkJ6ezrZt29AjznN38br++Mc/snfvXjZt2sTPf/5zampqbqkG9QNFAv3n6G2HI1oylHCsKQrB6mq0hQshHI5GIA2XC3pSxGb+WCom6IAfz27B//5tAeHrmfzdsv2sVLYSOjaNw1vScdqcmEzDfuQIUzhN7uq5CE3j+Lqz5Ny4TFYOFOx4BVmC98ZWUbjhZ6jXL+O0yciZIBobYdasaA+B3ePB57MkIi/NrqSjVea115zRDd6tqtEk8WBiUL6QzTg+wDTxeqW4frO+fKHr4H1oMbkHtqEeOsjJmaUWX5zZwmhjP3ptLcElSxBdXZgul8UTN+OLm3AFDMwLSb7o875JvrgzJBqLMf8ecKzFcoUQmKmpqFu2wNmzABi5ub1cIWt4Diis+8/RNJ1Mp2ziBVbIWwkfn0bdBpMjYi4LQn72Xx5PYP8xJo07xLyvz+HN122c2n2OqY0XIWgysfEVZGHS2SXhunYOtdPii5QrGsakgt7Iv2ni80lRrvB5Za5elTh+3Hbb6mV3G/fQpdy7UJRen4GsLIO2Non2dkFOjol9927UYBClogI9LNi8yc4ITw1yqp05XyiOPr9vQ/HmLQ6yj7pwyguZ9ndljA9quD5ehr4zhKmqEA73RjlUFSMzM9oYY7pc/RbuyAvHTqJEUZJ+Jh09F5jw7x8yhpt0lixZwmc/+1mefvrpaHo1KyuLBQsWsG7dOtauXctrr71GSUnJTV8rLS2NcePG8eabb/Kxj30M0zQ5evQos2fPZsmSJfzxj3/k6aefJhwO093dTWpqKj6f75auKwLDMLhy5QplZWWUlJTwxhtv0NXVRUZMI/g9hz76z7F/v+lYU5So9G2kMT6wZk28ipCus0p7h8Nn5xC+ns+5zhE0mIVUTT3PT3cspe7ESCqmX0TVW1n0cCtm/hzO7WpmQsu77ElZwdhFaXx80mH8tSfJr3uFjI5XGR2+RGfueEYtHIPmsN7DVBTsBw5gO3zYavJ3gd1ukndoGx2TqrDbzX4lH33rxe9Eyz6J+w8D8cXoE7sQwSBUVJCZ2esj4DcctD5cFjUdi+WL2JKjiefTKJ5fyKzyMsZ3abg+vgR9pzVG1S1b4np0Iqorg/JFBLoerygUiagn+SLJFx8U+vBF5EBnulyDj7UYrgDiDwc9KkICqFjSxeGXmjGvpHKjI5fj2mQ2GSswL6Tyyq7xhEc6mDPVSUn+BW7c6OJcdgnnf9PEhJZ3uTR1ESnL1pAt3keuPcH0t35JoMsgPHo0F6dX8JDtOEICrbAQR3o6zn/7NwBc+QXY7SZj3ttG54QqbDYzYYngvcIXw/JWhw4d4g9/+AOGYVBVVcXjjz8ed79pmvzhD3/g4MGDqKrKM888w6RJk4bjrT8wZGSYZGUZ7N9vRXsyM+1UrwqgBoPYPR4AvPPcjPDUMP5KPRfGFOPrBEb1f61IGrl9fhmXO2F8UOtZ/Hss6IVAeL3W4pySgtTcTGDFCssl0uFA3bKlv4FYnxSpVlycOEoyyCbtnkj1xmKYSWf69Ok8++yzrF27FkmSeOihh/j5z3/O9773PZ577jl+85vfRBvChoJ/+Zd/4Vvf+hY//elP0XWdxx57jNmzZ/MP//APfP3rX+fFF19EkiR+8IMfsGDBAoqLi6msrMTtdsc1hA10XRGEw2G+8IUv0NnZiWmaPP300wkX9tLSUnw+H5qmsWHDBv7jP/6DadOm3dF3NixIkL4f0ljrU/9rxpofdfqo8YwA1c6iiZcwWtN5a8codtY9hTDCVEy/yH//SQ6ykYGRWoTo7OT0nu2cHzGX9oyH+P9810Ugq4z0mdvgpfWYsoQmj+X4gs+QMrsL26PLUOvqcOTmYm7dinS2kZYVjxNeXo4Z3sWi9/bxUEqIq6mVPVJz1qGgb7242x2kpub2tewfRHwk+SLDxuPOIM7DFlcEKyp6fQRmFnG5m36mYxDfY3BuwlImFgXItBHHF6K9HXXbtihXiK6u3nVTUSwFrh5loOhGP2ZOYhjW/YlK95J8keSLDxp3yhd9jPBME+o2mAhdZ+JYP9DMlY4x/LymFGEajMnu5slH21iyahpSMEA4pYif/cjBtKZXOT9iLo9/2iS0fC7+zHLSZ27D9qvf05WRSzC/gGsTShkzezauVNMK4vZkBoLTZ3L1v/0tD82uIePlvUxRdA4fcmO3x5cA3kt8IcxIAdttwjAMvvjFL/Ltb3+bnJwcvvnNb/LFL36R/Pz86GM8Hg8bNmzgm9/8JqdOneLf/u3f+Kd/+qchvf6ViJTmPYDmZsGGDQ5ycgz8fkFlZZDMDAO1tha7x4NhwJkzMpfyi7lR6GbFSo1Ro3Jpbm6Oe50hmd3oOurGjdj37wdAKy62oqReL64XXog2WPq+8AXM3FyE12sRQs/mP1hejn3//uFv8rpJg1Zubv/PG0F3d3e/NOr9DkVR7jjt+0HiTn+DwX7fgdBvbFZWxkV+BoxWDlD/a5pQu1Xh8CuXKM6/hLvwBu/Y1vDNv88mLTVMln6DN//uVUS6tfkxJIW6bQqdv3kDm9bNidFLmfvJcVS4NdKe/wXSufOc1ArQQ9A6YwGzvrgYxW6pBmVmjeDaf7zJ8cMGZ6dWWb0Fhsn081tp63Yy428WoKrgcJgEAoJwGOrq1Gj0p6hIo6HBHr1dXm6pwwxX1GfMmDF3/iIfIO4mX9xLXAEJ+MIdIO9QTTR4ZBiwB6vkx5lilQf15Ys74QoAxxtvYG9owEhPR58yJbqxipuT7e3WJiaBZO8dI8kXcUjyxc0xnHxhmj3O9AcUStlD1aTTbDkzhZ/vrcDrlchI1Vg6+gTfWLUHUpz4q1ZQtysVzwGFqftewqZ1k7ZkOmVfno2QJVy/6OGL4AT0sETr9CKLL2wChCD73fe5eu46m6VVaCEJI2yS//42hFPlwqQlVFZa6/9Q+KK9XVBSojF2rDFs03Ewvrjjtzh9+jSjRo0iLy8PgMWLF1NfXx+3uB84cIBly5YhhGDatGl0dXX1S23dD8jMNKMLe2zzV7CiwmoulGDyJB3H02XMS9MG/AGHJGmnKNHaz1ijjYEMxPqlSDMyhj+VO8wNWkl8NNBvbDocCV1M41yz+9b/er3RvhehKKgpEnPX5lNW5CCQOo93f+kgLc3A3yUQuoOf7FjKVyp2Y3b4eP6P+TSek/mbxekUdx9mn2MUr++Zwqx1Pyf3yk6CS5eS+8yzmBu3M/34PvRdIYI9ZX9vvy0411LFqW6ZhSkhfD4wTMGrndUgBL4DGlVVvdEcu91EVRM7z6qqyZ499qhM5EcxS/CR5os0or0nYMVzZn1hMeO7tQF54I64AsA0MdLTLTdhIRLX/btcBN3uuDK8YUGSL5K4DdyULxKM1Ti+aG9HunwZY+xYiytUk8IFOmVLZhH0jUdLy0WqB39AAkOhUbjYcm46lRNO8MJPVc5dc/LYx7tZNc/B1ZePsvvcONb/OYWnTv8IdfcgfKHDW50VnOvo5NRpGwsXarS3C97WV6F7JVwnDdasCaAovQf8gfiivV1w8qT12VyuD4Yr7vjlW1tbycnJid7Oycnh1KlT/R6TG1PXlZOTQ2tr6323uPddmFP277a61yMwTWwXGhn777+k65lngIGbfOL6CGIlp3Qd0dmJmZaGmZkZXdxjayATGogpCfSfhyuVGzE9C4fvTRWJJO5t9Enf99vot7f3y2LFEYLdjn3fvjhX7cWLrWljmJk8/7yLnTtVHn00wNyHAvw/P9RZt3c8ZthgdnoujY0KBRM0pv/lHPwHNLJf9/Cxq4fRu8/jf2Qp3c8+iyJJsLoc3dWb7vX5BF1dkJtrcOqUQkuLRE6OQWGhRigkyM016O4WNDVZtvOKYslCut3Wwt3XeVbXoaZGjSpK3E9W9cOFjzRf7NtlaaJH1nvTJPNXz5NSXGxJfw7yOgm5AiwPjp7G4URcYbpc6FOmgBCWmVhs3b/bjdTUhJGXBw5HfBnenSDJF0ncCQbjiwGMJ6N80d6OcvIkQLQsLsIVoLDtYB6vv+lk5EiDRYu6OXta5trxNF7ZPp6DY7M4q6QycaJGxbzraGnljERmwq8PYP7bAW7IFxhZNgS+GGFy6jS0tEg4HCYPzdGx260sXyBgzd2b8cXlyxLhMB8oV9zxYSBRlVHfTvehPCaCLVu2sGXLFgB++MMfxpHCPQXTRKgq0quvghAYn/60ZeP+7/+Oeu4cKQ0NmKtWoSjKoJ9B1NZCMIi5cqW1eL71FmL/fkhLw/j61+Ezn7HEp9PSSAPr35/6FPj91t9ij4uBAGLrVuvfLhfmo4/eeSRG1xFvvw1dXeBwQG4uBAKQl4eroKDf6w/2eZuamu5dRYM7wP30mVRVvaM5NeDvq+vRcXrTMafriLw8a0zl5eHKzkbIMowZAx0d1uKaldU79nUdsXkz5OTE39+DkSMlpk8XLFsmsXKlg/R0g5/+s2Djhflc3gp/8V9CrAxuRmrowmemIGFj4kQJTZtM8Ct/T26O1Htta9dGN1uZmXDsmExnZzpVVbB0qRl929OnReTymT7d5LXXBB0dgvR0k4ICF4n2VYEAXLrU+7j8/MSPe5AxnHxx33AFgGEgvf8+0t69GJWVGN/4BtIPf4i0bRtGSgrGmjUgSYOun6KuDvx+zFWrrDEaCiF9//tgGJhFRZif/GQ8L+g6RGQls7Jw9RWbePNNaGmBnBzMj398+MpIk3wxKO6nz3RX+OJWuKLn8VG+SE+36uuysvpzwWc+AxcvItLTISMjIVfk5QkWLBCYJsyZY2PpUnjx30fSdCWH1k6FqsowX5n6DtKBLkhNpbXqcfJeO47DAd3BqbfMF2lpsHGjxRWpqVBQ4ELX43kgEV9kZhLHKR8EV9zxqMzJyaGlpSV6u6WlpV8EJycnJ65uLNFjIli+fDnLly+P3r7VerMPFIWFpO7di+3YMcK1tZYpRfWj+P0Ch18j3NIyaM2cfZcVKRK6jtbRgTZ7Nhm//z1SZyehefPoPHsOsnu+p+Zm7Js2IwViUmUtLXFufP3qQxsb7zgSI7xe1KYm61Te3Exw2bLoSRyvt9/jB/u8wWAwToP5QcD9VgMaDAbvaE4l/H1vpxygtLS3LCgcRg2HEVeuWM8PBiH2PQa6vycC+ZlPutiWl0pdnZ3OTo3y8iDZeZlo16CrK0zhlEa6apswU1OR99ciQgo+3Yksm3S98RrBj5VbNZ8J8MgjuTQ2tuBymQjRO+RLS3sVH65dE+Tnq1EliEuXgv3UwyKZgcEedzu433oGhpMv7iuuAOyzZ2Pv7kZ0dMDf/z2YJuGShXgnPIR0vRVFGWT9NE1Sa2uxHTuGv6OD4NKlOF59ldSNG9EnTyY4diyBi5csl1WvF3S9P1/EqnG1tODaurW392z2bMyYjM3tIskXg+Mjzxe3WzoW4YuINPVAXOF0ohpG4vt1nYfG+pg51UXtTosv5s/XGDXGTjAkc/UqTBhxDd91a/xK164hvfV/MM0g7e0CIcDY8CrNKysGNA/ryxc+XzxXeL3g9d6cLy5flhg92k5BwfBxBdzlnoHJkydz9epVrl+/TnZ2Nrt37+bZZ5+Ne8yCBQvYsGEDZWVlnDp1ipSUlPsu5ZsQQtD1zDOk/eQnVnbAgLdC1fgCAtEkWB1bQtQXPT4DQtcJSwrGjnqyfvE8ysUL6GPGsE1dhc8zgvJK3TLY61FPsafbWTr6RHyqrCfdi673rw+NbeDS9bi08FDQr36vT6d+Eknclr53nxK2m8nH9bu/D6lULl+BJEFDg51XX3Vy7ZrlCllQEKbWk8sq1YFaX4985RKjn1pLU9FyLv7rTkLrGmh618acry7G4ey/wMeVaPT5e6Tpy+HolRcVwmoOiyC2AVRVLf35zk6JzEyjn+HURwEfZb7QysrQFi0irUd9xjAFr07/Gr4OGfEGrF49CF8A+syZKEePIf/Hq2S8+BLK1SuIzk6MrCx2nJ+IryF3yHwhvF4rwtrj3kokGxPhC0VBamm5Ja6AJF8kMThu2wsihi9umSugH1+4l1t9aq+95uTqVevAOXp0GNPhZMepiYS6dFaqh7EJGPX4Ar63fw2zL22B/9zHeMAc4ECQiC9iucLlMqP/DcYXPp/gzBmFyZN1FCX+MXcLdzxLZVnmr//6r/n+97+PYRi43W7GjRvHpk2bAFi5ciXz58/H4/Hw7LPPYrfbeeaZZ+74wu8JmCZqT0YAQNMg50ANh5SVdHRaA+xznxvgucKShQuH4dIrhxn13h6avV3oI2aS9uRKurIepuGQk8PvmRQXa2j+XA5fClCcfwlztED0SMVFa+hME+XMGfTJk0FRrPpQYrwGFAXb4cNI3d0YLhe+r3xlaIv8PaopncS9gzvW9x5McSTmvljSiBBJpNlY6vJRXq7w6qvW4j56dJjnn/dSV6fi8Thh7sdZXqQQmLMIUVmJ3CU4NakKW6NC+1kn59928vjjgSEP774qL0uXBtmwwYFpWn0BkYavWGnI9nYR3XPdw670dxUfeb6oq4ve1DTIaagdMl90LXVTf8DGw2/+lBR/G37FwdE5n2LKXzyC7+yUW+YL024nPH58r1t9ZMPU3o5j82aM3FyM9PShcwUk+SKJQXFXDesG4ArozxdWg31qlCsKCsIUFmp4PE4UqQxdhMh0Gsyf3c1bvmrONdq4bFuJXxNM8sgUV4nb5ooVK4K43UHWrx+YLzIyTKZM0QkGBbJsxj3mbmFYXrqwsJDCwsK4v61cuTL6byEEnxtwlbtP0XMQsHs8aIWFBCsqULbWMvo/DjBOUzg9qYpw2CqNGxBC0FxYQeoLbyDZZDRkTmiTcNbrLPuRweFj3WzfncHp0woFBWGKetRTgs6Hoi6ukUY0MyMDfdo0tJKSqLV81KvA5UJubLSajkeNQmprszIEEybEX89Am7J7UVP6AcSXvvQlli9fzuqeg9xgME2T7373u2zbtg2n08nPfvYz5syZ8wFcZQLcyQZgoLSxriO83n6Nw1F5UYcD5eRJJJ8Pw+XCv3oNdXVW6jWyuNfVqZSXR4ycJNaH1uDXwbkF3O4gQhLsy1xBeobJFFO/pSat2E2+zydoaZGQpN4IUOS1Iu7EkSgQwMiRxke2gRiSfBHLF6P+/QBj/ArmQ5WY5uB84fMJtKBAssmE/RJ6yOBky0guniygoqSZw++OYccO59D5YsaMhHwhwmFrLk+YgOTz3RpXQJIvPiDcl3xxp4fFgfgiEEjsp9GDvnxhrl7DsWNKlCsia3NhoYaqmmiaxAbPcjZ4TAxTMHGiTleX4MKESuxTwsz0Db1spy9XRPxEbsYXsgxOp0lGhvmB8EXy2H67EAJTVaMLO0KgV1UwVgNeU/EHJE6fVnA6rbqxvtB18HWYjPr9Lwh2XKE9fSxdUycwUphcv9zBhr94BzOzmHxyGJefgwjqVJQHwJYJxKTKIjV0PZMrsrBD/Ck8PGoUUlOTdSBwuaz0b58Lutdk4PoKZ/S9fbcRDofv2ZrVbdu2ce7cOXbu3InH4+Gb3/wm69ev//Au6DY3AAnTxi4X6ubNSC0tKKdOoS1ahOjujlOXIBxGnzbNet9AkLo3g3guKDz2mJ/y8qCVEThgRYHKV+l0dNvYtk3FlWYtrIGAiJZlmCbR1O1QEbto95UQjZUdjlWUcTisCE/fxyTxEUACvgiUV9C834Z50uKLlBSdtLT+pfW6Dr5OyNq3jVkH19GePpbuqYuZZh6nsrWGbb+F518uAdtZRo+aTsF4HRHUbpsv0DRrnnV1YaSn3xdcAUm+GAz3FF/cwWFxIL5wrF8f1y8ZLT+KUbbSp02zduCdPuo2CQIBiSee8ON2B6NeBIUzvCxeKkBR8HjsliiMgK9+tZO333YMC1dEnnuv8cWHP4PvM8RaRUc1qyIrjhB0LHKjdjlYqGjoOvi7+0jCmT2uc5vsjDhYi3LoArkfKyP82S+S76nFcaiBq9dl9LDEufZcJmbcQJxqASHY9dMWlvz1WKtJLILYk7bDEX/i7nMKD65cOWDPwG3X8t0l7N5tJxgUVFQEI18btbUqqmqyeLF2W6/5q1/9ClVV+exnP8v/+B//g6NHj/Lyyy+zY8cO/vM//5MXXniBqVOn8vnPf566ujq++93vcvDgQV566SUAPvOZz/D0009z8eJF/tt/+2+UlJTQ0NBAXl4ev//973E6nRw6dIivfvWrOJ1OSkpKqKmpYdu2bQmv5dVXX0UIQWVlJd/61rfi7v+nf/onNm3ahKIoLFu2jO9+97tx92/cuJG1a9cihKCoqIj29naampqi+u33CxKljSNj0cjJgVOnkJqbrVKGGL1pw2bHTElBdHdjO3uGVPE+pdI5lswZB0YGFUt0bAeOor7vx6m0YrhX4HTa6ewUpKSYURm3xx8PJNRwj8zzgaZAIv33gfTgY+tIb6oZn8QDhcH4wtcl0Ti9irz5oLTolJZqKLIZ9xhdM9i8xYHfLyipO8XMvDCtH3sSZ3UFgR01jHrxRcacOsHJvCWcu5pCQZEP5exlhK4Pzhft7b19An3v8/kIrF49YM/AvcYVkOSLjzpfJPTTiDm0Gja7JVHd4/GRev1diqY/TPlSGSEUKpZ0RflClVvZaF9jvV/PVNy7V+WxxwJ0dSVeuwfji4G44V7jiyQd3QISu0HGhx5cadaJz+uVmNm0nYy9Er5QAKFpBMvLSf31rwn4IadrLlnNpzg65wnmT9cYeXwPwcoK3ntXpjHPTk3THK40O5iWbfLl5fvYdmEGB+tCCN8J3CVtCEnElU8YqgNnTJossHwFwqbEn8IVJT7dG1tj53CAYSDa263bw1nLd4swTQgGhXUyByoqek7uHjuFhdptR3xKS0v57W9/y2c/+1neffddNE0jFApRX19PSUkJYDkuTp8+na997Wu8++67/Od//ifr16/HNE1Wr17NokWLyMjI4Ny5c/zyl7/kZz/7GZ/97Gf585//zJNPPslzzz3H//pf/4vi4uIBXVO3bdvGhg0bWL9+Fgq1OgAA35ZJREFUPU6nk7a2trj729raeOedd9i+fTtCCNrb2/u9xrVr1+KUAUaPHs21a9fuu8U9Udo4uuD7/WjFxWiLFmFmZEQ3ITsuTUHzBljyX3IQklWHHzzvIvXEEZwb3sPIyUErLkYN+wnKTkRzCzafl+XLFTZscMQtrIkavnQdNmxQ8Xolxo8XLF5MwoOCy2XGPXegZuM+H/cjWRr0UcTN+MLlMnGmEI0C5r63C2n9SdTx4wm63dh370beUc+MRife6YWcyS5inM1ParYdzSYILFnKu3tCXGjROHc1hSsd6UxSTL62fDfbzk0dkC8CFW7UvXujXjWBlassroA4vjBiOSCWK1wuTLsd6fp1jMzMD5UrIMkXSb5wJfTTiJS9Rfhi6Ro7ktdLODuH8OvncHV347B3E1yxAqnLhxr2E5Cc1OxOx6OazC+zDpHHjilxY6vvWAoEiNb/jxkjKC0dmC8G2vQP8nE/ML5IHgZuAYlqvxL9UEJgNfTqQeT972LXLdUg2XMI88QZXM1NjGk5xfbxn8YZMCh3HCRcXEjtVoXDJ6ZSPO4y/3XqCfYEi/BLM9h2oZ2qvMMoI+3I2VOR288BYIwcifD52LNFx6g/QnVXA2ZGOqHJU6j7cxjVabCoUkqcwu2xsJfa2jDS0kCWo8fgoNv9oaZ9hbAmHYDHY49OxMJCLeFkHCoefvhhjhw5gs/nw263M2fOHA4fPsy+ffv43ve+B1gNjo8++igA+/fvp7q6OmrH/sgjj7Bv3z5WrlzJuHHjeOihh6Kve/HiRdrb2/H5fBQXFwPw+OOPR3XQY7Fjxw4+9alP4XQ6AfoppaSlpaGqKl/96lepqqqKk0+M4Fa8O+559E0b913wIZr5MhxONG8AzwEFgldwl7Sx9exUGo6lUSrbCOekI/m7MREEUTlYp6GMtLMsax/bHI9x8qTtphsEr1dQX29HkuDiRZg1S5Cba33fiTd4t/exByKJJB4M3IwvFIXeRkLD5N39YZZfO4LzwAEwTeRDR0h95y3GS7nUB+aAJHFFMRnn1zC1ELt+cYyGs7MoKmnlU3MU9h93EtIF285NGZAvdh3MwOf1s/rYfoQsYYYNNurLsWerlM2JkaqORSTC2tNzEKiujsuGf9hI8kWSL/rxhdfbny/87ax0XmDLyUl4ro9i/kwVulsQPh9GqosgKoe2azgVQclCD6FwEQffdTJ/vnUoUFWz31jSdesg0NBgJz3dwOUibp7fT3yRpKBbQGztl6pa5T66Hr92+nyCYFAwMs/kTEoVC3OdKPU1SOca8R1toiNtNKY5hrR0k2XqXoywoGNWETZ3OY7NforHXcZd1Izc0kzxqgnUvjcGSZmPNmssy7L2IIXOYdgzQQhrEDucBINw+Fwesr2UFcYetjXkcrjpKgsmXceut6BVJrDv7nF9RZIQfj+hmTMxRo+2oq+BwPC5Ud4mIgt8ZGGHxKfyW4HNZiM/P5+XXnqJBQsWMHPmTHbv3s358+eZOnUqYJmsROo+Ey2gEaiqGv23LMsEAoFBHx8L0zQHXYgVReHtt99m586drFu3jj/84Q+8/PLLcY8ZPXo0V65cid6+evXq/RPlSdR8GAjEl7BFFvy+9cmVbpZMu4x87gD1707Bc9iJVjyfoif8LJNTrYOA0wmZGSz5HIiu4+xtLmffa0HCY4MUlkNFhT7oOIq9T4jext+I/rPPJ+64qWs4SSKJexMRvoholCeSBwwEhNVImA6nJy+ntCQF9ZU/kPrLX9HWKugUufhcoygK1+NymlwcW4KzqIysbi8Ow0/RDMHyUUcIluazsNpJXZ06IF/Q6SMgjaLhZDaOS7NZnv8+W85OxaOZlGQ2YL9+FlISeBL0lAIpp08jdXQguq05FglG3QtlQkm+eED5IhFX6Hq0zC0qXTsUvnhvCh6jmtDDD1Oy9DhVk05BSk+pkaKw5HPjEF3Hqb8+iYajAcIhk8IlWvSgmegn8PmszHR6ukFHR6+IRM9l3ld8Id38IUlEEKn9Ki8PYpqwfbvK5s0qsR4icc0iKaB+fCUIQffoiRgG+MdOxJs7kY4REwGB3W4iqstBCBZVSrjnNaHu3YNy6hSOfXuoWNLFoiVhzJwctEeqCVZWElhVTXDVKoKVlWgrV1D+MYWimV72BefzT+89ieeQg0XX3mCltBmpw5KSU7dtQ928mejFxixEps3W228w3HJft4lIzWcsamvVfmWut4qFCxfym9/8htLSUkpLS/nTn/7E7NmzEy62CxcuZOPGjfj9frq7u9mwYQOlpaUDvnZmZiYul4uGhgYA1q1bl/Bx5eXlvPjii/j9foB+ad+uri46Ozupqqrif/7P/8nRo0f7vcbKlSt55ZVXME2ThoYG0tPT75vFXd28OX48BgK4fvITXL/5Da6f/MTKu/Ygrj7Z70cEApCVyfJxMd+JJFH+cQdatTUngitWYMoKZGbgXtSBCAYRzS3Ily+zSnsLER7c9Ccjw6SkRGPyZJ3Fiy01h8hivH+/nZMnFdrb76ypKzZq7Pf3Kkwk8eAgEvmP1LDX1MRzBSTgi8dWEi4oIBy2fAhaZpTSPmIidjuEQoIbhW5caVbtdNlDzVR3voLt9Cnse/YgGTrl5UEWL+3li4C7kmC1xRf/f/b+PLyNO7/zxF/fQqEKIMGbEnVQEqn7sCyRFG9JJEhJlA/Zbrd7kt7N7maTySRx0k6305OZTp7tzqRnMplJ+rD7SibZJPvL7M70YXe7Lbsl6yCpm6JI6rBuWvct3gQJoFDH748CQIAEqduiZLyfp9siCYBFoOr7qu/neH+0+jqqv7yU4moHBxzV/OeDz9F2Po/qKz9jo+8dSLFrsUfzwvJ47EbKgQHM9HQsRYkGo5K8iH9skhcPUYlYEa5o8Lz9Np7vfQ91yxZiL6o78kIIrJQUqt9chlZfF21+tyxsXpT34bh6Fam7G8eVK9SuHkKI8RNgkWbi+fN1Sko0Pv95KzpM7EnjRTIWdY+SZbuiRtMSp3/jmkVSTZw792FYFinXL6BL4L56Ho8D8qaZBGYWoqig72myHSZkmVBFOY6+XsycHITfz4FGk4BTtaMcsoyZ6qF5i4WS6aJqTTjKD1R/eSkH/0LYg8V6e1mf3QE30jBnzojW1MVGcazMTLTSUqS+PszMTILr1o3JHjwuRRb2SM1nbA0oPFjEp6ysjLfffptVq1aRkpKCqqrR+s/RWr58OV/4wheiaeAvfvGLPPPMM1y+fHnc1/+bv/kb/uRP/gS3201VVRVpaWljHuP1ejl+/DjPPfccTqeTuro6vva1r0V/7vP5+K3f+i2CwSCWZfGNb3xjzGvU19ezc+dOqqurcbvdfPvb377Xt+KxKFHzoejvty3fsrLG2N4mahizQjpbHc/b9copKeBy0dSkU1sLZGZGmwlramCrsgl9RpBLtzNw+Jw0tk+hutIX31Q5SrIMDQ32NVxQkBqdGhnxf164UKesTGPmTBOwf3avqdvxHCaSeroUjfxPUCo0mhdcuIDkAElY5JxqIZidR07ZbHRDUKBsRXfUgpDRysvtMs8wK/bvjGGFAMsh09SWi8scprJOwsrMRAC19Trt+2cjDQ1hyjL1BbuQhnxIt25hpaZG+85ieRGZWxO1bvSOzTY/LiV58XTyIhErAKTe3uiwPKmvLy4zdde82CNTWysjBHG8+Miox8i+znlfHvKtEM1bLGpeHH8zMLo52OXy4PPFzwt4UnghrLvNVT0mxaa2JovuKmUT9pXOPHmSocikYeGA05/g6r6BOX0a/ldfBbC9p0tK7A2BYYx0wLvcbFU20X7ETfEKP96VN9nzD5dp68ylZFEP1V9eGl2II9ZY0tlO5KtXKOcAdUsvE6qqBEVBDA+PNNdESoAiKTiX66Et7BONlx8eHo7WU95Jj8Id4lFo9Hj5oaEhUlNTAfj+97/PrVu3+Iu/+IvHdXhjdC+fQSJN9PnelRLZEuo6nm99K+oBPWbIUcx5yqCPPf94lbbTWawqvEnVV5fTdCDDvhFY4ae2uIum9lzaDruRZQtdF8iSAWfO4TSDaEJl5Rfyqa2fuFRo9N+b6JqHB0vdPowa0InGy3/W9MSyAmxeNDaS+cEHBINB/J/7HI7Dx3C9/z7kZDH0+u+DECgdHWhFRXZf13isKNaoXT1E84cGhz/ssUtPi2+jbViPKclR211HZyfS5StUiAPUrbyJ/uxyAi+8gLp790h/wGhe9PXZ0dWMjIeyCUjyIsmLcZWIFYC6datd4gxopaUEN24c07EbKSMyUz3s/f4Z2k5m3h0vNAvXxbPoARPZJRGYs4CSUv2uN5RPMi+SmYH70EQ2glGFfaXN8nK0wIibUPq///cYntkMbXyBYd2NVFuO8+hR5LNno427wfXrEYODWGlp1Dp0wM+Rn13h6L/4kbp1Smp6qJ/bSVPjYvyynRrt6FAoKtFg5SxOHchm/5lstKlD1JudaKtWoe7eHQVO1Bc63Ik/GT2jq6rimzwjNaGTvedp+/btfP/738cwDGbOnMl3v/vdx31Ik0uJhs7IMr4//uNxbW9jz1OpuxtPp6BkybPUzb2MNlSId2UAtCw8x9pxD5ynweWG5c/z3ntudOGkoBBKXsuPLvxqinTP51Giaz6SLbiTocBEr5l0Fnq6dVesAJsXLhfW8uX4I25CLhfG1UuYpsWg7kaqq7Rf8+RJe2ZBdfXItZSaSq2sg0Oj/ZDMkR93Ifx+KhytrC1OQwr42bfD5OBJD7ouKFmlUfNHU/jef8zhx+2FaJnXqJc7EbpO0OuNDnCK4wWgtLZOOlZAkhdPpcYZUBZsaECrqIjvGRgl5eBBhN8PpokrNIOSZQ7qCu6SF3MWsGpJH2sbBM179YSNw3dx6E8cLybHlfwE6m4+GK2qyq7d7O5GD1n4+gXy+o0429rZf0DlwuJaFr21g9WXz2POLQTTtNNfDgfK4cP2gl9VhbekiyP/Q7dTXN3drJt2FMOVQ0BKoaNDweUyox3vHUfdFFWAUx3CZfohxW3XNUlSQl/oRGO6H3czWESjL8DJvrADvPzyy7z88suP+zAml2KbwCDx9EmXa+yU0xjFzh5YM+MAoWkWlpoZnVC80TAR+RaWJwOpv5+N/JSjWgWWLGNY88KZgMyEGYE4L3hGrB4DAYHHE5/Wjb3mk6U+Sd2N7hbiWlUV5qZNBLu60A1B39Jq0vsCXH3nGNdbLLoNlU1OcJ89iwiF0Cor7Y1yRgZqUxOWqlJbW0X7PhNCNi/WTTmO0bMAPSuHoCOFCxdkCgr08GC+VEIemcI5XaiGf6SZ0udLyIvJzApI8uKp0Z0qFmQZKydn3KfHlRf197N64TUscQNU5e54IeZR86Jz3A3leLyINBMbRjTG9UTxIrkZeNQSwk4ZhQfHqMpGsoZVONDB/LYOMnMMhhbOJUUPoTY3E6ytjRtbb5kWP/zv0znf201hVg/GzJn8Ku0ZLMWD6pYoLtaidmpCROzUdKhdijTkIxi+ARtdRxfR6DHdgU2bHtc7ldTTqNhUb6TxMGY+xl1FFnUdDAPL4UB0daMVF6OtXg2AsmMnOGWEFkQYhr2hDgTZ+ck8AISmgabR1DRSS62HLHxDEi6XxdCQYP8+J1pIspv5BQwPC86ckZk3T+f6dUF+vorHMzatO1HUV9ftTYQQdkPyJAmgJjXZJQS6Idi2zZ48euTwy8wb9rCy7SDLLhxCzNXRCwoQuj6GF8GiYpoaFS7fdGH1ZlOY1cOW1Fep3jCTpqPTUNwSL7/sp61N4TvfsWvTS0o1ar48Fcdwis2K2Dkfo3iRZEVSj1wxNrbymTPoCxfaPSr3kIWKzEyiuwecMoGNGxG6jqUbuBp3giQhDQ5ENwsM+x+YF6dOyWiaIDVVYvlylYaGsSVA4/FisrAiiahPQbGd4NevO2j2N9DgP4zPJ8jONgm++SXkvc0o7e3RCXlacTGBmloam1ycPK3C9OmsWO8ALN75KA8kwauv+qldPUT7PhOhKCCkkZ2sM96LN+j1jpRhxJxtIhCwx3SHW+Ang61oUk+PYqM00q1bAONbEo5jI6du2YLU3c2+/SrDOXPwVg5gZWRghXSad8i4A33UyHsI1NTgOPYxH1kbOLw7SMWcJuoWXWRz0ddo36eCIVOv7OJEi8npOfWcOeskf6aOe1cTC5Y7OD5lLQCpqRY+n4RhCAYGBLJsX8NXr0rMnGnecXBMZGhZa6vdwFhWpiWEQ1JJJVKEF0LAufMy56UNzLx9iOxsE0UF3xtvoDbH8yJYVMxWGmhrVymYa6LnT0HoHlqCczj0Lyq65aCkRKOm2uYFigJShBcylpI5cgCynJAXSVYk9agV4QWyjBTJQvn9ibNQ41hUuzZvZs+Jqejnr1PTYKDu3k1g3XqadjrJ3Gni7f85OJ34N20C3WDnJ/M43PRgvOjtlXC57MKO3l6J/n6Bw8EdB41NJlYk8fQINDqN5PFYqKrFrVsSitOkpKuRjAwTXRdMn27g2tVMsK4murADBGtrEULwyScy8+frLF8a4Mi7vViazu3zKgXLXdRW9LPvrTPI57Pt9Na8eXE72tgDUhsbE9Z6Rp2Gwjdsk8EmLqmnR7FRRjMzc3xLwkTNYuEpkkprKwSC6J9M4VCwEg7fprraR/OeVI6ESlg15zJG4BTC4cBh6LjcFqvmXKOmQWA457Nx6F0UqwDlqERw7jDZp48zVxO0D25k6eXtiFsdDPcUkznfQEiC4WGBx2PicFikp1toGnR22tfL6AxBoqYun0/Q1ydFDC/o7ZXu2186qadf4/Hi8mUHDsniRedW0tMtpk83AOyMQE08LzRvLep+cLtNFi/WEabBkZ9d49L1VG4NpbJmo4Oacv8D8SLJiqQeteJ44fFErW3HnGvjNBe7Nm/GeagN/eZKDl3JxzirULf0Ms1bLNqPylRkz8DILADFiaOnByslBTVVZtWc8w/Ei6wsE00TmCakpZns36+gaWMbhUfzYjKxIrkZeMga3UX+xS/a37cs+/8WXdhBnnyIcytKaVI3IPERs3/WSsHhIwT8JorLPjHUpiYCa2uYP1+nrU1BCvghpHOhLxuz38eLafvY9zcKbRfyKFnWT33BabYqObS1ZwPxtW6JLLqiu+xxmnSSSuqhaLxpwqPOtXHPUSHANHHcuM4G2jFCeRy4XM+B/zYdNI3yRb3UFV7DOpuO5XBgejxUz7uGQ7qIoS60Nx+Whbe0B6WjnSBL6VlYzJRTh/hXXW3M4xP6C5fifqOazCy778bnE2x60U8gKJGf7+H4cQ1JWGRkENf4NZ5TjMdjkZlpYtpOcmRlmZOuPjSpyaGJeOF2mbzs2srKQAtHZpVzeWUNiy7uoLzlAI6Wo5jCRJLtuwi1qYnKtTUEAoKODoXiBT12aYTDgdE3xPK+ow/OiyQrknrUijnHAps2jetyOJ7tKJaFleah4eJ7SI4yWg5U03qlEmNuOiXP9OK1eqDDCYAxbRpIEqvVSzhCD84L35BETo5Kd7fGrmYFT5p1R15MJlYkr+aHrNEj6AcH7e9pmj2VOHTNRc6LK9FXrGZxq0FfWi1Tf3aYgQ/3c2lOJadf/TIvuz/CdagN55EjbCgtxSqu42c/y+XWOQVCIfI91yAzHTU0yKrCm9QVXIYUNzXrBJZHG9P9Pl4NaGyabTI1gn1W9eUvf5l169bxYsTTewJ1dnbyla98hY8//ph/9+/+Hb/3e7/3KRzhfWrU+PhE59q4dcoZGYSWL0cMDWEuWEBtvsHeizNwnD+P0HXqXr6MVlVH8CUbHIFI01nMf9XGRpSOdhyXL+OSJIqLViG0ECv6WxC9fQy89DJ67ogVydQTexHBIK7aWlwumDnDYNHFXfjPuzCerY5rHEvkDiHLsHFjkIoK7bHXgSY1uTURL6bPsPDMlUmdvhIztYaMTDht1ZG27QjTO/dzaUkFeW+9TureZpxhXjSUlkLRKF6kPnxeJE/ox6/PAi/GK0MblxceD8asWYjBQVb/+lz2v5uLMcPmRcP8faAq+F5/HRyOOBMVK7LxeEBe5E4BQzeZf24HfvPueDFZWJG8oh+CYlM/ozvG09IgGBz5nvFsNfK6ADOFheekhW/YwbWCUm7clHB26Qz9fDebX9tA8ZXjLKAT8ewKLNPi1GknuVMyKX1mgHKll/bTORQvtKj+8kI0/1yw7AW9dvUQ0pAPS49ZsBNFdMYpy5hUivWKS/T1I5ZhGNFR85NNmZmZfPOb32TLli2P+1AejsaLOsoygZdest1NTIsdFxeBJNlRT0Wh8cg0qlc7wOWKgmP0f4Pr1iFCIYQkYTmdeN57F+fJk4jBQYw5c3BIFlHnb8tCBIMjJRivvUbqniYqaadvaQlGbQCfTxpzrauqFRmQCRC3MUgqqVjdEy+KqsmqDeBpsp9jCcFes5JFGRLisk7ur3bRmLOe+XfgRdupHFYttqhK8uKRKcmLT1Hj2Y6uX4/o78eZlU3jkWlYbneUFzsuLKK+4DSoasLg1OPkRSSj/DgvqUl2NT950nXYulWlt1ciK8ukoSE4qmM8LUEXub1ArV8fpK9P0PNMBf/i97L8+g5mX2sl/90WbvZK9Kx5jYVrq/jgT9wMD0vomSaW6iK0cDHLF1kcvbwAR5uG1xeu71RV++T0+8cOjIk5YOHz2f8dr3RoEkjZtw8RDNqD2MJTZCL2eVpV1X295g9/+ENUVeW3f/u3+cY3vsGJEyf46U9/yu7du/nJT37C9773PRYsWMC/+Tf/hubmZr7+9a/T0dHBj3/8Y8CeKPk7v/M7XL58md/4jd+grKyMtrY28vLy+Md//EfcbjeHDx/mq1/9Km63m7KyMhobG9m5c2fCY3nnnXcQQlBXV8ef/umfxv38L//yL/noo4+QZZm1a9fy9a9/Pe7nubm55ObmsmPHjvt6LyalRmUQIMxzlwv/y6/QvMXikJVJSWWQhpJjNLZPofVKPlpbrm0ZaowMm4n1n1ZaW7E8HrSSEtw//7k9sEYI9IIChn73d1EOHwZJip5rwdpa+3nt7ThOnUIZHkZbVYxYXUNj2BUskuZdvz5If79g/36FXbvUqMNEMHh/g2WSerp1v7yIfO3zwffbazg7cw1Lr+xgRcteFvQf5sYNR5QX//h/pOLzSSxapIPLhbZgMRguTqTNZ7XiQ2k+ODEvYq0dwzdbE5aaTgIleZHkBdhTt63sHLa6X6FdCIo+b+GtDbD3uz20ncwGcz7VL3kQEDecLMmL5GbggdXfLzh4UEGSbDerigqNnBxrTAPIeF7Tra0KPp9AVgRDusr0oXM4pDlMm2bw/2gNiOebUQZSmDOnmhxPAPVcJ7/Yk45TsQhmgQia1Bb6IT3s1mIYOK5dQxoYACDwyisAY+26UlKwFCVxM+fj1qjd9mi71fuN+JSXl/N3f/d3/PZv/zZHjx5F0zRCoRCtra3REfPDw8MsWrSIf/tv/y1Hjx7lJz/5CZs3b8ayLF588UUqKyvJyMjg/Pnz/OAHP+A73/kOv/3bv82HH37I5z//ed58803+y3/5L5SWlvKXf/mXCY9j586dbNmyhc2bN+N2u+nt7Y37eW9vL7/61a/YtWsXQgj6+/vv+W+dFHrAsoLRU0UVxUKWLRS3hFa7nupKH1pbeIiYoSeeTGlZSN3dOE+dQisuRrp9G0sILN3EyJ1qP7aoyL4xiplYFKytHdPQ7+uXEqZ5HQ7QNPv7t27ZNdxTp5r3NVgmqadb98uLSORw714VpxP8QQnTqeD/+BLG9MI4XuTeTOVmdhULCgIUGQf50VvLEMKgYl2QvR/p1Adi3L1G8+LFF+3m4dHWjl7vuPbUj11JXjz5vHgIJWhRXqwewmVqFJWlgMPB/tYUqr+8FH2LhZQ5B+E07EzXaF6sW2c7Fw0Poxw5grZyJeLWbUxLIFkm5tSnmxfJzUBY9zvq2bLGfq3r9qJvWTBR8CRSQ5aRYVEwO0TZ2X0UyFcYuGmSVTqbdS1/zc1eg32iiqrMj+m8nMKHl6bSJ7LItHpZOPsay+ddZEfnPNwiQPUzIYTPhzQwgJmebi+S4caaMXZdwSDBmhq7dm6y1YCO2m3H2q1GIz/3oWeffZZjx47h8/lQFIXly5dz5MgRWlpa+OY3vwmAw+HghRdeAODgwYNs3LgxOo79ueeeo6WlhQ0bNjBr1iyeeeaZ6OtevnyZ/v5+fD4fpaWlALzyyits3759zHHs3r2bX/u1X8PtdgOQlZUV9/O0tDRUVeWrX/0q9fX1rFu37r7+3seq+ykriO1hccgEh03aWwRoggZjG1b7FLiSj/ZsPpZDRmSNDBETfT5ETy8RWwaprw96+1APtYKmYZkmqT/6EfKlSwxYadyeuYxBFrCgrQN9VbE9wClyGCELa0tztKkL7AZNz+rahENjYtO/mZkmQjBpB8sk9XD0sHgRCtke45Ehd3fiRTAoqK7WuHRBsGr/fjJ91xA3iOGFzqBZzDNp5/nwJ1P4FcUYQmaGuIZ1qp+OK06c8wtYW3gBMytrDC+kmzcTWzsGApO3eTjJiyebFw/ICmQZK6SjdYdoP5GO89AJ6ud2sv2T+RwUlRSv0kGWqXkRhLAduYRvFC+6u1Hf34xwSFguF9ry5aT+6G8Jnb7EkOKhb/YycgvnonR02HNuwrzQdfANQm5bU9zhPYm8mERX9OPTeK4gd6PMTIvSUo2+PonMTLsTfOtWlYMHbd/Y+npBVVXiczt6UgzCips7yEwNcWFBLarQcHacZ9GZ80jZpfSlzaPvmsBSnFz3Z4Ik4VEkvlBzGaHptFBOycoAgQ0CYej2xGHLirPkSmjXlZqKCAQmZ2PYOLvtB6kBdTqd5Ofn8+Mf/5hVq1axZMkS9u3bx8WLF1mwYAEAqqpG6z6t0eSOkaqq0X87HA4CgcCEj4+VZVmICf4OWZb54IMP2LNnD++99x7/9E//xE9/+tO7eu1PVROcN/dcVjAaCF4vDVojijWFQz/J46i1DMvlojT/EtUlLhD2a0Xexr3HcjCvLWejcRYhwMjIpGlfGp5T2VSvlBCahtTdTWDGHDqzq+hfuoqMk20ETBlHTJRHD1mc+O5esk8f4fj8clb87ss4d/4Md/g8XL+uFt+QFHcTOLqsA+KnUj7uWtCkHq4eFi/S0kza2pTokLuFC3VmzBCUl0/Mi+EhKOvbjscV4vx8mxdyhBeZxfSr+RiSwoBfoT/gItPlZ9WcczjSZ6AHTIbnLyWwdi6k2SyI5YWZl5eYFZGSoZjSoUl1Uid58cTy4mGwwtXYSIPmx6nN4tD5PA6dX43QNEpe7qOm1jnmNBjNC1N1sfPkbJRMF2tmngWnE6u7l76cAroXV3BpZimVwweQ0uRoVkDXYdtHClPaG/FfPkL28yXk/e+fR/vFz6Ln4d3yIjJ07HFqEl3Nj0/jdXnfjSLOIZEP1ucT9PaO+MZ2dxP3s/FOipypEmLVCm4X1aJ//S3OhgpYnNNF5SsZDPSf4a0dq5CEheqW8GQ7GOzN5Kd7C5k7Y4ji1wQ19c7osLHAK68kbqyJtesKd80/yKS/R6pwzWes1KamB17gKyoq+Nu//Vu+9a1vsWTJEv7Df/gPPPvsswkX24qKCr7yla/wh3/4h1iWxZYtW3j77bfHfe3MzEw8Hg9tbW2UlJTw3nvvJXxcTU0N3/nOd/jc5z4XTfvGRnuGhobw+/3U19dTXFzM6vC03UmlO0RzxnUkGUexQGDQh3TzJlLAj7e0h9ZzUxFCIDQNb/FttLSVcc+1LAjqMu1SFXrJYmqrBmk8Mo32DpUy3AzfvIp66zbmlCmInCnIln29GwuLWeA+ihHZCOhw9ZoDX8gFi0r4+eBGzr8n4VIb2LQCJFVFdiZeG2LLOiKNYTt2qJOiFjSph6uHxQvDgOZmNew3LiHLMDR077y4/vvf42zXPCqyu6h8JRPfkYO82fqbCIfA6XZgqCm8f7uaF6Z9QumSHqqfnwbOzOiLjubFE8cKSPLiCebFg7BC+HyIG3Y2y0rzULfoAq3npyE0DUuWqdk49iY7IS/ap3Dk2nVKuIJvWgpZ+3YgcrK4nV2OrtvThp2yAyslBa26GrBv4Lt7HEx1K+wOVaAN1FL4oUT56lr799wlLwIBaGpSsayxc2w+TU2SK/nxarSjw72mamI/WI/HIitrxDc2JwdcLostW9Ro9mDjxmDcAp+ZaRGsrMI3YDGlvYkuJ0yfJZiip6H1D+K3XJg52Vy+KpM2FZYs1TlxXOX4wByMfIM36vrHHJCZkRl/EYyy6xJ9fXc/6e/TVnhhj9R8xtaAwoNFfMrKynj77bdZtWoVKSkpqKoarf8creXLl/OFL3whmgb+4he/yDPPPMPly5fHff2/+Zu/4U/+5E9wu91UVVWRlpY25jFer5fjx4/z3HPP4XQ6qaur42tf+1r05z6fj9/6rd8iGAxiWRbf+MY3xrzGrVu3eO655/D5fEiSxN///d/T1NSU8Pc9Ck0YzQlHgIJe77g+0aMVGSG/95CHgHMa1c/nYbrc7DyYzfm+XMTMPGZNC7BVKaDWoRP76Qth+6QDtB3Kpu1jDygKK4p0fMMr+bj1OtOCcygsn4vD0pkrwazhAzhlGYemYwaD6CGLbdtd+HyCM1Yt+VN08Any8uDGDYmuSi+ZWYmPPVaRqHF3t8TZszKVlRrDw8negadJD4sXuk70dTweE12H1FSQZYtf/MKV8OZgNC9y25q4IQCHg8GMmWQMDvHT/o0Mi1RSU2HBdIOrVx30mxns61nKG2/0IJxyfBn9aF48SayAJC+edF4AWlnZmEbe8WR5PFiqinTrFrtuLcWXWkCD6wRi0Mf2C4voVJciC4NZhYKmPfqYoXrj8uJz+QT70zly4iBLzoeYPi2P/M+txO+HZVt/jBDgf/VVsCx0Q9DSonD2rMzHwTqcMyyqp4TszfyQhHyX55yuw+bNLtraFNLTTebP1x8bK4R1t7mqx6Rr1659Kr/nfmtAx3utSM/A/Pk5dHZ2873veZAk+2e/+ZtDzJljxk2li6Sb8q+0kv/aCkJ1tcgf7eDID4+w21rDTrWBKVMtFizQ+eQTmZs3Jbq7HUydarB6dZClS3W83mDESIGmJhVVtaiq0hIeoOjvR9m/H+H331205y5LiXJzc+nq6kr4s+Hh4Wg95Z30KNwhHoVkWUaP+INhR2lSU1MB+P73v8+tW7f4i7/4i8d1eGN0L59BIkU/3/EiPfdZ/6lu2waDPnaeLuCAspaiUhMMg5//VMFyOnn18wEAe6BSsTZ2aipghXTe+lKXbTsqy/wf35xC4+5Uyvb9EGPAj+c//j7pLbvIPNGKeuggOJ0M/Nbv0FXiRTcEu3apeFJN+gckSko0Tp504pTTMMzBu47W9PUJdu5UcbstDhxQWLBAJyfHfGTRnhkzZjz8F31C9WmxAh4eLyKvE+kZyM/P4Z//2Re9OSgs1Kms1Jg5c3xezPz8CrZJDQy/00zGiVZ2BKo5MWsdq0p1rl6VuHxZZmBAAIIXX/SzapVGKDTSmD8hL+6HFZHnJXkxRklePCRebNmC6O1j+9VnaHFUU7QyiAj4eWdLLgjBq6/6gXvnxZl/7GDW9Tau5Sxl+XIdz8ftpFw7h6Onm+CaNfT/m9fxDUnoOuxqVkhJhZs3JdxuC9lhkTctnfLy23e9JvT1CbZtU+nslBkIc+eVVwKPLDMwES+SmYGwxnP7uRfFAiInx4q+buQkNE24eNHBtm0uZs0yaGgIhlPEAn9AQklXuJJfiqukmkwJusvX8cl7HqarTkqmhVi6VCc11eKTT2Rqa4MsXqxz6rjEqaOCUydtC1GvN0hTk0p7u30RjDFSiL34VJWg1zvhpL8xz/mUPKa1qqp4F4hIk9jjLqy7g7Zv3873v/99DMNg5syZfPe7333ch/RoNI7P8/1YEEafk5lB3dLLhOQ+2vZ7uHwzBUuRePVVP16vHckRpoErNIwwpPiyJAuat1jR+QNC02jfZ+JSTW5kLmLmwEEu/+M+zs3fwMaWn5NvGRhT83g/uAF/owNVtZhzfjeWX8Mo9jJ7tsnsWQHS9u1Dk4IY8t3dUHg8Fopi0d0tUVyssXq1lhw69hTqYfNClu0Mst9vn8vp6SY9PRK9vXb5Z3r6SIZgDC9WVePN1Ph2x3qyexzkWU7q/3CItjaF/HyTwsIgq1ZpHDooc+E0YMropl3nXls7AS/uhxWjn5fkxYRK8uI+eaFpWHlTqVfPoluFtB2eweWrHhCM8ELXkYZ8KLIrYanQGF7sNZgiApi+AM5psE3aQE3rL9AHbpG+ZCr9v/P7bAtbhRZc2M1cLci5BfVMnWrirQ3g2N5MXtoUeuRld/22RLKMM2YYzJ2r8+KLj24jcCclEfWQNFFTWUaGRVmZxpUrDs6dc3D9uv2/iK1c5ITozF+D22WyMs2OznjSQNm0lmG/xErVz7qKbg6ezIk72Z8LbmNn/xSO355GR9siOjrsxuXxdsOjL74whcad9Acg+vqQursxc3I+3fTwmIOf3As7wMsvv8zLL7/8uA/j01Ein+d7rP8c8xyXygZzG0fOr6RAltHnzotmvNB1GrRfIQX8WNtGbjIikc32U6mULzpJ/dxOdpybT8uZRawo0pn3ejXu3SGMXx5iYetPUW5dw79kJvr0AqZ0NNFV4qWrS8JbOETGyTYsRUN32KUG6adO0bd4McY92BNGHibLyenDSSXWeLxIS7NvEObO1enrc2KagnPn5LjygdG8WOHRaGqymxp7VtVhIdCDfkoX96DJKdTU6Uimjrd/G03DU1CEA9/KUtrb3bS3j8+L+2EFuo509ap9LWdkfLozCZK8mNx62Lzo70c+e4aGBXD4kzXMmjcPS0g2Lwwddfs2GjQ/+NwE9ZgehfF4cXYRK4rqWLJEZ05HC3P/75+RPnCNnpQZSNNnY23dhV9rICXFxN+rsVpuYZmiI9bVkLqnCeVMO44ZtfdsZSuE3cP/uI0mkph6SJqoqUyWob4+yOnTDi5dckQ9piMFWmOHzABWeNjMBg1fn0Hu/q3IuwJ43W4C69YDMtKQDyngp66sh3rfJf7yVGF0yFiijQDE19uZWVmJL77YFC+gtLQgnz0LZ8+ilZbGP+cenIgmeUXaZ0KP/DOQZYJeL9LNm5h5eXcXEYyNGoV09v7dhWi0RoTsG53a2iCSzz7fLbcbqbsb0deHlZuLEKCqFsWrdKpXL0Ubmk31Sx5Ce3RU1SIrG/Tn1pL1zz/HuHSdq6n5HNv0fTapHzHz3VbOnXNwNn8dO0s38Eq5hftIOxyx643N2lqCxcV3vbhH7B8ng290UpNX4/FClu3s7tmzDnQdLl2yyweEINqbEMuL1FSLPXtU2tucFJfYN/RNO2SO/PQypbOusqHoNiFzPQz6cATthnxpyEegpJD2I7Oix5OwjCLmpgshxt8ExAwpS9RonOTFk6tP4zPQSkvt8ysj4554IV29imXBjstLEboOmoZwuWhqUvGutPtcrJR4VgB34AU469fi2fIzjP6bXDDz+cfi/8bnnFupPNHCPGTeHdoIooFU1aL6VAvSmTb77yguRt2wwXaNuUtNJl4kNwMPSRM1lek6NDaq0drQ2bMNcnLMMUNlIl/H1T8iOLTLYM6HV1i5PIAxKx/h89F4OA9VzsEb3lXvODcfFCX6epEbqIT3MLqOGB62w1AJfhab4tXKyhDBIFpZGY6rV+0LN6Z4dUw6eAJJkoSu68jJUOljka7rSBGbq0f3S+wbgnstEQg3MTbtkDlyRac0/xLe4ttsVQpoPyQjfD5q1rmwFAXn/gP2ELKsLIINDVgOmaqqSImDHX0SxNzgWBape5vx5Amua9NR82cz7WgTva/XkhUQ5B12klYWwu+X6FrtZdaREXtCK7y4jy7pGK9m/EGbS5P6bGi88ySWFRcuyMybpyPLjCkfiOVFfucepstB5lVX098vIWtDLL25i+meEI5gCqFBH41tuXguFrJmznlMl5vGtty440nIC1kmuGYN7nffxVIU1MbGsddzDAMiES7L48HIz0crLsacMyfJiydUj5wX93g+xEmWMWbMZNfFRbSdzqBkUQ/VX55GU7NF+z4BWhYNTgUlASuEYHxeYKE2NSOcMu4FeZjBOXw+fSudhXUsU3UWyg4WdNv3b+eG6yk91RKJvxKsrSUtxplutNV0osj/ZOJF8ip7SEoY3Q8rdrjY4sU6ZWXxDWFxipmmaBjwYbCB4If76D58g8NMY9l8F41tubQfUSguBn/9enZttWgjk+JVelwNKIyN+Ii+PttlQZLsm/vq6uiOGUalhvv7Ef39WA5HdFKfcugQwYYG21EiQb3fRHK5XAQCAYLB4IS+yU+SVFUlGAw+7sO4oyzLQpIkXBOl+B+C7qoGdDy/aQFqisSK1/KpLnGhpa2k1grg7GjHdcKPym2a/WUYRgbe1UP2hnbQR9PhPFRZp3p5d3w9angjEHEW8b/2MgeCG5jS0UT+lVZy2zUCz9UwrLjtcg2XSW5bY/R4TBOG3v2IQFkxjU0jY+W93mD0hk0I+0Yt8rZOtA4klVRE450ng4NEWbFw4R1YAWBZPDPfh6O1nQNvwdnCepQdeyjsO4U5OB1TzQ7zwk3xilL8xYU0tYe/DpcGjcsLXce1ZQvOkycx09PR588fcz1Hr3e3G+nWLSynE+XAAQDM3FyCc+aMfWySF4/7MO6oT4MXd90vMB4vnDJSRRErFgeo3jgNIaBBex/FmoLysaB5VhlmmBWSfxhrwEfTkTy7Ub5sGGl0/wIxrHj5ZYaqaxh8ax/Zp9uYD4g/WotTksjZZuIfhvnndsTGX5F3NNG74TUCAXtD7/cLFMVCCAgG7X9XVsb3kE0mXiRRFaMHdYgYr6nM47FQVXv0dFaWOeHirhuCvpVecg0QLe08e/4oTqfFvpXr2DltLb88oYIkxdR4yig5SnQjEGubparW2MxA7DdMEzE4GGfnFVePd+aM/TjLQp87FzMvDzE8HL1o79kfWIjoFMWnRRO5YXwWdcdz4g7NhVVVmm3LK2UC9iThusJO9l4upPndYczpp2k/7sFSVCz3DI5em05gGCq0AyjXL0DaKJcTIWxHkeJi9Npa1hshfFXV5LZpSCkqsiLZi/Eg5LY14j5i2xMOra7lxHf3Mu3HB7m4K4BvUR0ZmXYE5+pViVu3JK5cceDz2ZGzWAeIh9FcmtTk1sNwE0p0nqSl2Xaj/f32RjMvbwJW6PZ8As/qWiyfIPu9NtacP0RQE3RvqOM908t7cbzQESITNUWK6xEYjxfC5wPLwkxPRwrbi44uFbI8HixFiW4AQosXJ2RF9LFJXiR5EdZdnQ934sUaA9N0IqRwb2PAT23RbQ7+vIeDR9Mx+jygqtSWWLz1zzO4cEnh5ef6UX/+CwRWvCtWDCuCtbXIQrD0y9VYW3TmZEoYir3Wr18XwNrSTCZt6KvCj93RxJWfHaH7SAYnZ1diYW/ob92SMAxITbVoaVHo65PGuMtNFl4kNwNhPchUybtRpPRuohK8uGNwNfCq3I4sW4RCAvVztThPqBDO2tWuHkLqt3e2CYwUxu8ZyMhAKytD6urCcfkyyqFDWCdOoJWXRxdtrawM0dcXfbzo77enFQ8Px1+04zgEJPUZ1h3OiTtFg/btUwgGR2wPzVQPO87N5+QZF8Hbc1m5RKWo2M+PL1RzrT+N6dMNfj1vOxuH38X6xI5eGv0+Bh1Z0Ru1RE4jgZpaZKeIHDIej8Ww6YIV9qbB1yfonFtPmieFUK+JkAQ+HyiKxccfOzl5UubSJQfLlulYFsnegM+QHiUrIj0DmzfbcwYaG9WErz/6GLzrapA/OIymCZxOi2VfXc2O743cSN8PLyL1/nphIc7jx7FUFXXHjhFWhA8qtGyZbTCRl2cHl5zOsawI/3FJXiQV1V2cD/fCCzweuwRuXzqnutLRpqWjOJ0cCM3if3yYzfUbMmuq/TT0/xS1oy2a7YrlRezFoev2zADPxhoM58jFITsFjgwnfYtLEKtrkIWgq7iWKwcUMjNcWNgbeZ9PkJZmcuyYk54eia4uiYoKDb9/cvaSJa/GsB5kquTdvLam3blJJHoMqSbZrY3oAubNMwgGYeD0Tk6KjYCwhzN99wT1czshxd4ti9E3XeNlVWWZYEMD0tWrKAcPYqWloezfj9Tbi5mVBYaBNDCAmZY2smv3eMYfIJXAISCpp1x3agKc4JyIRBKlW7cww9ml6M8sO50aW7LQtCeVdlFJ8YvdyKdO034mC9Mxxd4IzDApnDZM/cILWJ+k2+etJdi2P4dhzRl/oxZe3MfcxDksHHv2c/SASefcetwuk/W6Rm5bI4WXPJxd+hyGOciL3gCBwMjU2DVrNHbvVpg1y8DjSfYGfJb0KFkBEAgIJIkJXz/uGAbBsb2ZefMMNA2cTmj/4V6wNtoguF9exDRq4nDEsyInh6DXi7pjRzSwZKWkTMyK8GsmefEZ00S8uMP5EBlGKfr7xzSjj+UFbFU20a5alFW0Q+gyh67ks+/4FBCC6dMN/ug3ryHtCGe7xuXF+KxACBy799HRYtFZsB73don16wLktjdxPtXDudk1eIwBvN5glBeaZm8Ojhxx0ttrZwYmIy+Sm4GwHlYjR6L08d2+tsdj4XbZG4H8K61YrxXj89Zw+nv7MHZ38MIaWPSlKpo/NGh7LxukRdQXnLaj+LKMmepBOGO810O2z26iG3hz5kwsjwcpnLI0c3KQbt9GPnEC3G4wTXyvvw6qGn3+hJZySX029DA8xGNDkqO+HSlZaG9X7EXeNClZ3IczI5VgSQmhkODCjZTocy/fcrNDXUT9PBCS4NbqTQzudkb92GNvpEbfxJk796PKfnxDguzTR1kkLPx+gXrqKG78lK4oxtpgEdSCUR/4yNRYv1+werU2pgY0qadfj5IVd/v60ccM2rXLmbShlxSh1Xo59vZeQrs7eGGNxaI3VtP8gX7/vBiHFcLvR7p+3e4lC0/T1Fatwpw1K8mKpEb0ILwIm1FEIvVBr3dMj1lCXqzsQ/KsIDgQ4pPruXGc+eH/O52vzPOgz58PYnxeJGSFM0CwpoZAn0b2qcNUXTzMxfxyLH0I95n2MC/MMbzweGxelJdPbl5MwkN6PHoYjRyJdpP38tqyDOs3aJiyRErFCiS3gmtXMz2r1pMNLC8VSLuaWZ8hIy/KwKX7QVVQWlrY+3EuAclN9ZeX2uPmQzp7v3sCl+mn+tmesRdhxALy2jXM9PToYBmcTvvnponw+TBzcpLp3KRGNDh4z0NiYiV8PkQwiDl1asLnRxb4yMLu+OQTNhTuY8eB+bxzYQ23bjswTYEkWVgWFMw1OaBVoruXULNR4DJkTp2S6e0RZGVbbNpkTy7GsvB4GLnJcpmkSAGUjg7czxbRs7CYwt22r7RnaR6Bz7+K7q0lN1sQW+I7mRq+kno8elSsuJemwtjH5EyV4Kh98y0E9JR6ycaixHmU0AEHNRvLkE/13D8vErHC7cYaXc9vmvf+RiT1dOsBeBEtEYrMqwgExmwyx+PF9hPz+VFrLT29DpYsCWFZ9qRgUNmyeBN167ogzYNLlzl10kFvn4OsLNPmxXisaLdd5vSGGqQtH5N3rJn0a2fJlGahlZag1z7ZvJjEh/bp60EbORKlj4HozIDoa08wlEKWgQ2VBDQTa+suMk+1410FgS/VIO1qRmm3Gxyr/2gJ0vAQmjEfpamZgJTCoWMe9A8Nap6H5neHOHwik1XPOGD46tiLMBDAtXmzbQXndhOsqcFKTcVKTUXq7sZx6RJKezvWmTOfygTJpJ4QxZaPTdQEOGpWReTfd2oaiwyEAWzfaF1nx4VFEPQj6SEkyUFenhG9hJYvD9nWca5UhFNjaEBQeHEP87UAJ1LXMTQkcKkm8vZGNMuN11tJIGBfowGrFr9fkHm4jUosZK7A0umYhQV2FGqCa3Sy1Xsm9enqUbAiM9OKNpVFX9+ygInPw2BFJYHeIJlt7SAEVbW1KIEQ0uEAIhhEyA6qv7wUach3f7zQdbscqLcXMz3dZkVGBmB7xCd5kdS4ehBeuFx3fO54vBCaH4elk50Nc+YYXLzoYOpUk/nzddQUCbIy7eds38/C84IjeevQNMGQDzx7mtAMFa+3aiwrDrWR1d5OxhwLKzQNz/xZSI6Jp10/KbxIXrEPUYnSu6K5GfXWrZGTJWx1aKmq3dg4WpaFbgi27XDj1xqYbzmoPHSQ9LY2EAJt5UqGVtfaO820TGRTA5dK6pmjqL3z6discvhDAVoIte82ajAIKaMuJF3HtXkzSttIEw0OB7hcBDduHOkn+LQnSCY1+XU3TYCxqWFVte1yNS2aJh7v+aZp1+O3tysUF2vUVPvZ951u2o6l40pP5+XXNA4fdUQfX1Sk4Xbbdm2RddgyLeZ2HSK/5zgOh4VlViJvb2TgR+9xM28Zp/Qa1m+wJ3xv2+7CrzVQ1dnBIvkTu9m4sACEQG1qCs/5uHs9DIeZpD4bSsQKZd8+hKpCZMjdnVgB6CHLPo9DG5mPk8rWA3YE07LQSkoIrl2Lbgh8Pvm+eSH6+0fKgUwTrbo6et0meZHUhHpQXkzQfzIRL9S0dH7n94ZpP+xGkuwNQUmJhqpaVFfb63+46YCyc5uZ29PB5me/intXE1e2HiZoKfSccLL8S6VADCs+Ocy8uTrOSxcwFhZENwBPAy8mwSE8PRqTEnJYEBhJLwVra1GbmpAPtdud6CEr6mgCI8PG+lZ6w1EjEy0ocBxrQ5qajdTdjYGDbf4GphxuxpHipJp9CD1EIOUZgtPyuXDNTWFOH+eHplGYYTE81UGwyBl3nHGWcQMDtqVWjENQpEY0uiN3uewJfkkHiKRgbNPXqAaxWAcI6dYtgDFlQdHzK/yciCuEolgUr/BTu6KLpsYsUh06q2Zf4/TATD4+ZjeKhe+TOHbMSWmpFheQycy0SKlYRNruY7zQ9z+Z+k97sT65gGE4GJ63JOrkAOAfhkXnt5N64wJwHWPWdLTiYoDonI/eDa+h63c+7R+1G1lST5cSsUIEg0hHjqAODERZobS3419RTF8veNLiz0Nl3z5C3Rp+rQFPGnQW1lGx7X/iHOzBzM5GTkvD0XGEdxZ+9YF4McYCL/br0bxQFDAM7uqiSeqzoQfhRSBgP1fX4+5BJuKF2xnCwgGWvX+1LLh40UFKiszrrw+NHIcQyM+txbnlY4qO7WTpx6dIc87ituXG7QhhDGn4Bu3H+Ydh0YUd6CGwPrmAo+s6+vz5+N54A7W5+angxWfqav00dmLxKSGBtWED2sAASns7Sns7pgn7KadTq8e9nWjXuSfVRA0PG8s1wO1qIPPATha2/RTJGcJx9SpicBDXL9+nKusiwWmzUW9cxmmcx+GwaFDaOHL9/+SatIFrQ1lgmiw2bvH8+V8i/UhCKy0luHGjfWFGLOPCTTSBF18c208Q2c1HRsw/SMNoUk+vEjSIxZYCmVlZdqQnNtU76jmBdesJBsMRnhV+NgTfp+mHUzh6Po9VcwzWVg5x+BeC3bucrKkJ8Ue/eY23/nkGu3fbNcuVlRqRQZmyU/DMH6/GWqaT90/fR7RcwbLgfPWX6SxchzvFjspiWcw/t4OM0+30z1lK1qaXcThA6ehAKyrC/2wRrcc83DYlDCOxvWOsHrXDTFKfrh4HK4K1tZjp6SjhTQCAf0Ux72sN+BslVNWivFwjM3Nk85B5qp35yJyeXUfZ9u+gdl0H2R4oKd2+jeHXqdp1ieC0WffPi8xMuxyor892ABsd9Y/woq8PpaUFtbk5yYqkEusR86JktsHiRSHe2TOdY+8ovPqaHxHwc+F8LidPOmlsVPF6R2x0ZUVi2luvk/6lU7huX4HDV1CnruHSzFJuF3tZmaaBZYZZ0UbP4lUEy0uR2lsRuo7a3EywpgbD4InnxWfmSn1sOzEhGFpdi7mvHUUBTYPOxfV40qC3V/CTn7ijaeL162oBcLe382qojZttl7Es2FX2JYqLdTy/eAfnoUMUnNrJ0JUpaJ5MhipWoOp+dgxWEOpKYdosmTnLXFw4oRE0cthxcwXr8o8jevtG0rfjpO7iWhlkGTMjE6m/7/4agO5kP5nUU6HxfKCDXi/SzZuYeXnR6E80EtQXf05JQz5qV9uv1bbfxdGLy7EUhdI5F6hbcgkRkChd3ENoYRDOXODtP9FBPsea6rmUlupIIr6uWnZYqKkjA5SEgOKiELNXBWOiq4KiConA4hWIjTXoMgTCvtIup0J3URXnB13MSIdr1+68WMeWfSiKlQyOPsF6nKywNmzA3NmEpoGiQFeJF3+jREqKxd69ClevOpgxw2DjxqDtpQiUtxyg4P/9KZ6+axxf7mXqt3+fzB99D3XXLpxDA+Tf2kvwStoIL0JD7OgrI9TjYVq+k9lLVC6d0gjqNi/qZx63h4zF8mLjxjvywnLIiGAwyYqkxlVCXng8aKWl9vkf7kV5EF4gSZy8nMGx0AyOvnMFoev82sKThIqKcbkkBDG8sCxS9zThWFCA6LoOwCJHJ67X/4iV6doYVkzbWI3hFPhWVyLvbGY45EIyJfpKvJwfeLJ5IT2+X/3pKnYnFlsq8KilhyxOfHcvn3xiu5xIkm0F198HH3/s5PhxJ52dsr07HJIYWl1LIABaSGA5ZK7UvEbn/PV0rarDKCwktGoVigquGRmA4GfPfp0L3Rm4L53FGRpmnnKJK1cckJKCI1VFJQimyfarz7D3WM7IgUVSd+Gzb98+haYmNW44WlOTyt7DmSM+vxNMCRR9ffbZHP5a3bYNdedO1G3bRr6f1JMtXYfbtxHd3dHPNGFDcNgSTmlrs63hIH7Kdax3dLgMzbV9Gw3a+ziuXMFyOBCahndVN8FNLxKsq6PsjeX80W/dQOg6lqIgdJ0/+s1rrDWbCG1uRg+FT1zTxPPWW6T+t/+GMX06WmUlxvTpeN57l7yOnXbpXljByiqGVtfaMwgMwbbtLt7XGtjs8+Jy224SAwPclX1kpOxj7Vo76tTcrLJtm5o89Z9APS5WYFmYv/qIzk4HZ87IdHY6yDrYiNtlcvOmxKVLDq5eddDaqtDXJ6KBJt+QhCnJBKfM4ID3TXx+Gd8bb2DMmIG+bClutxXlxTvL/pSeU324L3XiDA0jpapcvOxk9mIF2aOiWEF2XF7CrptLxgwMuxMvGg9msef09CQrkrJ1N7xwuezPf9cuuy8F4s81XQfDsNf8mOdMxAutvo5/861pFM4MRHmxbl4ndau6WGPE8MKyUBsbcb/zDtK1a1FWyDeuk///ewtZGnHHimUFgG5KUVZs26Y+Fbz4zGzDH5Y39D3Jsgj+8iMyT7bRrpTTkrmeV/UtrJUOMMfSMZY2cP6Ck4EBCSHApZqc+O5ess/LOBwWTJ0VnkZskttm++1Kt25hqSryQB+WmU3Fvh/SWvV7uD8+TMC/hJL8q6womso7W6dwmqUsezGX97FoP5tDsa5jWcbYSZOJhj01qbQfkimnFWuOhcBCW77cXugzMka2sLGuRB4PfPGLd5wamNQTKF1H3bIFcewYnkAArayMYENDwixTXCSnvx/p6lXMmTOji/to72gRCMCw33aBMAz0OXNAktiqFFCr6uCyp7E2tediOfx2c5kss+NgDlOPGGSfPULPKZmlX64mdXcTSksLwjDwf+5zBOvq7AX/3XeRjp+ka3kdQhKkplo0No5Ef0tL7cmQnjR7jQgEBOvXB1FVi2Dw7iLDsmz/LxicXOnfpO5Nj4sValMTWtspdocqOFm4jiWXtvPFg3vZVAln19TR2SlHengRYiTQlHnVQbdVQE6GwfzzO/GkVKE2N2PMmYMzfIMV4UXVnrc4u2gDXfJM/DdnUjgrgC5LyLLAX7CQXcYsDF1QvEJgOfSEPkYJebFD5sjPr1A6w4FljsOK8BoSKTcKbtz4wFbFSU1S3SUv4u4VErEiprE4uHYtVmam3fM4AS8s1WU7DKkyliwjNI0d5+ZT/lwqRw+YZJ+O4cWJE/YAzLw8tOJigm++ieftt5EvXEDe2cyNFV4MU3DokN2jEMkU+nwCf0CKrvNPAy8+M5uBx+L3KgTuLBc3C1fRMrSe9AyLC/PqWOkOkZ2pkOGD+fN1hIAXnx+Gj3aR9vERep4pobOwjo3SR0ztaCH15CGcpoZ09SrCNAlWVqLPyGeo9RpTjzSzzG9yfWE1KRcC4FKoaRBYqX7efdfN3/2P6UyfbvLyy/6EI+fDhzl2eAdQsriHeq0TUtNw7t2Ls70dVHXkwoaxrkSDg3e0j0zqyZPw+ezSAYcDJAmptzc6vMjyeOIAHv38+/uRz5yJfi8CgdHe0Waqhx3n5tN2OoOSRT1Uf3maPXm4XQGHRk2NvTkd2NzCxkUDLP7N5exsm8L+FhdLLjopT3WRfboN939tQXFBcM0a9MWLCdbVRTcchgEtR9P45ffTAFi2zLYkzciwF2EhGHMDKMuQlUWcb/RECgexUJRP+UYyqYeqx8UKS1XRiss4PlCDJATH89cx9GyA9BSFWbMtKio0enslsrJMMtIM9A9tXvQ9U0Ln7DrmmB9ReLYF9fuHcOiJeZFzogU18zK+si+SEnAwv1ggZI32doWLl2Vu31ZZuzZIbf3QuFPsE/IiEKAi/wrekh6UfcdQOkaxIhwkUFpbR1yJKipg/vwkK55CjeFFV1fcjX6EF3fFivCmIXL3PBEvLMl2CmpvVyhZpVH75Vyat1gcOLmI3h0GWmE9iyXIPmnzQnLZxi76kiVRO2nfG28g72ym5YiHX+5OIxi0xy9VV2sMD4vouvAgvIj0JLlcjyHwMI4+M5sBeDx+r1JdDcsX3ObiB/bgC48HxLoaDKdgvR4eGnNsL2JfkIOdHjod5RwfqOflM1s5paZgqhVMuXqaeS8UomZmYhQW4vvSl1B37cJT6uTi/ziCpqTQu7SMuYsCHDybib5Xp7Y2yLvvutF1e7R2TU3ijUBEccM7wqrZKGC7G+nmzZHIb+RG0OezHzTKlYi0NDCMxHZiydrQJ1O6jqUbmGlpWIYBpomRlm5H4IPB+GbB8Gcc6RkA4i0HE2wUhSwjVRSxYnGA6o3TEE45erOhqhYHDiicPOGgrsDHKr0V7bABUgOrurcxs7eV67OLyQ+0ooRP3aHXX7f/EdM00LWqjpM33Ujh09bvt7MD/f32RiA11XqgG8DYOnNVtVi7Nmg3eiZP8ydSj4MVWlUVmRk5lA776OuTyMw0kRtq0JwCGWhoiOHFrlG8+GQrF9QUzkkVTLl8B17ILnyLVzK3NI3Dx9wUFWnRY9B1wZIld65VGMMLRcG77DaO7m4IBhGShAVRVliZmWN92IWY2HoyyYsnT+HPzFRddrP55cug60iXLtv2s+Eb/ftlBYBwTsyL48edyLJl3/NIMmsbdI4cD3D1spM5hXC6oJ6qs4fG54Uk0VXi5eQNmxeqCsEgdHXZ16Rh2A+7X16M7kmKmsg8ZovR5BX2KcjlFrzySiDmxLFPOlmGzAwTWQ9itrRjWhWkv1bLhv0f8ezwQQ6b5fSsquXScD0ZJQEy13ntHLEkEaytpa9foq2nDk+6wO8TVHhNHBk6bW0K77zj5vp1B9OnGxQUGDQ3q9SuHho7bj6suOEdYTXtSaW21ov7/V9ipaTguHQJY/ZszKys6IU52pXIEzO1crSd2H2PJU/q8UnXaf3eMbQhA+9KGeurX2Wwu4em7Q7SOj+mqsKBGB6OLt5xn7HXG29RGz7v4uAPiO5uqpdYmBmZCKd9TsRGH5uaVAJBB9dX1qGJENfePcKU6x8zfbpB3u8+S8AfItNlRh2F1KamcC31yALrSYPMTDM6JDU316SmJsiWLXYJUmOj7QAx0Q1gIoeZyPd0nTh3iEgKOKmk7kWyU7BxY3AMK+Dh8sLnE3jLgzgUjXfftVkBMH26MXIwE9yMj+GFJLFV2YR33VU8n3yCfOYMlsOBVlw8woqMDLSyMnt4WVZWtFl0DCvCvzvJiydM4c9s79Fse7r1H64j7YUX8J08xd5/uobzchZrCi7cPysiG4j+/gl5EQwK2tqU6D3PvrdOYJ3N5pnFPZTXLIEde8mcZ9wTL6qqNMrLNQ4dsl83Ui50L7wYjxWBwOQoJU1eXZ+SEkWa7JNDwrO6FpcB+T9rZdqFQ8iyhfNzRdw2vPiHJTt9lBZ+kjTS8+3xWLhTRXRkdlqavRuO3Qi8/XafPZjjkIzz0Anq53ZCSvziGlnYI8M7oj0D7QrC56DBIaOtWYN08ybBtWsxZ80aGTpzp4EiYSX7CCap7hR9G/ShDRm0XpsNwEvPy+z55+sc2x2inGM49VuEKivG1n+GS4ASnh8R+AcCuH75S5zHjoEUY2dIeBZG+DkjJQkqHdYmNoQ3AgUFOiEHZJ9tR1tVHDfH48QhJ51z63GnEHWD2bgxSEWFhmHYhxIICCSJu6rXTOQwA/HZgGR5UFIPQ+NlJe6bF+H+nCgvBsGdMnKOxgaNios1OjoUhGnQoP0KKTD2Znx8XrgR/mwalixFX7YMMTQUN6AMWSbY0JDkxZOsCXgRqeUPyB7aTmagb3fw+V9PY/93TtBxPI3SrL2YvzP7/lgBiXmxbl10KJkIPyeuhG2fiXw+m1VL+6mfcxax4yLOM2fuixeRXpn74YXXG4z2qE1WVjzQZsDn8/Gd73yH27dvM2XKFL7yla/gSVDz9wd/8Ae4XC4kScLhcPBXf/VXD/Jrn3jpOnR3C5qaVCTJrlWurPBS2NaOrtuWckMballvaAnTUMq+fZjDQfpKvHaKyQ+5bY2IFpWPNC+yDNOmjWQEamqCMOjDddIPaR4YHLu4qqoVXdhjd9mK7AKfGzE8jDl16shGILIouFx39TfH1gYixF0/L6lHqLuJvqV58BbfBqD1Sj5Hv52GdjKD8ult1M3owsyfhVZePtI7MDqtmyjqF/7drs2bUQ4cQOrpwZgzx64xDdcVxx6TCG8I2tucLLqwHYCCAgMhBPLJk2hFRdEJ38Faezq3/4QLTxpxi3bkJutubuADAbh5UyKynCXyhYb4CE9NTRCHg8ee7p2sSvLi/vQgvIgMsoy4DnlrAzi2N+PyKOhyFSdPykyfbjBnzoixRFGRhhoatjcCnvvghewBvx9z6tSR50RchCL2kXe4QCyPB0tV7ebOmEx0Uo9Rd+CF5fFAipv6gtNgzqfl1CJO/pmGcSWfVStuUO86z9Dy5++dFeHfPYYX3d22eYkkxR1PXAmbomDJsn1MKSmYHo/dKPwQeaHr0Ndnl5tGDn80L27elCY9Kx7oMH7xi1+wfPlyXnnlFX7xi1/wi1/8gt/4jd9I+NhvfOMbpKenP8iveyqk67Bli8qePQqXLjlYskQn4IeUXY1o/Q7mz7fTV2pTE9TWMubasCzM4SBXfnaEKwcUbhd72aRsxXW4nUNyOe26kwULQixebDcmt7crWBZYbhlLUdjbkUFAmkb1Sx4E4SjPDhmX6aPWKyFEfNpNCAjqY1N16rZtCJ8P+cwZ9IULsdxutMpKRh9w1Italgl6vajvb0aELb3iFpNkfeinrruKvsky2ob1VFf6OPAPMyFs7bYu7xhCgDFz5shz5JHhQwkbVAIBpGvXsFJTo6EWMzsb6fJl++YhXFc8+pjMjEyaGhUWXdjOnOutXJxeSlfRGhrER6gd9nCm6IkmBGJjDT2yC3+Cm/zRi3SiRTkQgG99y4PPJzF1quD3f398h5nY72VkTJ6FfTIqyYt71wPxwrIHk8VGPuef20ElbYRKitneqBAISCxZEor2CXR0KKxcqRGQUth9sRChaQ+HF1u22M3DYDcV19fbLmLj8SL8hWUxdgJy+I1J8uLT1R15EVPWU/2Sh5a3JUhLw1IU1mccwErLsRuIRz3WcrkS9xd2dSH8fszp0+1zZRQvLJcr6mIYN90+toRNkjDmzmWrkkPNOmGXFUXsuOCBeRG5Pltb7QaE+npBVdVYXuTlmZOeFQ90OK2trfz5n/85ADU1Nfz5n//5uIv7Z1mxtWM+n6CvT0JV7YjOjesSC85tJ11uY/fUCpy/WcHMjxujEygjO9iohKCrxMuVAwqzr9lpYlGgE6oupkfxUhQMgWXRcVilqEijqEjj5EmZQEChaFkxorub9s4cQs0Oaut12xLuZ1cozb+Cqt9G2zBygx43UCbmoo8sCsgyks8HQqC0ttqR3cJCCEeKIyPDoy5G/gA7TxegpCusye8cWUyS9aGPRYk8n2NHvkcf55BpOpxnfxEMYsydy+bFX6O2eggyx0b5lJaWaF1wYEODvQAHAnj++q9xnDiJUJz4X3nF9oceGMCcM4dQURHBdevscy0mg2SGbeLaOxQ2LnFQ9OozSCckzv98N1s/t4GGIkBV7KmnqopWVYXsFOM2d41epBMtyjdvSvh8tmvLwIAd1Zkzx0z4mp+668wTrCQv7k4PjRcxkc/s99pYff4Qmiboe7kYp7cWdb+dBcCy6Oh4tLyQ+vqiJa5SV1c0oivy8hLyQvL5IKix/VYRrms+Slf74rIMSV58+rorXsj2sNKmJtW+6Q6F0CoqeH/uXGpeUhGxFQHhDIG6dStSby9GZhbaRtulUP3gA1zvvIvQQ+hLl+L7wz+0Z9MEApiFhYSKigi88ALq7t328aiqbXQR0qPOQsXFGhuURo61mnxweiOWJ0RtTQBXDCuAB+JF5PqMVG93dxPNLIx+zcnOigc6pP7+frKysgDIyspiYGBg3Mf+p//0nwBYv34969atG/dx27dvZ/t2uxTgr/7qr8jNzX2QQ5wEkmlpmcLQEKSmQkODxezZgosXYfZswZIlFsJM45q0mnOzNvCFXIu0115DpKfjdLrQ5Smkhes/Bwdtsx6PB1pWvcys944ydfgiWT0hzM//J16SJCzTQmz7iLk5bn512otl2Wtw7ZoQDf5muLiXzBtLOfDLNZw6vQiCQdYu6GZDlY4YdNgXVfgzHa1o1CYzE5GXh9U/gJg6FVWSEG431ty5OPx+clUVKzMLVRUcOSKRnm6yYYPFRwezOXrbotx9Dc/UPDwFBfYi0ttrW5DNmAEDAxMew2STLMufzjmq6yMnwMNcSb74Rft13W7St24lcqJaL7xgL9YWfPSR4NQJ8Kbs57lFF/nV6Tm0XKkk7SJsWGTFJwFu30YK13Q2H3Ax7E5hwxfSERfOI4b9fBSsxeUfZu3QEKiqXec5ZQpqdjae1FT7c//CFxDvvAOmSVp7G1NzNlFb62DthucQlknNtm3MPt/G0GWF9Dc/j9i2DenoUcyyMqycHHRDMDgIubmJ36rIn2y/lWljfu7xQG6uoLtbkJsrsWxZNsmqtgfXw+bF08cKSMSLmTMFnZ0wf75gwQIL6x55of/65+n58CSWBVOHL5I3tRopN5eXXhKfHi9mz8a6dBlJElhTpoDDARkZOIaGEvOiLpNtN57l2CfplC9TyZ5TEG0WTfLiDnpUrID750VXJWknYMOGxLxovraQQEhn3fMO+3Me9LGttwyXalEzdBX3zp3gcCBkGauhAdXvx5ORYR9Pby9i925ob4fU1CgvNqxXkLaprFUOkpKSwuCUDUzt2Id06tQYVmRmwrRpE//JiXiRmUn0fi4UEuTkSBQU5EzKm/076Y6H/M1vfpO+vr4x3//1X//1u/4l3/zmN8nOzqa/v5//+B//IzNmzGDp0qUJH7tu3bq4xb/rbk2+H5MSOYzEyuHI5eZNHx6Pxc2bgitXgpSVWVy/7iIQEKSkmPDcSgYGBMuzBzCMIF3doD9bzJatLvqODpGebjulaJpdt2YZJlMPN6EFg+Rk+DAuXif49a/je+MN1OZmlPZ2FhYV8077UDRcU7LgMr7Nl3CGNNbkHab53LPovX0IVaFi2SUGr/ntxmLbQ2vM3zE6ym+VldO8xUIpeo7qZ7pQVBVx4wZpeXl0BYPQ3UVxMQwMqDQ1KTQ12a9TvGkKxSWCrrRFEDmvdB3VMBDXrtmRnnGOYTIqNzc3/hx9FOnrTyESJq5cQb1500653rxJ8MKFaNZG6zZZkm9RqR/F9MygctpRBpQ5BIOpdHdr0WMUPp+dAfD7sYRE/7DJ3r0GA2KQ2gonLV0ltN1IozTrNANCQhgGcloa0q1baLNmEQgGsW53IfX3oQ4N2e/hjZs8s+wcZkYm3d3hgy0uJqd/gBkdjQT/zJ5wrBUXEywuRr/ZPabRd7y3KsGyFvlTmD9fJS3NwYIFqfT1dU3axX3GjBmP+xDi9Gny4kljBdw7Ly5cCDI8rFBYaDcGP/98gGbPSnp77pIXToOZxxvJDmoILHIyfJj/8s8MDQ3YZZtNTZ8OLyqraO4tQ1WhshbUxkbEtWsT88JcQclzfRRtzKa7vy/uTXwqePGEsgLG4UW4XCc4nMOS2QEqtbvjReqwn/5hk9Ybs+l/P0jN8wZ7Ly3hSG8PpZ6PGZYc6JqGlZGBYlnoFy9i5uQQCAQRhoHo70ft6sJM9SDdjOFFD1BcjDowwNK2RsTWRgLcHytgfF6UlcH16y6CQQF46Op6Mnlxx0P+v/6v/2vcn2VkZNDb20tWVha9vb3j1nhmZ2dHH19aWkpnZ+e4m4EnSYkcRkbZJGNZY4cQ9fUJQiHB1Kkmfr9g7Vr7ebGA6OuXaG1VkCTw+2HJEp3p003UfXuZebGFdFeIS3NLSfv9N5jzF1/B/cEHyJ2dGAUFBItL2EpDXHnRD/77dJ71PcNG/SxvHWvgHFMxrqXgVARbVm5CGH5Uj4sqgmNSfwknTu5Jpf2MnYozc3KjLhGegoLoVZNodkFtvQ4iM/6NnMhr+knSI1qIPw1njYQNXeG/x+v3YzoVcKkwMAAp7nD95cjCHukhwbLQVqxAGh5mbUkKfrcIDyaagjXvN1hVd5Uy7wqCmRkoOxvjbGn3HUyxbyBW28fCoI8d5+YjTc2hak2M5aEQaN7akX4BRsojEjX63qttm88nMAxBQYFBKETC17jTTd1nVUlejK/74YUQMDwsSE+30HV7DkBk3sCdeFHU10zK4VYchs7gihJOz6mj4Ox/JePwftzvvmuXFglBsKj4/njhdlLtS1BWOB4vOm1eWGowut5PyAtJouZF59j2o6eBF08wKyBxyVDk7/G63QTWeaHJPT4vIpOo09MJlRRTu2AQ/apMy5kc2jolcK6h+Cu3KS+ah2/qFNTdu2HYj1ZailZZyd4TUwnuCTvNeTyYLjeNrdkoqVMo9XhGzplwqVykjA4eLivAdqVzOiEryyQQeHJ58UCHtWrVKpqbm3nllVdobm6mtLR0zGMCgQCWZeF2uwkEAhw9epTXXnvtQX7tpNFEJ1Nk4Xc47C7zyBAigJYWhbNnZc6ehdJSLeFwotgF0OkEl8vCNwh57gB5A5/Q45zH7ZU1VHRsw8zPx7h2ze6qR7CVBto7VIqLgtR6NRobVd59181Js4ZDM4r4JNfFzT43eZLJnDk6777nATy8+rIP5aNtCe3kYu262toUhCDOTSJaJzoKCmNmFzSpiSchR54fdp14Ehf5R7UQfyoTnRMAVvT1Rf8eyeezx8Hn5REMBu3ynkh0J/xfubMTaWAAraiIwMaNWJmZ1Dp02o/A5csOdN3Bm3+SDWG3w63KJlzyMJV1EpZDjr+BWLee5i0WbWRSrOtY1ojbiR6ysLY0RyzUAbuBMlhbi8czdpLwvSq2TjQvjzGvcaebuqQSK8mLe+eFrsOZM7JtKeox2bQpkNB6dAwvVBNtMEThwCf0T53L6Tl1zD+3A5dDR6usRD5/HghfhzTYNdYl9np+V7ywUvm1ObtRusdaVcOnxIuIL32SFVF9KqyAMbwY/fdIQdsq1FLVsbwIBOImUftefx1UlerUcNMxcPmqk6KSLMzCFISAQJgHSoaLymyDoC7HbDZtlrQLQfFyC8uhEz1dLAt5RxOBgN1zE2m2f1isgKeHFw90SK+88grf+c532LlzJ7m5ubz55psA9PT08Hd/93d87Wtfo7+/n7/5m78BwDAMVq9ezcqVKx/4wCeDxnMYgZGFf8YMuHZtZAhRX58gGBSUlWlcveqgtNTeLff1xe8aMzIsyspGxs/X19sWop6XqnE1h8h75xc8++HvYRbOwXI6MadNQ7p+HVkIZrt3wMo6GvgI9qt4vXajzMmTMmfPZmCpkDfNnqZhGPZls2RJiLpVXUiN8QvU3hNTo+ne2togbW0KFy44kGV4882YRTpyoUfGjE8wuwDDwFvSZducjgqNPcmNYY9sIb7XSNj9pp9HNf5ZHg+Wotj2fpmZ9s8i89ZjPyvVbhaTBgYw09Ojk0Uth0xTk4pl2Qvi9esO3n7bwxtv+OzZF0cUilcAg12INA+1tfbvtTMJ9kJfvEqLuxnQQxYnvruX7NNHOLmonKVvVJO6p2kk8lNbe8+NWqOjNrHNXgUFnjHp4YcVUfqsKcmLe+eFzydYuFBHkux/Dw0JZNkac34n5MVwFVntGtPa21i483Wc6AQ+/znkkydB13FcuACyzGz3DqY7YLniICSq8HrtG/mJeLF0ro91BZ2QFn8zG1seNC4vYtensO6ZF0lWJNb9ZE0eAi/GsCLyWgl4ga4TF8mJbTqGKC/efdcNgNcbU4mwwo/o66N2tf1+xbGiKp4VkY3AlZ8d4Up+BbeX2c6L7gdgRfjteip58UBXT1paGl//+tfHfD87O5uvfe1rAOTl5fHXf/3XD/JrJq0m6hCPLPwDA8Qt/B6PhapaHDxon8AHDijIsp1Wjd01yjJx6eCUg/vwDAfpKvHC2ho8v3gH58mTWJ1n7B24JCFMEwtYOXyAZ4+1Iek6WkkJAnvohdcb5NvfTotzahPC9mx//fUhMOIXKDPVE43WRp5z4YIjOqSmsVHF6w0ijJELPeIOIWQ5sRe1YeA5ehD3jROYWVkEGxpG3Cie9EEzd7JLm0h3WpBH3ahP9DoTQnKi35PoZzHOJLGK/awY9NmpV4fDtn9L9WCmeqIwLynRePPNIG+/7WH3bpXOTpk5cwxWFflp0N5HavRjutxoG9ZTW0t8WdmoqKBvSMJvuuhfUkJnQT2zhzTk8C7CUlUQImHkdIK3K2HUJvIaiT6KiW7qkhpfSV7cHy9SUkZ4sWfP3fFC3b8PR59GoKEG9+E2HJKO8+RJ2OxEaJp905aXh15QQMmJ/4klBIEVr9p2pELckRe//0YQtsffzMaWB43LizVDuLaPrE988YvR170XXnymWQETr+N3y4rw6zwUXsC4rIBRvOjrJ7R8OULTMLOyMNIyaB61EYxkp959101bm13+VrzC5oXYafer1K5bPyErEIJh08WV/FJ6Sr34hyS6Kr3kOe6PFeE/+anlxZOzlZ6kGu9kiiz8qmoRDI6khWQZysvtCE5Ojkl3t707njrVHLNrjL527GyB/U4U1WKNGfZg7hvEeaCFUEU5wcJCRCiE4+JFHLKM/+WXR6zmRqVgL1ywx8/bw5si6VgIer1IN29i5uUhnHZNnmURHVk/fbrBH/yBD7A9qYUA78qRchKGhqILc1WVFucbbT/2Jmm734NBO0WoVVRg5eQA49etP1Gp4HD6+p6iVg8a5Yp5jyaE5ES/J8HPhM+HCAYxp06NvlZEkc9qZG5FDoFXXoFBH41tuait0hi4v/GGj85OmZ4e+5z/6m93ITX5MVM9NLZmIxwmQSVxmYBh2BEWl8ui59lqrg6DOyWckg3XhSacbTD+WxbuDeCeozYT3dQlldRE+jR4oYcsOg6YZJ86jLTlYzLmgCPs2S8dPY65aAFmnm0VLEIhEAJ9yRKCXm/cNTQhL/akUrtuPdJAf3QOQOQGfiJeSEM+GjQ/VjijwOBg9HfcCy/Gjaw/Sby4H1bAg/Fi1PvzsHihlZWNYcWYLHMsL/5wIVIwgJnqoXlPKp2dchwrItmpf/iHVMA+77wlXYidfnZcWIQa8jGkx19HsSVlkfXdtbaS27qCf2hkMve9siL85z71vJikh/V0KDZLFqvMTIucHLt5ODPTRAgm3jWK8GyB/U6WHf4prq5raAunsbvhGxTufYf07qukX7hI/9tvkfbd72IUFMTdIMWmYIuK7LKkCxfsFFxxsf11JB3boDXG9QwIWcbrDfLLX7qjI+u93iDoOtKQz544mTayMJOXF3cTj8sVHReOLCOwov7DkUhy7BsWl+KE+MXI6417rcmqe41aPVCUa/SC7fWOm36O3tBHIBDzexIew0RTQGWZwLr1+HSLthPp6FsGqNkoaDqcZ5f/hBd2CO9FLWhuVpkzx24Evn7dwVv/PIN/u8hNU2s2By/nI1Iy0U0HJSXxZQKGAZpmN2pFRrsHAqMW1nvcCESiO4piZ+ruNWpzrxGlpJK6kx4WL3xDEp2FdVRdPEzesWas0DRC8ws5YS1m+se78N9wMKVitl2lIQRGQQFDr78evYbujRfb7o0XaU7wj6xPpKXZDcThXjEh4icUCz0EwSCo4Y1JhBeJymFi10JFQausvKtpx49T97P23zcvEtzcT1SudC+8wLLi5sNYo72YI7zQDNqOphDaK6itd8WVhtXUxEf2LQuys02uX7c3ojsP5eL8ZD5tpzOQXVkEpExKSkeVlAGrVwfZvn0keu+tG82Le98IfBZ4MXmvkqdYo3eKcOdOc08a3C6pI9j2c0IZM5BmzkLTJPwzC0GA2xR4vve9uBGOkUYZIURclHb/foVXX/UD4HLqVD3TjTBzcZn2CHoz1YM0NDL9tbFRZdYsI/rSTTtkGrRf0aD5wecmyPp4d4iurrETij2ekcVHUaJDqcbUSsakOGMbWEV/P6733kOEQmPKiyab7rUe9EHqR8eAIRAYt2bUcrmQz5xB8vkwPR4Cmzbd+Rgi8NV1RHe3vYEzDBsQAmor+pFPnKDtvRwOfyBjzMsYU+cfe3OxapXGH/9xpGTITecnv07h9GGKv2ChuHU0zYgvE8DeN0YWc59PEAg8WM3l6BrORG5eSSU1WXSvvPB4LNypgov55aRdPYucPxsCFtemryTrZich04HVeRGHYkUDRxFWIMSYkp0oLwwDtz5MhVdCCHCF7oMX/vigTlrkJn70hOIGe/iU0tZmd0QHg/bN/aiJt4mGYVpuN8qBA0jd3VipqQRefJHJOijkftb+++XFeJuIh8KLzEyCXq89TM6yULdvRysttRf/CC8MnQZtM05rNq0/m017xzyQpPjGckZ4cfjwyH3Ku++6+cHfZjB92ho+/0oXSoYLzdDHsEJVLYaG4tf3JC/uTk/Rn/JkafROcaKTVdehrxdWD39ET9ZsBCaOIxdYwmVOFn+B2y98lc+f+RvU3bsJrlkTN28A7LRYbAq2qkrD0kK2P+/BFkRjkAbVhVZfx+4fzEHrC+At0+zyjXDt3pIlIV5/fci+qdsnUKwpeEt7ohCwIg2mMalH5PCEYlm2+wnCZSb64sXRSI4IBMZGESKpTJdrJONgmjg//th+3qjyokeq+0k732sT1/00fYWVEAyJakZ1HenmTfR58+wo2+j3PsExRDdjLhfKwYMo+/cjZWaiLl9OsL4etbERqbubjUNnOeR83S450DRqa0Ojh2ajqhZFRWNLhmRZgEultt4XzSAILEBEF3nDsCMzsdGYaBNXqonsHBVOukOWIFENZyBwb9GipJL6NHUvvAAoLgri6B6i75NCei9CVs95lnCZ40Wvobhg9sUfA7bnOjBmgvEYXgz7cX3wAQILa7uH2vp1CEli91v3wYtAYOIJxb29UVaIYBCtuhqpqwtt9erEa+MoXkjd3WCaOK5eRQqXIQVeeeXRB48+DVbc73OYYBPxsHgRzhAgBMr+/Sh79sTxwrV5M0p7GxvSznJI+1/tdK/LNabWP5YXkXKhSNRflgW1L8gIyRhxmAv3ucSWCI1e35O8uLOSm4FJLl2HLb9SGP75LuZc/5gL08pJ/8JaFr//fZbqR1mxIoRoCKFnlRKUsHfjkmQv6ow0ygDRuuucw7twtx7AmDYNubMTraICta3NLi3SZ9B2cTo4HFSt0zl50j5FlizRgUhTl4xyzIE0lGAsOYyUl9y8ieVwgKZhpaePNEqlpCCCQftxo6MasZ71QhDYuBGh6xAM4jxxYuRx1qeQcnuQ2sx7aeK6n8fHPG9CMITT70pLC2J4GPmTT6KZmomyMhATGeruRrpxA2OeHcmRenuRbt5E+P0Y2Tk0XlEQxrCd9lcUmpqksc1cMYq1EFx6axcz9GF2bF9LyaoQaR6T1D1N0XHxYWOiMZHRbdtUso/uxUWAmb+1Gk8aBPyQ29aElKIyXFY1bvQ09vVcLovGxslv+5ZUUnejsbyoiOfFSh1XpkJg1auAfY1rlZV2aUcMKyCeFym7mxBDQ5gZGejz5uHeugUjI5NgMJX2T6Zgud1Ub+S+eWGmptrfS0kZyRiHXWfE4CBmTo5d8pPgDx7Di0AAVVFQOjpsdzPLevQNxp8mKx7gOXfcRDxCXmBZGGnp7Dw1B5zC9vpkAvtYRspLI1p6axcnvzvA9N9cTVo6yA4LtWmEF+HDmpAXef/7apyKICN9hDVJXiQ3A5NePp+gr9+Bw+ni4vRVdExZzzM9Oqc2/SFzpS1YkgshBFp1NVplJbop4esTeDzYBrzhKyxa9zYMS04MUXH+tP0LLAtl3z4ct28jLV9OfcFp5PMKrQemc+gPb6MvzuHVV/22a1D4Yq2t1xG1ywn6Cu1hI42NCL/tCMP/YrtDoOs4Tp8B2WFvMtasiT7OcjjQioowp08fsyBF0pcRz3oIR3U8HrTiYhw3b6JPzYtfhO68ub8vPTGOFeOBIQwoqbsb+exZtLIyjFmz0EpKMGfPvmNTs3Tzpj0UbNEinB0ddr2OaWKGN3amU6GxfQqtzpWsfDmbmucdNO3R44YNRaL9waCgo2PE+eHdd91gWawpGyDnzCF+8SOJA0V1vKRuoVK0o68qjvtgYyOjfX0C/zCkSAGCzR2cHBDscm9gvfkRgWtHmPrSCn7xcxULe3FPtGBHXq+v78mwfUsqqbvR3fAiWFWJ4QjPNzAEvn6BZ3VtXMR0DC/OnQdVQQLkU6cRbhcsXMhG/zYc7mc5tK+Alr9SIUW6d17oOs6PP8YyLSwhCNbUAPaUYnQdoWkEnnsu4Xo1Hi8Czz+PCATsYYkxN7JJVkywibhfXkT6Aw0DfeFCECIxL1xuduj1HHJO59l/NZvaDUNxtf6jedHernD0qBNdt+10C+bozBTD3H7/CKdOybheXstL6laUI+12husueBFo6mDHxzJnCtfxauoWqqR2hp8tTvKC5GZg0svjscjMNGmdtgYsi/IijWefDZGXZ/Lhro34hyXc2+wTGKRxh1tE697SLE4teoFnOYbn9Gksh4ML0lxuLFzPoucqcb33HuvS9vPj4T9A6vIwPajh9Y4t+4gsKpFSkt1X5qMNaLz04qC9GA4Msr17FS5HiEpNQ+rujkZvlI4OpHC0Z3QExfJ4QIgRz/pIVCccydh9fRH+3kyqX7DbgCJRZlW1qKrSHup7/6kNcLkXxaTExzRTj+MUYebkwOnTqLt3Y6kqZnY2wdmzJ/wd0Z6Pzk70hQvRqqrQSkpQc3Lgo49Q9+61sz8ly1hRkUZNvT6mdjPqCmLoeFf2gZFLe4eby5fthrBXPx9gcU0V+oc6q/6+BUd7C5mZBn2vluCsrRmX2h6PhTsFjkxbhzJVpvJGC8q1DmbMNLiQX8p7l9Zx7ryTzEyT+fP1CRfsJ8X2Lamk7kb3xovxhyEl5MX5k1w6FSA4fQozvlCM9swzeFoPIUIhLg5kM82y1+TYjQDcmRfSzZuIoWG2hepwX+2lqLsbKyMD0d+P89Qp+yY7JSVhqU9CXvT1obS2svvKfIKGk6qvLkfI8mePFTA+LxKUNN0XL0Y1bUey/uPyonQ5z1al2RvEu+DFe5s96LrtJlSySmPF/1lO05/Cwp6DpP6kBWteCG1NyYQOQbG8ENkyi3pbWdzXSnq2ye2XS/jZjY0cP6F85nmR3AxMcskyrFsXZMmSEC6XRUeHwpEjCqZpR29UlegOFca3vYo7idNk/F/9Mq5v/1csp4J+yckH5vPs+HuLxQWvcvpkPtcGM5guBrlw1cUPf6jw+utDcQ0+0X97PBiqG21Ao/VKPq4DGRQXBdl+9Rk6buiUTj2HkZmOmZMzkkLs6kKrqIj2EYxuCgu8+GL0F1kpKRAIIPX2wtAwfk8BbScz0LdY1LwYP6jmoUd9HqCWP04Py+4u9iZ9VGM2ML5ThN9P6Jln7EhZ2PpNunoVc+bM8aNtfj9WRoa9ESgrG3msZdnN37m5iOFhquqCmBnuODvAuJRvDCwaXG7azdeYNcs+hxrURqRdQfrraxD/cIyAH6Teizg9pRN+kJG0bX+/YH/mWqyft6AoFoYBH+gbCHzioLdXih7PRAv2k2L7llRSd6NHyotv/VeC7hSu35D58YkXWKL5cV1fyU/PrsRyOrlwPQVZdYwp+7gjL5bnsa23jEOX8lg128CYmmeXBx4/jnzmDFZ6OhhG4mh7Al6Inh7ErdsEnPm0ncsitN3x5LACHj0vvN6RLH1MSdP98GJ0RiS4dm30tR4KL2a+hiXshvUNSiPW4SDOFzYQ+PtWAn6YOnCZ9NrKu+bFnrS18A+tWBYoisX7WgPHTyhRy97PMi+eoj/lydboqXax34/UqJmmvbBmZFj09gqOH5fR9fgx9ePtWuNO4lST1D17bNtP7F338/IWfnDiBd79aS7Ct4ZXSj9hxYwb/OTaQk6eVKIDxvbvH5kwKQRYDpmP1E2opcOsqEjhYJuTpt1Z4Kim5He6qaguRMvMQPh8IynEI0fsBSInJ75+NPKHu1wEXnkF0d+PsmcPnh/9yH4jhKD+GRPMJbScWkTbGfsCHu1G8FAlP+DY+4c4KfNOjdkTOkVE0vNhMABxG4m46FFM47bl8YxAQNcR+/Yhnz0L4TSy5fGMed+FIC51LPwjswQQWvi8s+jYb7Kkrx3X3qPkZFtk9ZwndfgajpZDGDXhkrcJ6jhzsk1ecW9FzNNZtMi+fl5wfERjygYkCZYtC/Hii4G76d9+alK9SX029Fh44XZRUGiCMHA1N/GDj9Yh979C3pRhFk7tJjgzgMOj0tY2UvZxd7yYAnP/F0rqblD+0gqE22aCvmwZwuGw1zyHY/y5M6N4kfov/4LjwgWem/YxzP31T5cVmZnRuvvJyotIv9eYkiZZvmdeYBhYihLnKhSXqb4bXhhj5+TE8kK4XJiGRUuzRanezibpKOezLfKGzuPuugb7DqFXVOIbdox7gx7hxec9W7HKR7JChWd2ci5jA5DkRXIzMAk03lQ7GEnXpqRY3LwpRRdvhwOWLdMjjf5R+6zRjTN9ffFjszMzTNSmJpR2u84uWFuL2tREaVsLG4VM15SNdA070A2Jthuz+dwXNEDDHRxEdA8THM6j/YjtOR3x921rl1m1xE9NQ4BTZ8ORIEmi5nkZMeTAgmgDkvD70crLbZu41NSEEYrojaRlRd0gHFeuYKWk4Ojtofr35tLy36To+/fIFvfwh/Mgi/PDrCWNTUWbYTDGNnbdySkiuH490tWr9mMz7A1apFlM6u7GcfEi+uLFWCkpaCUlWLITMmN8ugd94Pfb0yOHhtDKyxOXKIWt5SKpY9Op0NiaTeuVfIpfs6itH+Sjj1T+7PubeL3vONX+HWTnT6cvt5CBGQuYrQUQ25rZZjTgD0iJG7Usu3FMOdKOVlWMv7YWdUcTBT9rpX4GXFpVx4ubgpPVUTCppO5bj5sXMxqbKPtBCyjwY/9qpmUMo8suiissaup8NO+UcQXG4cUOmbYDgpKKFGrq9Dhe1G6wwGn/IZbHg5WRQWjZMrsxOBz9T7gWj+ZFxJWor5d1izpo6a6Ofu+RsiL84Ux2Xph5eeOXNN0PLxQFbcUKjBkzEbF/a18/dHejlZUh9fYm5kVMD0lkTo7pcsfxYnXNIN/6loev73mRN3zHWC92MDV7Bj3ZhYipC8gPBDj+nX18Mr8edwqJm3pjebHaPo/lHU3MTvIiquRmYBJotI/t6HStqlrs3asQCkFZmUZNjUZq6khXu8cTb58lyxYXL0p8/LETwxgFjLBrRGRhRwgCNbUcOeJEl12sKjfYb6TwiWMhc+YKvLX9qFu2oB5qhQPQUFoGK16mvd1Ne7vC5YuC+cYp1hceoPGt+VjOKVw8HsCRorD3uyeon9sJKfaiOK515ahBV9HFVFUx09LsMqFr12yP+94+9t0qhpS86ALf1KRSu3rIdqu412jMHVKyD7o4J6wlvd80cEzkJrBp05iegTumqWUZc+bMkUyH2w2GgXLwICIUQrpwAX3RIpSDB9l3yIM/JYuqP8pAItybcTCLqb8KUOluw4x1l4gBoOPiRYw5c2znkXBqWMgyU9OHWfH5mdTWhRBCsGhRiIxMiwspZSy6eor8ZfnMcRm8t+Lfcv1oE6E2Bd8CiYyM8DUxCJlZsR/M2PNYr68lH8g2JYrrJnZ5iE6odFljB5glldQk1p14IcsWTU0KTiesWqWxevXD44WFYCsNDE13k+1QSDM9GLky+lxBbf2Q7SXv35yYF4dkrrbcZF52N+tXnmDbtuexhoa4eErH2dfF3mAn3uLbaBvWT2x1fDe8uHEDS3Kw69JS5Jmn0ZcsAUkaKWEy7nMNfkp4cVclTXfLi/Z29h3OxJ8yRNUfLUVSZKyQzt5/uEzWJZ2KKYftrECCycax9WvCZ9vOahvWIzkMVpSnUFuv098vmDlDZ9p0B6d7KykNniS3eDbp2Ly4cLCRC50eclbA8HACVsAD8SI2Cwd3ngn1pOop+3OeTI3XlBI5CVes0Ni9W0FV4cgRhbVrNVyuxPZZAwOCjz5ykZZm0tMj8Wu/5sfvjweGVlUVtea0LGhqdtGub6Rok0ax0LhwwcG1624sxaBpi8VzMR7Qjr5evC900X5kFpYFelAnFDL5TnMV+lCQ1P7tLBzWkZ0Wh4ang7SI+oLT8bMIwkq08I2pQaypIbRkCamBANKt22wbrOJQZy4l/6qPmheddh3oIRnnofiNx11dqXcRxXngxrDRCy/jRLfu4fUi72GiKY93BM+o4xFdXRAMYjmd4HQi9fRgWeBPyeLn++bQZqh86U8MmptV2lpNMkKlDE+fxpq5l+yaUtWFFPnMUlMRA4O4N2/GAoz8fLuG1+FgZUoKK84cw3AsIej1Mi3P4BX1Q5Zf/CWS00Faun2+TzvahOJxYg5oCMLXhMskt60RKWXEPg5izuOYAlS9vhblDqG/SGTV5xOcOSOzcKE+rotEUklNNk3Ei74+wdCQQNMEaWkmmmZnBR4GL2xWqLR3KBR9bi0A039ucv26GxSDxkaVuqI+HP19iXkR1DBCFprk5q1fLCFkXMQz0MWCoRBOp0XrlUIAqit9kJU51rryHniR4g+w80whrUPLWTX9ItX/Op3Gw3kxk5N/FTc5+TPHi7thxejjcbnsoJxp3pEXu7ZYHD6bS2rGLPxiKqXly+zfaTHCC48HenpxHj+OpGmYikLguedQDh5kXWcHocVL0PDiSbVYfm0bc7tO2cHL/EJcLgNNg2W/+gGDBUs4I61mQZdBTraRkBVwf7wYPX1YCNvp6GmzFYXkZmBSKFFTSuxJqOt2UDwUsv8bOXdja9cille6bi/ks2aZ3L7t4OpVB7NmGWOaYpT9+xHBoF0mFB7wMf3YTk5f8PDqq+WA7RndfioTp/EMG42zCAFGZhaNbbmAfRwF8wTOixLnrrgxQgqLuU3p3JusS93LVuVFVF2BlPEXRa3UbhaNjI0fs5hmZGClphKqqMDZ0YHSlc6qOV1UrZuJEE47yuPz4Trhh7R7i8YkjOKM7g94GI1hMQtvwujWp21BF1PbGp3wGQrhf+UVtKoqlNZW6gKdHL6QS+PBQs6+YVJQYCCnGOiyGy3kwEz1sPdYDkFdpna1/ZnhG2Kr2Ei+s5UV0sfIFy+iz59vf8aA++c/h5MnAVAti9/q/Vssq4f+uk0MfuUNXLuayf/pQYKWwlQpRHm5Rm+Zl9y2RtwJ7OPsNzRRw8LEikRWZRl8PglZZswNUFJJTVZNxIvubonTp2Wysky6uyVMc6Qh8mHwQlU2UFSkgQXDP9/F7y9xcPPV1Zw8KdPRoSDMXBoyMpFNExjFC1WhYNoQihnkk65MDM1ksdpH2YxO6nPb+UjdhJKaAmkJWBGOjsdOL56IF3plBUp/gBJuUfOsg6C7NOpcE5mcfK9r8GeaF5EMjM+HpSgYs2ejlZSglZcn5sXsEC7XbXS/QWB6FmZGJvv3hftHIrwY9LHzTCGqczpex04kvx/Xhx9iZWXhPHkS58mT9gAyoObiO5jiFqGcPPqe+zz9L9Tiamokr+09cq8d53OVMGvjGvIOT8AKuGdexGbhbt2yN7hTp5pPna0oJDcDk0ajm1JiT8KeHnsaq98vyM42SU0dewJGokWhkP1vv19i8eIQL7/sJydnVErLshDBYHTqZFVtLcrORm5dOIooWMWi2gAAXq+gqUlFkksYWpKPhbCjKx0qJYt7qNkoaNqTShsL0EIWc+YY0H6V9WoTZloWVV9ajmOgn2DOsrGLY6IoS/iNGBMZaWwEWSa0ciUVQmBxHvHBZQIvvohwuajZKHDJPQiff0w0ZvR6EP1a1+0aynB6MjIMJ2EU5m6jKHehR2ZBdx+pZOHzxU/4XLsWKyeH4MaNCJ+P11/ycOpNk+vXHVy/7qCyUmPN/zqPkgWCoGclwT1yNNLmLS2jaZebvqtHyBOKPWjO7bbtS4SwLUrnz0fu7MTzgx9gWWD2DXIpr4gdS/4E9w7B+nV22nbYUElxBHEfacfzsX2OxqZ3H1SxkVWPx0TXiZZOJJXUk6DxeJGTY0a/VhRwuSa2SLxXXniLIVBTw7m/3UsGbcxYsoJgjR+vVwrbdkpotRsJVVbYvGjLpb1FUFzup7Zep2lHPm37BZrDxbzQacTNEPXTj6GXVVJdPh8hO7BGr2UTReQn4EVFg4plGEgBDXX7doIbN9qjdwwJa1viNTjJi8SKc5hbvDjOYW5cXpQvwFtuUlyVbT8uKGg/JCN8FjXrvJz8/gF6Lw2TNVOFYBAzIwPnqVP2YLsYVgBYQiIwu5CWGZs4Z6zDvR3Wr/OSDlhHT1FtHkT6fw4Cj44VmZkmQvBU2opCcjMwaRV7EsoyLF8eQlFGmr9GL/Kx0aIXXgjQ3S2Rl2cmbogRIjqhWGlvx/3ee6Dr5L36Khm11fZuPDzVr7bWngJrYUd3VNWinP3Ua52w3U1N/Xrefz+H2/0OCpxBmD6dLY7nsTQ36t+fZU3hhbE2mLI8NsrS3x9dea2MjLGRkbQ0HJcuYTmdyDduxA2YEU6ZoNeLdPMmZl5edHHbt2+Uk0XEZ1rW8fp+FW1wDdbUjDRIPeoozL1GjsZbtGO/z6hU8qjo2XiKgmZ4eMyET8uC5l0uCgoMrl+35wJcuOBg3X+S6O3NRGA341m6wZGfXeHo/6chdV3kuSmHKev5CCs3F33uXByXLuG+dAl93jyErmOZdoYreL2fISWTrZ5XSUsJR+aHJORw2la3LDjSHj2eh7W4Q/y1smlTINkzkNQTrwgv/H7BM8+ECAYFU6eadg11ggjm/fLC/d57uN95h5UFBQRfLUGrrcHV3BzHCpCxcnNB1/GcaKfCMvBqt9GM9ThdCpe73DicFsIUWFOm8F8v/2+sXFuAt30H0tBY2+RxI/J9fRPz4sIFHOfPQ0oKnDuHVlEBubn2GpzkxT3xIm5TEuswF/l5Il5ccrL+r2R6evwA1FQP4Tx0grb3sjn8vsTC291Um/vIn5tLaMkKnGfO4Lh61X4/DQMr3E4gD/TRa2WxZ/FLfDTUQEWKPsKLdV5Y50X69rejx/KoWJHsGUjqsam0VEMIEjZ/JVIkWqTrtp3chCdreIFX2tqwQjpcuY5hEN0IRNwjBBb2KBlb1cu7UW512qncQR/NWy00TSAsC4evn7L8a/z3rUsgEGB+xg1Ym0+9rz3OBtPKzIxfXFQVZc+eaKZCKysj2NAwkgZWVZT9++1aRctCCoUwMzPjBpKNdiWyHHJ0iiGMOB+1tyuULPSB5scKlxXhcCROOd9NFOZ+bOTuFDkKBGxQ5eSg7t6d0D0jdjHXyspGoNTfj2vzZpCkO9eYJgKNrqN8tI3G9ikcvhxAXjSXykq7j+T0aSe/8zsS//k/228Z2Haml2+6mZ1jTwpdWdCN6DIxJSlaNwwQWr4cx5FjhA6dwjE0iBUSOGbmgBB0d0vk5Fq4XBZ9/VLYyrAp7lDVpqaHvshHbpDGi54mldSTpNG8GB6eOIIpy0SbkGfONO/Mi5oaXD97B65cR7JAe/NN1ObmcVkhfD7WzDmPmepBGvJjDfg4dCiTK5cdFE7pp3jqRU6fSWH7mTnw/13EKMghJSWbutG8GL0uu1yoW7agtLYCE/DC78dx/TrG3LnRvwGw18/HwQtdh97e8B3u/dX9J3rN6KZoAne+B+bFeJuScXjR2urk5Eknf/VXgn/9r+23ftdWi1TD3lCJoSHOphXxhWcvoJw6iW7YtdD+L3yBoNdLyne/R+jQKeThQQxAz8klPd0EX5gXOabNiz5Bbltj3KE+SlbA02krCsnNwKRUIuu4ux10oevw4YcqN244mDbN4Pnnx2lyCVttmZbgrD6PVBw4fvQeUzrakRxi3FSb5fFASngBTHGjelx87mUfv/q7bj54P5vLadlgaChuB2d9+Tg7/dTOHWuDGRudsVQV1/btI5Zwvb0jURZZRisvj84lEP39iHAJStSuNEGEhszMaI1oe7sSXeSLizVqVgvY7o7WP2IY0QX6nqMwH3yAevPm3TV3jYrOJPw9gQCeb30LyefDkmVCy5ZhZWfHRZ5G/71YVhRKkZDW3USrLAt7MmekwSzc3CUF/DjTFBQrSGA4REm1wVe+EuTNNzM4etTN995K4Y/+t8s07U3lna1TcIgQlmliqSrtV2dQVFWNJAAhMAoK0IqKkE+dwjr9CaYJPYvKuX1bIj3F4oW+/4kjLQC1tTQ2uvAPw/xzO6ikHW3ViPVtZKP4MBf5pJJ6GnS/vIg8r7/fbjh+9VU/497PWhbyzmZOh1mRfuIaGV96A7OwAK0k8QTYyM2yNGTfLIt0D6XFfhynT7H34yl8c08py7OuUlPQiaRk8E7HYpbN6KImfxQvRkXyRSCAFGNqMS4vMjNRWlowZs7EmDkzmvl8LLyI3JA7HKiGcXeNwHfiha7HbYpCS5faN/WRrMVD5EUiVghhH9NoXhRXGRQVafzt36aye7fC/NkC0ddH27mpuK7OgqAWbWb+yL+WF+Szdoe7JBGsq0NtaoLTnZgm3F5Yjhayy6QL9r7Dq5WCmQ1r8KRB406FKe2NBK4cIf+1YvT6JCseRMnNwCTUeNZxo3ekiQbPdHcL3nnHjWHYAYzyco28vFE7WctC3tGE2dJOzzMl7JvbwKLz21n6/ttYnSYsnDNivzX6d8gyQ971DN8cIiUvlSqXgXWri5S0nRwJvcbHPTNISzPITx2GDJkFLznwNfwxUjCAmeoZ8SEOBFDf34zAnhpppqXZNmOAmZUVX8eZmWlvBMI1i4HYtCaMHXwSWTzDY83bD80c5TMdXsTDnslqc3N834JhIPr6MDMyowue5fFgOeT43lWfD4aG7i5NHBudUVU7qxHe1EQHufT3I125gjQwgJmTg9TdjdA0GPV3WR4PlqIg3bqFmZ4OMJLqjRkWM1G0avyUeA5et5u1szoRZiG+5Vb0Md/+dj//9987ObXtJt/bcZ7zg1Nw5Bi8/Lse6so1Gg/O4f3WUkx5B6usQ3aMUAgQgtDKlaidn9A1ayk9WXNRci2y5whcaU6MqSq3gpJ9zqdZ+E0XfUtLcNbWxJUoWOH+g6SSSmpE98sLn0/Q3y/Yvt3F4KDg7FmZP/uzwbGlQmFeBJs7uDSznL7P/QmVP3qd9CtXcAgIvvlm9LqM/x3xrJBlmdVLb7I+5+/5XfHbdDOTjwPzyE+Hi925GDPzWPC8G9+6UbzQddQdOxA9vVjZWQTr6+3M8F3wIjrTJmxQga4/PF7ErLnC57OPdxxeRG7ImTEDce3anUuK7oIX0uXLOK5csR8vSYhg0HZvG80LlwtME9Hfj5WSAtwbL8ZlhWpRVWZv+OJ44Q1Gn/urzRIf/pcLzFFvkOruZKi0ilXLfXa/YZNK78866ZQXUSBMBNjvrariSFWirJBli4L5Ak04KS53YEwJN8AHJJR0hSv5pbhKqskUJFnxAEpuBiahxrOOi9V4g2eGhgShkL3RDgTsr8GKW6RB0HHMQ8io4Ly/DkWx6OuTGM6eQYZLYAmBvKOJG8/W0tLqGrHSWhcAIdjWmIrf7xn5vZJ90T2TfYUDNxfQp3noG7T40z8boHadjhCwtyOd4LCJt6QL3C7U9zez630NJc1JRc0wWn0dWnU1CIGZkmpHxidwaLDCf6Br8+boGPrg2rUjExBj0peOKwGMefPifabDryuGh0d6GMKLvXLwILuvLcRXuIS6Z68jaUFMl5utyibUFImqKnuCoeXxQGoqIpwZmChNHBudkW7dArBHvff3I126hPPjj21nH10f+Xl6Ov6XXkLq74+rbbVfUIBp4jxyxIZATD/GnaJVlsW4KfHiYgisW4805KPU48Fy6NEGOtnn49//jsU39nchNIFDsnht2WGqy5dBVia1602mn9zNtAsdhF6Oieq3tWHJMsyYTvrzJWir6shta0Rtb6OvYAWitBKPGDnnjWerEeFzLfK3ThTlGW8aa1JJfRZ0v7zweOwSz8FBQUaG/e+bNyVmzjTjnYoMmxdBRzmbAxtY3/wRvmmFIBsgy8g7m+kq8eJyWTQ2uaK/w1sboLFpFCsQ7Ly2jML024DFgUApH1xcSWY2/MEXhqj1OhHCyd5DqWh9AWo26kgD/ThbDrL92nJU0UdxxRDBjRvRKuwmZTypd3b00XVEVxdKS4u9Xqrqg/EiUl5jWchnztAkvASkFOqeuZqQF5EsCQMDd1VSdFe8aG3FceECAEZBAWZODsGaGqTu7hFehEuisCz7f7qOumtXXCZ7Il5MzArNLq8KP380L+qL+zi9z8B03cByu6lM76Bv3nxqXsxAYNHg2M415xH6l6zC9/rqCVnhPNyGvuJZup6pwqP//9t787C40vvO93OW2qDYkUAIJJDU2tWSQCwCSYD23tzdcsc3zk0yycwkmdiZbrvtJONxMnnuk7HH48RpL+OOnVzbN5Mnkzh2d8d2u91qLYCWRhKL9g2QBJIAIbFTUNtZ7h+HKqqgWLR2C72f5+mnBZyq857t9z3v7/0tY/d8S/ZmXE6DdQmjXYWFVtwz4nR8DIlVOm48k3mDsrIMVqzQ6OuTSUkxyMoyJghBYWGAluzNNF1WuFMrU9y/j0yjnubiV3B8YRPOmmr6v/cL+hMucCL7T9hYGmRkRMJ835q1e0cqcCdY+x0akjh1KoPT9s0sXjRMp03njikx5FE4c85J5TYPAP4Rg9M/vYn92E22Lmvl4PlsGgedxPUNMXzJSelLSWPNSv7mPE5vP2WFnnAs6IS4SU3D+e672BsaMBIS0DMzkYaGxkq9BTWqGudQ17GAwuzrlP2OSvWxJBrqre+o2DSM5PejXrqEPDKC4Xbj37IFua/PqlxgOmg850YaTmRrQTdV9Wk0KhL5pdJYhQlVxXzuOfytrbFfvCOWeSPjS42UFMvTMzBg7b+3F+XKFXC5wG7Hv3kzgcJC9Ly8mDkDoSpAZkICckicIvIxpstJkCQmXRIPecLCCXkQnnSZhsn+W2sx7fFgmix0d6PHxdOnJZCggapKrCmUkdeuDRvkkKdGbWkhuGQJWkUFyRL4yitoPGnDe9FJr80ZI7RhZiXgInsGSBI8/7zvie4iKXjyuB+92LPHS3OzSiAgkZhokJZmTJg0eDwSLdmbuTQok3yiCoVGbm3bwJwvWOWAb/70NN63mumetxLP8m0kJVmTCf/Pa0htdTOwrhTPsDymFbbNFK67yrr4fi5UyfT1q6CYodY3mEEN49gZTl9ORb3Uy6bfymD/zVW8dXUDixK6WKuZyKqKkZZO9QEV9+kTbMm4gJGSElsvRr3sck8PanOz1RG3u3vmeqHrVBZ0Y9pU1KYmZI8HQ1XRVq8Gux1pyIM/wUHD+QTkXhcVlYpVRUmK0IvRl27T4cDv98cOEboHvdAXLUKfNw/fzp0YWVkTcgYiqwDJt28jBQLWpCJyJXsKvZheK4guhQrhVf+DFxcg3dYwbTYkrxffnERKn7HC0txusDsdZO1ZS3rlpim1QttWgReoO+vm2pBz+lC4abTC65WsQijFAZKTxaQghDgNH1PGJ62MZzJvkNMJf/zHHrq6rOoQqgrt7TIejxTu5jrqVOb8BRuSJHG9N54lpRu48dRWcocDePIrGEi9QGbvFXad/RamuYKsRINkqQH/uvUsubIfr+lCf7qMhAQTu0vGXLqYS5dh/lo3G7K8HD9u5/hxO9XVDior/VQWdGM/dpO6jgXUX50LkkTB+k6QnPzk9lbq3zR59VUPh97TOXMoiE2NQ7l5k/ySAcy0tAnHL3k8VlypqqKePo3t5EnU5mbMuDirRXpcHHZXJoVZ16lcewvpRBe7An5UliBLT+Pcvy8chhMoKLAM8fAwRmIiqmmwPessmj2Xxjo3JxvtmHYb+Z/RqKgwo23NZIY0Rjm88SXw5LY2bGfOoHR0WIluGVZXZdPpRH/qKSSfL2a1iiihcEfE1zqdM05mliQoL/eHjTtY1R7kAau5TGQYlvPdd7HVN7Cvv5iTUhLrnk+ioszJgUPx/NOv5jL32yZFRQGeecYPZRMbu/grKhjeXIlnCNyWMxHPsEzLom24E8A7RWjDdHg8EoODEhcv2vB6rX2+9JJPGHjBE8W96oXbDV/+8lBYL4aHrQTN9PSxSkRut4lhwNnzdp7yxnEyuZjEeVvIG/HjKajkZq2NrJ7zZLQ2kNZxgd75K8l0mGSaDbRJRSgnDjLH7SAhoRBHnMzTr+TgH0nn7Q/SWb3GxO8PcOeOzDvvuJAk2Lq+32ogKS+j4WIqx/8+C0XdTG7eME0DK/n2P2Xw6udGrEaIH2rE1wVR5ylsmXfCWi0Ypxdh2+l0gq7jqKpC7uiw8pim0wtjMY7TGs7BNjAMtMWLrXLJIyOWQ8Y0MRPcbF3UjNJqo+78AhouSOjz55P/a2MhluGLlJIC3d0TL9Dd6kV7O0Z6ulXZJyUFIysrtl7EmlREJGLPRC9iakV5RBfncXoRWvVvHLRTtukW+X82n+oaJ4evVlD9psL8+TopKQa7d5eiKhO1wl9ZiRY08QzIYcdQd0El1wad04bCTUVoQuxwmBw+7KC7W2buXGPWNQ+7V8QpeEyZyhvkdMLChUaU1/TSJZWFC3XS0gySkiwjdfKkDZsNziubyczxMTfODIcR1b74OeY0VvF0w0/IOPk+8oJMAr+2BwUoNk9wK7cAR4UPVZUoLQ1w6pQN7DZ+4/82yc8f4uBBB++84+LiRZXKSj8kuKlce4v65jTrZXfRIjb9fgJHzqTB2zKHD6u0tKjkzh3i1lAapiSxPu0aphH7YTedTtQrV6xl09u3MdPSUK5fx5gzx/KU+/0U/U4upmGiedKw19djJiWxbVELwbwkpAYvRno6NDcjDQ+jXL9ueU2SkvB85jOgKGxCouHqsBU/ahhUFvWBlDyj6zNZ2bmojprJydZ3yzJ6Tg5abi62lhZsFy9i7t+Pf/v22NUqIpZ2fS+8YE2MdB3HBx9Y+QaRHrJJOHrUTl2dPfzebuoGb37+NsXzW6k0q8Kl/QKFhVYYVlIirq4BilfdYf2zaWBL4+nNEv/0rszt21Bfb2fjxgDp6RMbvWi6FDNEwRU3dc3mmSzpOp0m587ZaGpSSUgYC4ebrRUfBIJ7YaZ6UVtrp7lZ5fJlq5y102ltW1Hhp6HBxg3nJq4FTV6K84ef2TsFW7nu3cbi5v0Ut/wYe91epJx5+D65h7WBINrRU6gr12MqJqWlAQwD/vZvk0GS+I3fMFi/vp/qagdvvz2qF5utIhXbci7QcKEE7Hb0p5awYXUfTe+lc/iojZardnJzdW60S6gDc1mfccNaWTAnPvem0xn26JuA6fMh6fpEvfjNbPD6CLpycBw9ipngZnvWWct55rZCdEKhN+rVq+jZ2SBJeF57Dbmnh3LlBMcO5iB5vejZ2VRs02Yctn5XegEgy2hLlqDcumUVA6mqwl9ZOVEvxocBaZqViJ2UNBZiGxFiGosJWmHCt78ZR5l0lM3ZLVGlYAOFhUiY2BNsFHGBnavT6E5fTvmnVAZ+bvLuu1bVJsOAkpIA6enjT4SVp7hvvzNaLxKYUSjcVHrhdpvhicCNGwoJCQZxcabQi1HEZOAxZjpvUOTScCAg4fNJobBBVBVKSwMMDsps2BCgrCx6yWzHriADxZsY+q91mF2QeKmDuPpGkCSOmsWcC2wn+YDJ7t3WrHr9+iArVmjs3KnS0wNbt1oeEafTDBuQgxdyxgYnyxw8mQFAdrZBRwd0dshcPJtIipRHRV4Lm1+KI5iSbA14yGOVMx0doOTzoS1dipSVhXrhghXeI8voCxeOecpDpdY80bWrjYyMcH39QEEB2vz5xF2+jHL1GophENi0CSM1jar9Kqb9plUf326n6kSKNSFImMSTMskyb9SLfOQ2SUnWknVfn7WU6vNhxsdbp6e/H8nnmzyeM7QioWnY6+qQ79zBsW8fetZ8sNvCHrJIJ71pWkly5qCHuuNZHDriYMsWP6++6uE7X1eoOTcPSdOoTPaEQ4+QJEy3G23JEoqfkoj/dyX0DHtCl5DsbB1FsVaaJhO+yPtwYECivd2KS66s9NPRIcdsojdZTsx4fD6JVauCyLKJzyeHyyVOJwwidlTwpDETvQgEJAoLAxw5YsfrlaiqslZ2VRU2bQrQ1yfjcpl84hNjq287dgYYGJCotW8jrbmBVFUhu/Mman0jbVdVrmcVc31wK8/7/aGiMRP0onI06dTpNJFsKr6KSj7867NgmijXrqEtWkz95VTm55h03ILOToWLF1SS4iUqV9goL/MRTC20XqCDmlXBaNRmhrQCVUXq7sZx8CByfz+mYYzphcuFvb4+nE8QTjBOTcGM8KYHCguRBgYsD33bdSsxd/t29Kz51LQug6D1XTidVB+wVsQftF4o7e1gGBjz5qF2dIDDYYWJTqYXEVoR0kPbmTNIPh9GXDzayhVjk49xemEGNeoPKxw+YWfT5gCvvebh2992c7jKhi0jlbIFKrJnol5srPBgYodXPgkeDxJQVuansdEWPvxHrReqahVU6e6WSUgw8HhkJEnoRYhZelgCGFsa7umRkWVYsEBnaEji3XedyDKoqklpqZ+srInNZlTFJLG2hi5DxpuVi7u/A7OlDX92Lu9ou6FFwu+HDRsCZGZaHh/LkCQA1oNeWTlWfaDmfZPTzWkUPD3Atpx69ppxNDRksS5fQ1VNDN3kZnPQSsiKt7HixRz8u3ORIZzYZY9XKPzPa8ZqPLvdVsOsefMwsrIwTdPy6o8vOZqUhLZ0aXTXxMpK5I4OKxHr3DmOnk3jhLKR1ak3KdatOt1v/8yF3fUUr+zuwY+D0+90Yq+7SWX+HQI7x3lSNA3H3r1WSbtRz3ysJLZQS3ckCd/zz+PftSu81Oo4cACuXgXAGA0Hmjb+f2AAuacH02bjyNUFjHSlsiPzDASDUVUf0HUCvSPslPchBwNsNPO4Nr+ca9dU3ngjAVM2KF/dTFHWTSAi9CgpKeo43C4njE4GkpKsuMtQfkpS0uTdTu12k85OmevXrQYFcXEmug4NDdbSc2FhIDyx1LSJoW0hQzzeILvdJklJJqtXa+GcAZhaGGY60RAIniQi9cLhgIwMY4JebN48US9UFRTZJPN0FXYH9KbkMa/7JmZLGwFtMQfUnfTXK4x4ZT71KS9OJ9PrxX6F023zKFg1wLacC+w1XdS3ZdHerqDrcPOmguT1YvN4eXpjN/5nn7EqBplw9JsXcBpeStf0Eti5Y0wrvF7MxET0RYswFi7ENIywXqDrViWbUd3wl5eDoox500e1wnH0KASDHO5cRn3PYlan3LD04nA8b7duxuEKsOdTAQIBfzhH7oHrxcAA9tpaJK83Okx0Gr0Ia8VoWM+RS5n4/VCpn8P34ouT6kUpc5Azc7jcvILXX08mO1tnc3mQQnqRDS16DOP0IsHpBI+lF8nJVjjpg9QLsF7SdZ2onJj+finsGIo87cnJJnPnWisCQi+imYWH9OQw3Ww1tDTc3y+RkmJnZEQKG1uXy+TYMTuDg1YDj6gbfLQHQfylRi4uLSLgl3B3tZLWfRPThJXB/fzK3EUgKHHokIM9e6wHyuORSEkZ239EKCD2ZCcFy3rZlnMZW3MTu54CxVzGmQvF6LqEpGvkpAzS53Oh6xJf+04mZ1qDvPbbN63ErvYF2HQvgf0GZbsZW/7s78dISbHCY5KTMTMzw0Z30q6Jox4SuacH9fJlAqtWM5ySxbnbSznbt5aR00lcbFLp6pKZOxd8tgQq13Vhr7uJPdGO7PNaS8aKAqHEqYEB7CdOhEOKwp75CMMcWv5VW1qiOiiHtvHv2kVgwwak4WH0eVljZViJDsOPvAHstbWozc3gGcYbP4/jchmGkk2JagtXfVj/tBfbyZOcPp+AUxlhyysJaF6dnAwvrbfire9WZD7zxlyUkTg8zvyojpSaBh5ScGNG7hqPR2LbNn/MDr7hezPeWjKQJKsqhc9rnaLubhm/Xwqvevf3y+F7ORTa1tSksnSphtttNSSLZZBjhT/0909MloycSEyWTCkQzFZm4tm8H71Ib6jGd/M01+dvwO4wwdmCs7ODZE0mJ3CAK45dnD9v4913TV566e71YucSONdWBspyFMVkfoafoU4/GipvfrCGM6qdlQUq8oiHM5dTyV8+wIF6BVU12LjTPvaC6nRixscj9/dH68Xo6kDYMx9RijSsFc3N1srA4BCeeU9xtnM5Z/ueHtOLOwpz5zrwawaV+V3Yjz0kvUhLw797txUm+swzyD096HMzptaLSK0wDAxFxetK4YSxhoCqU+b1UV2bNIle9OILKlTXr2De6OL+q58bQTbW4PfkWaGqo3qhoY7eZ8nh+yPy3tu1K3ao2r3oxcCAxIkT1gqW3W6FAHk81r+PH7ePVUGMuFeFXkyOmAw8psx0tqqqkJ4+9hA6nZbXu6fHeqoiE8XCN7gkYTocaAXrydeDSCdOYv7hiwwrIJ+/yMaTx/F5JdpXb8U04cYNmfPnbQQCEhkZEsXFE1dFSzfrmCUrCXYkWUYqKYnti1uQ3cv5X/9fBr2DCqvTvJQmt9M2kMyJjiT+9/+2c+TwIjbN8WE3vQQkB3UXkiFBo6wsYHlB0tPxP/vsWCLTdCXmGIvPNFJSUNracAwPs31JCr4XX+Stg/P47ves7TIzdT75Sa/lsRpW2ZZzAZwOTIfb8swEAkgZGVBcPDFWdfzPo/WtMQxr4pKYOJbMFSEA9sZGjp5JxSd7KPvcSiSbGl3XebSsafg4AgECRUXYa2qoXNOJcfsKtWoFR3+0AGSZ/PwAleu6cQw0I/EUdYcWcfzHVt6GulRlwQIdvx/sdqg5Gk9FhVUbO9QUZvx99ulPT3/vhf6eeuYoLtlH1u9swu+XyJqn4f/ZEfquO7i1vIxVq4K0t1uen+RkA7fb6igZ6jC5dKlGUVEgXOpwMoMcGf4QOs12+1h86fiJRGWlf9r4U4FgtnA3ns171Qs5zkH2J58mzRck+VIjvk/uAWDO2YuUNhyHgMSNpK3o+r3phZSURNH8GzTfWExfn52UZHg2/zrXu1zcGkrk/7yTgvm2TMF6J7++9CKMBPjpqZUsTUmiZIc3qmFW6EV6vF5MqRXp6XD5MvYjR8DhYOuGFIIFcby1b+5Hoxejq+OOfftG9WJgSr0Ia0VJiZVjpyhsM89h3o7nWOcWjv2/86P1or8JSVpGXc0ijv9LGlcH5zAvm7BeHDzoYNs2wpMb0+mMeZ/F+l2s/hf3ohemaU0gVNVaFQiFs+k61NQ4Jn15F3oRGzEZeEy529lq5AMw3vsT6wYPlJaiBU2MqmPEFefjK6+wZsqbK1l4sJqljQrORTotLSo+n8TVqyolJQGGhydP4JRsKsb8+eGlWMPlwi/HYRiS9fK3JQ1tOIFgaxzzNR83O200Ndto71jDK8/1o8bbCOoKgYAe9nwYBsgRhpERLzgdBIqLJy2bFu6M2dODvnAhwbVrwTSp3DJCw8UROjsTQJLIy9Mtw+734f7WtyyjbLfj+f0/wFl3whINjwfpZjtmZgaBwsIxj1NEWbtQ/wJpZMTyAq1ZY1UNQgq/dANWXsSQB38gnYameLT3dMo/oY7VdV7rhb5+SLAa2hBxHKbTRbCokPK+fo5cWxDVNMd2+CzOQzXsnnOB+qz/gD4/m+Qztazt+yXXMwswyzdy9apKff1YLWm/z6TrtoKqWmED6Wk6I16ZoaHp7z2PR8I7Am7VR9LFBhI+1HA5d5JYW0UGjbAsn6SlQbZsCbBlixUuEPr88eNWAmNzs7UUPH++EV7uvZta6g6HyZYtlvCMH6/PJ824o7dA8LhzL57Ne9ULs+oY/vX5+DaP6cXig9V0NSrY7kMvGPIQsGViKjZWrQqSk2Nws30pwTkac+ZBa72BIUF9owNF3YTp1zAybaxY5Qt/n2GMmsVRvbB/sA/ZZzX1CunFZFohjYwQXLPGmhhkZCCPjFC5eYiGc+6HoxdHjyIPDWHExxNYt85K9B2vF339SN09+Mii4VzClHphxI8eh9eLGR+PaZgEK8qp6O7haGt2lF7Ef/9fUM+eZVsF1LeUcUXLZWfnd1i/ROXM6j+ksdHOT3/qAmDbNiu0y+c1aW5R6euTcLnAM6oTDgfT3nv3qheaBk1NKh6PjNtt8MILVllpTZs+2Xj0VAu9iGCWHtbsZyYvR5Mx3vsT6wYfy+ivxG4zkD6QIpqPVVC4TSKrPYAsmSQkWqHuPT0yebnTjGWcB8Z5Qua/bdmL5Pfz5uXn6Ox0ofR289LwvzBsuHgz8LuMSA7eq0pi9aoAi+cPU15m1cIPVTkoLAxQtrIfhjwcvJxLfNtlSnrfx0xPi6qSENkfwL9jB/QP4EhOtqoI2ewc/kE7N+ocyIMaRkoKra0KVVUOti1qQx4cxIyP5+iVHPqOxLMzyYU8MIB54yZHjqdgT+il8A+3R4XXRNW3vnwZ0+lE9ngIPP00R67k4NdtVLoOECwpxkhKpvpECslVBs/2v4FtoITadz5Bw6UVoKrkr/WyK/AL5Covh9vy8KwppGIb4eM4cnM1jnN+GjvWcU13k5tnIElWy/aMk5B3E+TrnSzJaoXbbSzv+Akup4m5ejl9boNFizTsdnA4TKSaD/nVP8scS9pB122F9NQghX1VPLVGweV6Bi04tiQb694LVQq6nLuNJQY8dek4zwcaMFQ4taWYK0u24o6zcg4i77v+fuse27jRSvLauDEw5fLueMYb8VAYUaxnZbpkSoFgtnA/WgEfH72Q56bx8gY/W7b4qalx0Nrq4tIVF7aeLkoSztPpT+d0/xKOn3Dgdtt54dkRKtd1Ielujh6PC2tFaWkAachDVX0aDptOxfB7Vux+2pheTNCKIY/VLLOqyspTe5h6cfs2jgMH0PPyAPD8/u9T97+v4w8qY3oR7+boD9txN8Nzgb9BlXZz/N9Kabi0fHK9GG0maTicHP1fTTh0L35lPpEJIAf22djV7yepo5ObB9vQF/0Wrxx4gzXeKkaGt+AdtvoUgX2saES1pRdH4ndw7ryN1SsDbBrZT/JcA9fzn8DQzdHeArGv9b3qhccjsXSpFs4z8/mkcPWrmby8C72IRkwGHlNmesNP9x2T3eCRD8rt29bS3Ny5VriGZ1gmOdlk0fUjDLQZtCzaRlFRgOIiP0uvHcFzwk+gtHTKHQfdyXg8EkWFfuJ8HuyNjVyVVN5PqyT98hHSg63ctq+lYtFVzo4sYWhI5cIxP3lr2vjwW72UvraSEyfcHDni4Pp1Bd9gCnH7VU62Bimy9bGvfQuuDh+FG61l1Qkt1RWV6rNZOJJepHR1D1U1Lv6t1go9em1rHcG1a/np3jm8/bYLns3h5TvdSFdb8fvTaWjNxCx8ga0rr/Bh6zLqbmVSKF8Hrw8zJTl8mOEl5rQ0CAaRh4etUnb+AH4f1HUvRHm7lfLe9znQ9TSNvvWUpGahJyxgh62BEx0bQFHQli+nsqAbucqLEe/G79FpPC6B4qCiAqrOZNEoL0A1fDRrbiRZIj/fWh5+6604Oto/wS7TztbOH/OJwTdISTHoyM5mb/qnuezcxiKPTlycyfbtfmyqycC/Bsi6fo4NWdB4yc0ziYdxyEGeeu5p9v7KxFFzlEyXndzfKSIp0SCqQZhp1YUO3ZvOZ8u49vl6NE1CVU2WvVHGQn8g5j0bMsIjI1K4BO5M79fIz49/6XkQz4pA8LjyoO7/j1ovVq4xiI/3oyhWsnFDnUrXFS83vC5IczCPHlrNhei6ytAQXDnczdGRFirz71B36ZMcOjqqFT4JKZjC+QYbha6LHBici5y6ks1xrUgeD0cvzJ2oFacyrLCb0YnBw9QLMz4eSdeRgkFMmw28Pvy6jbrOnLBeHGxbRsO1DDbkLUO/U8/2xAvUNyyySo5OqhdWGGhVlYNGcyM2fARNJwWFGps2+XnvPSc/eSue2qz/yvOynWUNVfxJ0/MkJxtcWFHJP2f9CbRKLFqkkZ8fYOtWP5gmA10B5rWdY5FNRumN44ULh1kwz49/YB1Hf2Wy4OJBdJuD9Z/fYPUV4MHphdtthu+7yInGTF7ehV5E84Qc5uxgfBLYdDf8/ZTDinxQkpMtL3PUQ2OaqJqfjTSywq4h7Swn/kg1tkuXkJYvnyTjdWxckTF527dVcOaMjYWddfyBeYIrusEJqYiztiK2LBwkO8/Pv70j0d3noHUgFekS1P5PF8dP23E6DQIBiTe/l0i2uY1f23QZ/epcGi8mUrBawoh3w5Qt1a36zY40hZUL77Bqzi22Ftyhyu3kqaesSkcuVcO3dStVZ+Zhi1cpWO2h4XQqjSdXEnfzJoVZV6yKEQnrwqXgjHh3OIxH8nrxbyxF9nnD56WyoBsa4KeHCvnn/gzyUnop+MQwFfIg+/9lNReulWCmO63tAwGqGtLZ5XTx4akkTIed9etNGhvtNDTYaW1VWLEiyLp1Ck/7rWXxkyft4TKymfNM/MsqGT5wHLfUhqZBSlkOCQWbKbEHWbEiaMXlSoAk4fhEOR0HnWRdqWPxYCtLAje4OL+SztUVON/ax/ybddzMLiSh/ijxdv9Y+/fRxHPT4YDSUpKTDILv1qBpVlJXICCh7K8h+fny8L0x/h69HyM81eefBM+OQBDJ+Gdruvv/sdGL7X6O1KjYrl0hx+ZgQIrjVr+LnuA8CsqC9PTDjTaJSzeT+LG8irprPTQrVlx4e7vCm2+6yUrz8n/lB9GklZw8Dvl9XozlLuvleVKtCGAqKlLKNHohB/CXlXHg2lLsDibXC9fqcNOvyLBPyeMhsGw5RvZ8K6QnM4PK/Cbw+8f0IqmbwkVdVC65yv5LGzjfkm3Z3bvUi0DAmgjs3+9gcFAiGAR3Inyw4Qus7zlIUpLVfyL1659hw9Egkgx5eVq4oSlYetH2vpNF509Q5r1GbtdNBhZsoa+wHOeP9jG/w9ILueZDHEIvPraIycBjwt2Wt5rJ9lMZ//EPCozfdqx9eGpjAzQ1AGBUVODPz5+8iDDRXqShIYn397po0nbz7+bVA9Dfr3LIeAWQuCQZvFzgQ1W9/PgfFC7dSMTQ4VZ/AtdvqHz60yNomgSmTMeFJOrb5oEjm3XPplL2rIJksw5supbqpZt1NpZkwpAbf8I6AkdUNE1izZogJaUGNZ+Lo/F6PBsWdLDlj3XqL4Mky7B0KWW/IeF3r0OCiIQuF2WfWxleYq5qSMeRpLG1+6eg68h+H6W/n8u/nkumo8cFpPLqMyp7Dz7Pzwb6kKU+PpV8jIqdzbyfspDG027Mp19AWuml8WIS6xUrb6K1VaGjXWbnTp2iorGwmpMn7UgSLF6ss3qVnxW/+A65nqMMmjKmCd5j13lW+SsChYW8W7c16j5xuiSe+WYx6lc+5FZnNvotjZxMP+Y3vom320lNcgks3kIhe7E3NgLgr6jAUV2NvbGRQH4+GAaOmhqrItWyYloWbWPJ1QMsu3QczW3ir6iI2Yzsfo3wk2jEBYLxzGq9eN9J0xmd4vk3KVjiQjOyqevIw4izsWSZl5fXDPN334vnRpOTphsmkmlyeSCO5Su0cHWkjk4nddICZF2jYHM3Zf9xEYHkJCRVnVYrYAq9WOWj3Lef6qNuGm87Wb/ZxpZd0kS9cK7GWVWF5I0R9jnkoSr+kyScPs6mJTdx1NQQKCigdL3Ev55zj+pFOq++lsrBb8JPBpYgDw3yybJWtm5qZ69zEY2nXTH14to1hZs3ZF58UQsnFw8MWOd3zhyD7GydOWkaL777KmlSL719qRgG3Pn8d6l8fiUnLyfS0Ls56h5xuiSe+3Yxtz97HFnKQe7TWJrnpfX/+RbqbaEXjwtiMvCYcLdJYNNtP9MmHZGfmbC/0fbhoQccwNy5E3p6Jh3X+Oz9uDgTd7zBs+p+wGoms2KlxncW/4IfdTzHiNfqkfDqa8NAPP/yL8kcupaGw2GyYIEWGga5eQadnSm0qC5yF0uUf2I4Sl8kyZoQhFuqGwaV67rAP9ZKvbYuDr8/nooKPxUVfkwT3n7bxS/eUsgLFrBh0x22LNH45t/G09qqkJurh5unGQbEG0NUjHjxqW7qLyShvW9S/ryN6lMZNJ62U7C0F9MwsV29ijQwyIGmReQUL0G/JnHzThqvvm6QlznMqpw+VmwaZEffaXT/Cnbp78HaF3DEyZTH1ZFpN/juW8/ReUvFNExKBvYx8p7CPrWYHTv8HDniCB+3IpvkXtzP2lv7UYxeTi19Gc/Sp1n+y+/i+sV5AgHwPlWJO5Gx+yTJIOlYNfYUq4Z/IC8HpfUKF/xLmDvX5J8St7M6qPOevosX1oKrsTF8DwTy88OGOxB0Erc+n6VlZSTdDpLxbBnasaDlCZKkqMpBXu/dl2x7EhrBCAT3wqzWC7fJ+iIT86SdxktJ/NqWNpY6MrjaZmIYEooCv/8HHv7n/0zk1h0Xfe2pozbCJDvbWrW4dUvhirqM3JwRyr6QCbaxg5mJVqCqMfXinX+x896dzeRkeClcf4XS383lzb9PitKLA40ZSB4PiZdSKV03MCHss/pUBo0nDUpMG4Y7AcexWqTePvbfepqc4tywXrz2RR+L9X5WLh7m6YFjbM0bwCCLinIfKAoOR7RedHSq9PVK7GIvQ7+Q0SrWY7NFh2cWF/p4/sxfkdbdSCA5lbr1r5J9+xS5x36GeuMQeuGruOMNPMPy2D1imqTVV5OxRiMQAFXJIXipjRF9CdnZJv8ct52VQi8+9ohT8phwt0lg023/QGrnji7zhTAMGH77A7TSfFTbRE9PpKAoisnatQHmZ+kkHavGpjVyekUR3Xu2UXzxDZZc/DFrXw7yPrtxOkxch6rZ4XDzz9JzpKUZuN0GJSUBjhxxsHmznzVrgrS2KnR2usCmhztnRnZTrK52hAeqXLnC0e9cZ6e8D33ZUmt52P4CjaetKgkh71Bnp8K8TBum3U7lsjYOXF1GTX0ihinx8stekpOdfP3r8fT0yLz0gkz5Uy62LrzMqasl/Nu+NBqarHOanx+gfJOE9DMJaWCQff3F1N/OYMPLg7z+JzZefdVOZ6cCZhzff+4iSl8vkteBkZ6O0tNN5TNdkJaKVOVnTstpFl5ywfKtbA3uJVevo+FOMc31NjRNornZFvZkVVc7aPpZIqmLtpE334ejKYhyqRF/SgbB3PlQVohrWBq7T+KNKI+Nv7ycuG9+m+HmW3gHbTQ7nyI75SBp5Zvw+mS6N1aSc3pM3KM9OKPJhAejkwlVm9VyfnzloJkkNoYMeqjk4WxvBCMQ3AuzVi/mWw3PTBNqnetZu8LH1qRrbNN+he+PKqg55MRuM0g9U0WFlEzVnEoSE60ylNevq+TlRWqFClI8VYflu9IK0+3Gt30Hfr8jKpwIoKPbSbZkg+AQFSWDvH86k0OHHBgGvPyyl+vXXfzt37qRzHi2Zc2ndHE7piMTp0ulsVEdW4ko9lIZuIPS04Npwv5bT9NwOZUNL47pRUeHE9lM4Xvl72BraSa4oARpaAilo52KTfORVCVKL4KLt7Jd289m2zGOtJXw/q8cPP+Cf8KqTtyteHyfeAHfyjVkv92AFjAJJKZhW7qQOxu24R2Wo8K/wnqxIR9vWTldr71JwpkufKaNq1lPsX7ufpIqtjDiFXrxcUacjseEu42Pm277+60wEWUE8vMZ3lTBhW8eJfPHJ7h1wsvKz5VNMPAhQYmLM6mttTM0ZM3yn3c7oCCfxRWbWGT60VkBFy9iu3SRyj+sxFlTja2hgYbGLbjjDUxTYqDfajiyscSHqsKpkzb27LFi8i9esnGy0Rb28ABRcZ+V67o4+p3r1F+Zg2osZMtKFdnnpXJjNygZUbGV8+bp5OYa6OZiPrAl02RzkjlXJ2jIvPOOi2XLZNrawOuV0UwF3/YdHNpr4luYjHZdxjT18DgkScX3/PPW9WmaS4Gtn9JdWdTUOMhdEEQKBFjTW8vZJjfLf6sIszYR+/HjKDdvIt++zfBrr+GvqKDxXSclHGP3rQ+tUKHsQm5lbsWdYJKSYkQtaVdU+KmmmKv2QjI3+ln8jTcIBMC+bAFDX3wdZJkdmp+BAckqdT3aYyKQn89wWTnmLw8xPKBz2LGV2owy4hSdDcEjmMeDDJZUkN5QFXWNHdXV9K+rnDaZ0OOZvHLQZES+HBiGJdqRHYrFcq9AYDGb9SL0Ile6Wcc0VOQaDfvJRsvelVfgqKridO05nJSSlKjT1yeDJJGaoiFJJqdO2dnz8giApRUn7UiYVFRaYTMTtOK7N6m/Ng91JJsty2Ukrxd52ENFhXWSovQiyyBnwRz0YBLfbFkMgSAlRT6ar9h5+20XPT0SLS0qc+boPPWfctnrzaBRSmb9Ko3GxnAoPRXbNAL6DqSBAWxJyTgveClY1kvprsywXqzuPIxDCbA38RV2Fu5HHhrCceAAaksLwfXr8e/YEaUXu7o/pG9IpmlJIUNLKkhOCYYnQJGrOsO/93uESgUtPn1qVC82MPwnX2SHHgi/XFv3Ctgj9GL4HUsvTru2cjZhI/NTdDaZh+lu0LmzXujFxxkxGXiMuNv4tqm2v++M+YiXRn9FBZ4BmZZF20hwx+EdNMIPcSQhQenutmoah5b7ejaWkZwU0XmwshIA+8mTJL7xN8jXb3D41jJ+HtjFzl1+lizyM/CTI0iXrpCUlMf8PJk4NUDO2jLSG6vZtc7O6eMm2lkH+uZCVNUqmRl6ScbvZKe8D9VYiGuoGylow0xMhAQ3FRX+sGHv7FT47Gc9VFb6qd6vcOqnfdjMANxJw5Y1jxvtNpqbJTRNYvnyIFevqrzxnRRME1S7SW6uHja21dUO6wXd6cT30ksUejzocW5qjsbTWK9SbNbyubIWrh3rx3fEwyVZZvlvbESprcOvOVCGvDA4RPWpDM5m7eDfBY6Tm6vT2qqwd8F21i7XeOYZHzZbdC5eeCKCiaO6BlkOVZKTcNTUjMbxjnVytLwmpWCa7NvvJOmsm+ttZfxb8i4GhhTmpikU5I5QsFHCbfsA1+nG8D0QEvt0HVzOXXg8MsnJBroOt29bLehDLxGRS9MpKUY42Xmy+1DToL3dWppOSrJK1U1IUhQIBGFmq15EvshJ8lgugr2xEee//YyuDpMPzd+gddk2fnfXMIGfV6Odv8rVnuV4R7awSTvIRt8Qhi6xfY2DA1oF2ecPENRU5K0bo7VCd1O5rgvlyhVcnm7sp1usbsFud9i2xtKLqv0qP3uzFwyDZTnXeWrxUn7xbhwjXhmbTSc52eDn77pZsMDF+oJA6BRx44aCpjG6sg1mahp7XS/hXD9CaYU8phfGh1Qu/xW3Lwzw4T6TvS+9yE7P/yE4rOHXHchDI5ZenM6M0AuNa9dU/m7edoryg2zaFGBSJAlHdXWEXsjWqk5FBW73+JCxMb1IbHFT37eRn9l24pBkypYp7Fk1SFa8RJwyvV4kJlrXeHwpUqEXDx8xGZilzLT9/P3MjkONZjwDMqpqYpgSTXk7Mcwh3G5/zLHs2GF5oZOT7XhHwDUaAxp2iYyWtfFXVmI/edLquqhrOJUg/2HBr3h+o87Nfz2J7g7SlOYicKqO9Itt+H0S/ZdP47cFyV0CKU0mVzOK2Pu+nV27rdrS4a/3etGXLWXLShUpoBIoLsbIzrZKyFU7kCRQZYN56QHQdewffsjzp2vJMdz0rizBNEzuHL/Iqv4LXHCXUOvYTHq6gWFYDpW2Nis+tKAgQEW5j+oaJ/X1djweid27vNjsY6VVVdVk3dJ+5ja0cqY1lfh0BTUnh8zWelw/qKXlaB+3nItoVwt44eQ5Flw8yzxVsiYamCyhhd+69Q2ulv0RNpt1vsfn4lkTgWivnPl+Dcn1VhJfpGcm5DUByyvnW7GJk/02nnLoDA2ZlJUp5D9fitMlIX/4Ydiwh+KBAWSHgx0VYx6k/futJffIJpuhl4uBAYnaWjuHDjmmTFzct8+BxyPR1KSydKmG2211hvT5RAyoQHC/PHZ6AdF6UVGBvaEBSddI6O8iZ7VO3vNenj3/Br09lzk/dznzhmq5dDyI5DvJ7X89SlxuGhc3v8KWVe/Tceosp/o20KPZ2bEzgKKM2lFFIVhSTEXf+5gpy9D6+giMtkwOhRON1wuA3cZ7LEhvZb/rOS62JnHujIOnb+8jPs1FbdwmenoUMjIszQCr4MP6dX7Wr5d4+22XVaJ09PgaTtpZvlxFl3w4HCG9aOOErZSclGMUBw6j/OI4170GHX1Pc+vCEtZ1XGVhUgOO+N08px4gN1dDbW1liarySff79MZVTp6zHWMVZ8Z6sXITN/tsFCoafr9MfoGCfUc5qm16vQhpgWwt5IQ7CofuPaEXDxdxSmYhd1tJ4r72s98ZfugWL9asspkVY/uLNZa0NJMXk6rw9QeQtpdbdeojyowFNm4ciy2VJPTcXPKXGOQHDqP+pI3VnZ0MFG3B96d/wJ0vfI/MW9cZ8irMuXSN4dT59LgXcChYxiVlO0adRElJgLT00e7uB2uJMzxWLWefDzMxEVtLC0Z7Bx8EKmlstLN2tZfXFrzLhxfSafzpfLKWBim5epWink4CQ7dJVrPo7b+IrsNVcy3xcQaXL1tGR5IYLftqstNehVzjZ1NZBfX1Ni6cV1h46ShPF8rs81bi8VjeirJimVuN8SQwiEd2k/sfy8n5xzP09yvcjFvK8Y3/iVvDSWy58x4Fl94CwPvyyyBJuFpbWR44y0Lf+/jNrbGrckSG/myqsJoDBXaxBJX1qow7IXbHxtDvEpNMFi+2mrv8zu/YR42/Je7jlyFChl7FenHo75cIBKSxZd8Ir56qgqJAIDB1LHJk4tjSpRpFRWOdiZ1O4eERCO6HWaMXo1oRD3wq+H/QGo+itrUxLwtsO5czOCSR870fM8fbzpCuoOsm8WcaaD0rcVwqoU3dyhKPFp6IGAdriZO8aBWbMdPTkEZGUG7dwnbxIv5Nm8PhRFF68VY2EvPZJctskBpYl3CD3+VP2TBwgDXaCbriSxjul0AKR+Jw8aLKbudB1kue8Avy2285afv7IyTNtUPOJgIB2LvXwcaNAYw8mZuN8SRIQ9zMLqRQPYmk6DQ1qbxX8Kc4g0Mky8fIrTvOTvc5CGpgU9FzczFVlQ3BYwQDQfxmxUPRi+SUMb341KdmrhchLQiF8oSaiIWYiV6MXxEQejFzxGRgFvJAkr3uYj+qCh6PzGjSf9RDPDBgvcilpxuMjIxVq1E1P6lNjQRGy4aFPRHr1+OoqsJ+8mT0cmJDA8r1NvSFCwGIU4M8/bO/4UPVxh3nfK7FLWKOeYgmfTFJHSbvqbtYoBssvnEY16F+tBfK2bfPwZwGnZUn3yWtfAm+3/5N7A0N2E+fJpCfj8NusHZtAPqGuNgYJCVhiIL57fStLsG7Xsf11ls4GuqZ2+pgUF3Ar1I/zbXcXci9OoZhHfPrrw9RU+Ogod7G2TqDDcEG8EgsytvF8rYDJF2oZ2DBOgYMuHTZFo5njM/fCUPDKElxFJ45AFhLozaHRNqVkwwu3YrjxQqCPRdwHD6M6+230fPy8O7ZY9XwvnQJ0+WatHlPyBB7BmTr3kgwaVm0jQVrAiSrZswQgNDvXnjBF/aoOJ1uPJ6x79V0Kfpz48Rluljj6f4eK3EsZNgFAsH9M+v0oqoK93e/C4C+cCGBggJST54kMWhyx9fOHed8rroW4TZMtC5rPFdLt5J54UNyAh6cL5Sy74OQVvyMtEsX8f6n38f5/vtIgKRp1mqrw5yoF1k3Uc1UApXl2BoauHm4g+dvfI9hM55TacU0p+7CrRukpBisWhVkxQqNk402ZNWPvbERCZPKikpG3q5hYbCOFt8GFuUFcTrh8GE7vb2WU4T8nTDoYUH7CRyKFUbldJosu1nN+ezt9Bdtw3Rq2H/49xAMEiwqIrBhA/4tW3B/5zuoLS3hcNxYPAy9sFZ85GjPfIRezCQvZaptJlsREHoxM8QpmoXcd7IX0y8ba5r1n8NhxfG53VZzkvh4wvvTNKitHXuRKyoKhEOCIuM8x5cZs9fWRi8nlpdjO33acg3IMnpuLkprK+TmsiWrif41uay6fZnBC5A+cI1+NY9dyXupG9yOS/IxsPcUKQp4tV24XCa6DtL5Jtw/+AFIUnhfpVKQvj6J6v0JZCe6CA56KFzrx1egMpxUia2xkaFLtzGR8M1fSMLucrJvQU+vxJw5Onv2eJHlsaRlpdkOIyrJF+spu3oKLQiyU8WdZuN8jY2mJpXERBPThJJNBorsJL3hQFRcZf4H1Tx95ChS/giDvkoGfu8PSb9wAeXWLevcvP66Fdvp8yH5/VM270GSou+NuOiui+NfACJ/F8uj4vPBu+86MU3CS/rT1R+/27/fS+KYQCCYOferFzMJMXpkejGKPm9eWC8AayXhZis5OTquVfNZ2t5EU7NKd2Ie/QMKCy4cQFEgp70OPghGa8XZSyR861uWVmzYENal0tJAbL3IV9AGhvgguJVk76/ITetHS5I5OL+SvitWzHtZmZ9164KUlgaQJOizV2KeOo3z7bdpf+s0C2+ptGVuQDUlAjXHOBS3hRs3FBISrGu1tVInsfY4ybca0DZYx55zoJpP1x5laJUPb0kFvsRy3P/2FkpHB0pbG/4vfAFHTQ2SphFcsmRqrYAHqhcPQium2yY02Yy1IiCYHnGaZiH3m+wVOcOWJHj+ed+oN2esoUxoKdduN9m61R/2BuTmuunvt77H47FCREpKAvT0yBQXB6I8AuNrToeMbNRyommGDZj3xRfxl5fj/va3UTo7Ubq6wDBItSuM5CzkprGCgW4db9BGkX6MHLfO4PYKrp7Q2XTqGKWdp9A0iWtlr5DJ8bAhDHdEBBISTJxuhYtLdhNvDHHD5cZXo7Lkyn42t7UiSZahW8xV0tVfUfrDl/hvf+Hn2jWVYFAKD7ui3IeTYcw6DfNKK3l5uUjXWpEW5tLvDbJqZRBFkUbL5llVDlQV5DhHVO1lT1ElKUDjBTfXvA6WXD3AxoV5IEkonZ0kv/oqem4ugYKCqON4EPfGVCKvaZZxb2iwk5hosGSJNqlHcbpY46n+Hpk4lpZmkJQklnkFggfJ/ehFZFiPw2FSXBwIP8uPXC+wqtPYT57Eu2cP/spKHFVVuN5+O6wV+vx5JKWr6LcMFBna5hXhSzEpNk4QWLue9pENZNSdYOOdU+j61FoBk+jFIRtxNhvLb58myT2AqSq0o/LZp37J8O+8xPETPrxemUDA+p5QgQffyAr0n1QTkG3MW7GAgj1eOt45Q42vBDXBZOVKDY9HRpIgKRni0uzhiYCmS/QXVJKiw+UL8VyrGdWKBbkA96QVd3tvTKYXD1IrptomcuIiVgTuHnGqZin3k+wVWipuaVEZHJTRdetBC9X+LSoKRC0rK4pVccDptIxAZG1fl8vE67Ve5ELj0TTo74OED2uIN8acN47q6jEjFRFXaDoclgErLw9PDPybN2PGxSGNjKBeu4Yt3sbFl14jrbEGSbGxaP0wg5fs7K93AjtZ1FHHokU6WtAkd30Q+cyYIYzcr6paiUsdHTK6Hs+pkyrL2/aTXfNTAlmg/Nc/pP2CjZyan7L8yo9R9rl49T8XUHPIicNhjg1blhguK+f2T88z71IXclMXKSkGgRVLkHZtIekArFoVDE+2wIqNdxeVoipmVLdFQ9+NOR9Wth4g6XID/S9uwPbF10l+9VWUzk4A/K+/PiFkZzLjHLo3NG10n5O87E8VR+zxWBOfxESDwUFLoB5GhYb7rmIiEAim5V71Ynz5z74+q4KYJPHA9MLng65bErmXJ5alHK8XkRWLItHy8vDt3g2A6+c/x1jxFH2L85kTsDGwtgTfZZ3hoJMqpZzM1npsdli4IDilVoQoLAyg6zA8HE9Dg40Et0HCh4dZnN6HurOCqqQXSW08xAbtOE7ZRcEf5lNdE60XWhDOnLSRJmWT4mknu/M6xjtXyHp5D0nGFtLPmWRn61F6cXtlGe54A+5BK5CkCfrwsPRCaMXjgThdggm43ZaRGhy0Sn35fBLB4FgSqGnGTiCCiZ6igoIANtuY51vT4P1f2Rl55xCLbp/FtrmYVZ8rI/5IdVSr8khjG7lSYDoceNcV0J1fgTtRQpWtNuam3c6OYg1NkSAwgrG9nAVFCk+9F6Cy9hvYO24QmD+PhNut8LPWMc9RdfR+NV1i/34HdXV2DMNKBPbfSGDQtoa2RctYvXkTSyrBviKIdukidpcLWZGIbFUPoAVNht85hDai4UvPoic5D6d8hcCghv6rQ2zeWk5Pr1VRQlUneteAsIAODEhIMng0J/qyAjJ3leKoqUHPzbV2pqqoB2v4VWAXXp/VEKay0j9lk5WZvOxPFUccKvu2ZIkWtXr0MLjfKiYCgeDhEKv8Z0+P9e8HoRc+H3zjr+PJazpI78A58j+Tj7mzYoLdDhnf8UmqptOJ58U9dG/YGpXXZNrtLMsvxfv+cVJdHxD8fDntHQo7/mEv6YPX8AYU7EYAx+2ptSIyRn3xYo2WFhW/X2L5rXjiF29k9e4ySm0G5qYygjVBXE4nkjxOL0wT7b0aEs6fpueZPRiXG0hXruBo7+DOHYmyT/jZtDlAV5ccpRehlftNm/x3pRWOmpqxxOBR+/8w9UJoxeOBmAwIJqCqY96HkCGPrNGbnBw7gQhgaIiwp+joUTvt7QpZWTq7d1tx9B6PRP+AgmJz0ppZiC+rgoXDftRRT06o9XiIMW+FNa6RolL2fWDHWy2HjRIhMQiaNJ0zSL18mt4mlaWfLaWy9hsknjjEhaxKate9yq+1fRPH5Yvh7/eP26/HI9HfLyPL1opFRobByLxSBrOL6PJK5A5by+CBrZUEtlbimjMHurujJwKjVTOSWuJx9DsgJQ+bCleMxdyoc9DclkTr2QSWLbMSnEKes0jvWkqKgd0+tuRplUUrxB2nE3+0xkqeKyiwcgZqapA+bGSOaae3sBLPsExXlzzly/xMXvaniiMWXhiBQBBZ8jE52epVEloZeBB60dUl4xlWcCbbuUgRqctKWCiZE+x2iPFJqmG9qIrWC02XqPrAzpzzOv6bp8mWTRYD6ad+RnePzLEln6LEfoo15rnwd8fSivEJ0Tk5On6/RGppMVdHCOtFqB+CGUsvdIkLV+JpkYvgksRmm5X4fLZJ5do/XaW66XnWrtPQtLGVlsiVe8OAuLiZa4W9sdEae2AX7gTrcw9TL4RWPB7c12Wpra3lJz/5Ce3t7Xz1q19l8eLFMbc7deoUP/rRjzAMg23btvHSSy/dz24FjwCnE156yRcV9zn+YY41A08YLTvW1SXT2qoQDErcuKFQUhIgPd0yEsnJBnWZm8E0KUoNRieJjTPs470RHo+E1yeHvSDt7XI4NtAzbDWyWSZD6sUGEt44TrJ5g3NPl9P7yqsE/Aqdv/lZMk4exHQ6Y8aBhsYXqv88b56OomB53CNrXE8Rb+nxSHhHYIHTT0KKn6Rd69C2VXDpW0dx3mpE0QL09lgi4vWOec7GN9cpL/ePLqmbEfWR5eil8NHzpuqgnLXjGW0Vn5Fh3FcVn5AB7++XJj1U4YUR3A1CL2YnqgppaSa7d/sfuF5kZBi43QZ1lOOeq7Mnc9j64F3qRVycZV8HBiTS0gj/vrfQqqiTc+IY8XduYEs3OLXqUySUbqHeW8482/vEOx2TasVYjLqVEJ2ebjXE8nqjE25DY46FxyPRunATK/T9uM81kvriOprWVnL0Yi2rh+tYcPEgtzIqyM0bW2mJXLmXJNi40eqNELU/ObZWADhVOy7P2ITtYeuF0IqPP/c1GcjJyeGLX/wif/d3fzfpNoZh8IMf/IA/+7M/Iy0tjS996Uts2LCB7Ozs+9m14BEw/gGeycMcMgrXrsns3+/g5k0FWbaqMoS8/Nu3+ykpsaoohJaDgQnGMpY3ImSUBgaspVkYq07gdpu44uBy7jZKm+ux24HFOTSv/Bxev2IZsQQY3mx5z93axHKYqgq7d0ePLzSWmXo1QuPwaE70FRvI2lMGEvQVVTLSrqDLDlLTrBhMa/JhTvCuuVxmeN/jBY6I5j3WmCS0bRWsqZDI8/jD47yfKj4h6urs0fsWCO4RoRezm4ehFz6fxGuveejpsUJkrG64o8xQLxwOa8XVGpOd3bv9Yy+3wzJ6fiX288fQc3KQdZPB1ZVh54+0vYIRScITjpWXJox9fDnN0FjuVi9GDBfm6gKUZ8vI0E2uLdsGTRKOODsp84yolZbIlXu324wKw43VHThSK0ITgx1atP0XevFkc1+TgZkY6JaWFjIzM8nIyACgtLSUuro6YdxnAVMlHKWkmOTm6ui6hKLEfqmdzlDG8kaEjFJ7u+VFDzUoCS1b7tjus7olLtbDickvOD6gu7QSd4L1c2Ss5GQlztLTo4XsbrwaYcO5sRB3vIFqswRk9zMB+ktKkGSJ+HjPBPFISor2rqmqlbAVS+AmHoM0QYzvp4pPaEwTO01OzUzKDD6K7xB8/BB68eQy1TP9sPWiuDhAX58cXnENa8UOP54hSG+oCmuFrEi8YN9L98aQXkhTjmOycpr3qxeqDb7wxWG6bpWQkWmiqtGrLj6fxPPP+yZ01B1vs/v7Jerqxo9fmjD2WD/HGqfQi9nLQz91vb29pKWlhX9OS0ujubn5Ye9W8JCZLqEoOdlKhO3vt6pLKApTxhzG+n6PR4rZQlxVYf58I/xd4WVL07QSkZsaCWwYa1jmamwkQ7GWePsHrO6EqgqDg2NhRjCzWtkzNTpjhjPak5Q+ByBUq9kMt1cPBMbOY2TVJU0DRTG5fdvKIwgd81Tn8kEZx+mWhsfzIDqZPqpuqIKPJ0IvZh8zeaYfpl4kJ5vhiUCkHVMVk4xT1dgj+rpMpheSBHfuhMKMzBn3VrgfvXA6YWFudFx+LL0IfXcsvZCk6c+j0AsBzGAy8Jd/+Zf0hwoBR/Drv/7rFBYWTrsD05x4Q0hTxFvv37+f/fv3A/C1r32N9PT0affxcUZV1cf+GGLR1weKIpGVBYODVjOZlJTo4/3N37QSxBJGPfKXL0sMD0NGBuTmuqc0or/8pbVtfDw891xsI/XpT499v6paO5HmzoWKChw7d5IgSfDKK0iJidhUB5o6h8xMuHlTor8P7nRbn21rs77P67X+/8lPmtHL0VOMKXS8mhY5lunPX+j7bt+GS5dgyxYYHh47j6G/ezzQ1GSds7g4a8UiPR0yMmKfy5meu5kSeY4hgaEhleTk9Jjf2dcHsiyRlDTWYCglJfqYpztHk91XHxWz9fl9WDxKvZhtWgGz836b6pl+VHoRSytgZnrR1iZx7izY7BI5OW5eeMFk717LNsPHSy8GBuDcOViyxPruhQvNSbVipufubnhQejHT8yP04sEx7WX/8z//8/vaQVpaGj09PeGfe3p6SJniam3fvp3t27eHf+7u7r6v/X/UpKenP/bHEAtNg0DAQUuL5cnx+/10d8c+3tC7QXHxmAcixvtCxPYSXV0O3G6Tri6J1lb/lF6hqO9atcoKpIy455RhHyePe2lpGcIwJeZlmhT1HaRp2I3fX0xvr0QwCNeuKQwOygwMBNizJ7r82WRjSk9P59at7rv2ToS+Ly7OxOezc+2aRlra2Hns6ZG4ds2JzWZy6pSLhQt1rlwxWbPGQ3q6Oem5jBxnR4fE6dMTuzDeiyeou9vywChKIro+FPMYfT6or3ePVvMwKC/3oOtj+5zJOdI00HUHHR3WdqHz8VHxcX9+s7KyPuohRPEo9WK2aQV8/O+3eyH0TN+4YXnYh4d9YbvwqPViwvfMQC9SU2AX1Rg4aW/fyPnzfm7csHPhgvqx0ouWFictLSpNTSrBoIYsB7l5009xsTnpeQx9r8tlcu2aTEuLb0KI7KPWi7vx9gu9uDum0ouHvqCyePFiOjs7uX37NqmpqXz44Ye8+uqrD3u3gkdARF+wKKZrXjIdd7vcOOnAAEwT30CQ1MunWSbDhZxtLG45SMbNem47i9GCJgkJJseP2zl+3IbdDvHxJtu2+aOWg0MNcWKNKVacpqoypfEMHePIiERRUSDcuTOUBFZba6e5WcXrBVk2sdnM8KFNZZynSrCOnWA2s2XV0DFmZUFHR+zlZp9PYulSLbwfn08Kx9FOF9oUQpShe7IRejH7UFWrkeO77zoxTaiqckTZo8kaWX1c9GLljYOoPY1cnVtIfLJOWprBuXM2ams/Xnpx8aLK9esKbrfVuE2SrDyGqWyp221it5scO2YlWKek2Nm1Kzr06FHrxUy1AoRePEju69SdOHGCH/7whwwODvK1r32N3NxcvvzlL9Pb28v3v/99vvSlL6EoCv/+3/97vvKVr2AYBpWVleTk5Dyo8QseMSFjp+tWh8lQY5nQA/sgYvju5gGf6sU4VHPaub2c3ksqqRcb2NxcT16ezuDLBTyzvRif3+oe2dMj09amYJqg65azKHQsoZjR3bt9aNrEfUWKkd1uTSxGRqRwg5XQEvL4sUYeI4z9zeORCAQkSkoC3L4ts359EF2XSEkxiI83p01omyrB+m4MbSShYxwcZFLBdbvNsBCF/h3rHE0n2DN9CRA8Xgi9eLKItHc+n4QsM6EQwi9/OeaZvtd475nqxfRaYfWzkXZP1Iu+3ysgr6yEpGSrxv+iRRrXrk3Ui/ffd4TzHrZvn5i/APevF6FS0yFCerFpU4AjR+wsW6aRkGCye7dvykZioXO3caOVq5GebjAyEq0JH4Ve3O3kTujFg+G+JgNFRUUUFRVN+H1qaipf+tKXwj/n5+eTn59/P7sSfAyIfNG3262SbeMf2FATmbs1HuOZ6gGP9L6EjF2oc2+ktyTypbnyj8pI+Jvj2O1WMzHb8+XYJAmny5rAZGQYLFqkEwxaxjH04uzxjDV3Aav3wlRl13QdDh50TPgMxJ4kxZpAVVb6cbksIzl3rhGVFNffL9HTM7EyxvjxxEyw5t69aKFjdDispdhYgjuVKAsPjkDoxZPDZDYt0u54PFas+v1qBUyvF+MTbyNtKky0zSs/V4br62N64fxEOc7R92+324ypF1blHjuyDIZBuFdCrLHeq1643ROdQSGb7vVaE4KNGwMkJc1MK8ByGKWlWROB8ZrwUeiF0IqPBnGaBTNmvJdgy5YxAxV6YENNZO55yXYaNA327nXQ1ydjt1udkRMSxjr3pqUZY81mQmMdAmV/TVSSl6O6OlxvWVVh1y6rt4BpEp5QuN1mVHMX02TK8JbQi32sz8Dkk6Tx59XnkyYYQ6fT+u7jx63woeZmKCwMTHp+H4ahVVVISWHKmMypRFl4cASCJ4OZ2DS32yQ+Hrq6Ho5WgBWX/u67TkZGJK5cUSkpsTz7777rRJYJd/SNss1DWFWG7lIvxofLTlEn5Z71YjJP/fhz+yC0Yrq/Tcf96IXQikePmAwIZsx4L0HICEbysGf1AwMSJ05Y3hdNgzVrghM694b27XJZE4ElVw+QTENUuVF7YyNAlIFPS5voYR/f3GUmS5aTfSYUyy9JVlnRUKxsLO9LLGPo8Uj4/RIbNwbo7pbZuDEw5fkVhlYgEHwUzMSmqapVvaa11f9QtELTrIlAQ4Mdt9vqKt/TI+N0mmHb7PGMdYD3eCRcToP0hqoJ5UZnohdJSdbEoq/PKusZaho5FXerF5PlIow/tw9KK6b7m2D2ICYDghkz0xf9h2k8IisPyjJs2eLHZiOqc+/4GMu0uTKaNrElu+lwTO2+GT2Wigp/VDfiyRLeQjid1lLv+PMUSqLTNPjWt9wsXaqFk3tncl7dbis0q7t75mIjEAgEj5qPg1aEXvQTEw0GB2XWrQuwbZs18aiqckQ5tSLHKp9whCcCd6MXqgrbtvnp6rK6JYc89NOdg7vVi1i9FMYTmowNDVkTiPh4oRWCqRGTAcFdMRPj7fMRNojj6y/fL8nJJoWFY81p0tMtgzi+c2/kWPXNpeimGVX+KGTopyJW3Ov4hKzJPhdLAEJJdA6HldisqkTFcs6kGUxoMhSjHLtAIBB8bJjpi/7D0ouQl33JEm1Ccm6siUporIHSUsvA3oNeROrDw9ILn2/6xmFTVXASCGIhbg3BA8Xng298Y6x28Be+4InZkCWW8ZuJF2X8iz+MeeqnFJ67CegcJTI+c2BAorlZweORoir0jD+uqTpERi6du90GmjZ56FGsqkyhqhHjKzg9aER7d4FA8Ch4mHoRq2Jb5KruR6kXsYpg3KteTFbBL1YFJ6EXgskQl07wQOnstLwYKSkGfX0yXV0yCxca4b9PZrimKkkay+sxvgrP+GpCD4KQMQ7V7DcMaGlRw8u1kUY5NJaeHpnmZitRbXwFh0hxeuEF35RLvbESxe61ssPdINq7CwSCR8Wj0IvJKrZNF2pzt8xULyLHYhjWIsT4EtAwc72YLKlY6IXgbhCXTfBAmTcP3G7LsLvdBhkZRtTfJzNck/1+KmMT+kxc3MRqQg/CIIWMcWTN/qVLNYqKJnb1DY0lPd2guZlwSbfxBjjSIxVqyBWLyRLwHnbJtXutKy0QCAR3y6PSi/Fe+8hqQo9aL8aPRZKY9IV9Jnox2Uu/0AvB3SAmA4IHitMJX/iCZ9IY0MkM12S/n8rYhD7T3S1jGGPdFqcySHe75Kyq0TX73W5zwkQgciyxOkTeC1OVBn2YxvZReJMEAoEAHp1eRG4f6uJuxeI/er2IDv+5/1WK6UqDCr0QzAQxGRA8cJxOopZ6I7nb+vdTGZvQZ3p6JK5diw+XkHvhBV/MfU8eW0k40SpU3eduay3fqxdmqnjLWIb8YcdnPgpvkkAgEIR4FHoRub2qmnzrW2N5Co9aL2L9fapV4vFjupuXfqEXgpkiLp3gkTOZ4Yr1+5kYVocDli+3KkYMD0vhxKzxTBaH//OfOzl2zE56usFTT2kMDEgoChPEZzoPy916Ye423vJRxWc+bG+SQCAQzJQHpReh7fv7JZYu/Wj14l5srNALwcNE/qgHIBBMR8jYTFdT+exZG1evqhw/bkfTxv6uaUzasGVgQOLsWRu9vTLnzqloGtTW2jl40MG+fY6o73nQRIpNKNn4brYfGJDo75ce6hgFAoHgceJ+9CKkFaHKPUIvBE8KYmVA8NijqrBxo9V7ID3dYGRk8oSy8fGZpmk1L1u4UMfvh4KCAOfO2R9JQtTdxltGbu9wmJOWMBUIBAJBbCbTC7fbnOBJH7/KIPRCMFsRt4NgVpCUZJKWZhn2SEM5MCDR0zNm9Mc3bBnfxGzBAoMrV8YMrtNpTug4HBmHeT/cbbxl5PaaBocOOUQVB4FAILhLYunFeK0I2VShF4InATEZEMwKYhnK0BJuc7NKczMUFQVilm4b37049D2xGsJA9ErDpz99/+O+G6McWTdbVHEQCASCu2e8XsD0WhH6nNALwWxETAYEs4bxhtLjsTr2lpQE6OmRKS4OTFoJKPJzkYlm4xPIgKjfDQ099MOKiajiIBAIBPdOpN3v75+ZVoz/XOTPQi8EjzPilhDMWkIxk16vRFqacdfLopPFaEb+LiEB+vsfwuBngKjiIBAIBPfP/WpF5HcIvRA8jojJgGDWcr/ekMk+H/27hAc/cIFAIBA8Mh6E51zoheBxRkwGBLOa+/WGTFbLWnhYBAKBYPbwIOy60AvB44roMyAQCAQCgUAgEDyhiMmAQCAQCAQCgUDwhCImAwKBQCAQCAQCwROKmAwIBAKBQCAQCARPKGIyIBAIBAKBQCAQPKGIyYBAIBAIBAKBQPCEIiYDAoFAIBAIBALBE4qYDAgEAoFAIBAIBE8oYjIgEAgEAoFAIBA8oUimaYrWeAKBQCAQCAQCwROIWBl4yPyX//JfPuohPFLE8c5uxPEKBA+PJ+1+E8c7uxHH+/ggJgMCgUAgEAgEAsETipgMCAQCgUAgEAgETyhiMvCQ2b59+0c9hEeKON7ZjThegeDh8aTdb+J4ZzfieB8fRAKxQCAQCAQCgUDwhCJWBgQCgUAgEAgEgicU9aMewGyjtraWn/zkJ7S3t/PVr36VxYsXx9zu1KlT/OhHP8IwDLZt28ZLL730aAf6gPB4PLzxxhvcuXOHOXPm8PnPfx632z1hu89+9rM4nU5kWUZRFL72ta99BKO9d6a7XqZp8qMf/YiTJ0/icDj4zGc+w6JFiz6awT4Apjve8+fP8/Wvf525c+cCUFxczCuvvPIRjPT+efPNN2lsbCQpKYlvfOMbE/4+266t4OOD0IvZpxdCK16K+vts0gqYxXphCh4oN27cMNvb282/+Iu/MFtaWmJuo+u6+Ud/9EfmrVu3zGAwaH7xi180b9y48YhH+mD4x3/8R/Odd94xTdM033nnHfMf//EfY273mc98xhwYGHiEI3twzOR6NTQ0mF/5yldMwzDMy5cvm1/60pc+otHePzM53nPnzpn/43/8j49ohA+W8+fPm1euXDFff/31mH+fTddW8PFC6MXs0guhFbNbK0xz9uqFCBN6wGRnZ5OVlTXlNi0tLWRmZpKRkYGqqpSWllJXV/eIRvhgqauro7y8HIDy8vLH9jimYibXq76+ni1btiBJEkuXLmV4eJi+vr6PaMT3x2y6P2fCypUrY3onQ8ymayv4eCH04vE8jskQWvH43pszZbbqhZgMfAT09vaSlpYW/jktLY3e3t6PcET3zsDAACkpKQCkpKQwODg46bZf+cpX+NM//VP279//qIb3QJjJ9ert7SU9PX3KbR4XZnp/NjU18cd//Md89atf5caNG49yiI+U2XRtBY8fQi8eH70QWvFkawU8vtdX5AzcA3/5l39Jf3//hN//+q//OoWFhdN+3oxRwEmSpAcxtIfCVMd7N9+RmprKwMAA//2//3eysrJYuXLlAxzlw2Mm1+txu6ZTMZNjycvL480338TpdNLY2Mhf/dVf8e1vf/tRDfGRMpuureDRI/TC4knQC6EVT7ZWwON7fcVk4B748z//8/v6fFpaGj09PeGfe3p6wt6SjyNTHW9SUhJ9fX2kpKTQ19dHYmJizO1SU1PD2xcWFtLS0vJYGHeY2fVKS0uju7t7ym0eF2ZyvHFxceF/5+fn84Mf/IDBwcFJr//jzGy6toJHj9CLMWa7XgiteLK1Ah7f6yvChD4CFi9eTGdnJ7dv30bTND788EM2bNjwUQ/rntiwYQM1NTUA1NTUxPR0+Xw+vF5v+N9nzpxhwYIFj3Sc98NMrteGDRs4dOgQpmnS1NREXFzcY2EAYjGT4+3v7w97QFpaWjAMg4SEhI9iuA+d2XRtBY8fQi8eH70QWvFkawU8vtdXNB17wJw4cYIf/vCHDA4OEh8fT25uLl/+8pfp7e3l+9//Pl/60pcAaGxs5B/+4R8wDIPKykr27NnzEY/83hgaGuKNN96gu7ub9PR0Xn/9ddxud9TxdnV18dd//dcA6LrOpk2bHrvjjXW9PvjgAwB27tyJaZr84Ac/4PTp09jtdj7zmc9MWibwcWC6433//ff54IMPUBQFu93Ob//2b7Ns2bKPeNT3xje/+U0uXLjA0NAQSUlJfOpTn0LTNGB2XlvBxwehF7NPL4RWzF6tgNmrF2IyIBAIBAKBQCAQPKGIMCGBQCAQCAQCgeAJRUwGBAKBQCAQCASCJxQxGRAIBAKBQCAQCJ5QxGRAIBAIBAKBQCB4QhGTAYFAIBAIBAKB4AlFTAYEAoFAIBAIBIInFDEZEAgEAoFAIBAInlDEZEAgEAgEAoFAIHhC+f8BuntVzwq74/YAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots(ncols=2, figsize=(13, 5))\n",
+ "plot_predictions(flk_pred, ax[0])\n",
+ "ax[0].set_title(\"Falkon\")\n",
+ "ax[0].legend(loc='best')\n",
+ "\n",
+ "plot_predictions(logflk_pred, ax[1])\n",
+ "ax[1].set_title(\"Logistic Falkon\")\n",
+ "ax[1].legend(loc='best');\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.9"
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": false,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/falkon/VERSION b/falkon/VERSION
deleted file mode 100644
index ac39a106..00000000
--- a/falkon/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-0.9.0
diff --git a/falkon/__init__.py b/falkon/__init__.py
deleted file mode 100644
index 048f8d87..00000000
--- a/falkon/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-
-from .options import FalkonOptions # isort:skip
-from . import ( # isort:skip
- center_selection,
- sparse,
- kernels,
- preconditioner,
- optim,
- gsc_losses,
- hopt,
-)
-from .models import Falkon, InCoreFalkon, LogisticFalkon # isort:skip
-
-# Set __version__ attribute on the package
-init_dir = os.path.dirname(os.path.abspath(__file__))
-with open(os.path.join(init_dir, "VERSION")) as version_file:
- __version__ = version_file.read().strip()
-
-__all__ = (
- "Falkon",
- "LogisticFalkon",
- "InCoreFalkon",
- "FalkonOptions",
- "kernels",
- "optim",
- "preconditioner",
- "center_selection",
- "sparse",
- "gsc_losses",
- "hopt",
- "__version__",
-)
diff --git a/falkon/benchmarks/.gitignore b/falkon/benchmarks/.gitignore
deleted file mode 100644
index 52b0e6f2..00000000
--- a/falkon/benchmarks/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-*.json
-*.txt
-*.png
-*.npy
-logs/*
\ No newline at end of file
diff --git a/falkon/benchmarks/README.md b/falkon/benchmarks/README.md
deleted file mode 100644
index a435cd81..00000000
--- a/falkon/benchmarks/README.md
+++ /dev/null
@@ -1,15 +0,0 @@
-## Benchmark Scripts
-
-This folder contains the code necessary to reproduce the benchmark results of the paper: [Kernel methods through the roof: handling billions of points efficiently](https://arxiv.org/abs/2006.10350).
-
-It contains code for defining [GPyTorch](https://gpytorch.ai/) and [GPFlow](https://www.gpflow.org/) models,
-for data preprocessing (see the `datasets.py` file), and for running all standard benchmarks (see `benchmark_runner.py`).
-The individual bash files are used as drivers which call the benchmark runner with different parameters.
-The [EigenPro](https://github.com/EigenPro/EigenPro2) model code is missing from here,
-but is very similar to the publicly available code, and is available on request.
-
-
-Other benchmarks are also run with scripts from this folder:
- - The out-of-core operation timings can be run with `potrf_timings.py` and `lauum_timings.py` and their respective drivers
- - The kernel matrix-vector multiplication experiment can be run with `mmv_timings.py`.
- - The experiment to measure timings with different features turned on is available in `time_improvements.py`.
diff --git a/falkon/benchmarks/cmdline-args.md b/falkon/benchmarks/cmdline-args.md
deleted file mode 100644
index 6309432d..00000000
--- a/falkon/benchmarks/cmdline-args.md
+++ /dev/null
@@ -1,272 +0,0 @@
-## benchmark_hgrad.py
-
-First hypergradient runner function, supports several models:
- - nkrr (probably not working anymore)
- - gpflow (3 possibilities: SGPR, GPR, SVGP)
-
- ```bash
- # Useful additional options: --optimize-centers
- python benchmark_hgrad.py --seed 1234 --lr 0.1 --steps 100 --sigma-type single --sigma-init 1.0 --penalty-init 1.0 --gpflow --M 20 --dataset ho-higgs --name boston_test_sgpr
- ```
-
-## simple_hopt.py
-
-Latest hyper-parameter tuning runner. Supports 2 different experiment types, both with several models.
-
-The supported experiments are:
- 1. Hyperparameter optimization via gradient descent
- 2. Hyperparameter grid-search
-
-The available models are:
- 1. LOOCV (`--model loocv`)
- 2. GCV (`--model gcv`)
- 3. SGPR (`--model sgpr`)
- 4. GPR (`--model gpr`)
- 5. HyperGrad (closed-form) (`--model hgrad-closed`)
- 6. HyperGrad (via implicit function theorem) (`--model hgrad-ift`)
- 7. Complexity reg with penalized data-fit term (`--model creg-penfit`)
- 8. Complexity reg with plain data-fit term (`--model creg-nopenfit`)
-
-An example for running SGPR in grid-search mode:
-```bash
-GS=grid_specs/tuned_boston.csv
-python gen_grid_spec.py --out-file "$GS"
-ENAME=test_tuned_m20
-M=20
-PYTHONPATH=.. python simple_hopt.py \
- --seed 12319 \
- --dataset boston \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init 1.0 \
- --penalty-init 1e-4 \
- --num-centers $M \
- --dataset boston \
- --model sgpr \
- --grid-spec "$GS" \
- --name "boston_gs_sgpr_${ENAME}"
-```
-
-Or for running it in optimization mode:
-```bash
-ENAME=test_hopt_m20
-M=20
-MODEL=sgpr
-
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --dataset boston \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init 1 \
- --penalty-init 1e-4 \
- --lr 0.05 \
- --epochs 100 \
- --op \
- --os \
- --num-centers $M \
- --dataset protein \
- --model sgpr \
- --name "protein_hopt_sgpr_${ENAME}"
- PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --dataset boston \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init 15 \
- --penalty-init 1e-4 \
- --lr 0.05 \
- --epochs 100 \
- --op \
- --os \
- --num-centers $M \
- --dataset protein \
- --model sgpr \
- --name "protein_hopt_sgpr_${ENAME}"
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --dataset boston \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init 1 \
- --penalty-init 1 \
- --lr 0.05 \
- --epochs 100 \
- --op \
- --os \
- --num-centers $M \
- --dataset protein \
- --model sgpr \
- --name "protein_hopt_sgpr_${ENAME}"
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --dataset boston \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init 15 \
- --penalty-init 1 \
- --lr 0.05 \
- --epochs 100 \
- --op \
- --os \
- --num-centers $M \
- --dataset protein \
- --model sgpr \
- --name "protein_hopt_${MODEL}_${ENAME}"
-```
-
-
-```
-function run_exp () {
-
- PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-}
-
-
-M=20
-DATASET=protein
-LR=0.05
-NUM_EPOCHS=200
-PEN_INIT=1
-SIG_INIT=1
-ENAME="test_hopt_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem"
-
-MODEL=loocv
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=sgpr
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=gcv
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=hgrad-ift
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=hgrad-closed
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=creg-nopenfit
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-MODEL=creg-penfit
-PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct 0.2 \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
diff --git a/falkon/benchmarks/common/__init__.py b/falkon/benchmarks/common/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/falkon/benchmarks/common/benchmark_utils.py b/falkon/benchmarks/common/benchmark_utils.py
deleted file mode 100644
index 2677c215..00000000
--- a/falkon/benchmarks/common/benchmark_utils.py
+++ /dev/null
@@ -1,118 +0,0 @@
-from enum import Enum
-
-__all__ = ("DataType", "Algorithm", "Dataset", "VariationalDistribution")
-
-
-class DataType(Enum):
- single = 1
- float32 = 2
-
- double = 11
- float64 = 12
-
- def to_torch_dtype(self):
- import torch
-
- if self.value < 10:
- return torch.float32
- else:
- return torch.float64
-
- def to_numpy_dtype(self):
- import numpy as np
-
- if self.value < 10:
- return np.float32
- else:
- return np.float64
-
- def __str__(self):
- return self.name
-
- def __repr__(self):
- return str(self)
-
- @staticmethod
- def argparse(s):
- try:
- return DataType[s]
- except KeyError:
- return s
-
-
-class Algorithm(Enum):
- FALKON = "falkon"
- LOGISTIC_FALKON = "falkon-cls"
- EIGENPRO = "eigenpro"
- GPYTORCH_REG = "gpytorch-reg"
- GPFLOW_REG = "gpflow-reg"
- GPYTORCH_CLS = "gpytorch-cls"
- GPFLOW_CLS = "gpflow-cls"
- GPYTORCH_SGPR = "gpytorch-sgpr"
- GPFLOW_SGPR = "gpflow-sgpr"
-
- def __str__(self):
- return self.value
-
- def __repr__(self):
- return str(self)
-
-
-class Dataset(Enum):
- TIMIT = "timit"
- MILLIONSONGS = "millionsongs"
- HIGGS = "higgs"
- TAXI = "taxi"
- YELP = "yelp"
- FLIGHTS = "flights"
- FLIGHTS_CLS = "flights-cls"
- SUSY = "susy"
- MNIST_SMALL = "mnist-small"
- SVHN = "svhn"
- MNIST = "mnist"
- CIFAR10 = "cifar10"
- CIFAR10RGB = "cifar10-rgb"
- HOHIGGS = "ho-higgs"
- ICTUS = "ictus"
- SYNTH01NOISE = "synth-01noise"
- CHIET = "chiet"
- ENERGY = "energy"
- BOSTON = "boston"
- PROTEIN = "protein"
- KIN40K = "kin40k"
- CODRNA = "codrna"
- SVMGUIDE1 = "svmguide1"
- PHISHING = "phishing"
- SPACEGA = "spacega"
- CADATA = "cadata"
- MG = "mg"
- CPUSMALL = "cpusmall"
- ABALONE = "abalone"
- CASP = "casp"
- BLOGFEEDBACK = "blogfeedback"
- COVTYPE = "covtype"
- IJCNN1 = "ijcnn1"
- FASHION_MNIST = "fashionmnist"
- BUZZ = "buzz"
- ROAD3D = "road3d"
- HOUSEELECTRIC = "houseelectric"
-
- def __str__(self):
- return self.value
-
- def __repr__(self):
- return str(self)
-
-
-class VariationalDistribution(Enum):
- FULL = "full"
- DIAG = "diag"
- DELTA = "delta"
- NATGRAD = "natgrad"
- TRIL_NATGRAD = "tril_natgrad"
-
- def __str__(self):
- return self.value
-
- def __repr__(self):
- return str(self)
diff --git a/falkon/benchmarks/common/create_weather_dataset.py b/falkon/benchmarks/common/create_weather_dataset.py
deleted file mode 100644
index e1b2e1e9..00000000
--- a/falkon/benchmarks/common/create_weather_dataset.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import pickle
-
-import h5py
-import numpy as np
-
-horizon = 6
-memory = 72
-input_name = "smz_CHIET.pkl"
-output_name = "CHIET.hdf5"
-
-o_index = horizon - 1
-
-with open(input_name, "rb") as fh:
- data = pickle.load(fh)
-
-otime = np.array(data["otime"])[:, o_index]
-zout = np.array(data["O_zonal"])[:, o_index]
-mout = np.array(data["O_merid"])[:, o_index]
-sout = np.array(data["O_speed"])[:, o_index]
-
-itime = np.array(data["itime"])[:, -memory:]
-zinp = np.array(data["I_zonal"])[:, -memory:]
-minp = np.array(data["I_merid"])[:, -memory:]
-sinp = np.array(data["I_speed"])[:, -memory:]
-
-X = np.concatenate((zinp, minp), axis=1)
-Y = sout.reshape(-1, 1)
-
-time_thresh = np.datetime64("2018-01-01")
-tr_index = otime < time_thresh
-
-train_x = X[tr_index, :]
-test_x = X[~tr_index, :]
-train_y = Y[tr_index, :]
-test_y = Y[~tr_index, :]
-
-with h5py.File(output_name, "w") as fh:
- fh.create_dataset("X_train", data=train_x)
- fh.create_dataset("X_test", data=test_x)
- fh.create_dataset("Y_train", data=train_y)
- fh.create_dataset("Y_test", data=test_y)
diff --git a/falkon/benchmarks/common/datasets.py b/falkon/benchmarks/common/datasets.py
deleted file mode 100644
index ba8fb586..00000000
--- a/falkon/benchmarks/common/datasets.py
+++ /dev/null
@@ -1,1112 +0,0 @@
-import os
-import typing
-from abc import ABC, abstractmethod
-
-import h5py
-import numpy as np
-import scipy.io as scio
-import scipy.sparse
-from scipy.sparse import load_npz
-from sklearn.datasets import load_svmlight_file
-from torch import Tensor
-
-import falkon
-
-from .benchmark_utils import Dataset
-
-__all__ = (
- "get_load_fn",
- "get_cv_fn",
- "BaseDataset",
- "HiggsDataset",
- "SusyDataset",
- "MillionSongsDataset",
- "TimitDataset",
- "NycTaxiDataset",
- "YelpDataset",
- "FlightsDataset",
- "FlightsClsDataset",
- "CIFAR10Dataset",
- "CIFAR10RGBDataset",
- "SVHNDataset",
- "FashionMnistDataset",
- "MnistSmallDataset",
- "MnistDataset",
- "SmallHiggsDataset",
- "IctusDataset",
- "SyntheticDataset",
- "ChietDataset",
- "EnergyDataset",
- "BostonDataset",
- "ProteinDataset",
- "Kin40kDataset",
- "CodRnaDataset",
- "SvmGuide1Dataset",
- "PhishingDataset",
- "SpaceGaDataset",
- "CadataDataset",
- "MgDataset",
- "CpuSmallDataset",
- "AbaloneDataset",
- "CaspDataset",
- "BlogFeedbackDataset",
- "CovTypeDataset",
- "Ijcnn1Dataset",
- "BuzzDataset",
- "Road3DDataset",
- "HouseEelectricDataset",
-)
-
-NP_ARR = typing.TypeVar("NP_ARR", bound=typing.Union[np.ndarray, scipy.sparse.spmatrix])
-
-
-def load_from_npz(dset_name, folder, dtype, verbose=False):
- x_file = os.path.join(folder, f"{dset_name}_data.npz")
- y_file = os.path.join(folder, f"{dset_name}_target.npy")
- x_data = np.asarray(load_npz(x_file).todense()).astype(as_np_dtype(dtype))
- y_data = np.load(y_file).astype(as_np_dtype(dtype))
- if verbose:
- print(f"Loaded {dset_name}. X: {x_data.shape} - Y: {y_data.shape}")
- return x_data, y_data
-
-
-def load_from_t(dset_name, folder, verbose=False):
- file_tr = os.path.join(folder, dset_name)
- file_ts = os.path.join(folder, dset_name + ".t")
- x_data_tr, y_data_tr = load_svmlight_file(file_tr) # type: ignore
- x_data_tr = np.asarray(x_data_tr.todense())
- x_data_ts, y_data_ts = load_svmlight_file(file_ts) # type: ignore
- x_data_ts = np.asarray(x_data_ts.todense())
- if verbose:
- print(
- f"Loaded {dset_name}. train X: {x_data_tr.shape} - Y: {y_data_tr.shape} - "
- f"test X: {x_data_ts.shape} - Y: {y_data_ts.shape}"
- )
- x_data = np.concatenate((x_data_tr, x_data_ts))
- y_data = np.concatenate((y_data_tr, y_data_ts))
- return x_data, y_data
-
-
-def standardize_x(Xtr, Xts):
- if isinstance(Xtr, np.ndarray):
- mXtr = Xtr.mean(axis=0, keepdims=True, dtype=np.float64).astype(Xtr.dtype)
- sXtr = Xtr.std(axis=0, keepdims=True, dtype=np.float64, ddof=1).astype(Xtr.dtype)
- else:
- mXtr = Xtr.mean(dim=0, keepdims=True)
- sXtr = Xtr.std(dim=0, keepdims=True)
- sXtr[sXtr == 0] = 1.0
-
- Xtr -= mXtr
- Xtr /= sXtr
- Xts -= mXtr
- Xts /= sXtr
-
- return Xtr, Xts, {}
-
-
-def mean_remove_y(Ytr, Yts):
- mtr = np.mean(Ytr, dtype=np.float64).astype(Ytr.dtype)
- Ytr -= mtr
- Yts -= mtr
- Ytr = Ytr.reshape((-1, 1))
- Yts = Yts.reshape((-1, 1))
- return Ytr, Yts, {"Y_mean": mtr}
-
-
-def standardize_y(Ytr, Yts):
- mtr = np.mean(Ytr, dtype=np.float64).astype(Ytr.dtype)
- stdtr = np.std(Ytr, dtype=np.float64, ddof=1).astype(Ytr.dtype)
- Ytr -= mtr
- Ytr /= stdtr
- Yts -= mtr
- Yts /= stdtr
- Ytr = Ytr.reshape((-1, 1))
- Yts = Yts.reshape((-1, 1))
- return Ytr, Yts, {"Y_mean": mtr, "Y_std": stdtr}
-
-
-def as_np_dtype(dtype):
- if "float32" in str(dtype):
- return np.float32
- if "float64" in str(dtype):
- return np.float64
- if "int32" in str(dtype):
- return np.int32
- raise ValueError(dtype)
-
-
-def as_torch_dtype(dtype):
- import torch
-
- if "float32" in str(dtype):
- return torch.float32
- if "float64" in str(dtype):
- return torch.float64
- if "int32" in str(dtype):
- return torch.int32
- raise ValueError(dtype)
-
-
-def equal_split(N, train_frac):
- Ntr = int(N * train_frac)
- idx = np.arange(N)
- np.random.shuffle(idx)
- idx_tr = idx[:Ntr]
- idx_ts = idx[Ntr:]
- return idx_tr, idx_ts
-
-
-def convert_to_binary_y(Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- labels = set(np.unique(Ytr))
- if labels == {0, 1}:
- # Convert labels from 0, 1 to -1, +1
- Ytr = Ytr * 2 - 1
- Yts = Yts * 2 - 1
- elif labels == {1, 2}:
- # Convert from 1, 2 to -1, +1
- Ytr = (Ytr - 1) * 2 - 1
- Yts = (Yts - 1) * 2 - 1
-
- return Ytr.reshape(-1, 1), Yts.reshape(-1, 1), {}
-
-
-def convert_to_onehot(
- Ytr: np.ndarray, Yts: np.ndarray, num_classes: int, damping: bool = False
-) -> tuple[np.ndarray, np.ndarray, dict]:
- eye = np.eye(num_classes, dtype=as_np_dtype(Ytr.dtype))
- if damping:
- damp_val = 1 / (num_classes - 1)
- eye = eye - damp_val # + eye * damping
- Ytr = eye[Ytr.astype(np.int32).reshape(-1), :]
- Yts = eye[Yts.astype(np.int32).reshape(-1), :]
- return Ytr, Yts, {}
-
-
-def rgb_to_bw(X, dim=32):
- img_len = dim**2
- R = X[:, :img_len]
- G = X[:, img_len : 2 * img_len]
- B = X[:, 2 * img_len : 3 * img_len]
- return 0.2126 * R + 0.7152 * G + 0.0722 * B
-
-
-class MyKFold:
- def __init__(self, n_splits, shuffle, seed=92):
- self.n_splits = n_splits
- self.shuffle = shuffle
- self.random_state = np.random.RandomState(seed)
-
- def split(self, X, y=None):
- N = X.shape[0]
- indices = np.arange(N)
- mask = np.full(N, False)
- if self.shuffle:
- self.random_state.shuffle(indices)
-
- n_splits = self.n_splits
- fold_sizes = np.full(n_splits, N // n_splits, dtype=int)
- fold_sizes[: N % n_splits] += 1
- current = 0
-
- for fold_size in fold_sizes:
- start, stop = current, current + fold_size
- mask.fill(False)
- mask[indices[start:stop]] = True
- yield mask
- current = stop
-
-
-class BaseDataset:
- def load_data(self, dtype, as_torch=False, as_tf=False):
- X, Y = self.read_data(dtype)
- print(f"Loaded {self.dset_name} dataset in {dtype} precision.", flush=True)
- Xtr, Ytr, Xts, Yts = self.split_data(X, Y, train_frac=None)
- assert Xtr.shape[0] == Ytr.shape[0]
- assert Xts.shape[0] == Yts.shape[0]
- assert Xtr.shape[1] == Xts.shape[1]
- print(
- f"Split the data into {Xtr.shape[0]} training, "
- f"{Xts.shape[0]} validation points of dimension {Xtr.shape[1]}.",
- flush=True,
- )
- Xtr, Xts, other_X = self.preprocess_x(Xtr, Xts)
- Ytr, Yts, other_Y = self.preprocess_y(Ytr, Yts)
- print("Data-preprocessing completed.", flush=True)
- kwargs = dict(*other_X)
- kwargs.update(other_Y)
- if as_torch:
- return self.to_torch(Xtr, Ytr, Xts, Yts, **kwargs)
- if as_tf:
- return self.to_tensorflow(Xtr, Ytr, Xts, Yts, **kwargs)
- return Xtr, Ytr, Xts, Yts, kwargs
-
- def load_data_cv(self, dtype, k, as_torch=False):
- X, Y = self.read_data(dtype)
- print(f"Loaded {self.dset_name} dataset in {dtype} precision.", flush=True)
- print(f"Data size: {X.shape[0]} points with {X.shape[1]} features", flush=True)
-
- kfold = MyKFold(n_splits=k, shuffle=True)
- for iteration, test_idx in enumerate(kfold.split(X)):
- Xtr = X[~test_idx]
- Ytr = Y[~test_idx]
- Xts = X[test_idx]
- Yts = Y[test_idx]
- Xtr, Xts, other_X = self.preprocess_x(Xtr, Xts)
- Ytr, Yts, other_Y = self.preprocess_y(Ytr, Yts)
- print(
- f"Preprocessing complete (iter {iteration}) - "
- f"Divided into {Xtr.shape[0]} train, {Xts.shape[0]} test points"
- )
- kwargs = dict(*other_X)
- kwargs.update(other_Y)
- if as_torch:
- yield self.to_torch(Xtr, Ytr, Xts, Yts, **kwargs)
- else:
- yield Xtr, Ytr, Xts, Yts, kwargs
-
- @abstractmethod
- def read_data(self, dtype) -> tuple[np.ndarray | scipy.sparse.spmatrix, np.ndarray]:
- pass
-
- @abstractmethod
- def split_data(
- self, X, Y, train_frac: float | None
- ) -> tuple[np.ndarray | scipy.sparse.spmatrix, np.ndarray, np.ndarray | scipy.sparse.spmatrix, np.ndarray]:
- pass
-
- @abstractmethod
- def preprocess_x(self, Xtr: NP_ARR, Xts: NP_ARR) -> tuple[NP_ARR, NP_ARR, dict]:
- return Xtr, Xts, {}
-
- @abstractmethod
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return Ytr, Yts, {}
-
- def to_torch(self, Xtr, Ytr, Xts, Yts, **kwargs) -> tuple[
- Tensor | falkon.sparse.sparse_tensor.SparseTensor,
- Tensor,
- Tensor | falkon.sparse.sparse_tensor.SparseTensor,
- Tensor,
- dict,
- ]:
- import torch
-
- # torch_kwargs = {k: torch.from_numpy(v) for k, v in kwargs.items()}
- torch_kwargs = kwargs
- return (
- torch.from_numpy(Xtr),
- torch.from_numpy(Ytr),
- torch.from_numpy(Xts),
- torch.from_numpy(Yts),
- torch_kwargs,
- )
-
- def to_tensorflow(self, Xtr, Ytr, Xts, Yts, **kwargs):
- # By default tensorflow is happy with numpy arrays
- return Xtr, Ytr, Xts, Yts, kwargs
-
- @property
- @abstractmethod
- def dset_name(self) -> str:
- pass
-
-
-class KnownSplitDataset(BaseDataset, ABC):
- def split_data(self, X, Y, train_frac: float | None | str = None):
- if train_frac == "auto" or train_frac is None:
- idx_tr = np.arange(self.num_train_samples)
- if self.num_test_samples > 0:
- idx_ts = np.arange(self.num_train_samples, self.num_train_samples + self.num_test_samples)
- else:
- idx_ts = np.arange(self.num_train_samples, X.shape[0])
- else:
- idx_tr, idx_ts = equal_split(X.shape[0], train_frac)
-
- return X[idx_tr], Y[idx_tr], X[idx_ts], Y[idx_ts]
-
- @property
- @abstractmethod
- def num_train_samples(self) -> int:
- pass
-
- @property
- def num_test_samples(self) -> int:
- return -1
-
-
-class RandomSplitDataset(BaseDataset, ABC):
- def split_data(self, X, Y, train_frac: float | None | str = None):
- if train_frac is None:
- train_frac = self.default_train_frac
- idx_tr, idx_ts = equal_split(X.shape[0], train_frac)
- return X[idx_tr], Y[idx_tr], X[idx_ts], Y[idx_ts]
-
- @property
- @abstractmethod
- def default_train_frac(self) -> float:
- pass
-
-
-class Hdf5Dataset(BaseDataset, ABC):
- def read_data(self, dtype):
- with h5py.File(self.file_name, "r") as h5py_file:
- if "X_train" in h5py_file and "X_test" in h5py_file and "Y_train" in h5py_file and "Y_test" in h5py_file:
- X_train = np.array(h5py_file["X_train"], dtype=as_np_dtype(dtype))
- Y_train = np.array(h5py_file["Y_train"], dtype=as_np_dtype(dtype))
- X_test = np.array(h5py_file["X_test"], dtype=as_np_dtype(dtype))
- Y_test = np.array(h5py_file["Y_test"], dtype=as_np_dtype(dtype))
- X = np.concatenate([X_train, X_test], axis=0)
- Y = np.concatenate([Y_train, Y_test], axis=0)
- elif "X" in h5py_file and "Y" in h5py_file:
- X = np.array(h5py_file["X"], dtype=as_np_dtype(dtype))
- Y = np.array(h5py_file["Y"], dtype=as_np_dtype(dtype))
- else:
- raise RuntimeError(f"Cannot parse h5py file with keys {list(h5py_file.keys())}")
- return X, Y
-
- @property
- @abstractmethod
- def file_name(self) -> str:
- pass
-
-
-class MillionSongsDataset(KnownSplitDataset):
- file_name = "/data/DATASETS/MillionSongs/YearPredictionMSD.mat"
- dset_name = "MillionSongs" # type: ignore
- num_train_samples = 463715 # type: ignore
- num_test_samples = 51630 # type: ignore
-
- def read_data(self, dtype) -> tuple[np.ndarray, np.ndarray]:
- f = scio.loadmat(MillionSongsDataset.file_name)
- X = f["X"][:, 1:].astype(as_np_dtype(dtype))
- Y = f["X"][:, 0].astype(as_np_dtype(dtype))
- return X, Y
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts) # Original
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
-
-class NycTaxiDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/NYCTAXI/NYCTAXI.h5" # type: ignore
- dset_name = "TAXI" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class HiggsDataset(RandomSplitDataset):
- file_name = "/data/DATASETS/HIGGS_UCI/Higgs.mat"
- dset_name = "HIGGS" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def read_data(self, dtype):
- with h5py.File(HiggsDataset.file_name, "r") as h5py_file:
- arr = np.array(h5py_file["X"], dtype=as_np_dtype(dtype)).T
- X = arr[:, 1:]
- Y = arr[:, 0]
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- mtr = np.mean(Xtr, axis=0, dtype=np.float64, keepdims=True).astype(Xtr.dtype)
- vtr = np.var(Xtr, axis=0, dtype=np.float64, ddof=1, keepdims=True).astype(Xtr.dtype)
-
- Xtr -= mtr
- Xtr /= vtr
- Xts -= mtr
- Xts /= vtr
-
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class TimitDataset(KnownSplitDataset):
- file_name = "/data/DATASETS/TIMIT/TIMIT.mat"
- dset_name = "TIMIT" # type: ignore
- num_train_samples = 1124823 # type: ignore
-
- def read_data(self, dtype):
- f = scio.loadmat(TimitDataset.file_name)
- dtype = as_np_dtype(dtype)
- Xtr = np.array(f["Xtr"], dtype=dtype)
- Xts = np.array(f["Xts"], dtype=dtype)
- Ytr = np.array(f["Ytr"], dtype=dtype).reshape((-1,))
- Yts = np.array(f["Yts"], dtype=dtype).reshape((-1,))
- X = np.concatenate((Xtr, Xts), axis=0)
- Y = np.concatenate((Ytr, Yts), axis=0)
-
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- Yts = (Yts - 1) * 3
- return convert_to_onehot(Ytr, Yts, num_classes=144, damping=True)
-
-
-class YelpDataset(RandomSplitDataset):
- file_name = "/data/DATASETS/YELP_Ben/YELP_Ben_OnlyONES.mat"
- dset_name = "YELP" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def read_data(self, dtype):
- with h5py.File(YelpDataset.file_name, "r") as h5py_file:
- X: scipy.sparse.spmatrix = scipy.sparse.csc_matrix(
- (
- np.array(h5py_file["X"]["data"], as_np_dtype(dtype)), # type: ignore
- h5py_file["X"]["ir"][...], # type: ignore
- h5py_file["X"]["jc"][...], # type: ignore
- )
- ).tocsr(
- copy=False
- ) # type: ignore
- Y = np.array(h5py_file["Y"], dtype=as_np_dtype(dtype)).reshape((-1, 1))
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- # scaler = sklearn.preprocessing.StandardScaler(copy=False, with_mean=False, with_std=True)
- # Xtr = scaler.fit_transform(Xtr)
- # Xts = scaler.transform(Xts)
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return Ytr, Yts, {}
-
- def to_torch(self, Xtr, Ytr, Xts, Yts, **kwargs):
- import torch
-
- from falkon.sparse.sparse_tensor import SparseTensor
-
- return (
- SparseTensor.from_scipy(Xtr),
- torch.from_numpy(Ytr),
- SparseTensor.from_scipy(Xts),
- torch.from_numpy(Yts),
- {},
- )
-
- def to_tensorflow(self, Xtr, Ytr, Xts, Yts, **kwargs):
- import tensorflow as tf
-
- def scipy2tf(X):
- # Uses same representation as pytorch
- # https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor
- coo = X.tocoo()
- indices = np.array([coo.row, coo.col]).transpose()
- return tf.SparseTensor(indices, coo.data, coo.shape)
-
- return (scipy2tf(Xtr), Ytr, scipy2tf(Xts), Yts, {})
-
-
-class FlightsDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/FLIGHTS/flights.hdf5" # type: ignore
- dset_name = "FLIGHTS" # type: ignore
- default_train_frac = 0.666 # type: ignore
-
- def read_data(self, dtype):
- X, Y = super().read_data(dtype)
- # Preprocessing independent of train/test
- # As for https://github.com/jameshensman/VFF/blob/master/experiments/airline/airline_additive_figure.py
- # 1. Convert time of day from hhmm to minutes since midnight
- # ArrTime is column 7, DepTime is column 6
- X[:, 7] = 60 * np.floor(X[:, 7] / 100) + np.mod(X[:, 7], 100)
- X[:, 6] = 60 * np.floor(X[:, 6] / 100) + np.mod(X[:, 6], 100)
- # 2. remove flights with silly negative delays (small negative delays are OK)
- pos_delay_idx = np.where(Y > -60)[0]
- X = X[pos_delay_idx, :]
- Y = Y[pos_delay_idx, :]
- # 3. remove outlying flights in term of length (col 'AirTime' at pos 5)
- short_flight_idx = np.where(X[:, 5] < 700)[0]
- X = X[short_flight_idx, :]
- Y = Y[short_flight_idx, :]
-
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- Ytr, Yts, metadata = standardize_y(Ytr, Yts)
- return Ytr, Yts, {}
-
-
-class FlightsClsDataset(Hdf5Dataset):
- file_name = "/data/DATASETS/FLIGHTS/flights.hdf5" # type: ignore
- dset_name = "FLIGHTS-CLS" # type: ignore
- _default_train_num = 100_000
-
- def read_data(self, dtype):
- X, Y = super().read_data(dtype)
- # Preprocessing independent of train/test
- # As for https://github.com/jameshensman/VFF/blob/master/experiments/airline/airline_additive_figure.py
- # 1. Convert time of day from hhmm to minutes since midnight
- # ArrTime is column 7, DepTime is column 6
- X[:, 7] = 60 * np.floor(X[:, 7] / 100) + np.mod(X[:, 7], 100)
- X[:, 6] = 60 * np.floor(X[:, 6] / 100) + np.mod(X[:, 6], 100)
- # Turn regression into classification by thresholding delay or not delay:
- Y = (Y <= 0).astype(X.dtype)
-
- return X, Y
-
- def split_data(self, X, Y, train_frac: float | None):
- if train_frac is None:
- train_frac = (X.shape[0] - FlightsClsDataset._default_train_num) / X.shape[0]
- idx_tr, idx_ts = equal_split(X.shape[0], train_frac)
- return X[idx_tr], Y[idx_tr], X[idx_ts], Y[idx_ts]
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class SusyDataset(RandomSplitDataset):
- file_name = "/data/DATASETS/SUSY/Susy.mat"
- dset_name = "SUSY" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def read_data(self, dtype):
- with h5py.File(SusyDataset.file_name, "r") as f:
- arr = np.asarray(f["X"], dtype=as_np_dtype(dtype)).T
- X = arr[:, 1:]
- Y = arr[:, 0].reshape(-1, 1)
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class CIFAR10Dataset(KnownSplitDataset):
- file_name = "/data/DATASETS/CIFAR10/cifar10.mat"
- ts_file_name = "/data/DATASETS/CIFAR10/cifar10.t.mat"
- dset_name = "CIFAR10" # type: ignore
- num_train_samples = 50000 # type: ignore
-
- def read_data(self, dtype):
- tr_data = scio.loadmat(CIFAR10Dataset.file_name)
- ts_data = scio.loadmat(CIFAR10Dataset.ts_file_name)
- X = np.concatenate((tr_data["Z"], ts_data["Z"]), axis=0).astype(as_np_dtype(dtype))
- Y = np.concatenate((tr_data["y"], ts_data["y"]), axis=0).astype(as_np_dtype(dtype))
- X = rgb_to_bw(X, dim=32)
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr / 255, Xts / 255, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10)
-
-
-class CIFAR10RGBDataset(KnownSplitDataset):
- file_name = "/data/DATASETS/CIFAR10/cifar10rgb.hdf5"
- dset_name = "CIFAR10_RGB" # type: ignore
- num_train_samples = 50000 # type: ignore
-
- def read_data(self, dtype):
- with h5py.File(self.file_name, "r") as h5py_file:
- x_tr = np.array(h5py_file["Xtr"], dtype=as_np_dtype(dtype))
- x_ts = np.array(h5py_file["Xts"], dtype=as_np_dtype(dtype))
- y_tr = np.array(h5py_file["Ytr"], dtype=as_np_dtype(dtype))
- y_ts = np.array(h5py_file["Yts"], dtype=as_np_dtype(dtype))
- return np.concatenate((x_tr, x_ts), axis=0), np.concatenate((y_tr, y_ts), axis=0)
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr / 255, Xts / 255, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10)
-
-
-class SVHNDataset(KnownSplitDataset):
- file_name = "/data/DATASETS/SVHN/SVHN.mat"
- ts_file_name = "/data/DATASETS/SVHN/SVHN.t.mat"
- dset_name = "SVHN" # type: ignore
- num_train_samples = 73257 # type: ignore
-
- def read_data(self, dtype):
- tr_data = scio.loadmat(SVHNDataset.file_name)
- ts_data = scio.loadmat(SVHNDataset.ts_file_name)
- X = np.concatenate((tr_data["Z"], ts_data["Z"]), axis=0).astype(as_np_dtype(dtype))
- Y = np.concatenate((tr_data["y"], ts_data["y"]), axis=0).astype(as_np_dtype(dtype))
- X = rgb_to_bw(X, dim=32)
- Y = Y - 1 # Y is 1-indexed, convert to 0 index.
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr / 255, Xts / 255, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10)
-
-
-class FashionMnistDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/fashion_mnist.hdf5" # type: ignore
- dset_name = "FASHION_MNIST" # type: ignore
- num_train_samples = 60000 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- Xtr /= 255.0
- Xts /= 255.0
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10)
-
-
-class MnistSmallDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/mnist.hdf5" # type: ignore
- dset_name = "MNIST" # type: ignore
- num_train_samples = 60000 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- Xtr /= 255.0
- Xts /= 255.0
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10)
-
-
-class MnistDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/MNIST/mnist8m_normalized.hdf5" # type: ignore
- dset_name = "MNIST8M" # type: ignore
- num_train_samples = 6750000 # type: ignore
- num_test_samples = 10_000 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_onehot(Ytr, Yts, num_classes=10, damping=True)
-
-
-class SmallHiggsDataset(Hdf5Dataset, KnownSplitDataset):
- file_name = "/data/DATASETS/HIGGS_UCI/higgs_for_ho.hdf5" # type: ignore
- dset_name = "HIGGSHO" # type: ignore
- num_train_samples = 10_000 # type: ignore
- num_test_samples = 20_000 # type: ignore
-
- def read_centers(self, dtype):
- with h5py.File(self.file_name, "r") as h5py_file:
- centers = np.array(h5py_file["centers"], dtype=as_np_dtype(dtype))
- return centers
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- centers = self.read_centers(Xtr.dtype)
-
- mtr = np.mean(Xtr, axis=0, dtype=np.float64, keepdims=True).astype(Xtr.dtype)
- vtr = np.var(Xtr, axis=0, dtype=np.float64, ddof=1, keepdims=True).astype(Xtr.dtype)
- Xtr -= mtr
- Xtr /= vtr
- Xts -= mtr
- Xts /= vtr
- centers -= mtr
- centers /= vtr
-
- return Xtr, Xts, {"centers": centers}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class IctusDataset(RandomSplitDataset):
- file_name = "/data/DATASETS/ICTUS/run_all.mat"
- dset_name = "ICTUS" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def read_data(self, dtype):
- data_dict = scio.loadmat(IctusDataset.file_name)
- X = np.asarray(data_dict["X"], dtype=as_np_dtype(dtype))
- Y = np.asarray(data_dict["Y"], dtype=as_np_dtype(dtype))
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- mtr = np.mean(Xtr, axis=0, dtype=np.float64, keepdims=True).astype(Xtr.dtype)
- vtr = (1.0 / np.std(Xtr, axis=0, dtype=np.float64, ddof=1, keepdims=True)).astype(Xtr.dtype)
-
- Xtr -= mtr
- Xtr *= vtr
- Xts -= mtr
- Xts *= vtr
-
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class SyntheticDataset(RandomSplitDataset):
- file_name = "/data/DATASETS/Synthetic0.1Noise.mat"
- dset_name = "SYNTH01NOISE" # type: ignore
- default_train_frac = 0.5 # type: ignore
-
- def read_data(self, dtype):
- data_dict = scio.loadmat(SyntheticDataset.file_name)
- X = np.asarray(data_dict["X"], dtype=as_np_dtype(dtype))
- Y = np.asarray(data_dict["Y"], dtype=as_np_dtype(dtype))
- return X, Y
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr, Xts, {}
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return Ytr.reshape((-1, 1)), Yts.reshape((-1, 1)), {}
-
-
-class ChietDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/weather/CHIET.hdf5" # type: ignore
- dset_name = "CHIET" # type: ignore
- num_train_samples = 26227 # type: ignore
- num_test_samples = 7832 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class EnergyDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/energy.hdf5" # type: ignore
- dset_name = "ENERGY" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class BostonDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/boston.hdf5" # type: ignore
- dset_name = "BOSTON" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class ProteinDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/protein.hdf5" # type: ignore
- dset_name = "PROTEIN" # type: ignore
- default_train_frac = 0.8 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class Kin40kDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/kin40k.hdf5" # type: ignore
- dset_name = "KIN40K" # type: ignore
- num_train_samples = 10_000 # type: ignore
- num_test_samples = 30_000 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class CodRnaDataset(KnownSplitDataset):
- folder = "/data/DATASETS/libsvm/binary"
- dset_name = "cod-rna" # type: ignore
- num_train_samples = 59_535 # type: ignore
- num_test_samples = 271_617 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_t(CodRnaDataset.dset_name, CodRnaDataset.folder)
- x_data = x_data.astype(as_np_dtype(dtype))
- y_data = y_data.astype(as_np_dtype(dtype))
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return Ytr.reshape(-1, 1), Yts.reshape(-1, 1), {} # Is already -1, +1
-
-
-class SvmGuide1Dataset(KnownSplitDataset):
- folder = "/data/DATASETS/libsvm/binary"
- dset_name = "svmguide1" # type: ignore
- num_train_samples = 3089 # type: ignore
- num_test_samples = 4000 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_t(SvmGuide1Dataset.dset_name, SvmGuide1Dataset.folder)
- x_data = x_data.astype(as_np_dtype(dtype))
- y_data = y_data.astype(as_np_dtype(dtype))
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class PhishingDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/binary"
- dset_name = "phishing" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr, Xts, {} # No preproc, all values are equal-.-
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 0, 1 -> -1, +1
-
-
-class SpaceGaDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/regression"
- dset_name = "space_ga" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class CadataDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/regression"
- dset_name = "cadata" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class MgDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/regression"
- dset_name = "mg" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class CpuSmallDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/regression"
- dset_name = "cpusmall" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class AbaloneDataset(RandomSplitDataset):
- folder = "/data/DATASETS/libsvm/regression"
- dset_name = "abalone" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def read_data(self, dtype):
- x_data, y_data = load_from_npz(self.dset_name, self.folder, dtype)
- return x_data, y_data
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class CaspDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/casp.hdf5" # type: ignore
- dset_name = "casp" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class BlogFeedbackDataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/BlogFeedback.hdf5" # type: ignore
- dset_name = "blog-feedback" # type: ignore
- num_train_samples = 52397 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class CovTypeDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/covtype_binary.hdf5" # type: ignore
- dset_name = "covtype" # type: ignore
- default_train_frac = 0.7 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return convert_to_binary_y(Ytr, Yts) # 1, 2 -> -1, +1
-
-
-class Ijcnn1Dataset(KnownSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/ijcnn1.hdf5" # type: ignore
- dset_name = "ijcnn1" # type: ignore
- num_train_samples = 49990 # type: ignore
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return Xtr, Xts, {} # Data already standardized
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return Ytr.reshape(-1, 1), Yts.reshape(-1, 1), {} # binary-classif : already -1, +1
-
-
-class BuzzDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/buzz.hdf5" # type: ignore
- dset_name = "buzz" # type: ignore
- default_train_frac = 0.7 # type: ignore
- dset_shape = (583250, 77)
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- # Weird preprocessing from AGW
- Ytr = np.log(Ytr + 1.0)
- Yts = np.log(Yts + 1.0)
- return standardize_y(Ytr, Yts)
-
-
-class Road3DDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/3droad.hdf5" # type: ignore
- dset_name = "3DRoad" # type: ignore
- default_train_frac = 0.7 # type: ignore
- dset_shape = (434874, 3)
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_y(Ytr, Yts)
-
-
-class HouseEelectricDataset(RandomSplitDataset, Hdf5Dataset):
- file_name = "/data/DATASETS/misc/houseelectric.hdf5" # type: ignore
- dset_name = "HouseElectric" # type: ignore
- default_train_frac = 0.7 # type: ignore
- dset_shape = (2049280, 11)
-
- def preprocess_x(self, Xtr, Xts) -> tuple[np.ndarray, np.ndarray, dict]:
- return standardize_x(Xtr, Xts)
-
- def preprocess_y(self, Ytr: np.ndarray, Yts: np.ndarray) -> tuple[np.ndarray, np.ndarray, dict]:
- # Weird preprocessing from AGW
- Ytr = np.log(Ytr)
- Yts = np.log(Yts)
- return standardize_y(Ytr, Yts)
-
-
-""" Public API """
-
-__LOADERS = {
- Dataset.TIMIT: TimitDataset(),
- Dataset.HIGGS: HiggsDataset(),
- Dataset.MILLIONSONGS: MillionSongsDataset(),
- Dataset.TAXI: NycTaxiDataset(),
- Dataset.YELP: YelpDataset(),
- Dataset.FLIGHTS: FlightsDataset(),
- Dataset.SUSY: SusyDataset(),
- Dataset.MNIST: MnistDataset(),
- Dataset.FLIGHTS_CLS: FlightsClsDataset(),
- Dataset.SVHN: SVHNDataset(),
- Dataset.MNIST_SMALL: MnistSmallDataset(),
- Dataset.CIFAR10: CIFAR10Dataset(),
- Dataset.CIFAR10RGB: CIFAR10RGBDataset(),
- Dataset.HOHIGGS: SmallHiggsDataset(),
- Dataset.ICTUS: IctusDataset(),
- Dataset.SYNTH01NOISE: SyntheticDataset(),
- Dataset.CHIET: ChietDataset(),
- Dataset.ENERGY: EnergyDataset(),
- Dataset.BOSTON: BostonDataset(),
- Dataset.PROTEIN: ProteinDataset(),
- Dataset.KIN40K: Kin40kDataset(),
- Dataset.CODRNA: CodRnaDataset(),
- Dataset.SVMGUIDE1: SvmGuide1Dataset(),
- Dataset.PHISHING: PhishingDataset(),
- Dataset.SPACEGA: SpaceGaDataset(),
- Dataset.CADATA: CadataDataset(),
- Dataset.MG: MgDataset(),
- Dataset.CPUSMALL: CpuSmallDataset(),
- Dataset.ABALONE: AbaloneDataset(),
- Dataset.CASP: CaspDataset(),
- Dataset.BLOGFEEDBACK: BlogFeedbackDataset(),
- Dataset.COVTYPE: CovTypeDataset(),
- Dataset.IJCNN1: Ijcnn1Dataset(),
- Dataset.FASHION_MNIST: FashionMnistDataset(),
- Dataset.BUZZ: BuzzDataset(),
- Dataset.ROAD3D: Road3DDataset(),
- Dataset.HOUSEELECTRIC: HouseEelectricDataset(),
-}
-
-
-def get_load_fn(dset: Dataset):
- try:
- return __LOADERS[dset].load_data
- except KeyError:
- raise KeyError(dset, f"No loader function found for dataset {dset}.") from None
-
-
-def get_cv_fn(dset: Dataset):
- try:
- return __LOADERS[dset].load_data_cv
- except KeyError:
- raise KeyError(dset, f"No CV-loader function found for dataset {dset}.") from None
diff --git a/falkon/benchmarks/common/error_metrics.py b/falkon/benchmarks/common/error_metrics.py
deleted file mode 100644
index 0f808814..00000000
--- a/falkon/benchmarks/common/error_metrics.py
+++ /dev/null
@@ -1,297 +0,0 @@
-from typing import Any, Callable, Dict, Generator, List, Tuple, Union
-
-import numpy as np
-
-from .benchmark_utils import Dataset
-
-
-def _ensure_numpy(*arrays) -> Generator[np.ndarray, None, None]:
- for arr in arrays:
- if not isinstance(arr, np.ndarray):
- yield arr.cpu().numpy()
- else:
- yield arr
-
-
-def _ensure_numpy_or_float(*vals) -> Generator[Union[float, np.ndarray], None, None]:
- for val in vals:
- if (
- not isinstance(val, np.ndarray)
- and not isinstance(val, np.float64)
- and not isinstance(val, np.float32)
- and not isinstance(val, float)
- ):
- yield val.cpu().numpy()
- else:
- yield val
-
-
-def mse(y_true, y_pred, **kwargs):
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
-
- y_true = y_true.reshape((-1,))
- y_pred = y_pred.reshape((-1,))
-
- test_mse = ((y_pred - y_true) ** 2).mean()
- return test_mse, "MSE"
-
-
-def rmse(y_true, y_pred, **kwargs):
- pred_mse = mse(y_true, y_pred, **kwargs)[0]
- pred_rmse = np.sqrt(pred_mse)
- return pred_rmse, "RMSE"
-
-
-def rmse_with_std(y_true, y_pred, **kwargs):
- Y_std = kwargs["Y_std"]
-
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
- (Y_std,) = _ensure_numpy_or_float(Y_std)
-
- y_true = y_true.reshape((-1,))
- y_pred = y_pred.reshape((-1,))
-
- test_mse = np.sqrt(((y_pred * Y_std - y_true * Y_std) ** 2).mean())
- return test_mse, "RMSE"
-
-
-def nrmse(y_true, y_pred, **kwargs):
- Y_mean = kwargs["Y_mean"]
- (Y_mean,) = _ensure_numpy_or_float(Y_mean)
- Y_std = kwargs.get("Y_std", 1.0)
- (Y_std,) = _ensure_numpy_or_float(Y_std)
-
- y_true = y_true * Y_std + Y_mean
- y_pred = y_pred * Y_std + Y_mean
-
- pred_rmse = rmse(y_true, y_pred, **kwargs)[0]
- pred_nrmse = np.abs(pred_rmse / Y_mean)
- return pred_nrmse, "NRMSE"
-
-
-def ms_calc_mse(y_true, y_pred, **kwargs):
- Y_std = kwargs["Y_std"]
-
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
- (Y_std,) = _ensure_numpy_or_float(Y_std)
-
- y_true = y_true.reshape((-1,))
- y_pred = y_pred.reshape((-1,))
-
- test_mse = ((y_pred * Y_std - y_true * Y_std) ** 2).mean()
- return test_mse, "MSE"
-
-
-def ms_calc_relerr(y_true, y_pred, **kwargs):
- Y_std = kwargs.get("Y_std", 1.0)
- Y_mean = kwargs["Y_mean"]
-
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
- Y_std, Y_mean = _ensure_numpy_or_float(Y_std, Y_mean)
-
- y_true = y_true.reshape((-1,))
- y_pred = y_pred.reshape((-1,))
-
- Uypred = y_pred * Y_std + Y_mean
- Uytrue = y_true * Y_std + Y_mean
- rel_err = np.sqrt(np.mean(((Uytrue - Uypred) / Uytrue) ** 2))
- return rel_err, "relative error"
-
-
-def ms_calc_mse_tf(y_true, y_pred, **kwargs):
- Y_std = kwargs["Y_std"]
-
- import tensorflow as tf
-
- return tf.math.reduce_mean(
- tf.math.square(
- tf.math.subtract(
- tf.math.multiply(tf.reshape(y_true, (-1,)), Y_std), tf.math.multiply(tf.reshape(y_pred, (-1,)), Y_std)
- )
- )
- )
-
-
-def rmse_with_std_tf(y_true, y_pred, **kwargs):
- Y_std = kwargs["Y_std"]
-
- import tensorflow as tf
-
- return tf.math.sqrt(
- tf.math.reduce_mean(
- tf.math.square(
- tf.math.subtract(
- tf.math.multiply(tf.reshape(y_true, (-1,)), Y_std),
- tf.math.multiply(tf.reshape(y_pred, (-1,)), Y_std),
- )
- )
- )
- )
-
-
-def rmse_tf(y_true, y_pred, **kwargs):
- import tensorflow as tf
-
- return tf.math.sqrt(
- tf.math.reduce_mean(tf.math.square(tf.math.subtract(tf.reshape(y_true, (-1,)), tf.reshape(y_pred, (-1,)))))
- )
-
-
-def mse_tf(y_true, y_pred, **kwargs):
- import tensorflow as tf
-
- return tf.math.reduce_mean(tf.math.square(tf.math.subtract(tf.reshape(y_true, (-1,)), tf.reshape(y_pred, (-1,)))))
-
-
-def higgs_calc_auc(y_true, y_pred, **kwargs):
- from sklearn import metrics
-
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
- y_true = y_true.reshape((-1, 1))
- y_pred = y_pred.reshape((-1, 1))
-
- if np.min(y_true) == 0:
- y_true = y_true * 2 - 1
- y_pred = y_pred * 2 - 1
-
- fpr, tpr, thresholds = metrics.roc_curve(y_true, y_pred, pos_label=1)
- auc = metrics.auc(fpr, tpr)
- return (1.0 - auc), "1-AUC"
-
-
-def binary_cerr(y_true, y_pred, **kwargs):
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
-
- if np.min(y_true) == 0:
- y_true = y_true * 2 - 1
- y_pred = y_pred * 2 - 1
-
- if y_pred.ndim > 1 and y_pred.shape[1] > 1:
- y_pred = np.argmax(y_pred, axis=1) * 2 - 1
- if y_true.ndim > 1 and y_true.shape[1] > 1:
- y_true = np.argmax(y_true, axis=1) * 2 - 1
-
- c_err = np.mean(np.sign(y_pred.ravel()) != np.sign(y_true.ravel()))
- return c_err, "c-error"
-
-
-def mnist_calc_cerr(y_true, y_pred, **kwargs):
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
-
- if y_true.ndim > 1 and y_true.shape[1] > 2:
- y_true = np.argmax(y_true, axis=1)
- if y_pred.ndim > 1 and y_pred.shape[1] > 2:
- y_pred = np.argmax(y_pred, axis=1)
-
- return np.mean(y_true.ravel() != y_pred.ravel()), "c-error"
-
-
-def mnist_calc_cerr_tf(y_true, y_pred, **kwargs):
- import tensorflow as tf
-
- y_true = tf.math.argmax(y_true, axis=1, output_type=tf.dtypes.int32)
- y_pred = tf.math.argmax(y_pred, axis=1, output_type=tf.dtypes.int32)
-
- return tf.reduce_mean(tf.cast(tf.math.equal(y_true, y_pred), tf.dtypes.float64))
-
-
-def binary_cerr_tf(y_true, y_pred, **kwargs):
- import tensorflow as tf
-
- return tf.reduce_mean(
- tf.cast(
- tf.math.not_equal(tf.math.sign(tf.reshape(y_true, [-1])), tf.math.sign(tf.reshape(y_pred, [-1]))),
- tf.dtypes.float64,
- )
- )
-
-
-def timit_calc_error(y_true, y_pred, **kwargs):
- y_true, y_pred = _ensure_numpy(y_true, y_pred)
-
- if y_true.ndim > 1 and y_true.shape[1] > 2:
- y_true = np.argmax(np.sum(y_true.reshape((-1, 48, 3)), axis=2), axis=1)
- if y_pred.ndim > 1 and y_pred.shape[1] > 2:
- y_pred = np.argmax(np.sum(y_pred.reshape((-1, 48, 3)), axis=2), axis=1)
-
- return np.mean(y_true.ravel() != y_pred.ravel()), "c-error"
-
-
-def timit_calc_error_tf(y_true, y_pred, **kwargs):
- import tensorflow as tf
-
- y_true = tf.math.argmax(
- tf.math.reduce_sum(tf.reshape(y_true, (-1, 48, 3)), axis=2), axis=1, output_type=tf.dtypes.int32
- )
-
- y_pred = tf.math.argmax(
- tf.math.reduce_sum(tf.reshape(y_pred, (-1, 48, 3)), axis=2), axis=1, output_type=tf.dtypes.int32
- )
-
- return tf.reduce_mean(tf.cast(tf.math.not_equal(y_true, y_pred), tf.dtypes.float64))
-
-
-ARRAY_TYPE = Any
-ERROR_FN_TYPE = Callable[[Any, Any, Dict[str, Any]], Tuple[float, str]]
-
-ERROR_METRICS: Dict[Dataset, List[ERROR_FN_TYPE]] = {
- Dataset.TIMIT: [timit_calc_error],
- Dataset.MILLIONSONGS: [ms_calc_relerr, ms_calc_mse],
- Dataset.HIGGS: [higgs_calc_auc, binary_cerr],
- Dataset.HOHIGGS: [binary_cerr, higgs_calc_auc],
- Dataset.TAXI: [rmse_with_std],
- Dataset.YELP: [rmse],
- Dataset.FLIGHTS: [mse],
- Dataset.SUSY: [higgs_calc_auc, binary_cerr],
- Dataset.FLIGHTS_CLS: [binary_cerr, higgs_calc_auc],
- Dataset.MNIST: [mnist_calc_cerr],
- Dataset.MNIST_SMALL: [mnist_calc_cerr],
- Dataset.SVHN: [mnist_calc_cerr],
- Dataset.CIFAR10: [mnist_calc_cerr],
- Dataset.CIFAR10RGB: [mnist_calc_cerr],
- Dataset.ICTUS: [binary_cerr],
- Dataset.SYNTH01NOISE: [rmse],
- Dataset.CHIET: [nrmse],
- Dataset.ENERGY: [nrmse],
- Dataset.BOSTON: [nrmse],
- Dataset.PROTEIN: [nrmse],
- Dataset.KIN40K: [nrmse],
- Dataset.CODRNA: [binary_cerr],
- Dataset.SVMGUIDE1: [binary_cerr],
- Dataset.PHISHING: [binary_cerr],
- Dataset.SPACEGA: [nrmse],
- Dataset.CADATA: [nrmse],
- Dataset.MG: [nrmse],
- Dataset.CPUSMALL: [nrmse],
- Dataset.ABALONE: [nrmse],
- Dataset.CASP: [nrmse],
- Dataset.BLOGFEEDBACK: [rmse],
- Dataset.COVTYPE: [binary_cerr],
- Dataset.IJCNN1: [binary_cerr],
- Dataset.FASHION_MNIST: [mnist_calc_cerr],
- Dataset.BUZZ: [nrmse],
- Dataset.ROAD3D: [nrmse],
- Dataset.HOUSEELECTRIC: [nrmse],
-}
-TF_ERROR_METRICS: Dict[Dataset, ERROR_FN_TYPE] = {
- Dataset.TIMIT: timit_calc_error_tf,
- Dataset.MILLIONSONGS: ms_calc_mse_tf,
- Dataset.FLIGHTS: mse_tf,
- Dataset.MNIST: mnist_calc_cerr_tf,
- Dataset.SUSY: binary_cerr_tf,
- Dataset.FLIGHTS_CLS: binary_cerr_tf,
-}
-
-
-def get_err_fns(dset: Dataset) -> List[ERROR_FN_TYPE]:
- try:
- return ERROR_METRICS[dset]
- except KeyError:
- raise KeyError(dset, f"No error metrics found for dataset {dset}.") from None
-
-
-def get_tf_err_fn(dset: Dataset) -> ERROR_FN_TYPE:
- try:
- return TF_ERROR_METRICS[dset]
- except KeyError:
- raise KeyError(dset, f"No tensorflow error metric found for dataset {dset}.") from None
diff --git a/falkon/benchmarks/common/summary.py b/falkon/benchmarks/common/summary.py
deleted file mode 100644
index 0c7dd049..00000000
--- a/falkon/benchmarks/common/summary.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import os
-
-from torch.utils.tensorboard import SummaryWriter
-
-__all__ = ("get_writer",)
-
-LOG_DIR = "./logs/tensorboard"
-_writer = None
-
-
-def get_writer(name=None):
- global _writer
- if _writer is not None:
- return _writer
-
- log_dir = LOG_DIR
- if name is not None:
- log_dir = os.path.join(log_dir, name)
-
- _writer = SummaryWriter(log_dir=log_dir, max_queue=5, flush_secs=30)
- return _writer
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_flights.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_flights.sh
deleted file mode 100755
index 8aeec6f5..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_flights.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-export CUDA_VISIBLE_DEVICES="1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="flights"
-TRAIN_DATAPOINTS=5930000
-PY_LAUNCHER="benchmark_runner.py"
-
-# Falkon 32
-if [ false = true ]; then
- ALGO="falkon"
- M=75000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- exit 1;
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ true = false ]; then
- ALGO="gpytorch-reg"
- M=2000
- VAR="natgrad"
- BATCH_SIZE=16000
- LR=0.005
- NATGRAD_LR=0.005
- EPOCHS=30
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 12 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 13 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 14 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 15 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 16 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = true ]; then
- ALGO="gpflow-reg"
- M=2000
- VAR="diag"
- BATCH_SIZE=16000
- LR=0.005
- EPOCHS=$(( $TRAIN_DATAPOINTS / $BATCH_SIZE * 25 ))
- ERROR_EVERY=$(( $TRAIN_DATAPOINTS / $BATCH_SIZE ))
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data for ${EPOCHS} iterations, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ true = false ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 1 -e 10 --n-subsample 12000 \
- --data-subsample 1000000 --seed 12 --eta-divisor 10 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 1 -e 10 --n-subsample 12000 \
- --data-subsample 1000000 --seed 13 --eta-divisor 10 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 1 -e 10 --n-subsample 12000 \
- --data-subsample 1000000 --seed 14 --eta-divisor 10 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 1 -e 10 --n-subsample 12000 \
- --data-subsample 1000000 --seed 15 --eta-divisor 10 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 1 -e 10 --n-subsample 12000 \
- --data-subsample 1000000 --seed 16 --eta-divisor 10 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_flights_cls.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_flights_cls.sh
deleted file mode 100755
index ff9f0f83..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_flights_cls.sh
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-export CUDA_VISIBLE_DEVICES="1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="flights-cls"
-PY_LAUNCHER="benchmark_runner.py"
-
-if [ true = false ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 14 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# Falkon 32
-if [ true = false ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-
-# GPytorch
-if [ true = false ]; then
- ALGO="gpytorch-cls"
- M=2000
- VAR="tril_natgrad"
- BATCH_SIZE=16000
- EPOCHS=20
- LR=0.002
- NATGRAD_LR=0.002
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 12 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 13 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 14 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 15 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 0.9 -e $EPOCHS --seed 16 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = true ]; then
- ALGO="gpflow-cls"
- M=2000
- VAR="diag"
- BATCH_SIZE=16000
- EPOCHS=7400
- ERROR_EVERY=370
- LR=0.005
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ false = true ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- ETA_DIVISOR=12
- conda activate epro2
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 12 --n-subsample 12000 \
- --data-subsample 1000000 --seed 12 --eta-divisor $ETA_DIVISOR 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 12 --n-subsample 12000 \
- --data-subsample 1000000 --seed 13 --eta-divisor $ETA_DIVISOR 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 12 --n-subsample 12000 \
- --data-subsample 1000000 --seed 14 --eta-divisor $ETA_DIVISOR 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 12 --n-subsample 12000 \
- --data-subsample 1000000 --seed 15 --eta-divisor $ETA_DIVISOR 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 12 --n-subsample 12000 \
- --data-subsample 1000000 --seed 16 --eta-divisor $ETA_DIVISOR 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_flk_1gpu.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_flk_1gpu.sh
deleted file mode 100755
index eeebc9da..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_flk_1gpu.sh
+++ /dev/null
@@ -1,258 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-export CUDA_VISIBLE_DEVICES="0"
-PY_LAUNCHER="benchmark_runner.py"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-# FLIGHTS-CLS
-DSET="flights-cls"
-if [ false = true ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 0.9 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 \
- -M $M -t $TYPE --seed 14 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-if [ true = false ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- #PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- # --penalty 1e-8 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# FLIGHTS
-DSET="flights"
-if [ false = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 0.9 \
- --penalty 1e-8 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# HIGGS
-DSET="higgs"
-if [ true = true ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 --seed 12 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 --seed 13 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 --seed 14 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-if [ false = true ]; then
- conda activate torch
- ALGO="falkon"
- M=120000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 12 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 12 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 12 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 12 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 12 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# MILLIONSONGS
-DSET="millionsongs"
-if [ false = true ]; then
- ALGO="falkon"
- M=50000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# SUSY
-DSET="susy"
-if [ false = true ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=20000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-7 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 12 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 13 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 14 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 15 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 16 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-if [ false = true ]; then
- conda activate torch
- ALGO="falkon"
- M=30000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 10 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 10 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 10 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 10 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 10 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# TAXI
-DSET="taxi"
-if [ false = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 0.9 --penalty 2e-7 --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 7 -d $DSET -t $TYPE \
- --sigma 0.9 --penalty 2e-7 --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 7 -d $DSET -t $TYPE \
- --sigma 0.9 --penalty 2e-7 --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 7 -d $DSET -t $TYPE \
- --sigma 0.9 --penalty 2e-7 --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 7 -d $DSET -t $TYPE \
- --sigma 0.9 --penalty 2e-7 --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# TIMIT
-DSET="timit"
-if [ false = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 10 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 12 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 10 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 13 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 10 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 14 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 10 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 15 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 10 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 16 \
- 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# YELP
-DSET="yelp"
-if [ false = true ]; then
- ALGO="falkon"
- M=50000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 15 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 15 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 15 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 15 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 15 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_higgs.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_higgs.sh
deleted file mode 100755
index ce81ac6f..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_higgs.sh
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare GPU
-export CUDA_VISIBLE_DEVICES="1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-# Common variables
-DSET="higgs"
-PY_LAUNCHER="benchmark_runner.py"
-
-
-# Falkon Logistic
-if [ true = false ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 --seed 13 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 0 \
- --penalty-list 1e-3 1e-6 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 1e-9 \
- --iter-list 3 3 3 8 8 8 8 8 8 --seed 14 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-if [ true = false ]; then
- conda activate torch
- ALGO="falkon"
- M=120000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.8 --penalty 1e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPyTorch - SGPR
-if [ true = false ]; then
- ALGO="gpytorch-sgpr"
- M=50
- LR=0.001
- EPOCHS=15
- OUTFILE="logs/${DSET}_${ALGO}_${M}_learned_centers.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --learn-hyperparams -e $EPOCHS \
- --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-
-# GPytorch
-if [ true = false ]; then
- ALGO="gpytorch-cls"
- M=1000
- VAR="tril_natgrad"
- LR=0.02
- NATGRAD_LR=0.02
- BATCH_SIZE=16000
- EPOCHS=15
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = true ]; then
- ALGO="gpflow-cls"
- M=2000
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- BATCH_SIZE=16000
- EPOCHS=33000 # Around 60 epochs
- ERROR_EVERY=550
- LR=0.02
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ true = false ]; then
- # NOTE: EigenPro might take forever on this dataset
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 10 --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_millionsongs.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_millionsongs.sh
deleted file mode 100755
index e005ec75..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_millionsongs.sh
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-export CUDA_VISIBLE_DEVICES="1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="millionsongs"
-PY_LAUNCHER="benchmark_runner.py"
-
-# Falkon 64 / 32
-if [ true = false ]; then
- ALGO="falkon"
- M=50000
- TYPE="float64"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 6 \
- --penalty 1e-6 --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-if [ true = true ]; then
- ALGO="falkon"
- M=50000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 12 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 13 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 14 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 15 --kernel gaussian 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 7 \
- --penalty 2e-6 --seed 16 --kernel gaussian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ true = true ]; then
- ALGO="gpytorch-reg"
- M=3000
- VAR="natgrad"
- BATCH_SIZE=16000
- LR=0.002
- NATGRAD_LR=0.002
- EPOCHS=20
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 7 -e $EPOCHS --seed 12 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 7 -e $EPOCHS --seed 13 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 7 -e $EPOCHS --seed 14 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 7 -e $EPOCHS --seed 15 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size $BATCH_SIZE \
- --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 7 -e $EPOCHS --seed 16 \
- --learn-hyperparams 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = false ]; then
- ALGO="gpflow-reg"
- M=3000
- VAR="diag"
- BATCH_SIZE=16000
- EPOCHS=3500
- ERROR_EVERY=100
- LR=0.004
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 -t $TYPE --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 12 2>&1 | tee -a $OUTFILE
- exit 1
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ false = true ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 6 -e 10 --n-subsample 12000 \
- --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 6 -e 10 --n-subsample 12000 \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 6 -e 10 --n-subsample 12000 \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 6 -e 10 --n-subsample 12000 \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 6 -e 10 --n-subsample 12000 \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_mnist.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_mnist.sh
deleted file mode 100755
index 3a7df1ce..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_mnist.sh
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare GPU
-export CUDA_VISIBLE_DEVICES="0,1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-# Common variables
-DSET="mnist"
-PY_LAUNCHER="benchmark_runner.py"
-
-# Falkon (32)
-if [ true = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 4.4 --penalty 1e-8 \
- -M $M -t $TYPE --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ true = false ]; then
- ALGO="gpytorch-cls"
- M=1000
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr 0.001 --batch-size 4096 --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e 15 --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = false ]; then
- ALGO="gpflow-cls"
- M=500
- VAR="full"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size 4096 --learn-hyperparams \
- --lr 0.005 --natgrad-lr 0.0001 --epochs 10000 \
- --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ true = true ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 5 -e 5 --seed 12 --data-subsample 1000000 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_runner.py b/falkon/benchmarks/falkon_benchmarks/benchmark_runner.py
deleted file mode 100644
index 0a6bd062..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_runner.py
+++ /dev/null
@@ -1,834 +0,0 @@
-import argparse
-import functools
-import sys
-import time
-from typing import List, Optional
-
-import numpy as np
-
-from falkon.benchmarks.common.benchmark_utils import Algorithm, Dataset, DataType, VariationalDistribution
-from falkon.benchmarks.common.datasets import get_cv_fn, get_load_fn
-from falkon.benchmarks.common.error_metrics import get_err_fns, get_tf_err_fn
-
-RANDOM_SEED = 123
-EPRO_DIRECTORY = "../../EigenPro2"
-
-
-def test_model(model, model_name, Xts, Yts, Xtr, Ytr, err_fns):
- test_preds = model.predict(Xts)
- if Xtr is not None:
- train_preds = model.predict(Xtr)
- test_errs, train_errs = [], []
- for err_fn in err_fns:
- test_err, test_err_name = err_fn(Yts, test_preds)
- test_errs.append(test_err)
- print(f"Test {model_name} {test_err_name}: {test_err:9.6f}", flush=True)
- if Xtr is not None and Ytr is not None:
- train_err, train_err_name = err_fn(Ytr, train_preds)
- print(f"Train {model_name} {train_err_name}: {train_err:9.6f}", flush=True)
- train_errs.append(train_err)
- return test_errs, train_errs
-
-
-def run_epro(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- num_iter: int,
- kernel_sigma: float,
- n_subsample: Optional[int],
- data_subsample: Optional[int],
- q: Optional[int],
- kfold: int,
- eta_divisor: int,
- seed: int,
-):
- sys.path.append(EPRO_DIRECTORY)
- import kernels
- import tensorflow as tf
- from eigenpro import EigenPro
-
- tf.set_random_seed(seed)
- np.random.seed(seed)
-
- if dtype is None:
- dtype = DataType.float32
- if dtype.to_numpy_dtype() != np.float32:
- raise RuntimeError("EigenPro can only run on single-precision floats.")
-
- # Error metrics
- err_fns = get_err_fns(dset)
- tf_err_fn = get_tf_err_fn(dset)
-
- # Create kernel
- kernel = functools.partial(kernels.GaussianKernel, sigma=kernel_sigma)
-
- # Additional fixed params
- mem_gb = 11
- print(
- f"Starting EigenPro solver with {n_subsample} subsamples, "
- f"{q}-top eigensystem, {eta_divisor} eta-divisor. Random seed: {seed}"
- )
- if kfold == 1:
- # Load data
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=False)
- if data_subsample is not None:
- Xtr = Xtr[:data_subsample]
- Ytr = Ytr[:data_subsample]
- print(f"SUBSAMPLED INPUT DATA TO {Xtr.shape[0]} TRAINING SAMPLES", flush=True)
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- tf_err_fn = functools.partial(tf_err_fn, **kwargs)
- tf_err_fn.__name__ = "tf_error"
- model = EigenPro(
- kernel,
- Xtr,
- n_label=Ytr.shape[1],
- mem_gb=mem_gb,
- n_subsample=n_subsample,
- q=q,
- bs=None,
- metric=tf_err_fn,
- seed=seed,
- eta_divisor=eta_divisor,
- )
- print(f"Starting to train model {model} on data {dset}", flush=True)
- t_s = time.time()
- model.fit(Xtr, Ytr, x_val=Xts, y_val=Yts, epochs=np.arange(num_iter - 1) + 1)
- print(f"Training of algorithm {algorithm} on {dset} done in {time.time() - t_s:.2f}s", flush=True)
- test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
- else:
- print(f"Will train EigenPro model on data {dset} with {kfold}-fold CV", flush=True)
- load_fn = get_cv_fn(dset)
- test_errs, train_errs = [], []
-
- for it, (Xtr, Ytr, Xts, Yts, kwargs) in enumerate(
- load_fn(k=kfold, dtype=dtype.to_numpy_dtype(), as_torch=False)
- ):
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- tf_err_fn = functools.partial(tf_err_fn, **kwargs)
- tf_err_fn.__name__ = "tf_error"
- model = EigenPro(
- kernel,
- Xtr,
- n_label=Ytr.shape[1],
- mem_gb=mem_gb,
- n_subsample=n_subsample,
- q=q,
- bs=None,
- metric=tf_err_fn,
- seed=seed,
- )
- print(f"Starting EPRO fit (fold {it})")
- model.fit(Xtr, Ytr, x_val=Xts, y_val=Yts, epochs=np.arange(num_iter - 1) + 1)
- c_test_errs, c_train_errs = test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
- train_errs.append(c_train_errs)
- test_errs.append(c_test_errs)
-
- print(f"Full errors: Test {test_errs} - Train {train_errs}")
- print()
- print(f"{kfold}-Fold Error Report")
- for err_fn_i in range(len(err_fns)):
- print(
- f"Final test errors: "
- f"{np.mean([e[err_fn_i] for e in test_errs]):.4f} +- "
- f"{np.std([e[err_fn_i] for e in test_errs]):4f}"
- )
- print(
- f"Final train errors: "
- f"{np.mean([e[err_fn_i] for e in train_errs]):.4f} +- "
- f"{np.std([e[err_fn_i] for e in train_errs]):.4f}"
- )
- print()
-
-
-def run_gpytorch_sgpr(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- lr: float,
- num_iter: int,
- num_centers: int,
- learn_ind_pts: bool,
- seed: int,
-):
- import torch
-
- from falkon.benchmarks.models.gpytorch_sgpr import GpytorchSGPR
-
- torch.manual_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float32
- if dtype.to_numpy_dtype() != np.float32:
- raise RuntimeError(f"{algorithm} can only run on single-precision floats.")
- # Error metrics
- err_fns = get_err_fns(dset)
-
- # Load data
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=True)
- if Ytr.shape[1] != 1:
- raise NotImplementedError("GPyTorch SGPR only implemented for single-output problems.")
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
-
- # Extract inducing points at random
- inducing_idx = np.random.choice(Xtr.shape[0], num_centers, replace=False)
- inducing_points = Xtr[inducing_idx].reshape(num_centers, -1)
-
- # Initialize model class
- model = GpytorchSGPR(
- inducing_points, err_fns[0], num_epochs=num_iter, use_cuda=True, lr=lr, learn_ind_pts=learn_ind_pts
- )
-
- # Initialize training
- print(f"Starting to train model {model} on data {dset}", flush=True)
- t_s = time.time()
- model.do_train(Xtr, Ytr, Xts, Yts)
- print(f"Training of {algorithm} on {dset} complete in {time.time() - t_s:.2f}s", flush=True)
- # if isinstance(model, TwoClassVGP):
- # # Need Ys in range [0,1] for correct error calculation
- # Yts = (Yts + 1) / 2
- # Ytr = (Ytr + 1) / 2
- test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
-
-
-def run_gpytorch(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- batch_size: int,
- lr: float,
- natgrad_lr: float,
- num_iter: int,
- num_centers: int,
- kernel_sigma: float,
- var_dist: str,
- learn_ind_pts: bool,
- kfold: int,
- seed: int,
- ind_pt_file: Optional[str] = None,
-):
- import gpytorch
- import torch
-
- from falkon.benchmarks.models.gpytorch_variational_models import MultiClassVGP, RegressionVGP, TwoClassVGP
-
- torch.manual_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float32
- if dtype.to_numpy_dtype() != np.float32:
- raise RuntimeError(f"{algorithm} can only run on single-precision floats.")
- # Error metrics
- err_fns = get_err_fns(dset)
-
- def get_model(Xtr, num_outputs, err_fn):
- num_samples = Xtr.shape[0]
- # Inducing points
- inducing_idx = np.random.choice(num_samples, num_centers, replace=False)
- inducing_points = Xtr[inducing_idx].reshape(num_centers, -1)
- print(f"Took {inducing_points.shape[0]} random inducing points")
- # Kernel
- if num_outputs == 1:
- # Kernel has 1 length-scale!
- kernel = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(ard_num_dims=None))
- kernel.base_kernel.lengthscale = kernel_sigma
- # kernel = gpytorch.kernels.keops.RBFKernel(ard_num_dims=None)
- # kernel.lengthscale = kernel_sigma
- else:
- kernel = gpytorch.kernels.ScaleKernel(
- gpytorch.kernels.RBFKernel(ard_num_dims=None, batch_shape=torch.Size([num_outputs]))
- )
- if algorithm == Algorithm.GPYTORCH_CLS:
- if num_outputs == 1:
- # 2 classes
- model = TwoClassVGP(
- inducing_points,
- kernel,
- var_dist=var_dist,
- err_fn=err_fn,
- mb_size=batch_size,
- num_data=num_samples,
- num_epochs=num_iter,
- use_cuda=True,
- lr=lr,
- natgrad_lr=natgrad_lr,
- learn_ind_pts=learn_ind_pts,
- )
- else:
- # multiclass
- model = MultiClassVGP(
- inducing_points,
- kernel,
- num_classes=num_outputs,
- var_dist=var_dist,
- err_fn=err_fn,
- mb_size=batch_size,
- num_data=num_samples,
- num_epochs=num_iter,
- use_cuda=True,
- natgrad_lr=natgrad_lr,
- lr=lr,
- learn_ind_pts=learn_ind_pts,
- )
- else:
- if num_outputs != 1:
- raise NotImplementedError("Multi-output regression not yet implemented.")
- model = RegressionVGP(
- inducing_points,
- kernel,
- var_dist=var_dist,
- err_fn=err_fn,
- mb_size=batch_size,
- num_data=num_samples,
- num_epochs=num_iter,
- use_cuda=True,
- natgrad_lr=natgrad_lr,
- lr=lr,
- learn_ind_pts=learn_ind_pts,
- )
- return model
-
- # Load data
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=True)
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- model = get_model(Xtr, Ytr.shape[1], err_fns[0])
- print(f"Starting to train model {model} on data {dset}", flush=True)
- t_s = time.time()
- with gpytorch.settings.fast_computations(False, False, False):
- model.do_train(Xtr, Ytr, Xts, Yts)
- print(f"Training of {algorithm} on {dset} complete in {time.time() - t_s:.2f}s", flush=True)
- if isinstance(model, TwoClassVGP):
- # Need Ys in range [0,1] for correct error calculation
- Yts = (Yts + 1) / 2
- Ytr = (Ytr + 1) / 2
- with gpytorch.settings.fast_computations(False, False, False):
- test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
-
-
-def run_falkon(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- num_iter: int,
- num_centers: int,
- kernel_sigma: float,
- penalty: float,
- kernel: str,
- kfold: int,
- seed: int,
-):
- import torch
-
- from falkon import kernels
- from falkon.models import falkon
- from falkon.utils import TicToc
-
- torch.manual_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float64
- # Arguments
- if kernel.lower() == "gaussian":
- k = kernels.GaussianKernel(kernel_sigma)
- elif kernel.lower() == "laplacian":
- k = kernels.LaplacianKernel(kernel_sigma)
- elif kernel.lower() == "linear":
- k = kernels.LinearKernel(beta=1.0, gamma=kernel_sigma)
- else:
- raise ValueError(f"Kernel {kernel} not understood for algorithm {algorithm}")
-
- opt = falkon.FalkonOptions(
- compute_arch_speed=False, no_single_kernel=True, pc_epsilon_32=1e-6, pc_epsilon_64=1e-13, debug=True
- )
- flk = falkon.Falkon(
- kernel=k, penalty=penalty, M=num_centers, maxiter=num_iter, seed=seed, error_fn=None, error_every=1, options=opt
- )
-
- # Error metrics
- err_fns = get_err_fns(dset)
- if kfold == 1:
- # Load data
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=True)
- Xtr = Xtr.pin_memory()
- Ytr = Ytr.pin_memory()
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- with TicToc("FALKON ALGORITHM"):
- flk.error_fn = err_fns[0]
- print(f"Starting to train model {flk} on data {dset}", flush=True)
- flk.fit(Xtr, Ytr, Xts, Yts)
- test_model(flk, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
- else:
- print(f"Will train model {flk} on data {dset} with {kfold}-fold CV", flush=True)
- load_fn = get_cv_fn(dset)
- test_errs, train_errs = [], []
-
- for it, (Xtr, Ytr, Xts, Yts, kwargs) in enumerate(
- load_fn(k=kfold, dtype=dtype.to_numpy_dtype(), as_torch=True)
- ):
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- with TicToc(f"FALKON ALGORITHM (fold {it})"):
- flk.error_every = err_fns[0]
- flk.fit(Xtr, Ytr, Xts, Yts)
- c_test_errs, c_train_errs = test_model(flk, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
- train_errs.append(c_train_errs)
- test_errs.append(c_test_errs)
-
- print(f"Full errors: Test {test_errs} - Train {train_errs}")
- print()
- print(f"{kfold}-Fold Error Report")
- for err_fn_i in range(len(err_fns)):
- print(
- f"Final test errors: "
- f"{np.mean([e[err_fn_i] for e in test_errs]):.4f} +- "
- f"{np.std([e[err_fn_i] for e in test_errs]):4f}"
- )
- print(
- f"Final train errors: "
- f"{np.mean([e[err_fn_i] for e in train_errs]):.4f} +- "
- f"{np.std([e[err_fn_i] for e in train_errs]):.4f}"
- )
- print()
-
-
-def run_logistic_falkon(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- iter_list: List[int],
- penalty_list: List[float],
- num_centers: int,
- kernel_sigma: float,
- kernel: str,
- seed: int,
-):
- import torch
-
- import falkon
- from falkon import kernels
- from falkon.gsc_losses import LogisticLoss
- from falkon.models import logistic_falkon
- from falkon.utils import TicToc
-
- torch.manual_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float64
- # Arguments
- if kernel.lower() == "gaussian":
- k = kernels.GaussianKernel(kernel_sigma)
- elif kernel.lower() == "laplacian":
- k = kernels.LaplacianKernel(kernel_sigma)
- elif kernel.lower() == "linear":
- k = kernels.LinearKernel(beta=1.0, gamma=kernel_sigma)
- else:
- raise ValueError(f"Kernel {kernel} not understood for algorithm {algorithm}")
- opt = falkon.FalkonOptions(
- compute_arch_speed=False, no_single_kernel=True, pc_epsilon_32=1e-6, pc_epsilon_64=1e-13, debug=True
- )
- loss = LogisticLoss(kernel=k)
- flk = logistic_falkon.LogisticFalkon(
- kernel=k,
- loss=loss,
- penalty_list=penalty_list,
- iter_list=iter_list,
- M=num_centers,
- seed=seed,
- error_fn=None,
- error_every=1,
- options=opt,
- )
-
- # Error metrics
- err_fns = get_err_fns(dset)
- # Load data
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=True)
- Xtr = Xtr.pin_memory()
- Ytr = Ytr.pin_memory()
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- with TicToc("LOGISTIC FALKON ALGORITHM"):
- flk.error_fn = err_fns[0]
- print(f"Starting to train model {flk} on data {dset}", flush=True)
- flk.fit(Xtr, Ytr, Xts, Yts)
- test_model(flk, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
-
-
-def run_sgpr_gpflow(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- lr: float,
- num_iter: int,
- num_centers: int,
- kernel_sigma: float,
- learn_ind_pts: bool,
- kernel_variance: float,
- seed: int,
-):
- import gpflow
- import tensorflow as tf
-
- from falkon.benchmarks.models.gpflow_model import TrainableSGPR
-
- tf.random.set_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float32
- if dtype == DataType.float32:
- gpflow.config.set_default_float(np.float32)
-
- err_fns = get_err_fns(dset)
-
- # Kernel
- sigma_initial = np.array(kernel_sigma, dtype=dtype.to_numpy_dtype())
- kernel = gpflow.kernels.SquaredExponential(lengthscales=sigma_initial, variance=kernel_variance)
-
- # Data loading
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=False, as_tf=True)
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
-
- # Inducing points
- inducing_idx = np.random.choice(Xtr.shape[0], num_centers, replace=False)
- inducing_points = Xtr[inducing_idx].reshape(num_centers, -1)
- print(f"Took {inducing_points.shape[0]} random inducing points")
- if Ytr.shape[1] != 1:
- raise NotImplementedError("SGPR GPFLOW only implemented for 1 output")
-
- # Define model, train and test
- model = TrainableSGPR(
- kernel=kernel,
- inducing_points=inducing_points,
- num_iter=num_iter,
- err_fn=err_fns[0],
- train_hyperparams=learn_ind_pts,
- lr=lr,
- )
- t_s = time.time()
- print(f"Starting to train model {model} on data {dset}", flush=True)
- model.fit(Xtr, Ytr, Xts, Yts)
- print(f"Training of {algorithm} on {dset} complete in {time.time() - t_s:.2f}s", flush=True)
- test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
-
-
-def run_gpflow(
- dset: Dataset,
- algorithm: Algorithm,
- dtype: Optional[DataType],
- batch_size: int,
- lr: float,
- natgrad_lr: float,
- var_dist: str,
- num_iter: int,
- num_centers: int,
- kernel_sigma: float,
- learn_ind_pts: bool,
- error_every: int,
- kernel_variance: float,
- kfold: int,
- seed: int,
- ind_pt_file: Optional[str] = None,
-):
- import gpflow
- import tensorflow as tf
-
- from falkon.benchmarks.models.gpflow_model import TrainableSVGP
-
- tf.random.set_seed(seed)
- np.random.seed(seed)
-
- # Data types
- if dtype is None:
- dtype = DataType.float32
- if dtype == DataType.float32:
- gpflow.config.set_default_float(np.float32)
-
- err_fns = get_err_fns(dset)
-
- def get_model(Xtr, num_outputs, err_fn, kernel):
- # Inducing points
- inducing_idx = np.random.choice(Xtr.shape[0], num_centers, replace=False)
- inducing_points = Xtr[inducing_idx].reshape(num_centers, -1)
- print(f"Took {inducing_points.shape[0]} random inducing points")
-
- num_classes = 0
- if algorithm == Algorithm.GPFLOW_CLS:
- if num_outputs == 1:
- num_classes = 2
- else:
- num_classes = num_outputs
- model = TrainableSVGP(
- kernel=kernel,
- inducing_points=inducing_points,
- batch_size=batch_size,
- num_iter=num_iter,
- err_fn=err_fn,
- classif=num_classes,
- lr=lr,
- var_dist=var_dist,
- error_every=error_every,
- train_hyperparams=learn_ind_pts,
- optimize_centers=True,
- natgrad_lr=natgrad_lr,
- )
- return model
-
- load_fn = get_load_fn(dset)
- Xtr, Ytr, Xts, Yts, kwargs = load_fn(dtype=dtype.to_numpy_dtype(), as_torch=False, as_tf=True)
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- # Kernel
- sigma_initial = np.array(kernel_sigma, dtype=dtype.to_numpy_dtype())
- kernel = gpflow.kernels.SquaredExponential(lengthscales=sigma_initial, variance=kernel_variance)
- model = get_model(Xtr, Ytr.shape[1], err_fns[0], kernel)
- t_s = time.time()
- print(f"Starting to train model {model} on data {dset}", flush=True)
- model.fit(Xtr, Ytr, Xts, Yts)
- print(f"Training of {algorithm} on {dset} complete in {time.time() - t_s:.2f}s", flush=True)
- if model.num_classes == 2:
- Yts = (Yts + 1) / 2
- Ytr = (Ytr + 1) / 2
- test_model(model, f"{algorithm} on {dset}", Xts, Yts, Xtr, Ytr, err_fns)
-
-
-if __name__ == "__main__":
- import datetime
-
- print("-------------------------------------------")
- print(print(datetime.datetime.now()))
- p = argparse.ArgumentParser(description="FALKON Benchmark Runner")
-
- p.add_argument(
- "-a",
- "--algorithm",
- type=Algorithm,
- choices=list(Algorithm),
- required=True,
- help="The algorithm which should be used for predictions.",
- )
- p.add_argument("-d", "--dataset", type=Dataset, choices=list(Dataset), required=True, help="Dataset")
- p.add_argument(
- "-t",
- "--dtype",
- type=DataType.argparse,
- choices=list(DataType),
- required=False,
- default=None,
- help="Floating point precision to work with. Lower precision will be "
- "faster but less accurate. Certain algorithms require a specific precision. "
- "If this argument is not specified we will use the highest precision "
- "supported by the chosen algorithm.",
- )
- p.add_argument("-e", "--epochs", type=int, required=True, help="Number of epochs to run the algorithm for.")
- p.add_argument("--subsample", type=int, required=False, default=0, help="Data subsampling")
- p.add_argument("-k", "--kfold", type=int, default=1, help="Number of folds for k-fold CV.")
- p.add_argument("--seed", type=int, default=RANDOM_SEED, help="Random number generator seed")
- # Algorithm-specific arguments
- p.add_argument(
- "-M",
- "--num-centers",
- type=int,
- default=0,
- help="Number of Nystroem centers. Used for algorithms " "falkon, gpytorch and gpflow.",
- )
-
- p.add_argument("--natgrad-lr", type=float, default=0.0001, help="Natural gradient learning rate (GPFlow)")
-
- p.add_argument(
- "--var-dist",
- type=VariationalDistribution,
- default=None,
- required=False,
- help="Form of the variational distribution used in GPytorch",
- )
- p.add_argument("--learn-hyperparams", action="store_true", help="Whether gpytorch should learn hyperparameters")
- p.add_argument(
- "--inducing-point-file", type=str, default=None, required=False, help="file with saved inducing points"
- )
-
- p.add_argument(
- "--penalty",
- type=float,
- default=0.0,
- required=False,
- help="Lambda penalty for use in KRR. Needed for the Falkon algorithm.",
- )
- p.add_argument(
- "--sigma", type=float, default=-1.0, required=False, help="Inverse length-scale for the Gaussian kernel."
- )
- p.add_argument(
- "--batch-size",
- type=int,
- default=4096,
- required=False,
- help="Mini-batch size to be used for stochastic methods (GPytorch)",
- )
- p.add_argument(
- "--lr",
- type=float,
- default=0.001,
- required=False,
- help="Learning rate, used for only certain algorithms (GPytorch)",
- )
- p.add_argument(
- "--n-subsample",
- type=int,
- default=None,
- required=False,
- help="Number of samples to be used for the EigenPro SVD preconditioner",
- )
- p.add_argument(
- "--data-subsample",
- type=int,
- default=None,
- required=False,
- help="Subsample the input data to this number of samples (EigenPro)",
- )
- p.add_argument(
- "--epro-q", type=int, default=None, required=False, help="Top-q eigenvalues to take for eigenpro preconditioner"
- )
- p.add_argument(
- "--kernel", type=str, default="gaussian", required=False, help="Type of kernel to use. Used for Falkon"
- )
- p.add_argument(
- "--error-every", type=int, default=1000, required=False, help="How often to display validation error (GPFlow)"
- )
- p.add_argument(
- "--kernel-variance",
- type=float,
- default=1.0,
- required=False,
- help="Default kernel variance for GPFlow RBF kernel",
- )
- p.add_argument(
- "--eta-divisor", type=float, default=1.0, required=False, help="Learning-rate regulator for EigenPro"
- )
- p.add_argument(
- "--iter-list", type=int, nargs="*", default=[], required=False, help="List of CG iterations for logistic falkon"
- )
- p.add_argument(
- "--penalty-list",
- type=float,
- nargs="*",
- default=[],
- required=False,
- help="List of penalty values for logistic falkon",
- )
-
- args = p.parse_args()
- print(f"STARTING WITH SEED {args.seed}")
-
- if args.algorithm == Algorithm.FALKON:
- run_falkon(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- num_iter=args.epochs,
- num_centers=args.num_centers,
- kernel_sigma=args.sigma,
- penalty=args.penalty,
- kernel=args.kernel,
- kfold=args.kfold,
- seed=args.seed,
- )
- elif args.algorithm == Algorithm.LOGISTIC_FALKON:
- run_logistic_falkon(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- iter_list=args.iter_list,
- penalty_list=args.penalty_list,
- num_centers=args.num_centers,
- kernel_sigma=args.sigma,
- kernel=args.kernel,
- seed=args.seed,
- )
- elif args.algorithm == Algorithm.EIGENPRO:
- run_epro(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- num_iter=args.epochs,
- kernel_sigma=args.sigma,
- n_subsample=args.n_subsample,
- q=args.epro_q,
- kfold=args.kfold,
- seed=args.seed,
- data_subsample=args.data_subsample,
- eta_divisor=args.eta_divisor,
- )
- elif args.algorithm in {Algorithm.GPYTORCH_CLS, Algorithm.GPYTORCH_REG}:
- run_gpytorch(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- num_iter=args.epochs,
- num_centers=args.num_centers,
- kernel_sigma=args.sigma,
- var_dist=str(args.var_dist),
- batch_size=args.batch_size,
- lr=args.lr,
- learn_ind_pts=args.learn_hyperparams,
- ind_pt_file=args.inducing_point_file,
- kfold=args.kfold,
- seed=args.seed,
- natgrad_lr=args.natgrad_lr,
- )
- elif args.algorithm in {Algorithm.GPFLOW_CLS, Algorithm.GPFLOW_REG}:
- run_gpflow(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- num_iter=args.epochs,
- num_centers=args.num_centers,
- kernel_sigma=args.sigma,
- var_dist=str(args.var_dist),
- batch_size=args.batch_size,
- lr=args.lr,
- natgrad_lr=args.natgrad_lr,
- learn_ind_pts=args.learn_hyperparams,
- ind_pt_file=args.inducing_point_file,
- error_every=args.error_every,
- kernel_variance=args.kernel_variance,
- kfold=args.kfold,
- seed=args.seed,
- )
- elif args.algorithm == Algorithm.GPYTORCH_SGPR:
- run_gpytorch_sgpr(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- lr=args.lr,
- num_iter=args.epochs,
- num_centers=args.num_centers,
- learn_ind_pts=args.learn_hyperparams,
- seed=args.seed,
- )
- elif args.algorithm == Algorithm.GPFLOW_SGPR:
- run_sgpr_gpflow(
- dset=args.dataset,
- algorithm=args.algorithm,
- dtype=args.dtype,
- lr=args.lr,
- num_iter=args.epochs,
- num_centers=args.num_centers,
- kernel_sigma=args.sigma,
- learn_ind_pts=args.learn_hyperparams,
- kernel_variance=args.kernel_variance,
- seed=args.seed,
- )
- else:
- raise NotImplementedError(f"No benchmark implemented for algorithm {args.algorithm}.")
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_susy.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_susy.sh
deleted file mode 100755
index df4820e2..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_susy.sh
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare GPU
-export CUDA_VISIBLE_DEVICES="1"
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-# Common variables
-DSET="susy"
-TRAIN_DATAPOINTS=5000000
-PY_LAUNCHER="benchmark_runner.py"
-
-
-# Falkon Logistic
-if [ true = false ]; then
- conda activate torch
- ALGO="falkon-cls"
- M=20000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-7 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 12 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 13 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 14 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 15 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 3 -e 0 \
- --penalty-list 1e-4 1e-6 1e-8 1e-8 1e-8 1e-8 1e-8 1e-8 \
- --iter-list 5 5 5 8 8 8 8 8 --seed 16 \
- -M $M -t $TYPE 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-if [ true = true ]; then
- conda activate torch
- ALGO="falkon"
- M=30000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.5 --penalty 5e-7 \
- -M $M -t $TYPE --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- exit 1;
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 20 --sigma 3.0 --penalty 1e-6 \
- -M $M -t $TYPE --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ false = true ]; then
- ALGO="gpytorch-cls"
- M=1000
- VAR="tril_natgrad"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- BATCH_SIZE=16000
- LR=0.002
- NATGRAD_LR=0.002
- EPOCHS=13
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --lr $LR --natgrad-lr $NATGRAD_LR --batch-size $BATCH_SIZE --learn-hyperparams \
- --var-dist $VAR --sigma 5 -e $EPOCHS --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ false = true ]; then
- ALGO="gpflow-cls"
- M=2000
- VAR="diag"
- BATCH_SIZE=16000
- LR=0.003
- ERROR_EVERY=$(( $TRAIN_DATAPOINTS / $BATCH_SIZE ))
- EPOCHS=$(( $TRAIN_DATAPOINTS / $BATCH_SIZE * 20 ))
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data for ${EPOCHS} epochs, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 4 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 4 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 4 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 4 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 4 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-# EigenPro
-if [ true = false ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 4 -e 2 \
- --data-subsample 600000 --seed 12 --epro-q 850 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 4 -e 2 \
- --data-subsample 600000 --seed 13 --epro-q 850 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 4 -e 2 \
- --data-subsample 600000 --seed 14 --epro-q 850 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 4 -e 2 \
- --data-subsample 600000 --seed 15 --epro-q 850 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET --sigma 4 -e 2 \
- --data-subsample 600000 --seed 16 --epro-q 850 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_taxi.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_taxi.sh
deleted file mode 100755
index 24689f9a..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_taxi.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="taxi"
-PY_LAUNCHER="benchmark_runner.py"
-TRAINING_POINTS=1000000000
-
-export CUDA_VISIBLE_DEVICES="0,1"
-
-# Falkon 64
-if [ true = false ]; then
- ALGO="falkon"
- M=80000
- TYPE="float64"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET -t $TYPE --sigma 1 --penalty 1e-7 --kernel laplacian 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# Falkon 32
-if [ false = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 1.0 --penalty 2e-7 --kernel gaussian --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 1.0 --penalty 2e-7 --kernel gaussian --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 1.0 --penalty 2e-7 --kernel gaussian --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 1.0 --penalty 2e-7 --kernel gaussian --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 10 -d $DSET -t $TYPE \
- --sigma 1.0 --penalty 2e-7 --kernel gaussian --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ true = true ]; then
- ALGO="gpytorch-reg"
- M=1000
- VAR="natgrad"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- BATCH_SIZE=32000
- LR=0.002
- NATGRAD_LR=0.002
- EPOCHS=5
- conda activate torch
- #PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- # --batch-size $BATCH_SIZE --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 1 \
- # --epochs $EPOCHS --learn-hyperparams --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --batch-size $BATCH_SIZE --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 1 \
- --epochs $EPOCHS --learn-hyperparams --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --batch-size $BATCH_SIZE --var-dist $VAR --lr $LR --natgrad-lr $NATGRAD_LR --sigma 1 \
- --epochs $EPOCHS --learn-hyperparams --seed 14 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ false = true ]; then
- ALGO="gpflow-reg"
- M=1000
- VAR=diag
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- BATCH_SIZE=32000
- EPOCHS=$(( $TRAINING_POINTS / $BATCH_SIZE * 10 ))
- ERROR_EVERY=30000 # This is one epoch
- conda activate gpflow
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 1 --batch-size $BATCH_SIZE \
- --lr 0.003 --natgrad-lr 0.0000 --epochs $EPOCHS --error-every $ERROR_EVERY \
- --learn-hyperparams --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_timit.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_timit.sh
deleted file mode 100755
index 64d9423b..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_timit.sh
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-export CUDA_VISIBLE_DEVICES=0
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="timit"
-TRAIN_DATAPOINTS=1200000
-PY_LAUNCHER="benchmark_runner.py"
-
-##### TIMIT Dataset
-# Falkon 64
-if [ false = true ]; then
- ALGO="falkon"
- M=100000
- TYPE="float64"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 15 --penalty 1e-9 --kernel gaussian \
- 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-if [ false = true ]; then
- # Falkon 32
- ALGO="falkon"
- M=100000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 12 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 13 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 14 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 15 \
- 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M -t $TYPE -e 20 \
- --sigma 14.5 --penalty 5e-9 --kernel gaussian --seed 16 \
- 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch -- This fails, programmer error most likely
-if [ false = true ]; then
- ALGO="gpytorch-cls"
- M=100
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --var-dist $VAR \
- --sigma 15 --lr 0.01 --batch-size 2048 -e 30 --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = true ]; then
- ALGO="gpflow-cls"
- M=2000
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- BATCH_SIZE=2048
- EPOCHS=$(( $TRAIN_DATAPOINTS / $BATCH_SIZE * 13 ))
- ERROR_EVERY=500
- LR=0.01
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data for ${EPOCHS} iterations, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 15 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS \
- --error-every $ERROR_EVERY --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 15 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS \
- --error-every $ERROR_EVERY --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 15 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS \
- --error-every $ERROR_EVERY --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 15 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS \
- --error-every $ERROR_EVERY --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 15 --batch-size $BATCH_SIZE --learn-hyperparams \
- --lr $LR --natgrad-lr 0.0000 --epochs $EPOCHS \
- --error-every $ERROR_EVERY --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
-
-# EigenPro
-if [ true = false ]; then
- ALGO="eigenpro"
- OUTFILE="logs/${DSET}_${ALGO}.txt"
- conda activate epro2
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 5 --sigma 14.5 --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 5 --sigma 14.5 --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 5 --sigma 14.5 --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 5 --sigma 14.5 --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -e 5 --sigma 14.5 --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
diff --git a/falkon/benchmarks/falkon_benchmarks/benchmark_yelp.sh b/falkon/benchmarks/falkon_benchmarks/benchmark_yelp.sh
deleted file mode 100755
index 1959280c..00000000
--- a/falkon/benchmarks/falkon_benchmarks/benchmark_yelp.sh
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-DSET="yelp"
-PY_LAUNCHER="benchmark_runner.py"
-export CUDA_VISIBLE_DEVICES="0,1"
-
-# Falkon 64
-if [ true = false ]; then
- ALGO="falkon"
- M=50000
- TYPE="float64"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 31.4 --kernel linear \
- --penalty 1e-7 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# Falkon 32
-if [ true = true ]; then
- ALGO="falkon"
- M=50000
- TYPE="float32"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${TYPE}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 12 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 13 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 14 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 15 2>&1 | tee -a $OUTFILE
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -M $M -e 20 -d $DSET \
- -t $TYPE --sigma 20.0 --kernel gaussian \
- --penalty 1e-6 --seed 16 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPytorch
-if [ true = false ]; then
- ALGO="gpytorch-reg"
- M=1000
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate torch
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M --batch-size 4096 \
- --var-dist $VAR --lr 0.01 --sigma 6 -e 100 2>&1 | tee -a $OUTFILE
- conda deactivate
-fi
-
-# GPFlow
-if [ true = false ]; then
- ALGO="gpflow-reg"
- M=100
- VAR="diag"
- OUTFILE="logs/${DSET}_${ALGO}_${M}_${VAR}.txt"
- conda activate gpflow
- echo "Running ${ALGO} on ${DSET} data, log will be saved in ${OUTFILE}"
- PYTHONPATH='..' python $PY_LAUNCHER -a $ALGO -d $DSET -M $M \
- --var-dist ${VAR} --sigma 5 --batch-size 1024 --learn-hyperparams \
- --lr 0.005 --natgrad-lr 0.000 --epochs 2000 --error-every 10 \
- --seed 12 2>&1 | tee -a $OUTFILE
- conda deactivate
- echo "${ALGO} on ${DSET} data complete..."
-fi
-
diff --git a/falkon/benchmarks/models/__init__.py b/falkon/benchmarks/models/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/falkon/benchmarks/models/gpflow_model.py b/falkon/benchmarks/models/gpflow_model.py
deleted file mode 100644
index 0fa921ef..00000000
--- a/falkon/benchmarks/models/gpflow_model.py
+++ /dev/null
@@ -1,454 +0,0 @@
-import time
-from functools import partial
-
-import gpflow
-import numpy as np
-import pandas as pd
-import tensorflow as tf
-from gpflow import set_trainable
-from gpflow.models import SVGP
-from gpflow.optimizers import NaturalGradient
-
-
-@tf.function(autograph=False)
-def elbo_opt_step(optimizer, model, batch):
- with tf.GradientTape(watch_accessed_variables=False) as tape:
- tape.watch(model.trainable_variables)
- objective = -model.elbo(batch)
- grads = tape.gradient(objective, model.trainable_variables)
- optimizer.apply_gradients(zip(grads, model.trainable_variables))
- return objective
-
-
-def data_generator(X, Y, batch_size):
- bstart = 0
- while bstart < X.shape[0]:
- bend = min(X.shape[0], bstart + batch_size)
- yield tf.convert_to_tensor(X[bstart:bend]), tf.convert_to_tensor(Y[bstart:bend])
- bstart = bend
-
-
-class TrainableGPR:
- def __init__(self, kernel, num_iter, err_fn, lr):
- self.kernel = kernel
- self.num_iter = num_iter
- self.err_fn = err_fn
- self.lr = lr
- self.model = None
-
- def fit(self, X, Y, Xval, Yval):
- self.model = gpflow.models.GPR((X, Y), kernel=self.kernel, noise_variance=0.1)
- # Create the optimizers
- adam_opt = tf.optimizers.Adam(self.lr)
-
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
-
- @tf.function
- def step_fn():
- adam_opt.minimize(self.model.training_loss, var_list=self.model.trainable_variables)
- return True
-
- @tf.function
- def pred_fn():
- return self.model.predict_y(Xval)[0]
-
- t_elapsed = 0
- for step in range(self.num_iter):
- t_s = time.time()
- outcome = step_fn()
- outcome = int(outcome) + 1
- t_elapsed += time.time() - t_s
- if (step + 1) % 1 == 0:
- val_err, err_name = self.err_fn(Yval, pred_fn())
- print(
- f"Epoch {step + 1} - {t_elapsed:7.2f}s elapsed - " f"validation {err_name} {val_err:7.5f}",
- flush=True,
- )
- print(f"\tLengthscale: {self.kernel.lengthscales}")
-
- print("Final model is ")
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
- return self
-
- def predict(self, X, pred_fn=None):
- return self.model.predict_y(X)[0]
-
- def __str__(self):
- return f"TrainableGPR"
-
-
-class TrainableSGPR:
- def __init__(
- self,
- kernel,
- inducing_points,
- num_iter,
- err_fn,
- train_hyperparams: bool = True,
- lr: float = 0.001,
- ):
- self.train_hyperparams = train_hyperparams
- self.lr = lr
- self.kernel = kernel
- self.Z = inducing_points.copy()
- self.num_iter = num_iter
- self.err_fn = err_fn
- self.model = None
- self.optimizer = "adam"
-
- def fit(self, X, Y, Xval, Yval):
- # Only Gaussian likelihood allowed
- self.model = gpflow.models.SGPR((X, Y), kernel=self.kernel, inducing_variable=self.Z, noise_variance=0.001)
- # self.model.likelihood.variance = gpflow.Parameter(1, transform=tfp.bijectors.Identity())
-
- # Setup training parameters
- if not self.train_hyperparams:
- set_trainable(self.model.inducing_variable.Z, False)
-
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
-
- @tf.function
- def grad_fn():
- grads = tf.gradients(self.model.training_loss(), self.model.trainable_variables)
- return grads
-
- if self.optimizer == "scipy":
- opt = gpflow.optimizers.Scipy()
-
- def scipy_callback(step, variables, value):
- print(f"Step {step} - Variables: {value}")
-
- opt.minimize(
- self.model.training_loss,
- self.model.trainable_variables,
- method="L-BFGS-B",
- options=dict(maxiter=self.num_iter, ftol=1e-32, maxcor=3, gtol=1e-16, disp=False),
- step_callback=scipy_callback,
- compile=True,
- )
- else:
- if self.optimizer == "adam":
- opt = tf.optimizers.Adam(self.lr)
- elif self.optimizer == "sgd":
- opt = tf.optimizers.SGD(self.lr)
- else:
- raise ValueError(f"Optimizer {self.optimizer} unknown")
-
- @tf.function
- def step_fn():
- opt.minimize(self.model.training_loss, var_list=self.model.trainable_variables)
-
- t_elapsed = 0
- for step in range(self.num_iter):
- t_s = time.time()
- step_fn()
- t_elapsed += time.time() - t_s
- val_err, err_name = self.err_fn(Yval, self.predict(Xval))
- gpflow.utilities.print_summary(self.model)
- print(
- f"Epoch {step + 1} - {t_elapsed:7.2f}s elapsed - " f"validation {err_name} {val_err:7.5f}",
- flush=True,
- )
- print(self.model.inducing_variable.Z.numpy())
-
- print("Final model is ")
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
- return self
-
- def gradient_map(self, X, Y, Xval, Yval, variance_list, lengthscale_list):
- self.model = gpflow.models.SGPR((X, Y), kernel=self.kernel, inducing_variable=self.Z, noise_variance=0.1)
- # Setup parameters for which to compute gradient. We want only 2 params!
- set_trainable(self.model.inducing_variable.Z, False)
- set_trainable(self.model.kernel.variance, False)
- set_trainable(self.model.kernel.lengthscales, True)
- set_trainable(self.model.likelihood.variance, True)
-
- @tf.function
- def grad_fn():
- grads = tf.gradients(self.model.training_loss(), self.model.trainable_variables)
- return grads
-
- df = pd.DataFrame(columns=["sigma", "sigma_g", "variance", "variance_g", "elbo"])
- for lscale in lengthscale_list:
- self.model.kernel.lengthscales.assign([lscale])
- for var in variance_list:
- self.model.likelihood.variance.assign(var)
- # self.model.kernel.variance.assign([var])
- grads = [g.numpy() for g in grad_fn()]
- train_preds = self.model.predict_y(X)[0]
- test_preds = self.model.predict_y(Xval)[0]
- new_row = {
- "sigma": lscale,
- "sigma_g": grads[0][0],
- "variance": var,
- "variance_g": grads[1],
- "elbo": self.model.elbo().numpy(),
- }
- print(f"ELBO: {new_row['elbo']:10.3f} - TRAINING LOSS: {self.model.training_loss():10.3f}")
- tr_err, tr_err_name = self.err_fn(Y, train_preds)
- ts_err, ts_err_name = self.err_fn(Yval, test_preds)
- new_row[f"train_{tr_err_name}"] = tr_err
- new_row[f"test_{ts_err_name}"] = ts_err
- df = df.append(new_row, ignore_index=True)
- print(new_row)
- return df
-
- def predict(self, X):
- return self.model.predict_y(X)[0]
-
- @property
- def inducing_points(self):
- return self.model.inducing_variable.Z.numpy()
-
- def __str__(self):
- return (
- f"TrainableSGPR"
- )
-
-
-class TrainableSVGP:
- def __init__(
- self,
- kernel,
- inducing_points,
- batch_size,
- num_iter,
- err_fn,
- var_dist,
- classif=None,
- error_every=100,
- train_hyperparams: bool = True,
- optimize_centers: bool = True,
- lr: float = 0.001,
- natgrad_lr: float = 0.01,
- ):
- self.train_hyperparams = train_hyperparams
- self.optimize_centers = optimize_centers
- self.lr = lr
- self.natgrad_lr = natgrad_lr
- self.kernel = kernel
- self.Z = inducing_points.copy()
- self.batch_size = batch_size
- self.num_iter = num_iter
- self.err_fn = err_fn
- self.error_every = error_every
- self.do_classif = classif is not None and classif > 0
- self.num_classes = 1
- if self.do_classif:
- self.num_classes = int(classif)
- self.model = None
- self.whiten = True
- self.var_dist = var_dist
-
- def fit(self, X, Y, Xval, Yval):
- N = X.shape[0]
-
- if self.var_dist == "diag":
- q_diag = True
- elif self.var_dist == "full":
- q_diag = False
- else:
- raise NotImplementedError(f"GPFlow cannot implement {self.var_dist} variational distribution")
-
- if self.natgrad_lr > 0 and q_diag:
- raise ValueError("The variational distribution must be 'full' with natural gradients")
-
- if self.do_classif:
- if self.num_classes == 2:
- likelihood = gpflow.likelihoods.Bernoulli()
- num_latent = 1
- else:
- # Softmax better than Robustmax (apparently per the gpflow slack)
- # likelihood = gpflow.likelihoods.MultiClass(self.num_classes, invlink=invlink) # Multiclass likelihood
- likelihood = gpflow.likelihoods.Softmax(self.num_classes)
- num_latent = self.num_classes
- # Y must be 1D for the multiclass model to actually work.
- Y = np.argmax(Y, 1).reshape((-1, 1)).astype(int)
- else:
- num_latent = 1
- likelihood = gpflow.likelihoods.Gaussian(variance=0.1)
-
- self.model = SVGP(
- kernel=self.kernel,
- likelihood=likelihood,
- inducing_variable=self.Z,
- num_data=N,
- num_latent_gps=num_latent,
- whiten=self.whiten,
- q_diag=q_diag,
- )
- # Setup training
- set_trainable(self.model.inducing_variable.Z, self.optimize_centers)
- if not self.train_hyperparams:
- set_trainable(self.model.inducing_variable.Z, False)
- set_trainable(self.model.likelihood.variance, False)
- set_trainable(self.kernel.lengthscales, False)
- set_trainable(self.kernel.variance, False)
- if self.natgrad_lr > 0:
- set_trainable(self.model.q_mu, False)
- set_trainable(self.model.q_sqrt, False)
- variational_params = [(self.model.q_mu, self.model.q_sqrt)]
- # Create the optimizers
- adam_opt = tf.optimizers.Adam(self.lr)
- if self.natgrad_lr > 0:
- natgrad_opt = NaturalGradient(gamma=self.natgrad_lr)
-
- # Print
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
-
- # Giacomo: If shuffle buffer is too large it will run OOM
- if self.num_classes == 2:
- Y = (Y + 1) / 2
- Yval = (Yval + 1) / 2
- generator = partial(data_generator, X, Y)
- if X.dtype == np.float32:
- tf_dt = tf.float32
- else:
- tf_dt = tf.float64
- train_dataset = (
- tf.data.Dataset.from_generator(generator, args=(self.batch_size,), output_types=(tf_dt, tf_dt))
- .prefetch(self.batch_size * 10)
- .repeat()
- .shuffle(min(N // self.batch_size, 1_000_000 // self.batch_size))
- .batch(1)
- )
- train_iter = iter(train_dataset)
-
- loss = self.model.training_loss_closure(train_iter)
- t_elapsed = 0
-
- @tf.function
- def step_fn():
- adam_opt.minimize(loss, var_list=self.model.trainable_variables)
- if self.natgrad_lr > 0:
- natgrad_opt.minimize(loss, var_list=variational_params)
- return True
-
- for step in range(self.num_iter):
- t_s = time.time()
- outcome = step_fn()
- outcome = int(outcome) + 1
- t_elapsed += time.time() - t_s
- if step % 500 == 0:
- print(f"Step {step} -- Elapsed {t_elapsed:.2f}s", flush=True)
- gpflow.utilities.print_summary(self.model)
- print(self.model.inducing_variable.Z.numpy())
- if (step + 1) % self.error_every == 0:
- preds = self.predict(Xval)
- val_err, err_name = self.err_fn(Yval, preds)
- print(
- f"Step {step + 1} - {t_elapsed:7.2f}s Elapsed - " f"Validation {err_name} {val_err:7.5f}",
- flush=True,
- )
-
- preds = self.predict(Xval)
- val_err, err_name = self.err_fn(Yval, preds)
- print(
- f"Finished optimization - {t_elapsed:7.2f}s Elapsed - " f"Validation {err_name} {val_err:7.5f}", flush=True
- )
- print("Final model is ")
- gpflow.utilities.print_summary(self.model)
- print("", flush=True)
- return self
-
- def predict(self, X):
- preds = []
- dset = tf.data.Dataset.from_tensor_slices((X,)).batch(self.batch_size)
- for X_batch in iter(dset):
- batch_preds = self.model.predict_y(X_batch[0])[0].numpy()
- if self.do_classif:
- batch_preds = batch_preds.reshape((X_batch[0].shape[0], -1))
- preds.append(batch_preds)
- preds = np.concatenate(preds, axis=0)
- return preds
-
- def gradient_map(self, X, Y, Xval, Yval, variance_list, lengthscale_list):
- N = X.shape[0]
- likelihood = gpflow.likelihoods.Gaussian(variance=0.1)
- self.model = SVGP(
- kernel=self.kernel,
- likelihood=likelihood,
- inducing_variable=self.Z,
- num_data=N,
- num_latent_gps=1,
- whiten=self.whiten,
- q_diag=False,
- ) # var-dist must be full covar when using natgrad
- # Setup training parameters. We want only 2 params.
- set_trainable(self.model.inducing_variable.Z, False)
- set_trainable(self.kernel.variance, False)
- set_trainable(self.kernel.lengthscales, True)
- set_trainable(self.model.likelihood.variance, True)
- # Variational parameters will be optimized with natgrad.
- set_trainable(self.model.q_mu, False)
- set_trainable(self.model.q_sqrt, False)
-
- # Set-up for natgrad optimization
- variational_params = [(self.model.q_mu, self.model.q_sqrt)]
- natgrad_opt = NaturalGradient(gamma=1.0)
- generator = partial(data_generator, X, Y)
- if X.dtype == np.float32:
- tf_dt = tf.float32
- else:
- tf_dt = tf.float64
- print(tf_dt)
- train_dataset = (
- tf.data.Dataset.from_generator(generator, args=(self.batch_size,), output_types=(tf_dt, tf_dt))
- .prefetch(self.batch_size * 10)
- .repeat()
- .shuffle(min(N // self.batch_size, 1_000_000 // self.batch_size))
- .batch(1)
- )
- train_iter = iter(train_dataset)
- loss = self.model.training_loss_closure(train_iter)
-
- @tf.function
- def grad_fn():
- grads = tf.gradients(self.model.training_loss((X, Y)), self.model.trainable_variables)
- return grads
-
- df = pd.DataFrame(columns=["sigma", "sigma_g", "variance", "variance_g", "elbo"])
- for lscale in lengthscale_list:
- self.model.kernel.lengthscales.assign([lscale])
- for var in variance_list:
- self.model.likelihood.variance.assign(var)
-
- # Optimize variational parameters (a single iteration is enough with lr=1)
- natgrad_opt.minimize(loss, var_list=variational_params)
-
- # Get gradients and save output in df row.
- grads = [g.numpy() for g in grad_fn()]
- train_preds = self.model.predict_y(X)[0]
- test_preds = self.model.predict_y(Xval)[0]
- new_row = {
- "sigma": lscale,
- "sigma_g": grads[0][0],
- "variance": var,
- "variance_g": grads[1],
- "elbo": self.model.elbo((X, Y)).numpy(),
- }
- tr_err, tr_err_name = self.err_fn(Y, train_preds)
- ts_err, ts_err_name = self.err_fn(Yval, test_preds)
- new_row[f"train_{tr_err_name}"] = tr_err
- new_row[f"test_{ts_err_name}"] = ts_err
- df = df.append(new_row, ignore_index=True)
- print(new_row)
- return df
-
- @property
- def inducing_points(self):
- return self.model.inducing_variable.Z.numpy()
-
- def __str__(self):
- return (
- f"TrainableSVGP"
- )
diff --git a/falkon/benchmarks/models/gpytorch_sgpr.py b/falkon/benchmarks/models/gpytorch_sgpr.py
deleted file mode 100644
index ff61afaf..00000000
--- a/falkon/benchmarks/models/gpytorch_sgpr.py
+++ /dev/null
@@ -1,127 +0,0 @@
-import time
-
-import gpytorch
-import torch
-
-
-class SGPRBaseModel(gpytorch.models.ExactGP):
- def __init__(self, train_x, train_y, likelihood, inducing_points):
- super().__init__(train_x, train_y, likelihood)
- self.likelihood = likelihood
- self.mean_module = gpytorch.means.ConstantMean()
-
- base_kernel = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(ard_num_dims=None))
- self.covar_module = gpytorch.kernels.InducingPointKernel(
- base_kernel, inducing_points=inducing_points, likelihood=likelihood
- )
-
- def forward(self, x):
- mean = self.mean_module(x)
- covar = self.covar_module(x)
- return gpytorch.distributions.MultivariateNormal(mean, covar)
-
- def cuda(self, *args, **kwargs):
- super().cuda(*args, **kwargs)
- self.likelihood = self.likelihood.cuda(*args, **kwargs)
- return self
-
- def parameters(self, recurse: bool = True):
- return list(super().parameters(recurse)) + list(self.likelihood.parameters(recurse))
-
- def train(self, arg=True):
- super().train(arg)
- self.likelihood.train(arg)
-
- def eval(self):
- self.train(False)
-
- @property
- def inducing_points(self):
- for name, param in self.named_parameters():
- if "inducing_points" in name:
- return param
- return None
-
-
-class GpytorchSGPR:
- def __init__(
- self, inducing_points, err_fn, num_epochs: int, use_cuda: bool, lr: float = 0.001, learn_ind_pts: bool = False
- ):
- self.likelihood = gpytorch.likelihoods.GaussianLikelihood()
- self.use_cuda = use_cuda
- self.inducing_points = inducing_points
- self.learn_ind_pts = learn_ind_pts
-
- self.lr = lr
- self.num_epochs = num_epochs
- self.err_fn = err_fn
-
- if use_cuda:
- self.inducing_points = self.inducing_points.contiguous().cuda()
- self.likelihood = self.likelihood.cuda()
-
- def do_train(self, Xtr, Ytr, Xts, Yts):
- Ytr = Ytr.reshape(-1)
- Yts = Yts.reshape(-1)
-
- self.model = SGPRBaseModel(Xtr, Ytr, self.likelihood, self.inducing_points)
- if self.use_cuda:
- self.model = self.model.cuda()
-
- # Loss function for the model
- mll = gpytorch.mlls.ExactMarginalLogLikelihood(self.likelihood, self.model)
-
- # Parameters of the model which will be trained
- params = self.model.parameters()
- if not self.learn_ind_pts:
- exclude = {self.model.inducing_points}
- print("Excluding inducing points from the model:", exclude)
- params = list(set(self.model.parameters()) - exclude)
-
- # Define optimizer
- optimizer = torch.optim.Adam(params, lr=self.lr)
-
- # Start training
- t_elapsed = 0
- if self.use_cuda:
- Xtr = Xtr.cuda()
- Ytr = Ytr.cuda()
- for epoch in range(self.num_epochs):
- # Train
- t_start = time.time()
- self.model.train()
- optimizer.zero_grad()
- output = self.model(Xtr)
- loss = -mll(output, Ytr)
- loss.backward()
- optimizer.step()
- t_elapsed += time.time() - t_start
- # Evaluate
- torch.cuda.empty_cache()
- err, err_name = self.err_fn(Yts, self.predict(Xts))
- print(
- f"Epoch {epoch + 1} - Elapsed {t_elapsed:.2f}s - "
- f"Train loss: {loss.item():.3f} - Test {err_name}: {err:.3f}",
- flush=True,
- )
- torch.cuda.empty_cache()
- print(f"Training took {t_elapsed:.2f}s")
-
- def predict(self, X):
- self.model.eval()
- if self.use_cuda:
- X = X.cuda()
- preds = self.model.likelihood(self.model(X)).mean.cpu().detach()
- return preds
-
- def __str__(self):
- num_ind_pt = self.model.inducing_points.shape[0]
- ker = self.model.covar_module
- lengthscale = [p for name, p in dict(ker.named_parameters(recurse=True)).items() if "raw_lengthscale" in name]
- num_ker_params = lengthscale[0].shape
- return (
- f"RegressionVGP"
- )
diff --git a/falkon/benchmarks/models/gpytorch_variational_models.py b/falkon/benchmarks/models/gpytorch_variational_models.py
deleted file mode 100644
index 62d20e6d..00000000
--- a/falkon/benchmarks/models/gpytorch_variational_models.py
+++ /dev/null
@@ -1,491 +0,0 @@
-import time
-
-import gpytorch
-import numpy as np
-import torch
-from gpytorch.models import ApproximateGP
-from gpytorch.variational import UnwhitenedVariationalStrategy, VariationalStrategy
-
-__all__ = ("get_rbf_kernel", "RegressionVGP", "TwoClassVGP", "MultiClassVGP")
-
-
-def _choose_var_dist(dist_str, num_points, batch_shape=1):
- if batch_shape == 1:
- batch_shape = torch.Size([])
- else:
- batch_shape = torch.Size([batch_shape])
- if dist_str == "diag":
- return gpytorch.variational.MeanFieldVariationalDistribution(num_points, batch_shape=batch_shape)
- elif dist_str == "full":
- return gpytorch.variational.CholeskyVariationalDistribution(num_points, batch_shape=batch_shape)
- elif dist_str == "delta":
- return gpytorch.variational.DeltaVariationalDistribution(num_points, batch_shape=batch_shape)
- elif dist_str == "natgrad":
- return gpytorch.variational.NaturalVariationalDistribution(num_points, batch_shape=batch_shape)
- elif dist_str == "tril_natgrad":
- return gpytorch.variational.TrilNaturalVariationalDistribution(num_points, batch_shape=batch_shape)
- else:
- raise KeyError(dist_str)
-
-
-def _choose_var_strat(model, var_strat, var_dist, ind_pt, learn_ind=True, num_classes=None):
- if var_strat == "multi_task":
- try:
- num_classes = int(num_classes)
- except TypeError as e:
- raise RuntimeError("Multi-task variational strategy must specify integer num_classes") from e
-
- return gpytorch.variational.MultitaskVariationalStrategy(
- VariationalStrategy(model, ind_pt, var_dist, learn_inducing_locations=learn_ind),
- num_tasks=num_classes,
- task_dim=0,
- )
- else:
- return UnwhitenedVariationalStrategy(model, ind_pt, var_dist, learn_inducing_locations=learn_ind)
-
-
-def get_rbf_kernel(ard=None, batch_shape=1):
- if batch_shape == 1:
- return gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(ard_num_dims=ard))
- else:
- return gpytorch.kernels.ScaleKernel(
- gpytorch.kernels.RBFKernel(ard_num_dims=ard, batch_shape=torch.Size([batch_shape])),
- batch_shape=torch.Size([batch_shape]),
- )
-
-
-class BaseModel(ApproximateGP):
- def __init__(self, strategy, likelihood):
- super().__init__(strategy)
- self.strategy = strategy
- self.likelihood = likelihood
-
- def cuda(self, **kwargs):
- super().cuda(**kwargs)
- self.likelihood = self.likelihood.cuda()
- return self
-
- def parameters(self, **kwargs):
- return list(super().parameters(**kwargs))
-
- def train(self, arg=True):
- super().train(arg)
- self.likelihood.train(arg)
-
- def eval(self):
- self.train(False)
-
- @property
- def inducing_points(self):
- for name, param in self.named_parameters():
- if "inducing_points" in name:
- return param
- return None
-
-
-class GenericApproxGP(BaseModel):
- def __init__(
- self,
- inducing_points,
- mean_module,
- covar_module,
- var_strat: str,
- learn_ind_pts: bool,
- var_distrib: str,
- likelihood,
- cuda: bool,
- num_classes: int = 1,
- ):
- distribution = _choose_var_dist(var_distrib, inducing_points.size(-2), batch_shape=num_classes)
- if cuda:
- distribution = distribution.cuda()
- strategy = _choose_var_strat(
- self, var_strat, distribution, inducing_points, learn_ind=learn_ind_pts, num_classes=num_classes
- )
- if cuda:
- strategy = strategy.cuda()
-
- super().__init__(strategy, likelihood)
-
- self.mean_module = mean_module
- self.covar_module = covar_module
-
- if not strategy.variational_params_initialized.item():
- try:
- strategy._variational_distribution.initialize_variational_distribution(strategy.prior_distribution)
- except AttributeError:
- strategy.base_variational_strategy._variational_distribution.initialize_variational_distribution(
- strategy.base_variational_strategy.prior_distribution
- )
- strategy.variational_params_initialized.fill_(1)
-
- def forward(self, x):
- mean_x = self.mean_module(x)
- covar_x = self.covar_module(x)
- return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
-
-
-class MultiTaskApproxGP(BaseModel):
- def __init__(
- self,
- inducing_points,
- mean_module,
- covar_module,
- var_strat: str,
- learn_ind_pts: bool,
- var_distrib: str,
- likelihood,
- batch_shape,
- ):
- distribution = _choose_var_dist(var_distrib, inducing_points.size(-2), batch_shape).cuda()
- strategy = _choose_var_strat(
- self, var_strat, distribution, inducing_points, learn_ind=learn_ind_pts, num_classes=batch_shape
- ).cuda()
-
- super().__init__(strategy, likelihood)
-
- self.mean_module = mean_module
- self.covar_module = covar_module
-
- def forward(self, x):
- mean_x = self.mean_module(x)
- covar_x = self.covar_module(x)
- return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
-
-
-class GPTrainer:
- def __init__(
- self, model, err_fn, mb_size, use_cuda, mll, num_epochs, params, natgrad_lr, num_data, likelihood, lr=0.001
- ):
- self.model = model
- self.mll = mll
- self.num_epochs = num_epochs
- self.lr = lr
- self.natgrad_lr = natgrad_lr
-
- self.err_fn = err_fn
- self.mb_size = mb_size
-
- self.use_cuda = use_cuda
- if self.use_cuda:
- self.model = self.model.cuda()
- self.params = params
- # Count params
- num_params = [np.prod(p.data.shape) for p in params]
- print(f"Training with {sum(num_params)} parameters")
- # Initialize optimizer with the parameters
- if self.natgrad_lr > 0:
- self.ng_optimizer = gpytorch.optim.NGD(
- self.model.variational_parameters(), num_data=num_data, lr=self.natgrad_lr
- )
- params = set(list(self.model.hyperparameters()) + list(likelihood.parameters()))
- self.optimizer = torch.optim.Adam(list(params), lr=lr)
- else:
- self.ng_optimizer = None
- self.optimizer = torch.optim.Adam(self.params, lr=lr)
- print(f"Optimizer: {self.optimizer}")
- print(f"nat-grad optimizer: {self.ng_optimizer}")
-
- self.error_every = 100
-
- def do_train(self, Xtr, Ytr, Xval, Yval):
- # Define dataset iterators
- train_dataset = torch.utils.data.TensorDataset(Xtr, Ytr)
- # Pinning memory of DataLoader results in slower training.
- if self.mb_size == 1:
- train_loader = torch.utils.data.DataLoader(train_dataset, shuffle=True, num_workers=0)
- else:
- train_loader = torch.utils.data.DataLoader(
- train_dataset, batch_size=self.mb_size, shuffle=True, num_workers=0
- )
-
- # Start training
- t_elapsed = 0
- for epoch in range(self.num_epochs):
- t_start = time.time()
- self.model.train()
- for j, (x_batch, y_batch) in enumerate(train_loader):
- if self.use_cuda:
- x_batch = x_batch.cuda()
- y_batch = y_batch.cuda()
- if self.ng_optimizer is not None:
- self.ng_optimizer.zero_grad()
- self.optimizer.zero_grad()
- output = self.model(x_batch)
- loss = -self.mll(output, y_batch)
- loss.backward()
- if self.ng_optimizer is not None:
- self.ng_optimizer.step()
- self.optimizer.step()
- if j % self.error_every == 0:
- t_elapsed += time.time() - t_start
- err, err_name = self.err_fn(y_batch.cpu(), self.model.likelihood(output).mean.detach().cpu())
- print(
- f"Epoch {epoch + 1}, iter {j}/{len(train_loader)} - Elapsed {t_elapsed:.2fs} - "
- f"Loss: {loss.item():.3f} - {err_name}: {err:.7f}",
- flush=True,
- )
- t_start = time.time()
- t_elapsed += time.time() - t_start # t_start will be reset at the start of the loop
-
- test_pred = self.predict(Xval)
- err, err_name = self.err_fn(Yval, test_pred)
- print(f"Epoch {epoch + 1} - elapsed {t_elapsed:.2f}s - validation {err_name}: {err:.5f}")
- print(f"Training took {t_elapsed:.2f}s")
-
- def predict(self, X):
- test_dataset = torch.utils.data.TensorDataset(X)
- if self.mb_size > 1:
- test_loader = torch.utils.data.DataLoader(
- test_dataset, batch_size=self.mb_size, shuffle=False, num_workers=8
- )
- else:
- test_loader = torch.utils.data.DataLoader(test_dataset, shuffle=False, num_workers=8)
-
- self.model.eval()
- test_pred_means = []
- for x_batch in test_loader:
- x_batch = x_batch[0]
- if self.use_cuda:
- x_batch = x_batch.cuda()
- preds = self.model.likelihood(self.model(x_batch))
- test_pred_means.append(preds.mean.cpu().detach())
-
- del x_batch
- test_pred_means = torch.cat(test_pred_means)
- return test_pred_means
-
-
-class RegressionVGP(GPTrainer):
- def __init__(
- self,
- inducing_points,
- kernel,
- var_dist: str,
- err_fn,
- mb_size: int,
- num_data: int,
- num_epochs: int,
- use_cuda: bool,
- natgrad_lr: float,
- lr: float = 0.001,
- learn_ind_pts: bool = False,
- ):
- self.var_dist = var_dist
- mean_module = gpytorch.means.ConstantMean()
- likelihood = gpytorch.likelihoods.GaussianLikelihood()
-
- if use_cuda:
- inducing_points = inducing_points.contiguous().cuda()
- mean_module = mean_module.cuda()
- kernel = kernel.cuda()
-
- model = GenericApproxGP(
- inducing_points,
- mean_module=mean_module,
- covar_module=kernel,
- var_strat="var_strat",
- var_distrib=var_dist,
- likelihood=likelihood,
- learn_ind_pts=learn_ind_pts,
- cuda=use_cuda,
- )
- loss_fn = gpytorch.mlls.VariationalELBO(likelihood, model, num_data=num_data)
- params = model.parameters()
- print("Model parameters:")
- for k, v in model.named_parameters():
- print(f"\t{k} : {v.shape}")
- print(f"Initialized sigma to {kernel.base_kernel.lengthscale}")
- print(f"Initialized lambda to {likelihood.noise_covar.noise}")
- if not learn_ind_pts:
- exclude = set(mean_module.parameters()) | set(kernel.parameters())
- print("Excluding parameters from mean and covariance models:", exclude)
- params = list(set(model.parameters()) - exclude)
- super().__init__(
- model,
- err_fn,
- mb_size,
- use_cuda,
- mll=loss_fn,
- num_epochs=num_epochs,
- lr=lr,
- params=params,
- natgrad_lr=natgrad_lr,
- num_data=num_data,
- likelihood=likelihood,
- )
-
- def do_train(self, Xtr, Ytr, Xts, Yts):
- super().do_train(Xtr, Ytr.reshape(-1), Xts, Yts.reshape(-1))
-
- def __str__(self):
- num_ind_pt = self.model.inducing_points.shape[0]
- ker = self.model.covar_module
- lengthscale = [p for name, p in dict(ker.named_parameters()).items() if "raw_lengthscale" in name]
- num_ker_params = lengthscale[0].shape # dict(ker.named_parameters())['raw_lengthscale'].shape
- var_dist_params = self.model.variational_strategy._variational_distribution._parameters
- var_dist_num_params = sum([np.prod(p.shape) for p in var_dist_params.values()])
- return (
- f"RegressionVGP"
- )
-
-
-class TwoClassVGP(GPTrainer):
- def __init__(
- self,
- inducing_points,
- kernel,
- var_dist: str,
- err_fn,
- mb_size: int,
- num_data: int,
- num_epochs: int,
- use_cuda: bool,
- natgrad_lr: float,
- lr: float = 0.001,
- learn_ind_pts: bool = True,
- ):
- self.var_dist = var_dist
- if use_cuda:
- inducing_points = inducing_points.contiguous().cuda()
-
- mean_module = gpytorch.means.ConstantMean()
- # Only difference from regression is use of Bernoulli likelihood
- likelihood = gpytorch.likelihoods.BernoulliLikelihood()
-
- if use_cuda:
- inducing_points = inducing_points.contiguous().cuda()
- mean_module = mean_module.cuda()
- kernel = kernel.cuda()
-
- model = GenericApproxGP(
- inducing_points,
- mean_module=mean_module,
- covar_module=kernel,
- var_strat="var_strat",
- var_distrib=var_dist,
- likelihood=likelihood,
- learn_ind_pts=learn_ind_pts,
- cuda=use_cuda,
- )
- loss_fn = gpytorch.mlls.VariationalELBO(likelihood, model, num_data)
- params = model.parameters()
- if not learn_ind_pts:
- exclude = set(mean_module.parameters()) | set(kernel.parameters())
- print("Excluding parameters from mean and covariance models:", exclude)
- params = list(set(model.parameters()) - exclude)
- super().__init__(
- model,
- err_fn,
- mb_size,
- use_cuda,
- mll=loss_fn,
- num_epochs=num_epochs,
- lr=lr,
- params=params,
- natgrad_lr=natgrad_lr,
- num_data=num_data,
- likelihood=likelihood,
- )
-
- def do_train(self, Xtr, Ytr, Xts, Yts):
- Ytr = (Ytr + 1) / 2
- Yts = (Yts + 1) / 2
- super().do_train(Xtr, Ytr.reshape(-1), Xts, Yts.reshape(-1))
-
- def __str__(self):
- num_ind_pt = self.model.inducing_points.shape[0]
- ker = self.model.covar_module
- num_ker_params = 0
- for pn, pv in ker.named_parameters():
- if "raw_lengthscale" in pn:
- num_ker_params = pv.shape
- continue
- var_dist_params = self.model.variational_strategy._variational_distribution._parameters
- var_dist_num_params = sum([np.prod(p.shape) for p in var_dist_params.values()])
- return (
- f"TwoClassVGP"
- )
-
-
-class MultiClassVGP(GPTrainer):
- def __init__(
- self,
- inducing_points,
- kernel,
- num_classes: int,
- var_dist: str,
- err_fn,
- mb_size: int,
- num_data: int,
- num_epochs: int,
- use_cuda: bool,
- natgrad_lr: float,
- lr: float = 0.001,
- learn_ind_pts: bool = True,
- ):
- # if mb_size != 1:
- # raise ValueError("MultiTask VGP must be run with batch size of 1.")
- self.num_classes = num_classes
- self.var_dist = var_dist
- if use_cuda:
- inducing_points = inducing_points.contiguous().cuda()
-
- mean_module = gpytorch.means.ConstantMean(batch_shape=torch.Size([num_classes]))
- likelihood = gpytorch.likelihoods.SoftmaxLikelihood( # num_tasks=num_classes)
- num_classes=num_classes, mixing_weights=False
- )
- model = MultiTaskApproxGP(
- inducing_points,
- mean_module=mean_module,
- covar_module=kernel,
- var_strat="multi_task",
- var_distrib=var_dist,
- likelihood=likelihood,
- batch_shape=num_classes,
- learn_ind_pts=learn_ind_pts,
- )
- loss_fn = gpytorch.mlls.VariationalELBO(likelihood, model, num_data)
- params = model.parameters()
- if not learn_ind_pts:
- exclude = set(mean_module.parameters()) + set(kernel.parameters())
- params = list(set(model.parameters()) - exclude)
- super().__init__(
- model,
- err_fn,
- mb_size,
- use_cuda,
- mll=loss_fn,
- num_epochs=num_epochs,
- lr=lr,
- params=params,
- natgrad_lr=natgrad_lr,
- num_data=num_data,
- likelihood=likelihood,
- )
-
- def do_train(self, Xtr, Ytr, Xts, Yts):
- super().do_train(Xtr, Ytr, Xts, Yts)
-
- def __str__(self):
- num_ind_pt = self.model.inducing_points.shape[0]
- ker = self.model.covar_module
- num_ker_params = ker._parameters["raw_lengthscale"].shape
- var_dist_params = (
- self.model.variational_strategy.base_variational_strategy._variational_distribution._parameters
- )
- var_dist_num_params = sum([sum(p.shape) for p in var_dist_params.values()])
- return (
- f"MultiClassVGP"
- )
diff --git a/falkon/benchmarks/run_hgrad_benchmark.sh b/falkon/benchmarks/run_hgrad_benchmark.sh
deleted file mode 100644
index e872a65a..00000000
--- a/falkon/benchmarks/run_hgrad_benchmark.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-
-function run_exp () {
- local SIG_INIT=$1
- local PEN_INIT=$2
- local LR=$3
- local M=$4
- local DATASET=$5
- local ENAME=$6
- local VAL_PCT=$7
- local MODEL=$8
- PYTHONPATH=.. python hgrad_benchmarks/simple_hopt.py \
- --seed 12319 \
- --cg-tol 1e-3 \
- --val-pct $VAL_PCT \
- --sigma-type single \
- --sigma-init $SIG_INIT \
- --penalty-init $PEN_INIT \
- --lr $LR \
- --epochs $NUM_EPOCHS \
- --op \
- --os \
- --num-centers $M \
- --dataset $DATASET \
- --model $MODEL \
- --cuda \
- --name "${DATASET}_hopt_${MODEL}_${ENAME}"
-}
-
-function run_exp_all_models () {
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "loocv"
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "sgpr"
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "gcv"
-# run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "hgrad-ift"
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "hgrad-closed"
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "creg-nopenfit"
- run_exp "$1" "$2" "$3" "$4" "$5" "$6" "$7" "creg-penfit"
-}
-
-
-M=20
-DATASET=boston
-LR=0.02
-NUM_EPOCHS=200
-VAL_PCT=0.2
-
-PEN_INIT=1e-4
-SIG_INIT=15
-ENAME="test_hopt_rmsprop_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
-
-PEN_INIT=1
-SIG_INIT=15
-ENAME="test_hopt_rmsprop_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
-
-
-# VAL_PCT=0.2
-# ENAME="test_hopt_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-# run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
-#
-# VAL_PCT=0.4
-# ENAME="test_hopt_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-# run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
-#
-# VAL_PCT=0.6
-# ENAME="test_hopt_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-# run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
-#
-# VAL_PCT=0.8
-# ENAME="test_hopt_m${M}_lr${LR}_pinit${PEN_INIT}sinit${SIG_INIT}_meanrem_val${VAL_PCT}"
-# run_exp_all_models "$SIG_INIT" "$PEN_INIT" "$LR" "$M" "$DATASET" "$ENAME" "$VAL_PCT"
diff --git a/falkon/benchmarks/timing_benchmarks/lauum_timings.py b/falkon/benchmarks/timing_benchmarks/lauum_timings.py
deleted file mode 100644
index d451308b..00000000
--- a/falkon/benchmarks/timing_benchmarks/lauum_timings.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import functools
-import json
-import time
-from typing import Any, Dict, List
-
-import numpy as np
-import torch
-from scipy.linalg.lapack import dlauum, slauum
-
-import falkon
-from falkon.ooc_ops.ooc_lauum import gpu_lauum
-from falkon.utils import devices
-
-
-def gen_random(a, b, dtype, F=False, seed=0):
- rng = np.random.default_rng(seed)
- out = rng.random(size=(a, b), dtype=dtype)
- if F:
- return out.T
- return out
-
-
-def run_lauum_exp(exp_name, fn, exp_sizes, num_reps, is_torch, dtype):
- timings = []
- for num_pts in exp_sizes:
- A = gen_random(num_pts, num_pts, dtype=dtype, F=True, seed=123)
-
- rep_times = []
- for j in range(num_reps):
- if is_torch:
- Ac = torch.from_numpy(A.copy(order="C"))
- else:
- Ac = A.copy(order="F")
- t_s = time.time()
- fn(Ac)
- t_e = time.time()
- rep_times.append(t_e - t_s)
- print(f"Exp {exp_name} - N {num_pts} - Rep {j} - {rep_times[-1]:.2f}s", flush=True)
- del Ac
- if is_torch:
- torch.cuda.empty_cache()
- timings.append(min(rep_times))
- return timings
-
-
-if __name__ == "__main__":
- init_opt = falkon.FalkonOptions(compute_arch_speed=False)
- gpu_info = [v for k, v in devices.get_device_info(init_opt).items() if k >= 0]
- num_gpu = len(gpu_info)
-
- experiments: List[Dict[str, Any]] = [
- {
- "exp_name": "OOC 32",
- "exp_sizes": [10_000, 20_000, 30_000, 40_000, 50_000, 75_000, 100_000, 120_000, 140_000],
- "dtype": np.float32,
- "num_reps": 5,
- "is_torch": True,
- "fn": functools.partial(
- gpu_lauum,
- upper=False,
- overwrite=True,
- write_opposite=True,
- opt=falkon.FalkonOptions(compute_arch_speed=False),
- ),
- },
- {
- "exp_name": "OOC 64",
- "exp_sizes": [10_000, 20_000, 30_000, 40_000, 50_000],
- "dtype": np.float64,
- "num_reps": 5,
- "is_torch": True,
- "fn": functools.partial(
- gpu_lauum,
- upper=False,
- overwrite=True,
- write_opposite=True,
- opt=falkon.FalkonOptions(compute_arch_speed=False),
- ),
- },
- {
- "exp_name": "CPU 32",
- "exp_sizes": [10_000, 20_000, 30_000, 40_000, 50_000, 75_000, 100_000],
- "dtype": np.float32,
- "num_reps": 3,
- "is_torch": False,
- "fn": functools.partial(slauum, lower=1, overwrite_c=True),
- },
- {
- "exp_name": "CPU 64",
- "exp_sizes": [10_000, 20_000, 30_000, 40_000, 50_000, 75_000, 100_000],
- "dtype": np.float64,
- "num_reps": 3,
- "is_torch": False,
- "fn": functools.partial(dlauum, lower=1, overwrite_c=True),
- },
- ]
- for exp in experiments:
- exp_times = run_lauum_exp(**exp)
- exp["timings"] = exp_times
- with open(f"logs/lauum_timings_{num_gpu}GPU.json", "w") as fh:
- json.dump(experiments, fh)
diff --git a/falkon/benchmarks/timing_benchmarks/lauum_timings.sh b/falkon/benchmarks/timing_benchmarks/lauum_timings.sh
deleted file mode 100755
index c56c266a..00000000
--- a/falkon/benchmarks/timing_benchmarks/lauum_timings.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-echo "Running with 1 GPU"
-export CUDA_VISIBLE_DEVICES="0"
-python lauum_timings.py 2>&1 | tee -a "logs/lauum_timings_1GPU.txt"
-
-echo "Running with 2 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1"
-python lauum_timings.py 2>&1 | tee -a "logs/lauum_timings_2GPU.txt"
-exit 1;
-
-
-echo "Running with 3 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2"
-python lauum_timings.py 2>&1 | tee -a "logs/lauum_timings_3GPU.txt"
-
-echo "Running with 4 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2,3"
-python lauum_timings.py 2>&1 | tee -a "logs/lauum_timings_4GPU.txt"
-
-echo "Running with 5 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2,3,4"
-python lauum_timings.py 2>&1 | tee -a "logs/lauum_timings_5GPU.txt"
diff --git a/falkon/benchmarks/timing_benchmarks/mm_timings.py b/falkon/benchmarks/timing_benchmarks/mm_timings.py
deleted file mode 100644
index b22661a6..00000000
--- a/falkon/benchmarks/timing_benchmarks/mm_timings.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import time
-
-import kernels
-import numpy as np
-import torch
-
-import falkon
-
-
-def gen_random(a, b, dtype, F=False, seed=0):
- rng = np.random.default_rng(seed)
- out = rng.random(size=(a, b), dtype=dtype)
- if F:
- return out.T
- return out
-
-
-def run_mm_exp(exp_name, kernel, N, D, pin_memory, num_reps):
- timings = []
- A = torch.randn(N, D, dtype=torch.float32)
- if pin_memory:
- A = A.pin_memory()
-
- for _ in range(num_reps):
- t_s = time.time()
- kernel(A)
- torch.cuda.synchronize()
- t_e = time.time()
- timings.append(t_e - t_s)
- print(f"{exp_name} - {N=} {D=} {pin_memory=} - {t_e - t_s:.2f}s", flush=True)
- print(f"\t min={np.min(timings):.2f}s")
- return np.min(timings)
-
-
-if __name__ == "__main__":
- N = 50_000
- D = 256
- for no_single_kernel in [True, False]:
- init_opt = falkon.FalkonOptions(compute_arch_speed=False, no_single_kernel=no_single_kernel)
- kernel = kernels.GaussianKernel(sigma=5.0, opt=init_opt)
- exp_name = f"exp-{no_single_kernel=}"
- run_mm_exp(exp_name=exp_name, kernel=kernel, N=N, D=D, pin_memory=True, num_reps=5)
- print()
diff --git a/falkon/benchmarks/timing_benchmarks/mmv_timings.py b/falkon/benchmarks/timing_benchmarks/mmv_timings.py
deleted file mode 100644
index 3a67c5b5..00000000
--- a/falkon/benchmarks/timing_benchmarks/mmv_timings.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import argparse
-import json
-import timeit
-from typing import Any, Dict, List
-
-import torch
-
-from falkon.kernels import GaussianKernel
-
-
-def gen_data(N, M, D, T, cuda=False, dtype=torch.float64):
- X1 = torch.randn(N, D, requires_grad=False, dtype=dtype)
- X2 = torch.randn(M, D, requires_grad=False, dtype=dtype)
- v = torch.randn(M, T, requires_grad=False, dtype=dtype)
- if not cuda:
- return X1, X2, v
- return X1.cuda(), X2.cuda(), v.cuda()
-
-
-def run_mmv_exp(exp_name, fn, changing_var, data_sizes, kernel, dtype, num_reps):
- timings = []
- fn = fn + "; torch.cuda.synchronize();"
-
- for i in range(len(data_sizes[changing_var])):
- N = data_sizes["N"][i] if changing_var == "N" else data_sizes["N"]
- D = data_sizes["D"][i] if changing_var == "D" else data_sizes["D"]
- M = data_sizes["M"][i] if changing_var == "M" else data_sizes["M"]
- T = data_sizes["T"][i] if changing_var == "T" else data_sizes["T"]
-
- X1, X2, v = gen_data(N, M, D, T, dtype=dtype)
-
- _vars = locals()
- _vars.update(globals())
- exp_times = timeit.repeat(fn, globals=_vars, number=1, repeat=num_reps)
- timings.append(min(exp_times))
- print(f"Exp {exp_name} - {N=}, {D=}, {M=}, {T=} - {timings[-1]:.2fs}", flush=True)
- torch.cuda.empty_cache()
- return timings
-
-
-if __name__ == "__main__":
- aparse = argparse.ArgumentParser(description="MMV experiment runner")
- aparse.add_argument("--num-gpus", type=int, required=True)
- args = aparse.parse_args()
- num_gpus = args.num_gpus
-
- kernel = GaussianKernel(3.0)
- Ns = [
- 1000,
- 5000,
- 20000,
- 50000,
- 100000,
- 200000,
- 400000,
- 600_000,
- 1_000_000,
- 2_000_000,
- 10_000_000,
- 50_000_000,
- 100_000_000,
- ]
- KeopsDs = [10, 50, 100, 250, 500, 750, 1000, 1250, 1500, 1750, 2000]
- OurDs = [10, 50, 100, 250, 500, 750, 1000, 1250, 1500, 2000, 2500, 3000, 4000, 5000, 7000, 10000]
- defaultM = 20_000
- defaultN = 20_000
- defaultT = 10
- defaultD = 10
-
- experiments: List[Dict[str, Any]] = [
- {
- "exp_name": "varying N - KeOps 32",
- "changing_var": "N",
- "data_sizes": {
- "N": Ns,
- "M": defaultM,
- "D": defaultD,
- "T": defaultT,
- },
- "kernel": kernel,
- "dtype": torch.float32,
- "num_reps": 10,
- "fn": (
- "kernel._keops_mmv_impl(X1, X2, v, kernel, out=None, "
- 'opt=FalkonOptions(keops_active="force", compute_arch_speed=False));'
- ),
- },
- {
- "exp_name": "varying N - Our 32",
- "changing_var": "N",
- "data_sizes": {
- "N": Ns,
- "M": defaultM,
- "D": defaultD,
- "T": defaultT,
- },
- "kernel": kernel,
- "dtype": torch.float32,
- "num_reps": 10,
- "fn": (
- "kernel.mmv(X1, X2, v, out=None, " 'opt=FalkonOptions(keops_active="no", compute_arch_speed=False));'
- ),
- },
- {
- "exp_name": "varying D - KeOps 32",
- "changing_var": "D",
- "data_sizes": {
- "N": defaultN,
- "M": defaultM,
- "D": KeopsDs,
- "T": defaultT,
- },
- "kernel": kernel,
- "dtype": torch.float32,
- "num_reps": 10,
- "fn": (
- "kernel.keops_mmv_impl(X1, X2, v, kernel, out=None, "
- 'opt=FalkonOptions(keops_active="force", compute_arch_speed=False));'
- ),
- },
- {
- "exp_name": "varying D - Our 32",
- "changing_var": "D",
- "data_sizes": {
- "N": defaultN,
- "M": defaultM,
- "D": OurDs,
- "T": defaultT,
- },
- "kernel": kernel,
- "dtype": torch.float32,
- "num_reps": 10,
- "fn": 'kernel.mmv(X1, X2, v, out=None, opt=FalkonOptions(keops_active="no", compute_arch_speed=False));',
- },
- ]
-
- for exp in experiments:
- exp_times = run_mmv_exp(**exp)
- exp["timings"] = exp_times
- # Remove the stuff we can't serialize
- exp["kernel"] = None
- exp["dtype"] = None
- with open(f"logs/mmv_timings_{num_gpus}GPU.json", "w") as fh:
- json.dump(experiments, fh)
diff --git a/falkon/benchmarks/timing_benchmarks/mmv_timings.sh b/falkon/benchmarks/timing_benchmarks/mmv_timings.sh
deleted file mode 100755
index 44f025b7..00000000
--- a/falkon/benchmarks/timing_benchmarks/mmv_timings.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-echo "Running with 1 GPU"
-export CUDA_VISIBLE_DEVICES="0"
-python mmv_timings.py --num-gpus 1 2>&1 | tee -a "logs/mmv_timings_1GPU.txt"
-exit 1;
-
-echo "Running with 2 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1"
-python mmv_timings.py --num-gpus 2 2>&1 | tee -a "logs/mmv_timings_2GPU.txt"
-
diff --git a/falkon/benchmarks/timing_benchmarks/potrf_timings.py b/falkon/benchmarks/timing_benchmarks/potrf_timings.py
deleted file mode 100644
index f054f048..00000000
--- a/falkon/benchmarks/timing_benchmarks/potrf_timings.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import functools
-import json
-import time
-from typing import Any, Dict, List
-
-import numpy as np
-import torch
-from scipy.linalg.lapack import dpotrf, spotrf
-
-import falkon
-from falkon.ooc_ops.ooc_potrf import gpu_cholesky
-from falkon.utils import devices
-
-
-def gen_random(a, b, dtype, F=False, seed=0):
- rng = np.random.default_rng(seed)
- out = rng.random(size=(a, b), dtype=dtype)
- if F:
- return out.T
- return out
-
-
-def gen_random_pd(t, dtype, F=False, seed=0):
- A = gen_random(t, t, dtype, F, seed)
- A = A + A.T
- A.flat[:: t + 1] += t
- return A
-
-
-def run_potrf_exp(exp_name, fn, exp_sizes, num_reps, is_torch, dtype):
- timings = []
- for num_pts in exp_sizes:
- A = gen_random_pd(num_pts, dtype, F=True, seed=192)
-
- rep_times = []
- for j in range(num_reps):
- if is_torch:
- Ac = torch.from_numpy(A.copy(order="F"))
- else:
- Ac = A.copy(order="F")
- t_s = time.time()
- fn(Ac)
- t_e = time.time()
- rep_times.append(t_e - t_s)
- print(f"Exp {exp_name} - N {num_pts} - Rep {j} - {rep_times[-1]:.2f}s", flush=True)
- del Ac
- if is_torch:
- torch.cuda.empty_cache()
- timings.append(min(rep_times))
- return timings
-
-
-if __name__ == "__main__":
- init_opt = falkon.FalkonOptions()
- torch.cuda.init()
- gpu_info = [v for k, v in devices.get_device_info(init_opt).items() if k >= 0]
- num_gpu = len(gpu_info)
-
- defaultN32 = [10_000, 20_000, 30_000, 40_000, 50_000, 65_000, 80_000, 100_000, 120_000, 140_000]
- defaultN64 = [10_000, 20_000, 30_000, 40_000, 50_000, 65_000, 80_000]
- falkon.FalkonOptions(chol_force_ooc=True, chol_par_blk_multiplier=2, compute_arch_speed=False)
-
- experiments: List[Dict[str, Any]] = [
- {
- "exp_name": "Parallel 32",
- "exp_sizes": defaultN32,
- "dtype": np.float32,
- "num_reps": 3,
- "is_torch": True,
- "fn": functools.partial(
- gpu_cholesky,
- upper=False,
- clean=False,
- overwrite=True,
- opt=falkon.FalkonOptions(chol_force_ooc=True, chol_par_blk_multiplier=2),
- ),
- },
- {
- "exp_name": "Parallel 64",
- "exp_sizes": defaultN64,
- "dtype": np.float64,
- "num_reps": 3,
- "is_torch": True,
- "fn": functools.partial(
- gpu_cholesky,
- upper=False,
- clean=False,
- overwrite=True,
- opt=falkon.FalkonOptions(chol_force_ooc=True, chol_par_blk_multiplier=2, compute_arch_speed=False),
- ),
- },
- {
- "exp_name": "CPU 32",
- "exp_sizes": defaultN32,
- "dtype": np.float32,
- "num_reps": 3,
- "is_torch": False,
- "fn": functools.partial(spotrf, lower=True, clean=False, overwrite_a=True),
- },
- {
- "exp_name": "CPU 64",
- "exp_sizes": defaultN64,
- "dtype": np.float64,
- "num_reps": 2,
- "is_torch": False,
- "fn": functools.partial(dpotrf, lower=True, clean=False, overwrite_a=True),
- },
- ]
- for exp in experiments:
- exp_times = run_potrf_exp(**exp)
- exp["timings"] = exp_times
- with open(f"logs/potrf_timings_{num_gpu}GPU.json", "w") as fh:
- json.dump(experiments, fh)
diff --git a/falkon/benchmarks/timing_benchmarks/potrf_timings.sh b/falkon/benchmarks/timing_benchmarks/potrf_timings.sh
deleted file mode 100755
index 1f21cb27..00000000
--- a/falkon/benchmarks/timing_benchmarks/potrf_timings.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-
-echo "Running with 1 GPU"
-export CUDA_VISIBLE_DEVICES="0"
-python potrf_timings.py 2>&1 | tee -a "logs/potrf_timings_1GPU.txt"
-
-echo "Running with 2 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1"
-python potrf_timings.py 2>&1 | tee -a "logs/potrf_timings_2GPU.txt"
-
-exit 0;
-
-echo "Running with 3 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2"
-python potrf_timings.py 2>&1 | tee -a "logs/potrf_timings_3GPU.txt"
-
-echo "Running with 4 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2,3"
-python potrf_timings.py 2>&1 | tee -a "logs/potrf_timings_4GPU.txt"
-
-echo "Running with 5 GPUs"
-export CUDA_VISIBLE_DEVICES="0,1,2,3,4"
-python potrf_timings.py 2>&1 | tee -a "logs/potrf_timings_5GPU.txt"
diff --git a/falkon/benchmarks/timing_benchmarks/time_improvements.py b/falkon/benchmarks/timing_benchmarks/time_improvements.py
deleted file mode 100644
index 1979f109..00000000
--- a/falkon/benchmarks/timing_benchmarks/time_improvements.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
-Time Falkon with different parts switched on/off on a sample dataset (MS) which is quite fast:
-
-Baseline (equivalent to FALKON MATLAB):
-1. float64 + CPU Preconditioner + single GPU (no keops)
-2. float32 + CPU Preconditioner + single GPU (no keops)
-3. float32 + GPU Preconditioner + single GPU (no keops)
-4. float32 + GPU Preconditioner + 2 GPU (no keops)
-5. float32 + GPU Preconditioner + 2 GPU (keops)
-"""
-
-import argparse
-import dataclasses
-import functools
-import time
-
-import numpy as np
-import torch
-
-import falkon
-from falkon import kernels
-from falkon.benchmarks.common.benchmark_utils import Dataset, DataType
-from falkon.benchmarks.common.datasets import get_load_fn
-from falkon.benchmarks.common.error_metrics import get_err_fns
-
-RANDOM_SEED = 95
-
-torch.manual_seed(RANDOM_SEED)
-np.random.seed(RANDOM_SEED)
-
-
-def run(exp_num, dset, show_intermediate_errors: bool = False):
- opt = falkon.FalkonOptions(
- debug=True,
- pc_epsilon_32=1e-6,
- pc_epsilon_64=1e-13,
- compute_arch_speed=False,
- num_fmm_streams=2,
- no_single_kernel=False,
- )
- params = {
- "seed": 12,
- "kernel": kernels.GaussianKernel(3.8),
- "penalty": 1e-7,
- "M": 100_000,
- "maxiter": 10,
- }
- if exp_num == 1:
- opt = dataclasses.replace(opt, cpu_preconditioner=True, keops_active="no")
- dtype = DataType.float64
- elif exp_num == 2:
- opt = dataclasses.replace(opt, cpu_preconditioner=True, keops_active="no")
- dtype = DataType.float32
- elif exp_num == 3:
- opt = dataclasses.replace(opt, cpu_preconditioner=False, keops_active="no")
- dtype = DataType.float32
- elif exp_num == 4:
- opt = dataclasses.replace(opt, cpu_preconditioner=False, keops_active="no")
- dtype = DataType.float32
- elif exp_num == 5:
- opt = dataclasses.replace(opt, cpu_preconditioner=False, keops_active="force")
- dtype = DataType.float32
- else:
- raise ValueError(f"exp num {exp_num} not valid")
- data = load_data(dset, data_type=dtype)
- torch.cuda.init()
- print(f"\n\n --- Running Experiment {exp_num} -- {opt}")
- data = list(data)
- data[0] = data[0].pin_memory()
- data[1] = data[1].pin_memory()
- data[2] = data[2].pin_memory()
- data[3] = data[3].pin_memory()
- t_s = time.time()
- flk = run_single(dset, data[0], data[1], data[2], data[3], data[4], show_intermediate_errors, opt, params)
- t_e = time.time()
- print(f"Timing for Experiment {exp_num}: {t_e - t_s:.3f}s -- fit times {flk.fit_times_}")
-
-
-def load_data(dset, data_type):
- load_fn = get_load_fn(dset)
- return load_fn(dtype=data_type.to_numpy_dtype(), as_torch=True)
-
-
-def run_single(dset, Xtr, Ytr, Xts, Yts, kwargs, intermediate_errors, opt, params):
- err_fns = get_err_fns(dset)
- err_fns = [functools.partial(fn, **kwargs) for fn in err_fns]
- error_every = 1 if intermediate_errors else None
-
- flk = falkon.Falkon(error_fn=err_fns[0], error_every=error_every, options=opt, **params)
- flk.fit(Xtr, Ytr, Xts, Yts)
- return flk
-
-
-if __name__ == "__main__":
- p = argparse.ArgumentParser(description="FALKON Benchmark Runner")
- p.add_argument("-i", "--exp-num", type=int, required=True, help="The experiment type, 1 to 5.")
- p.add_argument("-d", "--dataset", type=Dataset, choices=list(Dataset), required=True, help="Dataset")
- args = p.parse_args()
- run(args.exp_num, args.dataset, show_intermediate_errors=True)
diff --git a/falkon/benchmarks/timing_benchmarks/time_improvements.sh b/falkon/benchmarks/timing_benchmarks/time_improvements.sh
deleted file mode 100755
index e7b15ba2..00000000
--- a/falkon/benchmarks/timing_benchmarks/time_improvements.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-# Prepare log file
-if [ ! -d logs ]; then
- mkdir logs
-fi
-
-# Prepare conda
-CONDA_BASE=$(conda info --base)
-source $CONDA_BASE/etc/profile.d/conda.sh
-
-# Common variables
-DSET="higgs"
-PY_LAUNCHER="time_improvements.py"
-LOG_FILE="logs/time_improvements_${DSET}.log"
-
-conda activate torch
-
-export CUDA_VISIBLE_DEVICES="0"
-PYTHONPATH='..' python $PY_LAUNCHER --exp-num 1 --dataset $DSET 2>&1 | tee -a $LOG_FILE
-export CUDA_VISIBLE_DEVICES="0"
-PYTHONPATH='..' python $PY_LAUNCHER --exp-num 2 --dataset $DSET 2>&1 | tee -a $LOG_FILE
-export CUDA_VISIBLE_DEVICES="0"
-PYTHONPATH='..' python $PY_LAUNCHER --exp-num 3 --dataset $DSET 2>&1 | tee -a $LOG_FILE
-export CUDA_VISIBLE_DEVICES="0,1"
-PYTHONPATH='..' python $PY_LAUNCHER --exp-num 4 --dataset $DSET 2>&1 | tee -a $LOG_FILE
-export CUDA_VISIBLE_DEVICES="0,1"
-PYTHONPATH='..' python $PY_LAUNCHER --exp-num 5 --dataset $DSET 2>&1 | tee -a $LOG_FILE
-
-conda deactivate
\ No newline at end of file
diff --git a/falkon/c_ext/__init__.py b/falkon/c_ext/__init__.py
deleted file mode 100644
index 25e55254..00000000
--- a/falkon/c_ext/__init__.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
-Taken from nerfacc (https://github.com/KAIR-BAIR/nerfacc) (MIT Licence)
-
-Copyright (c) 2022 Ruilong Li, UC Berkeley.
-Copyright (c) 2023 Giacomo Meanti
-"""
-
-from typing import Callable
-
-import torch
-
-
-def _make_lazy_cuda_func(name: str) -> Callable:
- def call_cuda(*args, **kwargs):
- from ._backend import _assert_has_ext
-
- _assert_has_ext()
- return getattr(torch.ops.falkon, name)(*args, **kwargs)
-
- return call_cuda
-
-
-# Custom la functions
-parallel_potrf = _make_lazy_cuda_func("parallel_potrf")
-lauum_cuda = _make_lazy_cuda_func("lauum")
-
-# Triangular helpers
-copy_triang = _make_lazy_cuda_func("copy_triang")
-mul_triang = _make_lazy_cuda_func("mul_triang")
-copy_transpose = _make_lazy_cuda_func("copy_transpose")
-vec_mul_triang = _make_lazy_cuda_func("vec_mul_triang")
-
-# Sparse matrices
-spspmm = _make_lazy_cuda_func("spspmm")
-csr2dense = _make_lazy_cuda_func("csr2dense")
-sparse_row_norm_sq = _make_lazy_cuda_func("sparse_square_norm")
-sparse_row_norm = _make_lazy_cuda_func("sparse_norm")
-sparse_bdot = _make_lazy_cuda_func("sparse_bdot")
-
-# Square norm with autograd
-square_norm = _make_lazy_cuda_func("square_norm")
-
-# Wrappers
-cublas_2d_copy_to_dev_async = _make_lazy_cuda_func("cublas_2d_copy_to_dev_async")
-cublas_2d_copy_to_dev = _make_lazy_cuda_func("cublas_2d_copy_to_dev")
-cublas_2d_copy_to_host_async = _make_lazy_cuda_func("cublas_2d_copy_to_host_async")
-cublas_2d_copy_to_host = _make_lazy_cuda_func("cublas_2d_copy_to_host")
-cuda_2d_copy_async = _make_lazy_cuda_func("cuda_2d_copy_async")
-cuda_2d_copy = _make_lazy_cuda_func("cuda_2d_copy")
-cuda_1d_copy_async = _make_lazy_cuda_func("cuda_1d_copy_async")
-cuda_1d_copy = _make_lazy_cuda_func("cuda_1d_copy")
-mem_get_info = _make_lazy_cuda_func("mem_get_info")
-cusolver_potrf_buffer_size = _make_lazy_cuda_func("cusolver_potrf_buffer_size")
-cusolver_potrf = _make_lazy_cuda_func("cusolver_potrf")
-potrf = _make_lazy_cuda_func("potrf")
-cublas_trsm = _make_lazy_cuda_func("cublas_trsm")
-cublas_trmm = _make_lazy_cuda_func("cublas_trmm")
-cublas_gemm = _make_lazy_cuda_func("cublas_gemm")
-cublas_syrk = _make_lazy_cuda_func("cublas_syrk")
-cuda_version = _make_lazy_cuda_func("_cuda_version")
diff --git a/falkon/c_ext/_backend.py b/falkon/c_ext/_backend.py
deleted file mode 100644
index 902d3f24..00000000
--- a/falkon/c_ext/_backend.py
+++ /dev/null
@@ -1,198 +0,0 @@
-"""
-Taken from nerfacc (https://github.com/KAIR-BAIR/nerfacc) (MIT Licence)
-
-Copyright (c) 2022 Ruilong Li, UC Berkeley.
-Copyright (c) 2023 Giacomo Meanti
-"""
-
-import glob
-import importlib.machinery
-import json
-import os
-import os.path as osp
-import shutil
-import warnings
-from subprocess import DEVNULL, call
-from typing import Optional
-
-import torch.cuda
-from torch.utils.cpp_extension import _get_build_directory, load
-
-
-def _get_extension_path(lib_name):
- lib_dir = os.path.dirname(__file__)
- loader_details = (importlib.machinery.ExtensionFileLoader, importlib.machinery.EXTENSION_SUFFIXES)
-
- extfinder = importlib.machinery.FileFinder(lib_dir, loader_details)
- ext_specs = extfinder.find_spec(lib_name)
- if ext_specs is None:
- raise ImportError
-
- return ext_specs.origin
-
-
-def cuda_toolkit_available():
- """Check if the nvcc is avaiable on the machine."""
- try:
- call(["nvcc"], stdout=DEVNULL, stderr=DEVNULL)
- return True
- except FileNotFoundError:
- return False
-
-
-def cuda_toolkit_version():
- """Get the cuda toolkit version."""
- cuda_home = os.path.join(os.path.dirname(shutil.which("nvcc")), "..")
- if os.path.exists(os.path.join(cuda_home, "version.txt")):
- with open(os.path.join(cuda_home, "version.txt")) as f:
- cuda_version = f.read().strip().split()[-1]
- elif os.path.exists(os.path.join(cuda_home, "version.json")):
- with open(os.path.join(cuda_home, "version.json")) as f:
- cuda_version = json.load(f)["cuda"]["version"]
- else:
- raise RuntimeError("Cannot find the cuda version.")
- return cuda_version
-
-
-def torch_version():
- import torch
-
- version = torch.__version__
- split_version = version.split(".")
- # With torch 1.10.0 the version 'number' include CUDA version (e.g. '1.10.0+cu102').
- # Here we remove the CUDA version.
- for i in range(len(split_version)):
- if "+" in split_version[i]:
- split_version[i] = split_version[i].split("+")[0]
- return [int(v) for v in split_version]
-
-
-def lib_from_oserror(exc: OSError) -> Optional[str]:
- e_str = str(exc)
- so_idx = e_str.index(".so") # TODO: Platform specific code
- if so_idx <= 0:
- return None
- lib_wext = e_str[: so_idx + 3]
- return lib_wext
-
-
-_HAS_EXT = False
-
-try:
- if not _HAS_EXT:
- # try to import the compiled module (via setup.py)
- lib_path = _get_extension_path("_C")
- try:
- torch.ops.load_library(lib_path)
- except OSError as e:
- # Hack: usually ld can't find torch_cuda_linalg.so which is in TORCH_LIB_PATH
- # if we load it first, then load_library will work.
- # TODO: This will only work on linux.
- if (missing_lib := lib_from_oserror(e)).startswith("libtorch_cuda_linalg"):
- import ctypes
-
- from torch.utils.cpp_extension import TORCH_LIB_PATH
-
- ctypes.CDLL(os.path.join(TORCH_LIB_PATH, missing_lib))
- torch.ops.load_library(lib_path)
- else:
- raise
-
- _HAS_EXT = True
-
- # Check torch version vs. compilation version
- # Copyright (c) 2020 Matthias Fey
- # https://github.com/rusty1s/pytorch_scatter/blob/master/torch_scatter/__init__.py
- flk_cuda_version = torch.ops.falkon._cuda_version()
- if torch.cuda.is_available() and flk_cuda_version != -1:
- if flk_cuda_version < 10000:
- f_major, f_minor = int(str(flk_cuda_version)[0]), int(str(flk_cuda_version)[2])
- else:
- f_major, f_minor = int(str(flk_cuda_version)[0:2]), int(str(flk_cuda_version)[3])
- t_major, t_minor = (int(x) for x in torch.version.cuda.split("."))
-
- if t_major != f_major:
- raise RuntimeError(
- f"PyTorch and Falkon were compiled with different CUDA versions. "
- f"PyTorch has CUDA version {t_major}.{t_minor} and Falkon has CUDA version "
- f"{f_major}.{f_minor}. Please reinstall Falkon such that its version matches "
- f"your PyTorch install."
- )
-except ImportError:
- # if failed, try with JIT compilation
- ext_dir = os.path.dirname(os.path.abspath(__file__))
- pt_version = torch_version()
- sources = (
- glob.glob(osp.join(ext_dir, "ops", "cpu", "*.cpp"))
- + glob.glob(osp.join(ext_dir, "ops", "autograd", "*.cpp"))
- + glob.glob(osp.join(ext_dir, "ops", "*.cpp"))
- + glob.glob(osp.join(ext_dir, "*.cpp"))
- )
- extra_cflags = [
- "-O3",
- f"-DTORCH_VERSION_MAJOR={pt_version[0]}",
- f"-DTORCH_VERSION_MINOR={pt_version[1]}",
- f"-DTORCH_VERSION_PATCH={pt_version[2]}",
- ]
- extra_ldflags = []
- extra_include_paths = []
- extra_cuda_cflags = []
- if cuda_toolkit_available():
- sources.extend(glob.glob(osp.join(ext_dir, "ops", "cuda", "*.cu")))
- extra_cflags += ["-DWITH_CUDA=1"]
- extra_cuda_cflags += ["--expt-relaxed-constexpr", "--extended-lambda"]
- from torch.utils.cpp_extension import CUDA_HOME, TORCH_LIB_PATH
-
- extra_ldflags += [
- "-L",
- os.path.join(CUDA_HOME, "lib"),
- "-L",
- TORCH_LIB_PATH,
- "-Wl,-rpath",
- TORCH_LIB_PATH,
- "-l",
- "cusparse",
- "-l",
- "cublas",
- "-l",
- "cusolver",
- ]
- if torch.__version__ >= (1, 12):
- extra_ldflags.extend(["-l", "torch_cuda_linalg"])
- else:
- warnings.warn("No CUDA toolkit found. Falkon will only run on the CPU.")
-
- name = "falkon.c_ext._C"
- build_dir = _get_build_directory(name, verbose=False)
- sources = sorted(sources)
- if len(os.listdir(build_dir)) == 0:
- # If the build exists, we assume the extension has been built and we can load it.
- # Otherwise we must build from scratch.
- # Remove the build directory just to be safe: pytorch jit might stuck if the build
- # directory exists.
- shutil.rmtree(build_dir)
- print("Building C extension. This might take a couple of minutes.")
- load(
- name=name,
- sources=sources,
- extra_cflags=extra_cflags,
- extra_ldflags=extra_ldflags,
- extra_cuda_cflags=extra_cuda_cflags,
- extra_include_paths=extra_include_paths,
- is_python_module=False,
- is_standalone=False,
- )
- _HAS_EXT = True
-
-
-def _assert_has_ext():
- if not _HAS_EXT:
- raise RuntimeError(
- "Couldn't load custom C++ ops. This can happen if your PyTorch and "
- "falkon versions are incompatible, or if you had errors while compiling "
- "falkon from source. For further information on the compatible versions, check "
- "https://github.com/falkonml/falkon#installation for the compatibility matrix. "
- "Please check your PyTorch version with torch.__version__ and your falkon "
- "version with falkon.__version__ and verify if they are compatible, and if not "
- "please reinstall falkon so that it matches your PyTorch install."
- )
diff --git a/falkon/c_ext/falkon.cpp b/falkon/c_ext/falkon.cpp
deleted file mode 100644
index c7c37838..00000000
--- a/falkon/c_ext/falkon.cpp
+++ /dev/null
@@ -1,21 +0,0 @@
-#include "falkon.h"
-
-#include
-
-#ifdef WITH_CUDA
-#include
-#endif
-
-namespace falkon {
- int64_t cuda_version() {
- #ifdef WITH_CUDA
- return CUDA_VERSION;
- #else
- return -1;
- #endif
- }
-
- TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def("_cuda_version", &cuda_version);
- }
-} // namespace falkon
diff --git a/falkon/c_ext/falkon.h b/falkon/c_ext/falkon.h
deleted file mode 100644
index dae3a38c..00000000
--- a/falkon/c_ext/falkon.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
- int64_t cuda_version();
-
-} // namespace falkon
-
-
-/*
- * schema help: https://github.com/pytorch/pytorch/blob/main/aten/src/ATen/native/README.md
- */
diff --git a/falkon/c_ext/ops/autograd/ag_square_norm.cpp b/falkon/c_ext/ops/autograd/ag_square_norm.cpp
deleted file mode 100644
index b7039a01..00000000
--- a/falkon/c_ext/ops/autograd/ag_square_norm.cpp
+++ /dev/null
@@ -1,69 +0,0 @@
-#include "../square_norm.h"
-
-#include
-#include
-
-namespace falkon {
-namespace ops {
-namespace {
-
-class SquareNormFunction
- : public torch::autograd::Function {
- public:
- static torch::autograd::variable_list forward(
- torch::autograd::AutogradContext *ctx,
- const torch::autograd::Variable& input,
- int64_t dim,
- bool keepdim) {
- at::AutoDispatchBelowADInplaceOrView g;
- auto output = square_norm(input, dim, keepdim);
-
- ctx->save_for_backward({input});
- ctx->saved_data["dim"] = dim;
- ctx->saved_data["keepdim"] = keepdim;
-
- return {
- output,
- };
- }
- static torch::autograd::variable_list backward(
- torch::autograd::AutogradContext* ctx,
- const torch::autograd::variable_list& grad_output) {
- auto input = ctx->get_saved_variables()[0];
-
- auto dim = ctx->saved_data["dim"].toInt();
- auto keepdim = ctx->saved_data["keepdim"].toBool();
-
- auto grad_out = grad_output[0];
-
- if (!keepdim) {
- grad_out = grad_out.unsqueeze(dim);
- }
- auto grad_input = input * 2;
- grad_input.mul_(grad_out);
-
- return {
- grad_input,
- torch::autograd::Variable(),
- torch::autograd::Variable()
- };
- }
-};
-
-at::Tensor square_norm_autograd(
- const at::Tensor& input,
- int64_t dim,
- bool keepdim) {
- return SquareNormFunction::apply(input, dim, keepdim)[0];
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, Autograd, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::square_norm"),
- TORCH_FN(square_norm_autograd));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/copy_transpose.cpp b/falkon/c_ext/ops/copy_transpose.cpp
deleted file mode 100644
index d09f5329..00000000
--- a/falkon/c_ext/ops/copy_transpose.cpp
+++ /dev/null
@@ -1,30 +0,0 @@
-#include "copy_transpose.h"
-
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor copy_transpose(
- const at::Tensor &self,
- at::Tensor &out) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::copy_transpose", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- self,
- out
- );
-}
-
-TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::copy_transpose(Tensor self, Tensor(a!) out) -> Tensor(a!)"));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/copy_transpose.h b/falkon/c_ext/ops/copy_transpose.h
deleted file mode 100644
index 904c69ac..00000000
--- a/falkon/c_ext/ops/copy_transpose.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor copy_transpose(
- const at::Tensor &self,
- at::Tensor &out);
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/copy_triang.cpp b/falkon/c_ext/ops/copy_triang.cpp
deleted file mode 100644
index 473ba946..00000000
--- a/falkon/c_ext/ops/copy_triang.cpp
+++ /dev/null
@@ -1,30 +0,0 @@
-#include "copy_triang.h"
-
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor copy_triang(
- at::Tensor &self,
- const bool upper) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::copy_triang", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- self,
- upper
- );
-}
-
-TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::copy_triang(Tensor(a!) self, bool upper) -> Tensor(a!)"));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/copy_triang.h b/falkon/c_ext/ops/copy_triang.h
deleted file mode 100644
index 09dea48f..00000000
--- a/falkon/c_ext/ops/copy_triang.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor copy_triang(
- at::Tensor &self,
- const bool upper);
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_copy_triang.cpp b/falkon/c_ext/ops/cpu/cpu_copy_triang.cpp
deleted file mode 100644
index 133fa060..00000000
--- a/falkon/c_ext/ops/cpu/cpu_copy_triang.cpp
+++ /dev/null
@@ -1,86 +0,0 @@
-#include
-#include
-#include
-
-#include "../helpers.h"
-
-namespace falkon {
-namespace ops {
-namespace {
-
-template
-void copy_triang_impl(scalar_t *mat, const int n, const int stride1, const int stride2, const bool upper) {
- // assume input is f-contiguous (contiguous columns, stride1 == 1)
- if (upper) {
- at::parallel_for(0, n, 0, [&](int64_t start, int64_t end) {
- for (int64_t i : c10::irange(start, end)) {
- for (int64_t j = 0; j < i; j++) {
- // mat[i, j] = mat[j, i]
- mat[i * stride1 + j * stride2] = mat[j * stride1 + i * stride2];
- }
- }
- });
- } else {
- at::parallel_for(0, n, 0, [&](int64_t start, int64_t end) {
- for (int64_t i : c10::irange(start, end)) {
- for (int64_t j = i + 1; j < n; j++) {
- // mat[i, j] = mat[j, i]
- mat[i * stride1 + j * stride2] = mat[j * stride1 + i * stride2];
- }
- }
- });
- }
-}
-
-at::Tensor copy_triang_kernel(
- at::Tensor &mat,
- const bool upper) {
- AT_ASSERTM(mat.dim() == 2, "Input matrix must be 2D");
- const int64_t n = mat.size(0);
- const int64_t m = mat.size(1);
- TORCH_CHECK(
- (n == m),
- "Input matrix must be square. Found shape: (",
- n,
- ", ",
- m,
- ")");
- int64_t row_stride = mat.stride(0);
- int64_t col_stride = mat.stride(1);
- TORCH_CHECK(
- (row_stride == 1 || col_stride == 1),
- "Input must be contiguous in one dimension. Found strides: (",
- row_stride,
- ", ",
- col_stride,
- ")");
-
- bool bupper = upper;
- if (!is_fortran_contig(mat)) {
- bupper = !upper;
- int64_t tmp = row_stride;
- row_stride = col_stride;
- col_stride = tmp;
- }
- AT_DISPATCH_FLOATING_TYPES(mat.scalar_type(), "copy_triang", [&] {
- copy_triang_impl(
- mat.data_ptr(),
- n,
- row_stride,
- col_stride,
- bupper
- );
- });
- return mat;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::copy_triang"),
- TORCH_FN(copy_triang_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_mul_triang.cpp b/falkon/c_ext/ops/cpu/cpu_mul_triang.cpp
deleted file mode 100644
index 27c93bfb..00000000
--- a/falkon/c_ext/ops/cpu/cpu_mul_triang.cpp
+++ /dev/null
@@ -1,109 +0,0 @@
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-namespace {
-
-// TODO: Parallelize
-template
-void mul_upper_diag(
- scalar_t *data,
- const int64_t size,
- const scalar_t mul,
- const int64_t row_stride,
- const int64_t col_stride,
- const bool preserve_diag) {
- const int diagonal_offset = preserve_diag ? 1 : 0;
- for (int64_t i = 0; i < size; i++) {
- for (int64_t j = i + diagonal_offset; j < size; j++) {
- data[i * row_stride + j * col_stride] *= mul;
- }
- }
-}
-
-template
-void mul_lower_diag(
- scalar_t *data,
- const int64_t size,
- const scalar_t mul,
- const int64_t row_stride,
- const int64_t col_stride,
- const bool preserve_diag) {
- const int diagonal_offset = preserve_diag ? -1 : 0;
- for (int64_t i = 0; i < size; i++) {
- for (int64_t j = 0; j <= (i + diagonal_offset); j++) {
- data[i * row_stride + j * col_stride] *= mul;
- }
- }
-}
-
-
-at::Tensor mul_triang_kernel(
- at::Tensor &mat,
- const double multiplier,
- const bool upper,
- const bool preserve_diag) {
- AT_ASSERTM(mat.dim() == 2, "mat must be 2D");
- const int64_t n = mat.size(0);
- const int64_t m = mat.size(1);
- TORCH_CHECK(
- (n == m),
- "Input matrix must be square. Found shape: (",
- n,
- ", ",
- m,
- ")");
- int64_t row_stride = mat.stride(0);
- int64_t col_stride = mat.stride(1);
- TORCH_CHECK(
- (row_stride == 1 || col_stride == 1),
- "Input must be contiguous in one dimension. Found strides: (",
- row_stride,
- ", ",
- col_stride,
- ")");
-
- bool bupper = upper;
- if (row_stride == 1) {
- bupper = !upper;
- int64_t tmp_stride = row_stride;
- row_stride = col_stride;
- col_stride = tmp_stride;
- }
-
- AT_DISPATCH_FLOATING_TYPES(mat.scalar_type(), "mul_triang", [&] {
- const scalar_t mul = (scalar_t)multiplier;
- if (bupper) {
- mul_upper_diag(
- mat.data_ptr(),
- n,
- mul,
- row_stride,
- col_stride,
- preserve_diag);
- } else {
- mul_lower_diag(
- mat.data_ptr(),
- n,
- mul,
- row_stride,
- col_stride,
- preserve_diag);
- }
- });
- return mat;
-}
-
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::mul_triang"),
- TORCH_FN(mul_triang_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_potrf.cpp b/falkon/c_ext/ops/cpu/cpu_potrf.cpp
deleted file mode 100644
index fcee360a..00000000
--- a/falkon/c_ext/ops/cpu/cpu_potrf.cpp
+++ /dev/null
@@ -1,84 +0,0 @@
-#include
-#include
-#include
-
-#include "../helpers.h"
-#include "../mul_triang.h"
-
-namespace falkon {
-namespace ops {
-namespace {
-
-at::Tensor potrf_kernel(
- at::Tensor &mat,
- bool upper,
- bool clean,
- bool overwrite) {
- AT_ASSERTM(mat.dim() == 2, "Input matrix must be 2D");
- const int64_t n = mat.size(0);
- const int64_t m = mat.size(1);
- TORCH_CHECK(
- (n == m),
- "Input matrix must be square. Found shape: (",
- n,
- ", ",
- m,
- ")");
- int64_t row_stride = mat.stride(0);
- int64_t col_stride = mat.stride(1);
- TORCH_CHECK(
- (row_stride == 1 || col_stride == 1),
- "Input must be contiguous in one dimension. Found strides: (",
- row_stride,
- ", ",
- col_stride,
- ")");
-
- char uplo;
- if (is_fortran_contig(mat)) {
- uplo = upper ? 'U' : 'L';
- } else {
- uplo = upper ? 'L' : 'U';
- }
-
- // Copy array if necessary
- if (!overwrite) {
- mat = mat.clone();
- }
-
- int info = 0;
-
- AT_DISPATCH_FLOATING_TYPES(mat.scalar_type(), "copy_triang", [&] {
- at::native::lapackCholesky(
- uplo,
- n,
- mat.data_ptr(),
- row_stride == 1 ? col_stride : row_stride,
- &info);
- TORCH_CHECK(
- (info == 0),
- "LAPACK potrf failed with status ",
- info,
- ". Params: uplo ",
- uplo,
- ", rows ",
- n);
- // Clean non-factorized part of the matrix
- });
- if (clean) {
- falkon::ops::mul_triang(mat, 0.0, !upper, true);
- }
- return mat;
-}
-
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::potrf"),
- TORCH_FN(potrf_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_sparse_vector_ops.cpp b/falkon/c_ext/ops/cpu/cpu_sparse_vector_ops.cpp
deleted file mode 100644
index 4788e2b6..00000000
--- a/falkon/c_ext/ops/cpu/cpu_sparse_vector_ops.cpp
+++ /dev/null
@@ -1,235 +0,0 @@
-#include "../helpers.h"
-
-#include
-#include
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-namespace {
-
-#define ASSERT_IS_CPU(x) AT_ASSERTM(x.device().is_cpu(), #x " must be CPU tensor")
-
-
-template
-static inline void sparse_bdot_impl(
- scalar_t* data1,
- int64_t* indices1,
- int64_t* indptr1,
- scalar_t* data2,
- int64_t* indices2,
- int64_t* indptr2,
- scalar_t* out_data,
- int64_t N) {
- // row start and row end for both input matrices
- int64_t rs1, re1, rs2, re2;
- // column indices (in the `indices1` and `indices2` arrays)
- int64_t colidx1, colidx2;
- int64_t i;
-
- for (i = 0; i < N; i++) {
- rs1 = indptr1[i];
- re1 = indptr1[i + 1];
- rs2 = indptr2[i];
- re2 = indptr2[i + 1];
-
- while (rs1 < re1 && rs2 < re2) {
- colidx1 = indices1[rs1];
- colidx2 = indices2[rs2];
- if (colidx1 < colidx2) {
- rs1++;
- } else if (colidx1 > colidx2) {
- rs2++;
- } else {
- out_data[i] += data1[rs1] * data2[rs2];
- rs1++;
- rs2++;
- }
- }
- }
-}
-
-
-at::Tensor sparse_bdot_kernel(
- const at::Tensor &indexptr1,
- const at::Tensor &indices1,
- const at::Tensor &data1,
- const at::Tensor &indexptr2,
- const at::Tensor &indices2,
- const at::Tensor &data2,
- at::Tensor &out) {
- ASSERT_IS_CPU(indexptr1);
- ASSERT_IS_CPU(indices1);
- ASSERT_IS_CPU(data1);
- ASSERT_IS_CPU(indexptr2);
- ASSERT_IS_CPU(indices2);
- ASSERT_IS_CPU(data2);
- ASSERT_IS_CPU(out);
- AT_ASSERTM(indexptr1.dim() == 1, "indexptr must be 1D");
- AT_ASSERTM(indexptr1.stride(0) == 1, "indexptr must be memory-contiguous");
- AT_ASSERTM(indices1.dim() == 1, "indices must be 1D");
- AT_ASSERTM(indices1.stride(0) == 1, "indices must be memory-contiguous");
- AT_ASSERTM(data1.dim() == 1, "data must be 1D");
- AT_ASSERTM(data1.stride(0) == 1, "data must be memory-contiguous");
- AT_ASSERTM(indexptr2.dim() == 1, "indexptr must be 1D");
- AT_ASSERTM(indexptr2.stride(0) == 1, "indexptr must be memory-contiguous");
- AT_ASSERTM(indices2.dim() == 1, "indices must be 1D");
- AT_ASSERTM(indices2.stride(0) == 1, "indices must be memory-contiguous");
- AT_ASSERTM(data2.dim() == 1, "data must be 1D");
- AT_ASSERTM(data2.stride(0) == 1, "data must be memory-contiguous");
- AT_ASSERTM(indexptr1.size(0) == indexptr2.size(0), "the two sparse matrices must have the same number of rows.");
- AT_ASSERTM(data1.scalar_type() == data2.scalar_type(), "the two sparse matrices must be of the same type.");
- AT_ASSERTM(out.scalar_type() == data1.scalar_type(), "Matrices A, B and out must be of the same type.");
-
- int64_t N = indexptr1.size(0) - 1;
- AT_ASSERTM(N == out.size(0), "Input shape mismatch");
- if (out.dim() >= 2) {
- AT_ASSERTM(out.size(1) == 1, "Output array must be 1D");
- }
- out.fill_(0.0);
-
- auto scalar_type = data1.scalar_type();
-
- AT_DISPATCH_ALL_TYPES(scalar_type, "sparse_bdot_impl", [&] {
- sparse_bdot_impl(
- data1.data_ptr(),
- indices1.data_ptr(),
- indexptr1.data_ptr(),
- data2.data_ptr(),
- indices2.data_ptr(),
- indexptr2.data_ptr(),
- out.data_ptr(),
- N);
- });
- return out;
-}
-
-
-template
-static inline void sparse_square_norm_impl(
- scalar_t* data,
- int64_t* indptr,
- scalar_t* out_data,
- int64_t N) {
- int64_t i;
- int64_t i_start, i_end;
- scalar_t val_ij;
- // Assume squaring is desired, but need to fix this to accept multiple operations eventually
- for (i = 0; i < N; i++) {
- i_start = indptr[i];
- i_end = indptr[i+1];
- val_ij = 0.0;
- while (i_start < i_end) {
- val_ij += data[i_start]*data[i_start];
- i_start++;
- }
- out_data[i] = val_ij;
- }
-}
-
-at::Tensor sparse_square_norm_kernel(
- const at::Tensor &indexptr,
- const at::Tensor &data,
- at::Tensor &out) {
- ASSERT_IS_CPU(indexptr);
- AT_ASSERTM(indexptr.dim() == 1, "indexptr must be 1D");
- AT_ASSERTM(indexptr.stride(0) == 1, "indexptr must be memory-contiguous");
- int64_t N = indexptr.size(0) - 1;
-
- ASSERT_IS_CPU(data);
- AT_ASSERTM(data.dim() == 1, "data must be 1D");
- AT_ASSERTM(data.stride(0) == 1, "data must be memory-contiguous");
-
- ASSERT_IS_CPU(out);
- AT_ASSERTM(N == out.size(0), "Input shape mismatch");
- if (out.dim() >= 2) {
- AT_ASSERTM(out.size(1) == 1, "Output array must be 1D");
- }
- AT_ASSERTM(out.scalar_type() == data.scalar_type(), "Matrices A, B and out must be of the same type.");
-
- auto scalar_type = data.scalar_type();
- auto indexptr_data = indexptr.data_ptr();
-
- AT_DISPATCH_ALL_TYPES(scalar_type, "sparse_square_norm_impl", [&] {
- sparse_square_norm_impl(
- data.data_ptr(),
- indexptr_data,
- out.data_ptr(),
- N);
- });
- return out;
-}
-
-template
-static inline void sparse_norm_impl(
- scalar_t* data,
- int64_t* indptr,
- scalar_t* out_data,
- int64_t N) {
- int64_t i;
- int64_t i_start, i_end;
- scalar_t val_ij;
- for (i = 0; i < N; i++) {
- i_start = indptr[i];
- i_end = indptr[i+1];
- val_ij = 0.0;
- while (i_start < i_end) {
- val_ij += data[i_start]*data[i_start];
- i_start++;
- }
- out_data[i] = sqrt(val_ij);
- }
-}
-
-at::Tensor sparse_norm_kernel(
- const at::Tensor &indexptr,
- const at::Tensor &data,
- at::Tensor &out) {
- ASSERT_IS_CPU(indexptr);
- AT_ASSERTM(indexptr.dim() == 1, "indexptr must be 1D");
- AT_ASSERTM(indexptr.stride(0) == 1, "indexptr must be memory-contiguous");
- int64_t N = indexptr.size(0) - 1;
-
- ASSERT_IS_CPU(data);
- AT_ASSERTM(data.dim() == 1, "data must be 1D");
- AT_ASSERTM(data.stride(0) == 1, "data must be memory-contiguous");
-
- ASSERT_IS_CPU(out);
- AT_ASSERTM(N == out.size(0), "Input shape mismatch");
- if (out.dim() >= 2) {
- AT_ASSERTM(out.size(1) == 1, "Output array must be 1D");
- }
- AT_ASSERTM(out.scalar_type() == data.scalar_type(), "Matrices A, B and out must be of the same type.");
-
- auto scalar_type = data.scalar_type();
- auto indexptr_data = indexptr.data_ptr();
-
- AT_DISPATCH_ALL_TYPES(scalar_type, "sparse_norm_impl", [&] {
- sparse_norm_impl(
- data.data_ptr(),
- indexptr_data,
- out.data_ptr(),
- N);
- });
- return out;
-}
-
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::sparse_bdot"),
- TORCH_FN(sparse_bdot_kernel));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::sparse_square_norm"),
- TORCH_FN(sparse_square_norm_kernel));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::sparse_norm"),
- TORCH_FN(sparse_norm_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_square_norm.cpp b/falkon/c_ext/ops/cpu/cpu_square_norm.cpp
deleted file mode 100644
index afee3d14..00000000
--- a/falkon/c_ext/ops/cpu/cpu_square_norm.cpp
+++ /dev/null
@@ -1,64 +0,0 @@
-#include "../helpers.h"
-
-#include
-#include
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-namespace {
-
-template
-struct NormTwoSquareOpsCPU {
- inline C10_DEVICE acc_t reduce(acc_t acc, acc_t data, int64_t /*idx*/) const {
- return acc + data * data;
- }
-
- inline C10_DEVICE acc_t combine(acc_t a, acc_t b) const {
- return a + b;
- }
-
- inline C10_DEVICE acc_t project(acc_t a) const {
- return a;
- }
-
- static C10_DEVICE acc_t translate_idx(acc_t acc, int64_t /*base_idx*/) {
- return acc;
- }
-};
-
-template
-void square_vector_norm_impl(at::TensorIterator iter) {
- if (iter.numel() == 0) {
- iter.output().fill_(0);
- return;
- }
- at::native::binary_kernel_reduce(iter, NormTwoSquareOpsCPU(), (scalar_t)0.0);
-}
-
-at::Tensor square_norm_kernel(const at::Tensor &input, int64_t dim, bool keepdim) {
- at::IntArrayRef dimArr = at::IntArrayRef(dim);
- at::ScalarType in_dtype = input.scalar_type();
-
- // Create the output tensor
- auto result_shape = shape_from_dim(input, dim, keepdim);
- at::Tensor result = at::empty(result_shape, input.options());
- at::TensorIterator iter = at::native::make_reduction("vector_sqnorm", result, input, dimArr, keepdim, in_dtype);
- AT_DISPATCH_FLOATING_TYPES(iter.input_dtype(), "square_vector_norm_impl", [&] {
- square_vector_norm_impl(iter);
- });
- return result;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::square_norm"),
- TORCH_FN(square_norm_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cpu/cpu_vec_mul_triang.cpp b/falkon/c_ext/ops/cpu/cpu_vec_mul_triang.cpp
deleted file mode 100644
index de126c25..00000000
--- a/falkon/c_ext/ops/cpu/cpu_vec_mul_triang.cpp
+++ /dev/null
@@ -1,135 +0,0 @@
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-namespace {
-
-/* https://github.com/pytorch/pytorch/blob/4081e924a8d701d5201db3e4b4ed2da60b072d30/aten/src/ATen/native/TriangularOps.cpp */
-template
-void vec_mul_triang_impl(
- scalar_t* mat,
- const scalar_t* multiplier_vector,
- const int64_t n,
- const int64_t row_stride,
- const int64_t col_stride,
- const bool side,
- const bool upper) {
- /*
- * Multiply triangular matrix by a column or row vector (depending on side, following broadcasting rules)
- * if side == true: multiplier is a row vector
- * if side == false: multiplier is a column vector
- */
- if (col_stride == 1) {
- // C-contiguous (rows are stored as contiguous blocks, stride is (?, 1))
- at::parallel_for(0, n, 0, [&](int64_t start, int64_t end) { // rows
- scalar_t mul;
- for (int64_t i : c10::irange(start, end)) {
- if (!side) {
- mul = multiplier_vector[i];
- }
- if (upper) {
- for (int64_t j = i; j < n; j++) { // cols
- if (side) {
- mul = multiplier_vector[j];
- }
- mat[i * row_stride + j] = mat[i * row_stride + j] * mul;
- }
- } else {
- for (int64_t j = 0; j <= i; j++) { // cols
- if (side) {
- mul = multiplier_vector[j];
- }
- mat[i * row_stride + j] = mat[i * row_stride + j] * mul;
- }
- }
- }
- });
- } else {
- // F-contiguous (columns are stored as contiguous blocks, stride is (1, ?))
- at::parallel_for(0, n, 0, [&](int64_t start, int64_t end) { // columns
- scalar_t mul;
- for (int64_t i : c10::irange(start, end)) {
- if (side) {
- mul = multiplier_vector[i];
- }
- if (upper) {
- for (int64_t j = 0; j <= i; j++) { // rows
- if (!side) {
- mul = multiplier_vector[j];
- }
- mat[j + i * col_stride] = mat[j + i * col_stride] * mul;
- }
- } else {
- for (int64_t j = i; j < n; j++) { // rows
- if (!side) {
- mul = multiplier_vector[j];
- }
- mat[j + i * col_stride] = mat[j + i * col_stride] * mul;
- }
- }
- }
- });
- }
-}
-
-at::Tensor vec_mul_triang_kernel(
- at::Tensor &mat,
- const at::Tensor &multiplier_vec,
- const bool upper,
- const bool side) {
- AT_ASSERTM(mat.dim() == 2, "mat must be 2D");
- const int64_t n = mat.size(0);
- const int64_t m = mat.size(1);
- const int64_t k = multiplier_vec.size(0);
- TORCH_CHECK(
- (n == m),
- "Input matrix must be square. Found shape: (",
- n,
- ", ",
- m,
- ")");
- TORCH_CHECK(
- (n == k),
- "Multiplier vector must have the same first dimension as the input matrix. Expected shape: (",
- n,
- ", ) found shape: (",
- m,
- ", )");
-
- at::Tensor multiplier_vec_c = multiplier_vec.contiguous();
- const int64_t row_stride = mat.stride(0);
- const int64_t col_stride = mat.stride(1);
- TORCH_CHECK(
- (row_stride == 1 || col_stride == 1),
- "Input must be contiguous in one dimension. Found strides: (",
- row_stride,
- ", ",
- col_stride,
- ")");
-
- AT_DISPATCH_FLOATING_TYPES(mat.scalar_type(), "vec_mul_triang", [&] {
- vec_mul_triang_impl(
- mat.data_ptr(),
- multiplier_vec_c.data_ptr(),
- n,
- row_stride,
- col_stride,
- side,
- upper
- );
- });
- return mat;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CPU, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::vec_mul_triang"),
- TORCH_FN(vec_mul_triang_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/csr2dense.cpp b/falkon/c_ext/ops/csr2dense.cpp
deleted file mode 100644
index ee510692..00000000
--- a/falkon/c_ext/ops/csr2dense.cpp
+++ /dev/null
@@ -1,34 +0,0 @@
-#include "csr2dense.h"
-
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor csr2dense(
- const at::Tensor &rowptr,
- const at::Tensor &col,
- const at::Tensor &val,
- at::Tensor &out) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::csr2dense", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- rowptr,
- col,
- val,
- out
- );
-}
-
-TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::csr2dense(Tensor rowptr, Tensor col, Tensor val, Tensor(a!) out) -> Tensor(a!)"));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/csr2dense.h b/falkon/c_ext/ops/csr2dense.h
deleted file mode 100644
index f001a521..00000000
--- a/falkon/c_ext/ops/csr2dense.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
-namespace ops {
-
-at::Tensor csr2dense(
- const at::Tensor &rowptr,
- const at::Tensor &col,
- const at::Tensor &val,
- at::Tensor &out);
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cublas_bindings.cpp b/falkon/c_ext/ops/cublas_bindings.cpp
deleted file mode 100644
index 4590b9e2..00000000
--- a/falkon/c_ext/ops/cublas_bindings.cpp
+++ /dev/null
@@ -1,259 +0,0 @@
-#include "cublas_bindings.h"
-
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-void cublas_2d_copy_to_dev_async (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_2d_copy_to_dev_async", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- rows,
- cols,
- elemSize,
- host_tensor,
- lda,
- dev_tensor,
- ldb
- );
-}
-void cublas_2d_copy_to_dev (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_2d_copy_to_dev", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- rows,
- cols,
- elemSize,
- host_tensor,
- lda,
- dev_tensor,
- ldb
- );
-}
-void cublas_2d_copy_to_host_async(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_2d_copy_to_host_async", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- rows,
- cols,
- elemSize,
- dev_tensor,
- lda,
- host_tensor,
- ldb
- );
-}
-void cublas_2d_copy_to_host(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_2d_copy_to_host", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- rows,
- cols,
- elemSize,
- dev_tensor,
- lda,
- host_tensor,
- ldb
- );
-}
-
-void cublas_trsm(
- const at::Tensor& A,
- at::Tensor& B,
- const at::Scalar& alpha,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_trsm", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- A,
- B,
- alpha,
- left,
- upper,
- transpose,
- unitriangular,
- m,
- n,
- lda,
- ldb
- );
-}
-
-void cublas_trmm(
- const at::Tensor& A,
- const at::Tensor& B,
- at::Tensor& C,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- const at::Scalar& alpha,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb,
- int64_t ldc) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_trmm", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- A,
- B,
- C,
- left,
- upper,
- transpose,
- unitriangular,
- alpha,
- m,
- n,
- lda,
- ldb,
- ldc
- );
-}
-
-
-void cublas_gemm(
- const at::Tensor& A,
- int64_t lda,
- bool transa,
- const at::Tensor& B,
- int64_t ldb,
- bool transb,
- at::Tensor& C,
- int64_t ldc,
- int64_t m,
- int64_t n,
- int64_t k,
- const at::Scalar& alpha,
- const at::Scalar& beta) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_gemm", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- A,
- lda,
- transa,
- B,
- ldb,
- transb,
- C,
- ldc,
- m,
- n,
- k,
- alpha,
- beta
- );
-}
-
-
-void cublas_syrk(
- const at::Tensor& A,
- int64_t lda,
- at::Tensor& C,
- int64_t ldc,
- const at::Scalar& alpha,
- const at::Scalar& beta,
- bool upper,
- bool transpose,
- int64_t n,
- int64_t k) {
- static auto op = c10::Dispatcher::singleton()
- .findSchemaOrThrow("falkon::cublas_syrk", "")
- .typed();
- at::AutoDispatchBelowAutograd guard;
- at::tracer::impl::NoTracerDispatchMode tracer_guard;
- return op.call(
- A,
- lda,
- C,
- ldc,
- alpha,
- beta,
- upper,
- transpose,
- n,
- k
- );
-}
-
-TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_2d_copy_to_dev_async(int rows, int cols, int elemSize, Tensor host_tensor, int lda, Tensor (a!) dev_tensor, int ldb) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_2d_copy_to_dev(int rows, int cols, int elemSize, Tensor host_tensor, int lda, Tensor (a!) dev_tensor, int ldb) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_2d_copy_to_host_async(int rows, int cols, int elemSize, Tensor dev_tensor, int lda, Tensor (a!) host_tensor, int ldb) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_2d_copy_to_host(int rows, int cols, int elemSize, Tensor dev_tensor, int lda, Tensor (a!) host_tensor, int ldb) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_trsm(Tensor A, Tensor (a!) B, Scalar alpha, bool left, bool upper, bool transpose, bool unitriangular, int m, int n, int lda, int ldb) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_trmm(Tensor A, Tensor B, Tensor (a!) C, bool left, bool upper, bool transpose, bool unitriangular, Scalar alpha, int m, int n, int lda, int ldb, int ldc) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_gemm(Tensor A, int lda, bool transa, Tensor B, int ldb, bool transb, Tensor (a!) C, int ldc, int m, int n, int k, Scalar alpha, Scalar beta) -> ()"));
- m.def(TORCH_SELECTIVE_SCHEMA(
- "falkon::cublas_syrk(Tensor A, int lda, Tensor (a!) C, int ldc, Scalar alpha, Scalar beta, bool upper, bool transpose, int n, int k) -> ()"));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cublas_bindings.h b/falkon/c_ext/ops/cublas_bindings.h
deleted file mode 100644
index f5b97853..00000000
--- a/falkon/c_ext/ops/cublas_bindings.h
+++ /dev/null
@@ -1,91 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
-namespace ops {
-void cublas_2d_copy_to_dev_async (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb);
-void cublas_2d_copy_to_dev (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb);
-void cublas_2d_copy_to_host_async(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb);
-void cublas_2d_copy_to_host(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb);
-void cublas_trsm(
- const at::Tensor& A,
- at::Tensor& B,
- const at::Scalar& alpha,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb);
-void cublas_trmm(
- const at::Tensor& A,
- const at::Tensor& B,
- at::Tensor& C,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- const at::Scalar& alpha,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb,
- int64_t ldc);
-void cublas_gemm(
- const at::Tensor& A,
- int64_t lda,
- bool transa,
- const at::Tensor& B,
- int64_t ldb,
- bool transb,
- at::Tensor& C,
- int64_t ldc,
- int64_t m,
- int64_t n,
- int64_t k,
- const at::Scalar& alpha,
- const at::Scalar& beta);
-void cublas_syrk(
- const at::Tensor& A,
- int64_t lda,
- at::Tensor& C,
- int64_t ldc,
- const at::Scalar& alpha,
- const at::Scalar& beta,
- bool upper,
- bool transpose,
- int64_t n,
- int64_t k);
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cublas_bindings.cu b/falkon/c_ext/ops/cuda/cublas_bindings.cu
deleted file mode 100644
index 8284592d..00000000
--- a/falkon/c_ext/ops/cuda/cublas_bindings.cu
+++ /dev/null
@@ -1,464 +0,0 @@
-#include "cublas_bindings.h"
-#include "../helpers.h"
-#include "cuda_helpers.cuh"
-
-#include
-#include
-#include
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-
-/*
- * TRSM
- */
-template
-void trsm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- scalar_t *B,
- int ldb) {
- throw std::invalid_argument("scalar_t");
-}
-template<>
-void trsm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const double *alpha,
- const double *A,
- int lda,
- double *B,
- int ldb) {
- FLK_CUDABLAS_CHECK(cublasDtrsm(cublas_handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb));
-}
-template<>
-void trsm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const float *alpha,
- const float *A,
- int lda,
- float *B,
- int ldb) {
- FLK_CUDABLAS_CHECK(cublasStrsm(cublas_handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb));
-}
-
-
-/*
- * TRMM
- */
-template
-void trmm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *B,
- int ldb,
- scalar_t *C,
- int ldc) {
- throw std::invalid_argument("scalar_t");
-}
-template<>
-void trmm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const double *alpha,
- const double *A,
- int lda,
- const double *B,
- int ldb,
- double *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasDtrmm(cublas_handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb, C, ldc));
-}
-template<>
-void trmm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const float *alpha,
- const float *A,
- int lda,
- const float *B,
- int ldb,
- float *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasStrmm(cublas_handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb, C, ldc));
-}
-
-
-/*
- * GEMM
- */
-template
-void gemm(
- cublasHandle_t cublas_handle,
- cublasOperation_t transa,
- cublasOperation_t transb,
- int m,
- int n,
- int k,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *B,
- int ldb,
- const scalar_t *beta,
- scalar_t *C,
- int ldc) {
- throw std::invalid_argument("scalar_t");
-}
-template<>
-void gemm(
- cublasHandle_t cublas_handle,
- cublasOperation_t transa,
- cublasOperation_t transb,
- int m,
- int n,
- int k,
- const double *alpha,
- const double *A,
- int lda,
- const double *B,
- int ldb,
- const double *beta,
- double *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasDgemm(cublas_handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc));
-}
-template<>
-void gemm(
- cublasHandle_t cublas_handle,
- cublasOperation_t transa,
- cublasOperation_t transb,
- int m,
- int n,
- int k,
- const float *alpha,
- const float *A,
- int lda,
- const float *B,
- int ldb,
- const float *beta,
- float *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasSgemm(cublas_handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc));
-}
-
-
-/*
- * SYRK
- */
-template
-void syrk(
- cublasHandle_t cublas_handle,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- int n,
- int k,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *beta,
- scalar_t *C,
- int ldc) {
- throw std::invalid_argument("scalar_t");
-}
-template<>
-void syrk(
- cublasHandle_t cublas_handle,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- int n,
- int k,
- const double *alpha,
- const double *A,
- int lda,
- const double *beta,
- double *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasDsyrk(cublas_handle, uplo, trans, n, k, alpha, A, lda, beta, C, ldc));
-}
-template<>
-void syrk(
- cublasHandle_t cublas_handle,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- int n,
- int k,
- const float *alpha,
- const float *A,
- int lda,
- const float *beta,
- float *C,
- int ldc) {
- FLK_CUDABLAS_CHECK(cublasSsyrk(cublas_handle, uplo, trans, n, k, alpha, A, lda, beta, C, ldc));
-}
-
-
-namespace {
-
-/*
- * Copies
- */
-void cublas_2d_copy_to_dev_async (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb) {
- at::cuda::CUDAStream torch_stream = at::cuda::getCurrentCUDAStream(at::cuda::current_device());
- FLK_CUDABLAS_CHECK(cublasSetMatrixAsync(
- rows, cols, elemSize,
- host_tensor.data_ptr(),
- lda,
- dev_tensor.data_ptr(),
- ldb,
- torch_stream.stream()
- ));
-}
-
-void cublas_2d_copy_to_dev (
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& host_tensor,
- const int64_t lda,
- at::Tensor& dev_tensor,
- const int64_t ldb) {
- FLK_CUDABLAS_CHECK(cublasSetMatrix(
- rows, cols, elemSize,
- host_tensor.data_ptr(),
- lda,
- dev_tensor.data_ptr(),
- ldb
- ));
-}
-
-void cublas_2d_copy_to_host_async(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb) {
- at::cuda::CUDAStream torch_stream = at::cuda::getCurrentCUDAStream(at::cuda::current_device());
- FLK_CUDABLAS_CHECK(cublasGetMatrixAsync(
- rows, cols, elemSize,
- dev_tensor.data_ptr(),
- lda,
- host_tensor.data_ptr(),
- ldb,
- torch_stream.stream()
- ));
-}
-
-void cublas_2d_copy_to_host(
- const int64_t rows,
- const int64_t cols,
- const int64_t elemSize,
- const at::Tensor& dev_tensor,
- const int64_t lda,
- at::Tensor& host_tensor,
- const int64_t ldb) {
- FLK_CUDABLAS_CHECK(cublasGetMatrix(
- rows, cols, elemSize,
- dev_tensor.data_ptr(),
- lda,
- host_tensor.data_ptr(),
- ldb
- ));
-}
-
-/*
- * Torch wrappers for linalg functions
- */
-
-void cublas_trsm(
- const at::Tensor& A,
- at::Tensor& B,
- const at::Scalar& alpha,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb) {
- cublasFillMode_t uplo = upper ? CUBLAS_FILL_MODE_UPPER : CUBLAS_FILL_MODE_LOWER;
- cublasDiagType_t diag = unitriangular ? CUBLAS_DIAG_UNIT : CUBLAS_DIAG_NON_UNIT;
- cublasOperation_t trans = transpose ? CUBLAS_OP_T : CUBLAS_OP_N;
- cublasSideMode_t side = left ? CUBLAS_SIDE_LEFT : CUBLAS_SIDE_RIGHT;
-
- AT_DISPATCH_FLOATING_TYPES(A.scalar_type(), "cublas_trsm", [&]{
- auto handle = at::cuda::getCurrentCUDABlasHandle();
- auto A_data = A.data_ptr();
- auto B_data = B.data_ptr();
- scalar_t cast_alpha = alpha.to();
- trsm(handle, side, uplo, trans, diag, m, n, &cast_alpha, A_data, lda, B_data, ldb);
- });
-}
-
-void cublas_trmm(
- const at::Tensor& A,
- const at::Tensor& B,
- at::Tensor& C,
- bool left,
- bool upper,
- bool transpose,
- bool unitriangular,
- const at::Scalar& alpha,
- int64_t m,
- int64_t n,
- int64_t lda,
- int64_t ldb,
- int64_t ldc) {
- cublasFillMode_t uplo = upper ? CUBLAS_FILL_MODE_UPPER : CUBLAS_FILL_MODE_LOWER;
- cublasDiagType_t diag = unitriangular ? CUBLAS_DIAG_UNIT : CUBLAS_DIAG_NON_UNIT;
- cublasOperation_t trans = transpose ? CUBLAS_OP_T : CUBLAS_OP_N;
- cublasSideMode_t side = left ? CUBLAS_SIDE_LEFT : CUBLAS_SIDE_RIGHT;
-
- AT_DISPATCH_FLOATING_TYPES(A.scalar_type(), "cublas_trmm", [&]{
- auto handle = at::cuda::getCurrentCUDABlasHandle();
- auto A_data = A.data_ptr();
- auto B_data = B.data_ptr();
- auto C_data = C.data_ptr();
- scalar_t cast_alpha = alpha.to();
- trmm(handle, side, uplo, trans, diag, m, n, &cast_alpha, A_data, lda, B_data, ldb, C_data, ldc);
- });
-}
-
-void cublas_gemm(
- const at::Tensor& A,
- int64_t lda,
- bool transa,
- const at::Tensor& B,
- int64_t ldb,
- bool transb,
- at::Tensor& C,
- int64_t ldc,
- int64_t m,
- int64_t n,
- int64_t k,
- const at::Scalar& alpha,
- const at::Scalar& beta) {
- cublasOperation_t transa_op = transa ? CUBLAS_OP_T : CUBLAS_OP_N;
- cublasOperation_t transb_op = transb ? CUBLAS_OP_T : CUBLAS_OP_N;
-
- AT_DISPATCH_FLOATING_TYPES(A.scalar_type(), "cublas_gemm", [&]{
- auto handle = at::cuda::getCurrentCUDABlasHandle();
- auto A_data = A.data_ptr();
- auto B_data = B.data_ptr();
- auto C_data = C.data_ptr();
- scalar_t cast_alpha = alpha.to();
- scalar_t cast_beta = beta.to();
-
- gemm(handle, transa_op, transb_op, m, n, k, &cast_alpha, A_data, lda, B_data, ldb, &cast_beta, C_data, ldc);
- });
-}
-
-void cublas_syrk(
- const at::Tensor& A,
- int64_t lda,
- at::Tensor& C,
- int64_t ldc,
- const at::Scalar& alpha,
- const at::Scalar& beta,
- bool upper,
- bool transpose,
- int64_t n,
- int64_t k) {
- cublasFillMode_t uplo = upper ? CUBLAS_FILL_MODE_UPPER : CUBLAS_FILL_MODE_LOWER;
- cublasOperation_t op = transpose ? CUBLAS_OP_T : CUBLAS_OP_N;
-
- AT_DISPATCH_FLOATING_TYPES(A.scalar_type(), "cublas_syrk", [&]{
- auto handle = at::cuda::getCurrentCUDABlasHandle();
- auto A_data = A.data_ptr();
- auto C_data = C.data_ptr();
- scalar_t cast_alpha = alpha.to();
- scalar_t cast_beta = beta.to();
- syrk(handle, uplo, op, n, k, &cast_alpha, A_data, lda, &cast_beta, C_data, ldc);
- });
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_2d_copy_to_dev_async"),
- TORCH_FN(cublas_2d_copy_to_dev_async));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_2d_copy_to_dev"),
- TORCH_FN(cublas_2d_copy_to_dev));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_2d_copy_to_host_async"),
- TORCH_FN(cublas_2d_copy_to_host_async));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_2d_copy_to_host"),
- TORCH_FN(cublas_2d_copy_to_host));
-
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_trsm"),
- TORCH_FN(cublas_trsm)
- );
-
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_trmm"),
- TORCH_FN(cublas_trmm)
- );
-
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_gemm"),
- TORCH_FN(cublas_gemm)
- );
-
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cublas_syrk"),
- TORCH_FN(cublas_syrk)
- );
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cublas_bindings.h b/falkon/c_ext/ops/cuda/cublas_bindings.h
deleted file mode 100644
index a074b2a7..00000000
--- a/falkon/c_ext/ops/cuda/cublas_bindings.h
+++ /dev/null
@@ -1,72 +0,0 @@
-#pragma once
-
-#include
-
-namespace falkon {
-namespace ops {
-
-template
-void trsm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- scalar_t *B,
- int ldb);
-
-template
-void trmm(
- cublasHandle_t cublas_handle,
- cublasSideMode_t side,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- cublasDiagType_t diag,
- int m,
- int n,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *B,
- int ldb,
- scalar_t *C,
- int ldc);
-
-template
-void gemm(
- cublasHandle_t cublas_handle,
- cublasOperation_t transa,
- cublasOperation_t transb,
- int m,
- int n,
- int k,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *B,
- int ldb,
- const scalar_t *beta,
- scalar_t *C,
- int ldc);
-
-template
-void syrk(
- cublasHandle_t cublas_handle,
- cublasFillMode_t uplo,
- cublasOperation_t trans,
- int n,
- int k,
- const scalar_t *alpha,
- const scalar_t *A,
- int lda,
- const scalar_t *beta,
- scalar_t *C,
- int ldc);
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_bindings.cu b/falkon/c_ext/ops/cuda/cuda_bindings.cu
deleted file mode 100644
index 41cd7436..00000000
--- a/falkon/c_ext/ops/cuda/cuda_bindings.cu
+++ /dev/null
@@ -1,116 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-#include
-
-#include "../helpers.h"
-
-namespace falkon {
-namespace ops {
-
-namespace {
-
-std::tuple mem_get_info(int64_t device_id) {
- const at::cuda::CUDAGuard device_guard(device_id);
- size_t free;
- size_t total;
- C10_CUDA_CHECK(cudaMemGetInfo(&free, &total));
- return std::tuple{
- (int64_t)free,
- (int64_t)total
- };
-}
-
-void cuda_2d_copy_async(
- at::Tensor& dest_tensor,
- const int64_t dest_pitch,
- const at::Tensor& src_tensor,
- const int64_t src_pitch,
- const int64_t width,
- const int64_t height) {
- at::cuda::CUDAStream torch_stream = at::cuda::getCurrentCUDAStream(at::cuda::current_device());
- C10_CUDA_CHECK(cudaMemcpy2DAsync(
- dest_tensor.data_ptr(),
- dest_pitch,
- src_tensor.data_ptr(),
- src_pitch,
- width,
- height,
- cudaMemcpyDefault,
- torch_stream.stream()
- ));
-}
-
-void cuda_2d_copy(
- at::Tensor& dest_tensor,
- const int64_t dest_pitch,
- const at::Tensor& src_tensor,
- const int64_t src_pitch,
- const int64_t width,
- const int64_t height) {
- C10_CUDA_CHECK(cudaMemcpy2D(
- dest_tensor.data_ptr(),
- dest_pitch,
- src_tensor.data_ptr(),
- src_pitch,
- width,
- height,
- cudaMemcpyDefault
- ));
-}
-
-void cuda_1d_copy_async(
- at::Tensor& dest_tensor,
- const at::Tensor &src_tensor,
- const int64_t count) {
- at::cuda::CUDAStream torch_stream = at::cuda::getCurrentCUDAStream(at::cuda::current_device());
- C10_CUDA_CHECK(cudaMemcpyAsync(
- dest_tensor.data_ptr(),
- src_tensor.data_ptr(),
- count,
- cudaMemcpyDefault,
- torch_stream.stream()
- ));
-}
-
-void cuda_1d_copy(
- at::Tensor& dest_tensor,
- const at::Tensor &src_tensor,
- const int64_t count) {
- C10_CUDA_CHECK(cudaMemcpy(
- dest_tensor.data_ptr(),
- src_tensor.data_ptr(),
- count,
- cudaMemcpyDefault
- ));
-}
-
-} // namespace
-
-// registered as catch-all function since it has no tensor inputs, and dispatcher doesn't know what to do
-TORCH_LIBRARY_FRAGMENT(falkon, m) {
- m.def("falkon::mem_get_info", &mem_get_info);
-}
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
-// m.impl(
-// TORCH_SELECTIVE_NAME("falkon::mem_get_info"),
-// TORCH_FN(mem_get_info));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cuda_2d_copy_async"),
- TORCH_FN(cuda_2d_copy_async));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cuda_2d_copy"),
- TORCH_FN(cuda_2d_copy));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cuda_1d_copy_async"),
- TORCH_FN(cuda_1d_copy_async));
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::cuda_1d_copy"),
- TORCH_FN(cuda_1d_copy));
-}
-
-} // ops
-} // falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_copy_transpose.cu b/falkon/c_ext/ops/cuda/cuda_copy_transpose.cu
deleted file mode 100644
index 4b7aec68..00000000
--- a/falkon/c_ext/ops/cuda/cuda_copy_transpose.cu
+++ /dev/null
@@ -1,131 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-
-#include "../helpers.h"
-
-
-namespace falkon {
-namespace ops {
-
-namespace {
-
-#define NB 32
-#define BLOCK_ROWS 8
-
-template
-__global__
-void matrix_transpose_f(scalar_t* __restrict__ out, const scalar_t* __restrict__ in, const unsigned dim0, const unsigned dim1)
-{
- // https://developer.nvidia.com/blog/efficient-matrix-transpose-cuda-cc/
- // https://arrayfire.com/cuda-optimization-tips-for-matrix-transpose-in-real-world-applications/
- __shared__ scalar_t shrdMem[NB][NB+1];
-
- const unsigned lx = threadIdx.x;
- const unsigned ly = threadIdx.y;
-
- unsigned gx = lx + NB * blockIdx.x;
- unsigned gy = ly + NB * blockIdx.y;
-
- #pragma unroll
- for (unsigned repeat = 0; repeat < NB; repeat += blockDim.y) {
- unsigned gy_ = gy + repeat;
- if (gx < dim0 && gy_ < dim1) {
- shrdMem[ly + repeat][lx] = in[gy_ * dim0 + gx];
- }
- }
- __syncthreads();
-
- gx = lx + NB * blockIdx.y;
- gy = ly + NB * blockIdx.x;
-
- #pragma unroll
- for (unsigned repeat = 0; repeat < NB; repeat += blockDim.y) {
- unsigned gy_ = gy + repeat;
- if (gx < dim1 && gy_ < dim0) {
- out[gy_ * dim1 + gx] = shrdMem[lx][ly + repeat];
- }
- }
-}
-
-
-template
-__global__
-void matrix_transpose_c(scalar_t* __restrict__ out, const scalar_t* __restrict__ in, const unsigned dim0, const unsigned dim1)
-{
- // https://developer.nvidia.com/blog/efficient-matrix-transpose-cuda-cc/
- // https://arrayfire.com/cuda-optimization-tips-for-matrix-transpose-in-real-world-applications/
- __shared__ scalar_t shrdMem[NB][NB+1];
-
- const unsigned lx = threadIdx.x;
- const unsigned ly = threadIdx.y;
-
- unsigned gx = lx + NB * blockIdx.x;
- unsigned gy = ly + NB * blockIdx.y;
-
- #pragma unroll
- for (unsigned repeat = 0; repeat < NB; repeat += blockDim.x) {
- unsigned gx_ = gx + repeat;
- if (gx_ < dim0 && gy < dim1) {
- shrdMem[lx + repeat][ly] = in[gx_ * dim1 + gy];
- }
- }
- __syncthreads();
-
- gx = lx + NB * blockIdx.y;
- gy = ly + NB * blockIdx.x;
-
- #pragma unroll
- for (unsigned repeat = 0; repeat < NB; repeat += blockDim.x) {
- unsigned gx_ = gx + repeat;
- if (gx_ < dim1 && gy < dim0) {
- out[gx_ * dim0 + gy] = shrdMem[ly][lx + repeat];
- }
- }
-}
-
-
-at::Tensor copy_transpose_kernel(
- const at::Tensor &input,
- at::Tensor &output) {
- CHECK_CUDA(input);
- CHECK_CUDA(output);
- TORCH_CHECK(input.size(0) == output.size(1) && input.size(1) == output.size(0),
- "Input and output matrices shapes must be consistent.");
- // TODO: Check strides are consistent
-
- const int64_t nx = input.size(0), ny = input.size(1);
- const bool fortran_contig = is_fortran_contig(input);
-
- const dim3 dimGrid(ceildiv(nx, NB), ceildiv(ny, NB), 1);
-
- AT_DISPATCH_FLOATING_TYPES(input.scalar_type(), "dispatch_copy_transpose", [&] {
- at::DeviceGuard g(input.device());
- at::cuda::CUDAStream stream = at::cuda::getCurrentCUDAStream();
- if (fortran_contig) {
- const dim3 dimBlock(NB, BLOCK_ROWS, 1);
- matrix_transpose_f<<>>(
- output.data_ptr(), input.data_ptr(), nx, ny);
- } else {
- const dim3 dimBlock(BLOCK_ROWS, NB, 1);
- matrix_transpose_c<<>>(
- output.data_ptr(), input.data_ptr(), nx, ny);
- }
- });
- return output;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::copy_transpose"),
- TORCH_FN(copy_transpose_kernel));
-}
-} // namespace ops
-} // namespace falkon
-
-
-
diff --git a/falkon/c_ext/ops/cuda/cuda_copy_triang.cu b/falkon/c_ext/ops/cuda/cuda_copy_triang.cu
deleted file mode 100644
index 25b1ce87..00000000
--- a/falkon/c_ext/ops/cuda/cuda_copy_triang.cu
+++ /dev/null
@@ -1,92 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-
-#include "../helpers.h"
-
-
-namespace falkon {
-namespace ops {
-namespace {
-
-#define NB 64
-
-/*
- Matrix is size * size (no support for different size than stride).
- Columns are contiguous.
- The size * size grid is subdivided into NB * size blocks (of rows).
- Each block has NB threads, so each thread copies one row into one
- column (transpose).
- Not a particularly efficient implementation!
-*/
-template
-__global__ void copy_simple_kernel_lower(
- scalar_t* __restrict__ data,
- const size_t size) {
- const int i = blockIdx.x * blockDim.x + threadIdx.x;
- if (i < size) {
- int col_pos = i * size;
- for (int row_pos = i; row_pos < i + i * size; row_pos += size) {
- data[col_pos] = data[row_pos];
- col_pos++;
- }
- }
-}
-
-// Same as the _lower version, but we copy dataT to data instead!
-template
-__global__ void copy_simple_kernel_upper(
- scalar_t* __restrict__ data,
- const size_t size) {
- const int i = blockIdx.x * blockDim.x + threadIdx.x;
- if (i < size) {
- int col_pos = i * size;
- for (int row_pos = i; row_pos < i + i * size; row_pos += size) {
- data[row_pos] = data[col_pos];
- col_pos++;
- }
- }
-}
-
-at::Tensor copy_triang_kernel(
- at::Tensor &A,
- const bool upper) {
- CHECK_CUDA(A);
- TORCH_CHECK(A.size(0) == A.size(1), "A must be a square 2D matrix.");
-
- // Transpose matrix, and flip upper if matrix is C-contiguous.
- const bool fContig = is_fortran_contig(A);
- if (!fContig)
- A = at::transpose(A, 0, 1);
- const bool bupper = fContig ? upper : !upper;
- const int64_t nx = A.size(0);
- const dim3 dimGrid(ceildiv(nx, NB));
- const dim3 dimBlock(NB);
-
- AT_DISPATCH_FLOATING_TYPES(A.scalar_type(), "dispatch_copy_triang", [&] {
- at::cuda::CUDAStream stream = at::cuda::getCurrentCUDAStream();
- at::DeviceGuard g(A.device());
- if (bupper) {
- copy_simple_kernel_upper<<>>(A.data_ptr(), nx);
- } else {
- copy_simple_kernel_lower<<>>(A.data_ptr(), nx);
- }
- });
-
- if (!fContig)
- A = at::transpose(A, 0, 1);
- return A;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::copy_triang"),
- TORCH_FN(copy_triang_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_csr2dense.cu b/falkon/c_ext/ops/cuda/cuda_csr2dense.cu
deleted file mode 100644
index e58cfc33..00000000
--- a/falkon/c_ext/ops/cuda/cuda_csr2dense.cu
+++ /dev/null
@@ -1,195 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-#include
-
-#include "../helpers.h"
-
-namespace falkon {
-namespace ops {
-namespace {
-
-#if defined(__CUDACC__) && (CUSPARSE_VERSION >= 11301)// || (!defined(_MSC_VER) && CUSPARSE_VERSION >= 10301))
-#define IS_GENERIC_AVAILABLE() 1
-#else
-#define IS_GENERIC_AVAILABLE() 0
-#endif
-
-#if IS_GENERIC_AVAILABLE()
-#include
-#endif
-
-// Helpers for dispatching int32 and int64 (indices). Only available in newer pytorch versions (TODO: Which versions? When can we delete this?)
-#ifndef AT_PRIVATE_CASE_TYPE_USING_HINT
-#define AT_PRIVATE_CASE_TYPE_USING_HINT(NAME, enum_type, type, HINT, ...) \
- case enum_type: { \
- using HINT = type; \
- return __VA_ARGS__(); \
- }
-#endif
-#ifndef AT_DISPATCH_INDEX_TYPES
-#define AT_DISPATCH_INDEX_TYPES(TYPE, NAME, ...) \
- [&] { \
- at::ScalarType _it = ::detail::scalar_type(TYPE); \
- switch (_it) { \
- AT_PRIVATE_CASE_TYPE_USING_HINT(NAME, at::ScalarType::Int, int32_t, index_t, __VA_ARGS__) \
- AT_PRIVATE_CASE_TYPE_USING_HINT(NAME, at::ScalarType::Long, int64_t, index_t, __VA_ARGS__)\
- default: \
- AT_ERROR(#NAME, " not implemented for '", toString(_it), "'"); \
- } \
- }()
-#endif
-
-
-#if IS_GENERIC_AVAILABLE()
-
-template
-void run_csr2dense(
- const at::Tensor &rowptr,
- const at::Tensor &col,
- const at::Tensor &val,
- at::Tensor &out) {
- auto handle = at::cuda::getCurrentCUDASparseHandle();
- constexpr auto cuda_value_type = std::is_same::value
- ? CUDA_R_32F : CUDA_R_64F;
- constexpr auto cusparse_index_type = std::is_same::value
- ? CUSPARSE_INDEX_32I : CUSPARSE_INDEX_64I;
- const auto dense_order = is_fortran_contig(out)
- ? CUSPARSE_ORDER_COL : CUSPARSE_ORDER_ROW;
-
- // Create sparse and dense matrix descriptors
- cusparseSpMatDescr_t csr_mat;
- TORCH_CUDASPARSE_CHECK(cusparseCreateCsr(
- /*output=*/&csr_mat,
- /*rows=*/out.size(0),
- /*cols=*/out.size(1),
- /*nnz=*/val.numel(),
- /*csrRowOffsets=*/const_cast(rowptr.data_ptr()),
- /*csrColInd=*/const_cast(col.data_ptr()),
- /*csrValues=*/const_cast(val.data_ptr()),
- /*csrRowOffsetsType=*/cusparse_index_type,
- /*csrColIndType=*/cusparse_index_type,
- /*idxBase=*/CUSPARSE_INDEX_BASE_ZERO,
- /*valueType=*/cuda_value_type
- ));
- cusparseDnMatDescr_t dn_mat;
- TORCH_CUDASPARSE_CHECK(cusparseCreateDnMat(
- /*output=*/&dn_mat,
- /*rows=*/out.size(0),
- /*cols=*/out.size(1),
- /*ld=*/out.stride(1),
- /*values=*/out.data_ptr(),
- /*valueType=*/cuda_value_type,
- /*order=*/dense_order
- ));
- // Check needed buffer size, and allocate it
- size_t buf_size;
- TORCH_CUDASPARSE_CHECK(cusparseSparseToDense_bufferSize(
- handle, csr_mat, dn_mat, CUSPARSE_SPARSETODENSE_ALG_DEFAULT, &buf_size));
- auto& allocator = *c10::cuda::CUDACachingAllocator::get();
- auto conv_buf = allocator.allocate(buf_size);
- // Run sparse->dense
- TORCH_CUDASPARSE_CHECK(cusparseSparseToDense(
- handle, csr_mat, dn_mat, CUSPARSE_SPARSETODENSE_ALG_DEFAULT, conv_buf.get()));
- // Cleanup
- TORCH_CUDASPARSE_CHECK(cusparseDestroySpMat(csr_mat));
- TORCH_CUDASPARSE_CHECK(cusparseDestroyDnMat(dn_mat));
-}
-
-#else // Non-generic implementation (using legacy cuSPARSE API)
-template
-cusparseStatus_t cusparseXcsr2dense(cusparseHandle_t handle,
- int m,
- int n,
- const cusparseMatDescr_t descrA,
- const value_t* csrValA,
- const int* csrRowPtrA,
- const int* csrColIndA,
- value_t* A,
- int lda) { }
-template<>
-cusparseStatus_t cusparseXcsr2dense(
- cusparseHandle_t handle, int m, int n, const cusparseMatDescr_t descrA, const float* csrValA, const int* csrRowPtrA, const int* csrColIndA, float* A, int lda) {
- return cusparseScsr2dense(handle, m, n, descrA, csrValA, csrRowPtrA, csrColIndA, A, lda);
-}
-template<>
-cusparseStatus_t cusparseXcsr2dense(
- cusparseHandle_t handle, int m, int n, const cusparseMatDescr_t descrA, const double* csrValA, const int* csrRowPtrA, const int* csrColIndA, double* A, int lda) {
- return cusparseDcsr2dense(handle, m, n, descrA, csrValA, csrRowPtrA, csrColIndA, A, lda);
-}
-
-template
-void run_csr2dense(
- const at::Tensor &rowptr,
- const at::Tensor &col,
- const at::Tensor &val,
- at::Tensor &out) {
- TORCH_CHECK(out.stride(0) == 1, "Output matrix is not F-contiguous");
-
- auto handle = at::cuda::getCurrentCUDASparseHandle();
-
- // Convert indices to int TODO: This may cause problems if it doesn't fit in int!
- auto rowptr_int = rowptr.toType(torch::kInt);
- auto col_int = col.toType(torch::kInt);
-
- // Creates default matrix descriptor (0-based and GENERAL matrix)
- cusparseMatDescr_t descr;
- TORCH_CUDASPARSE_CHECK(cusparseCreateMatDescr(&descr));
- TORCH_CUDASPARSE_CHECK(cusparseXcsr2dense(
- handle, /* cuSparse handle */
- (int)out.size(0), /* Number of rows */
- (int)out.size(1), /* Number of columns */
- descr, /* Descriptor for the dense matrix */
- val.data_ptr(), /* Non-zero elements of sparse matrix */
- rowptr_int.data_ptr(), /* CSR row indices */
- col_int.data_ptr(), /* CSR column indices */
- out.data_ptr(), /* Output data */
- (int)out.stride(1) /* Leading dimension of dense matrix */
- ));
- TORCH_CUDASPARSE_CHECK(cusparseDestroyMatDescr(descr));
-}
-#endif // end IS_GENERIC_AVAILABLE()
-
-
-at::Tensor csr2dense_kernel(
- const at::Tensor &rowptr,
- const at::Tensor &col,
- const at::Tensor &val,
- at::Tensor &out) {
- CHECK_CUDA(rowptr);
- CHECK_CUDA(col);
- CHECK_CUDA(val);
- CHECK_CUDA(out);
-
- const int M = out.size(0);
-
- TORCH_CHECK(
- rowptr.numel() - 1 == M, "Expected output with ", rowptr.numel() - 1, " rows but found ", M);
- TORCH_CHECK(val.dtype() == out.dtype(), "Expected csr and output matrix with the same dtypes but found ",
- val.dtype(), " and ", out.dtype());
- TORCH_CHECK(rowptr.device() == col.device() && col.device() == val.device(),
- "Expected all arrays of CSR matrix to be on the same device.");
- TORCH_CHECK(out.device() == val.device(),
- "Expected CSR and dense matrices to be on the same device.");
-
- at::DeviceGuard g(rowptr.device());
- AT_DISPATCH_FLOATING_TYPES(val.scalar_type(), "csr2dense_cuda_value", [&] {
- AT_DISPATCH_INDEX_TYPES(col.scalar_type(), "csr2dense_cuda_index", [&] {
- run_csr2dense(rowptr, col, val, out);
- });
- });
- return out;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::csr2dense"),
- TORCH_FN(csr2dense_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_helpers.cuh b/falkon/c_ext/ops/cuda/cuda_helpers.cuh
deleted file mode 100644
index 4e34d40c..00000000
--- a/falkon/c_ext/ops/cuda/cuda_helpers.cuh
+++ /dev/null
@@ -1,114 +0,0 @@
-#pragma once
-
-#include
-#include
-#include
-
-namespace falkon {
-namespace ops {
-
-#define FLK_CUSOLVER_CHECK(EXPR) \
- do { \
- cusolverStatus_t __err = EXPR; \
- TORCH_CHECK(__err == CUSOLVER_STATUS_SUCCESS, \
- "CUDA error: ", \
- cusolverGetErrorString(__err), \
- " when calling `" #EXPR "`"); \
- } while (0)
-
-
-static const char* cusolverGetErrorString(cusolverStatus_t error) {
- if (error == CUSOLVER_STATUS_SUCCESS) {
- return "CUBLAS_STATUS_SUCCESS";
- }
- if (error == CUSOLVER_STATUS_NOT_INITIALIZED) {
- return "CUSOLVER_STATUS_NOT_INITIALIZED";
- }
- if (error == CUSOLVER_STATUS_ALLOC_FAILED) {
- return "CUSOLVER_STATUS_ALLOC_FAILED";
- }
- if (error == CUSOLVER_STATUS_INVALID_VALUE) {
- return "CUSOLVER_STATUS_INVALID_VALUE";
- }
- if (error == CUSOLVER_STATUS_ARCH_MISMATCH) {
- return "CUSOLVER_STATUS_ARCH_MISMATCH";
- }
- if (error == CUSOLVER_STATUS_EXECUTION_FAILED) {
- return "CUSOLVER_STATUS_EXECUTION_FAILED";
- }
- if (error == CUSOLVER_STATUS_INTERNAL_ERROR) {
- return "CUSOLVER_STATUS_INTERNAL_ERROR";
- }
- if (error == CUSOLVER_STATUS_MATRIX_TYPE_NOT_SUPPORTED) {
- return "CUSOLVER_STATUS_MATRIX_TYPE_NOT_SUPPORTED";
- }
- return "";
-}
-
-
-#define FLK_CUDABLAS_CHECK(EXPR) \
- do { \
- cublasStatus_t __err = EXPR; \
- TORCH_CHECK(__err == CUBLAS_STATUS_SUCCESS, \
- "CuBLAS error: ", \
- cublasGetErrorString(__err), \
- " when calling `" #EXPR "`"); \
- } while (0)
-
-
-static const char* cublasGetErrorString(cublasStatus_t error) {
- if (error == CUBLAS_STATUS_SUCCESS) {
- return "CUBLAS_STATUS_SUCCESS";
- }
- if (error == CUBLAS_STATUS_NOT_INITIALIZED) {
- return "CUBLAS_STATUS_NOT_INITIALIZED";
- }
- if (error == CUBLAS_STATUS_ALLOC_FAILED) {
- return "CUBLAS_STATUS_ALLOC_FAILED";
- }
- if (error == CUBLAS_STATUS_INVALID_VALUE) {
- return "CUBLAS_STATUS_INVALID_VALUE";
- }
- if (error == CUBLAS_STATUS_ARCH_MISMATCH) {
- return "CUBLAS_STATUS_ARCH_MISMATCH";
- }
- if (error == CUBLAS_STATUS_MAPPING_ERROR) {
- return "CUBLAS_STATUS_MAPPING_ERROR";
- }
- if (error == CUBLAS_STATUS_EXECUTION_FAILED) {
- return "CUBLAS_STATUS_EXECUTION_FAILED";
- }
- if (error == CUBLAS_STATUS_INTERNAL_ERROR) {
- return "CUBLAS_STATUS_INTERNAL_ERROR";
- }
- if (error == CUBLAS_STATUS_NOT_SUPPORTED) {
- return "CUBLAS_STATUS_NOT_SUPPORTED";
- }
-#ifdef CUBLAS_STATUS_LICENSE_ERROR
- if (error == CUBLAS_STATUS_LICENSE_ERROR) {
- return "CUBLAS_STATUS_LICENSE_ERROR";
- }
-#endif
- return "";
-}
-
-
-inline __device__ int2 tri_index_lower(const int linear_index) {
- const int row = (int)((-1 + sqrt((double)(8*linear_index + 1))) / 2.0);
- return make_int2(
- linear_index - row * (row + 1) / 2,
- row
- );
-}
-
-
-inline __device__ int2 tri_index_upper(const int linear_index) {
- const int row = (int)((-1 + sqrt((double)(8*linear_index + 1))) / 2.0);
- return make_int2(
- row,
- linear_index - row * (row + 1) / 2
- );
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_lauum.cu b/falkon/c_ext/ops/cuda/cuda_lauum.cu
deleted file mode 100644
index 791e13dd..00000000
--- a/falkon/c_ext/ops/cuda/cuda_lauum.cu
+++ /dev/null
@@ -1,192 +0,0 @@
-#include
-#include
-#include
-#include
-
-#include "../helpers.h"
-#include "cuda_helpers.cuh"
-
-namespace falkon {
-namespace ops {
-namespace {
-
-#define BLOCK_SIZE 32
-//#define DEBUG
-
-template
-__global__
-void upper_cuda_lauum_ker(
- const scalar_t* __restrict__ in,
- scalar_t* __restrict__ out,
- const int size,
- const int in_stride,
- const int out_stride,
- const int grid_size) {
- const int2 tile_pos = tri_index_upper(blockIdx.x);
- const int tx = threadIdx.x;
- const int ty = threadIdx.y;
-
- // tx and ty are inverted (i.e. tx goes on along the rows,
- // ty along the columns). This allows coalesced store in the
- // write-back phase
- const int A_row = tile_pos.y * BLOCK_SIZE + tx;
- const int B_row = tile_pos.x * BLOCK_SIZE + tx;
-
- // Initialize shared mem
- __shared__ scalar_t A_tile[BLOCK_SIZE*BLOCK_SIZE];
- // The first dimension of the B_tile needs to be increased to prevent bank
- // conflicts in B_tile load.
- __shared__ scalar_t B_tile[(BLOCK_SIZE + 1) * BLOCK_SIZE];
-
- // Initialize thread-local output (register)
- scalar_t accumulator = 0;
-
- for (int tile_i = tile_pos.x; tile_i < grid_size; tile_i++) {
- const int i = tile_i * BLOCK_SIZE + ty;
- const int i_pos = i * in_stride;
-
- // Copy item input[row, i] and input[col, i].T to shared memory
- A_tile[ty * BLOCK_SIZE + tx] = 0;
- B_tile[tx * (BLOCK_SIZE + 1) + ty] = 0;
- if (i < size && A_row <= i) {
- A_tile[ty * BLOCK_SIZE + tx] = in[A_row + i_pos];
- }
- if (i < size && B_row <= i) {
- B_tile[tx * (BLOCK_SIZE + 1) + ty] = in[B_row + i_pos];
- }
- __syncthreads();
-
- #ifdef DEBUG
- printf("(tr=%d, tc=%d, ti=%d, i=%d) - A[%d, %d] = %f\n", tile_pos.y, tile_pos.x, tile_i, i, tx, ty, A_tile[tx][ty]);
- __syncthreads();
- printf("(tr=%d, tc=%d, ti=%d, i=%d) - B[%d, %d] = %f\n", tile_pos.y, tile_pos.x, tile_i, i, tx, ty, B_tile[tx][ty]);
- __syncthreads();
- #endif
-
- // Compute
- for (int k = 0; k < BLOCK_SIZE; k++) {
- // Both accesses to A, B are done to prevent bank conflicts.
- // In practice we need to avoid stuff like A[tx][k] where tx is on the first dimension.
- accumulator = accumulator + A_tile[k * BLOCK_SIZE + tx] * B_tile[ty * (BLOCK_SIZE + 1) + k];
- }
- __syncthreads();
- }
- // Write-back
- const int col = tile_pos.x * BLOCK_SIZE + ty;
- const int row = tile_pos.y * BLOCK_SIZE + tx;
- if (row <= col && col < size && row < size) {
- out[row + col * out_stride] = accumulator;
- }
-}
-
-
-template
-__global__
-void lower_cuda_lauum_ker(
- const scalar_t* __restrict__ in,
- scalar_t *out,
- const int size,
- const int in_stride,
- const int out_stride,
- const int grid_size) {
- // Determine the triangular tile of the output (0 indexed)
- const int2 tile_pos = tri_index_lower(blockIdx.x);
- const int tx = threadIdx.x;
- const int ty = threadIdx.y;
-
- // A_col is the global column of the current thread for the A tile (transposed)
- const int A_col = tile_pos.y * BLOCK_SIZE + tx;
- // B_col is the global column of the current thread for the B tile (not transposed)
- const int B_col = tile_pos.x * BLOCK_SIZE + tx;
-
- // Initialize shared mem
- __shared__ scalar_t A_tile[BLOCK_SIZE][BLOCK_SIZE+1];
- __shared__ scalar_t B_tile[BLOCK_SIZE][BLOCK_SIZE];
-
- // Initialize thread-local output (register)
- scalar_t accumulator = 0;
-
- for (int tile_i = tile_pos.y; tile_i < grid_size; tile_i++) {
- // i is the row position of this thread within tile-rows
- int i = tile_i * BLOCK_SIZE + ty;
-
- // Copy item input[i, row].T and input[i, col] to shared memory
- A_tile[ty][tx] = 0;
- B_tile[ty][tx] = 0;
- if (i < size && A_col <= i) {
- A_tile[ty][tx] = in[i + in_stride * A_col];
- }
- if (i < size && B_col <= i) {
- B_tile[ty][tx] = in[i + in_stride * B_col];
- }
- __syncthreads();
-
- #ifdef DEBUG
- printf("(tr=%d, tc=%d, ti=%d, i=%d) - A[%d, %d] = %f\n", tile_pos.y, tile_pos.x, tile_i, i, ty, tx, A_tile[ty][tx]);
- __syncthreads();
- printf("(tr=%d, tc=%d, ti=%d, i=%d) - B[%d, %d] = %f\n", tile_pos.y, tile_pos.x, tile_i, i, ty, tx, B_tile[ty][tx]);
- __syncthreads();
- #endif
-
- // Compute
- for (int k = 0; k < BLOCK_SIZE; k++) {
- accumulator = accumulator + A_tile[k][ty] * B_tile[k][tx];
- }
- __syncthreads();
- }
- // Write-back
- const int col = tile_pos.x * BLOCK_SIZE + tx;
- const int row = tile_pos.y * BLOCK_SIZE + ty;
- if (row >= col && col < size && row < size) {
- out[row + col * out_stride] = accumulator;
- }
-}
-
-
-at::Tensor lauum_kernel(
- const int64_t n,
- const at::Tensor &A,
- const int64_t lda,
- at::Tensor &B,
- const int64_t ldb,
- const bool lower) {
- // TODO: Consistency checks
- CHECK_CUDA(A);
- CHECK_CUDA(B);
- const auto scalar_type = A.scalar_type();
- const auto size = n;
- const auto in_stride = lda;
- const auto out_stride = ldb;
-
- // Setup CUDA grid dimensions:
- // grid is 1D, so that we can only consider triangularly-appropriate tiles
- // blocks are 2D, with a fixed block size
- const int64_t grid_height = ceildiv(size, BLOCK_SIZE);
- const dim3 dimGrid(grid_height * (grid_height + 1) / 2, 1);
- const dim3 dimBlock(BLOCK_SIZE, BLOCK_SIZE);
-
- AT_DISPATCH_FLOATING_TYPES(scalar_type, "dispatch_lauum_cuda", [&] {
- at::DeviceGuard g(A.device());
- at::cuda::CUDAStream stream = at::cuda::getCurrentCUDAStream();
- if (lower) {
- lower_cuda_lauum_ker<<>>(
- A.data_ptr(), B.data_ptr(), (int)size, (int)in_stride, (int)out_stride, (int)grid_height);
- }
- else {
- upper_cuda_lauum_ker<<>>(
- A.data_ptr(), B.data_ptr(), (int)size, (int)in_stride, (int)out_stride, (int)grid_height);
- }
- });
- return B;
-}
-
-} // namespace
-
-TORCH_LIBRARY_IMPL(falkon, CUDA, m) {
- m.impl(
- TORCH_SELECTIVE_NAME("falkon::lauum"),
- TORCH_FN(lauum_kernel));
-}
-
-} // namespace ops
-} // namespace falkon
diff --git a/falkon/c_ext/ops/cuda/cuda_mul_triang.cu b/falkon/c_ext/ops/cuda/cuda_mul_triang.cu
deleted file mode 100644
index 17aaea2f..00000000
--- a/falkon/c_ext/ops/cuda/cuda_mul_triang.cu
+++ /dev/null
@@ -1,111 +0,0 @@
-#include
-#include
-#include
-#include