diff --git a/openfl-tutorials/experimental/workflow/LLM/phi-4-peft-quantization.ipynb b/openfl-tutorials/experimental/workflow/LLM/phi-4-peft-quantization.ipynb new file mode 100644 index 0000000000..a8c3dba3ca --- /dev/null +++ b/openfl-tutorials/experimental/workflow/LLM/phi-4-peft-quantization.ipynb @@ -0,0 +1,12060 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a59f475d-d843-46bc-b75e-10984b687ed3", + "metadata": {}, + "source": [ + "# LLM Federated Finetuning with PEFT and Quantization" + ] + }, + { + "cell_type": "markdown", + "id": "20c74cb9-51a2-42e2-893f-d280e227e8bf", + "metadata": {}, + "source": [ + "## Overview\n", + "\n", + "This tutorial demonstrates how to efficiently fine-tune Microsoft's Phi-4 model (7B parameter variant) in a federated learning workflow using OpenFL framework with advanced techniques for memory optimization and performance enhancement. The approach combines:\n", + "\n", + "### Memory Optimization Techniques\n", + "- **Parameter-Efficient Fine-Tuning (PEFT)**: Using Low-Rank Adaptation (LoRA) to fine-tune only a small subset of model parameters\n", + "- **Quantization**: Comparing 4-bit (NF4) and 8-bit quantization approaches with QLoRA to reduce memory footprint\n", + "- **Gradient Checkpointing**: Trading computation for memory by recomputing activations during backpropagation\n", + "\n", + "### Training Enhancements\n", + "- **Partial Round Updates**: Breaking each global round into partial updates for more frequent knowledge sharing\n", + "- **Fixed Training Steps**: Using a fixed number of training steps (100) equivalent to one epoch\n", + "- **Optimizer State Preservation**: Maintaining optimizer momentum across federation rounds\n", + "- **Memory Usage Tracking**: Detailed monitoring of GPU/CPU memory consumption across training phases\n", + "\n", + "### Federated Learning Architecture\n", + "- **Server-Client Model**: Central aggregator and multiple collaborators (simulated locally)\n", + "- **Federated Averaging**: Weighted parameter averaging between collaborator models\n", + "- **Metrics Visualization**: Tracking and comparing training loss, validation loss, and memory usage\n", + "\n", + "The tutorial implements a complete workflow that addresses common challenges in federated fine-tuning of large language models, including memory constraints, training efficiency, and performance metrics tracking across heterogeneous clients." + ] + }, + { + "cell_type": "markdown", + "id": "7241cc9a", + "metadata": {}, + "source": [ + "Before running the notebook make sure to install NVIDIA drivers using the below command\n", + "```\n", + "sudo apt update \n", + "sudo apt install -y nvidia-driver-550 \n", + "sudo reboot\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "d07c32d3-1a8d-4162-af45-bc3a10e0ae3f", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "05b2ad75-8c7b-499c-902e-dbd5b24361bc", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install torch transformers peft datasets trl==0.12.2 bitsandbytes accelerate -q" + ] + }, + { + "cell_type": "markdown", + "id": "440a9c39-ec42-45a5-80f6-9a9e0bc90d2f", + "metadata": {}, + "source": [ + "## Import Libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "a3e6c3f4-dec3-4d3a-97cb-5b35bec06046", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install seaborn matplotlib pandas -q" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "be4690ae-0671-4d3a-8f21-620ab865a03e", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/azureuser/env_name/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "2025-05-21 08:21:48,953\tINFO util.py:154 -- Missing packages: ['ipywidgets']. Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.\n" + ] + } + ], + "source": [ + "import os\n", + "import torch\n", + "from transformers import (\n", + " AutoModelForCausalLM,\n", + " AutoTokenizer,\n", + " BitsAndBytesConfig,\n", + " TrainingArguments\n", + ")\n", + "from peft import (\n", + " LoraConfig,\n", + " get_peft_model,\n", + " prepare_model_for_kbit_training,\n", + " PeftModel\n", + ")\n", + "from peft.utils import get_peft_model_state_dict, set_peft_model_state_dict # Added this import\n", + "from datasets import load_dataset\n", + "from trl import SFTTrainer\n", + "from openfl.experimental.workflow.interface import Aggregator, Collaborator, FLSpec\n", + "from openfl.experimental.workflow.placement import aggregator, collaborator\n", + "from openfl.experimental.workflow.runtime import LocalRuntime\n", + "import numpy as np\n", + "from transformers.trainer_callback import PrinterCallback\n", + "import transformers\n", + "import gc\n", + "import psutil\n", + "\n", + "# Import our utility functions\n", + "from openfl.utilities.phi_utils import (\n", + " get_gpu_memory_info,\n", + " MemoryTracker,\n", + " plot_memory_metrics,\n", + " plot_loss_metrics,\n", + " plot_aggregated_metrics\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "74fed8f2", + "metadata": {}, + "outputs": [], + "source": [ + "# Memory optimization setup\n", + "os.environ[\"PYTORCH_CUDA_ALLOC_CONF\"] = \"expandable_segments:True\"\n", + "os.environ[\"TRANSFORMERS_ATTN_IMPLEMENTATION\"] = \"flash_attention_2\"\n", + "\n", + "def clear_gpu():\n", + " torch.cuda.empty_cache()\n", + " gc.collect()\n", + "\n", + "clear_gpu()" + ] + }, + { + "cell_type": "markdown", + "id": "8fa0941e-5fd7-401b-9cc7-0beb5a2a3621", + "metadata": {}, + "source": [ + "## Acquiring and preprocessing dataset\n", + "We can download the dataset directly from the [LLM-Adapters repository](https://github.com/AGI-Edgerunners/LLM-Adapters)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a50ae4a4-628d-4f45-a9fc-c5c437df229e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dataset already exists locally.\n" + ] + } + ], + "source": [ + "# Import libraries needed for downloading and verifying the dataset\n", + "import hashlib\n", + "import requests\n", + "\n", + "def file_checksum(file_path, algorithm=\"sha256\"):\n", + " \"\"\"\n", + " Calculate the checksum of a file using the specified hashing algorithm.\n", + " \n", + " Args:\n", + " file_path (str): The path to the file for which the checksum is to be calculated.\n", + " algorithm (str): The hashing algorithm to use (default is 'sha256').\n", + " \n", + " Returns:\n", + " str: The calculated checksum of the file.\n", + " \"\"\"\n", + " hash_func = hashlib.new(algorithm)\n", + " with open(file_path, \"rb\") as f:\n", + " for chunk in iter(lambda: f.read(4096), b\"\"):\n", + " hash_func.update(chunk)\n", + " return hash_func.hexdigest()\n", + "\n", + "\n", + "# Download the dataset if it doesn't exist locally\n", + "if not os.path.exists(\"math_10k.json\"):\n", + " print(\"Downloading math_10k.json dataset...\")\n", + " r = requests.get(\n", + " \"https://raw.githubusercontent.com/AGI-Edgerunners/LLM-Adapters/main/ft-training_set/math_10k.json\",\n", + " )\n", + " with open(\n", + " \"math_10k.json\",\n", + " \"wb\",\n", + " ) as f:\n", + " f.write(r.content)\n", + " print(\"Download complete.\")\n", + "\n", + " # Verify the integrity of the downloaded file\n", + " actual_checksum = file_checksum(\"math_10k.json\")\n", + " expected_checksum = \"0342d0d860ad8592b579329337c90e42eefd3d9f2898043140cbd120630418b8\"\n", + " if actual_checksum != expected_checksum:\n", + " raise ValueError(\n", + " \"Checksum verification failed. The file may have been altered.\"\n", + " )\n", + " print(\"Checksum verification successful.\")\n", + "else:\n", + " print(\"Dataset already exists locally.\")\n", + "\n", + "# Set the dataset path to be used later\n", + "dataset_name = \"math_10k.json\"" + ] + }, + { + "cell_type": "markdown", + "id": "08576aa0-f628-4ae6-8fc3-dd167d164784", + "metadata": {}, + "source": [ + "## Configuration" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "eada9809-468a-47c6-9b03-55aa887c9487", + "metadata": {}, + "outputs": [], + "source": [ + "# Model and dataset\n", + "model_name = \"microsoft/phi-4\"\n", + "dataset_name = \"math_10k.json\"\n", + "\n", + "# 4-bit QLoRA configuration\n", + "bnb_config_4bit = BitsAndBytesConfig(\n", + " load_in_4bit=True,\n", + " bnb_4bit_quant_type=\"nf4\",\n", + " bnb_4bit_compute_dtype=torch.bfloat16,\n", + " bnb_4bit_use_double_quant=True, # Enable double quantization for further memory saving\n", + ")\n", + "\n", + "# 8-bit QLoRA configuration with more aggressive memory savings\n", + "bnb_config_8bit = BitsAndBytesConfig(\n", + " load_in_8bit=True,\n", + " llm_int8_enable_fp32_cpu_offload=True,\n", + " llm_int8_skip_modules=['lm_head'],\n", + " llm_int8_threshold=6.0,\n", + " llm_int8_has_fp16_weight=False,\n", + ")\n", + "\n", + "# Active quantization config (will be set to either 4-bit or 8-bit)\n", + "bnb_config = bnb_config_4bit # Default to 4-bit\n", + "\n", + "# LoRA configuration - reduce parameters to save memory\n", + "peft_config = LoraConfig(\n", + " r=4, # Reduced from 8 to save memory\n", + " lora_alpha=16,\n", + " lora_dropout=0.01,\n", + " bias=\"none\",\n", + " task_type=\"CAUSAL_LM\",\n", + " target_modules=\"all-linear\",\n", + ")\n", + "\n", + "# Training configuration with memory optimizations\n", + "training_args = TrainingArguments(\n", + " output_dir=\"./results\",\n", + " # Reduced steps for testing\n", + " max_steps=50, # Reduced from 100 to 50 to save memory\n", + " per_device_train_batch_size=1,\n", + " per_device_eval_batch_size=1,\n", + " gradient_accumulation_steps=8, # Increased from 4 to 8 to reduce memory pressure\n", + " optim=\"adamw_torch_fused\",\n", + " # More frequent saving and logging\n", + " save_steps=25,\n", + " logging_steps=5,\n", + " learning_rate=3e-4,\n", + " weight_decay=0.001,\n", + " fp16=False,\n", + " bf16=True,\n", + " max_grad_norm=0.5,\n", + " warmup_ratio=0.02,\n", + " lr_scheduler_type=\"cosine\",\n", + " gradient_checkpointing=True,\n", + " report_to=\"none\",\n", + " # Enable memory optimization options\n", + " deepspeed=None, # Not using DeepSpeed but enabling other memory optimizations\n", + " optim_target_modules=[\"c_attn\", \"c_proj\"], # Optimize specific modules\n", + " # Add auto memory optimization flag\n", + " auto_find_batch_size=True # Automatically find the largest batch size that fits in memory\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "ffe93234-2a1a-4809-a431-efe2f35ce496", + "metadata": {}, + "source": [ + "## Load and Prepare Model" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "8ab371f1-64c3-4225-82e7-fb3c5b05578c", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|██████████| 6/6 [00:04<00:00, 1.31it/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "trainable params: 13,926,400 || all params: 14,673,433,600 || trainable%: 0.0949\n" + ] + } + ], + "source": [ + "# Load tokenizer\n", + "tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)\n", + "tokenizer.pad_token = tokenizer.eos_token\n", + "tokenizer.padding_side = \"right\"\n", + "\n", + "# Load model with quantization\n", + "model = AutoModelForCausalLM.from_pretrained(\n", + " model_name,\n", + " quantization_config=bnb_config,\n", + " device_map=\"auto\",\n", + " trust_remote_code=True\n", + ")\n", + "\n", + "# Prepare model for k-bit training\n", + "model = prepare_model_for_kbit_training(model, use_gradient_checkpointing=True)\n", + "\n", + "# Apply LoRA\n", + "model = get_peft_model(model, peft_config)\n", + "model.print_trainable_parameters()" + ] + }, + { + "cell_type": "markdown", + "id": "dd058fff-f6dd-4cc6-acaf-7e2fa2c1132d", + "metadata": {}, + "source": [ + "## Load and Prepare Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "4392ddab-10b7-41f6-a8e0-65ba298ea457", + "metadata": {}, + "outputs": [], + "source": [ + "def format_prompt(example):\n", + " if example[\"input\"]:\n", + " return f\"\"\"Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", + "\n", + "### Instruction:\n", + "{example['instruction']}\n", + "\n", + "### Input:\n", + "{example['input']}\n", + "\n", + "### Response:\n", + "{example['output']}\"\"\"\n", + " else:\n", + " return f\"\"\"Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", + "\n", + "### Instruction:\n", + "{example['instruction']}\n", + "\n", + "### Response:\n", + "{example['output']}\"\"\"\n", + "\n", + "# Load dataset\n", + "dataset = load_dataset(\"json\", data_files=dataset_name, split=\"train\", num_proc=4)\n", + "dataset = dataset.map(lambda x: {\"text\": format_prompt(x)}, num_proc=4)\n", + "\n", + "# Split dataset\n", + "dataset = dataset.train_test_split(test_size=0.1)\n", + "train_dataset = dataset[\"train\"]\n", + "eval_dataset = dataset[\"test\"]" + ] + }, + { + "cell_type": "markdown", + "id": "812cfcc8-33ec-4a2b-8a74-27bfc2a41d7b", + "metadata": {}, + "source": [ + "## Enhanced Training with SFTTrainer" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "6dc85c57-68b2-4514-9373-43e3d7c05c10", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': dataset_text_field, max_seq_length. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Map: 100%|██████████| 8927/8927 [00:02<00:00, 3310.91 examples/s]\n", + "Map: 100%|██████████| 992/992 [00:00<00:00, 3267.10 examples/s]\n", + "max_steps is given, it will override any value given in num_train_epochs\n" + ] + } + ], + "source": [ + "# Create SFTTrainer\n", + "trainer = SFTTrainer(\n", + " model=model,\n", + " train_dataset=train_dataset,\n", + " eval_dataset=eval_dataset,\n", + " peft_config=peft_config,\n", + " dataset_text_field=\"text\",\n", + " max_seq_length=512, # Reduced from 1024 to save memory\n", + " tokenizer=tokenizer,\n", + " args=training_args\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "810eb75e", + "metadata": {}, + "source": [ + "## Federated Averaging Function" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "58298e8e-ab9e-4377-966e-143823441697", + "metadata": {}, + "outputs": [], + "source": [ + "def FedAvg(peft_params, model, weights=None):\n", + " \"\"\"\n", + " Perform Federated Averaging (FedAvg) on the model parameters.\n", + "\n", + " Parameters:\n", + " peft_params (list): A list of state dictionaries containing the model parameters from different clients.\n", + " model (torch.nn.Module): The model to which the averaged parameters will be applied.\n", + " weights (list, optional): A list of weights for averaging the parameters. If None, equal weights are used.\n", + "\n", + " Returns:\n", + " torch.nn.Module: The model with the averaged parameters applied.\n", + " \"\"\"\n", + " state_dicts = peft_params\n", + " state_dict = get_peft_model_state_dict(model)\n", + " for key in peft_params[0]:\n", + " dtype = state_dicts[0][key].dtype\n", + " state_dict[key] = torch.from_numpy(\n", + " np.average(\n", + " [state[key].cpu().to(torch.float).numpy() for state in state_dicts], axis=0, weights=weights\n", + " )\n", + " ).to(dtype)\n", + " set_peft_model_state_dict(model, state_dict)\n", + " return model" + ] + }, + { + "cell_type": "markdown", + "id": "e120a656-f4a5-47a5-a3d4-62c5f3672bba", + "metadata": {}, + "source": [ + "## Federated Learning Workflow" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "e5e108c6-5150-4931-9c01-6b64a913fa04", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Aggregator step \"start\" registered\n", + "Collaborator step \"aggregated_model_validation\" registered\n", + "Collaborator step \"train\" registered\n", + "Collaborator step \"local_model_validation\" registered\n", + "Aggregator step \"join\" registered\n", + "Aggregator step \"end\" registered\n" + ] + } + ], + "source": [ + "# Import the required PrinterCallback for proper initialization/removal\n", + "from transformers.trainer_callback import PrinterCallback\n", + "import transformers\n", + "import gc\n", + "import psutil\n", + "import os\n", + "import math\n", + "import time\n", + "\n", + "class FederatedFlow(FLSpec):\n", + " def __init__(self, model=None, optimizer=None, rounds=3, quant_type=\"4bit\", **kwargs):\n", + " \"\"\"\n", + " Initialize the class with the given model, optimizer, and training parameters.\n", + "\n", + " Parameters:\n", + " model (torch.nn.Module, optional): The model to be used. If None, a ValueError is raised.\n", + " optimizer (torch.optim.Optimizer, optional): The optimizer to be used.\n", + " rounds (int, optional): The number of rounds for training or processing (default is 3).\n", + " quant_type (str, optional): Quantization type, either \"4bit\" or \"8bit\".\n", + " **kwargs: Additional keyword arguments to be passed to the superclass initializer.\n", + "\n", + " Raises:\n", + " ValueError: If no model is provided.\n", + " \"\"\"\n", + " super().__init__(**kwargs)\n", + " if model is not None:\n", + " self.model = model\n", + " self.peft_params = get_peft_model_state_dict(self.model)\n", + " self.optimizer = optimizer\n", + " else:\n", + " raise ValueError(\"No model inputted\")\n", + "\n", + " self.rounds = rounds\n", + " self.quant_type = quant_type\n", + " # Initialize histories for tracking metrics over rounds\n", + " self.average_loss_history = []\n", + " self.agg_model_loss_history = []\n", + " self.local_model_loss_history = []\n", + " # Dictionary to store optimizer states for each collaborator\n", + " self.optimizer_states = {}\n", + " \n", + "\n", + " @aggregator\n", + " def start(self):\n", + " \"\"\"\n", + " Initialize the model and set up the collaborators for federated learning.\n", + "\n", + " This method performs the initial setup for the model, including setting the\n", + " collaborators, initializing private variables, and starting the first round\n", + " of the federated learning process.\n", + " \"\"\"\n", + " print(f\"Performing initialization for model with {self.quant_type} quantization\")\n", + " print(f\"Using {self.rounds} main rounds with partial round updates\")\n", + " self.collaborators = self.runtime.collaborators\n", + " self.current_round = 0\n", + " self.current_sub_round = 0\n", + " # Initialize dictionary to collect memory stats\n", + " # Check if collaborators are objects with name attribute or strings\n", + " if hasattr(self.collaborators[0], 'name'):\n", + " collab_names = [c.name for c in self.collaborators]\n", + " else:\n", + " # If collaborators are already strings, use them directly\n", + " collab_names = self.collaborators\n", + " self.all_memory_stats = {collab: {} for collab in collab_names}\n", + " # Initialize optimizer states dictionary for each collaborator\n", + " self.optimizer_states = {collab: None for collab in collab_names}\n", + " self.next(\n", + " self.aggregated_model_validation,\n", + " foreach=\"collaborators\",\n", + " )\n", + "\n", + " \n", + " @collaborator\n", + " def aggregated_model_validation(self):\n", + " \"\"\"\n", + " Perform aggregated model validation for a collaborator.\n", + "\n", + " This method loads the model, applies the PEFT configuration, and evaluates\n", + " the model using the provided training and evaluation datasets. The validation\n", + " score is then stored and the next step in the process is triggered.\n", + " \"\"\"\n", + " print(f\"[Round {self.current_round}, Update {self.current_sub_round}] Performing aggregated model validation for collaborator {self.input} with {self.quant_type}\")\n", + " # Initialize memory tracker for this collaborator\n", + " self.memory_tracker = MemoryTracker(self.input, self.quant_type)\n", + " self.memory_tracker.reset_peak()\n", + " \n", + " # Choose quantization config based on quant_type\n", + " if self.quant_type == \"4bit\":\n", + " quant_config = bnb_config_4bit\n", + " else: # 8bit\n", + " quant_config = bnb_config_8bit\n", + " \n", + " # Define device_map variable\n", + " device_map = {\"\": torch.cuda.current_device()} if torch.cuda.is_available() else \"cpu\"\n", + " try:\n", + " self.model = AutoModelForCausalLM.from_pretrained(\n", + " model_name,\n", + " quantization_config=quant_config,\n", + " device_map=device_map,\n", + " trust_remote_code=True\n", + " )\n", + " self.memory_tracker.log(\"model_load\")\n", + " except ValueError:\n", + " # Fallback to CPU if GPU memory is insufficient\n", + " print(f\"Falling back to CPU mode for {self.input}\")\n", + " self.model = AutoModelForCausalLM.from_pretrained(\n", + " model_name,\n", + " device_map=\"cpu\",\n", + " trust_remote_code=True\n", + " )\n", + " self.memory_tracker.log(\"model_load\")\n", + " \n", + " self.model = prepare_model_for_kbit_training(self.model)\n", + " self.model = get_peft_model(self.model, peft_config)\n", + " set_peft_model_state_dict(self.model, self.peft_params)\n", + " \n", + " # Use fixed number of steps (max_steps) for each round\n", + " steps_per_round = training_args.max_steps # Use the hardcoded 100 steps\n", + " \n", + " # Create a custom TrainingArguments for this round\n", + " self.round_args = TrainingArguments(\n", + " output_dir=training_args.output_dir,\n", + " max_steps=steps_per_round, # Use fixed steps per round\n", + " per_device_train_batch_size=training_args.per_device_train_batch_size,\n", + " gradient_accumulation_steps=training_args.gradient_accumulation_steps,\n", + " optim=training_args.optim,\n", + " save_steps=steps_per_round // 2 or 1, # More frequent saving\n", + " logging_steps=5,\n", + " learning_rate=training_args.learning_rate,\n", + " weight_decay=training_args.weight_decay,\n", + " fp16=training_args.fp16,\n", + " bf16=training_args.bf16,\n", + " max_grad_norm=training_args.max_grad_norm,\n", + " warmup_ratio=training_args.warmup_ratio,\n", + " lr_scheduler_type=training_args.lr_scheduler_type,\n", + " gradient_checkpointing=training_args.gradient_checkpointing,\n", + " report_to=training_args.report_to\n", + " )\n", + " \n", + " print(f\"[{self.input}] Training with {steps_per_round} steps\")\n", + " \n", + " trainer = SFTTrainer(\n", + " model=self.model,\n", + " args=self.round_args, # Use round specific args\n", + " peft_config=peft_config,\n", + " train_dataset=self.train_dataset,\n", + " eval_dataset=self.eval_dataset,\n", + " max_seq_length=1024,\n", + " dataset_text_field=\"text\",\n", + " tokenizer=tokenizer,\n", + " packing=True,\n", + " data_collator=transformers.DataCollatorForSeq2Seq(\n", + " tokenizer, pad_to_multiple_of=8, return_tensors=\"pt\", padding=True\n", + " ),\n", + " )\n", + "\n", + " trainer.remove_callback(PrinterCallback)\n", + " out = trainer.evaluate()\n", + " self.agg_validation_score = out[\"eval_loss\"]\n", + " print(f\"{self.input} evaluation loss: {self.agg_validation_score}\")\n", + " self.memory_tracker.log_loss(eval_loss=self.agg_validation_score) # Log eval loss\n", + " self.memory_tracker.update_peak()\n", + " self.next(self.train)\n", + "\n", + " @collaborator\n", + " def train(self):\n", + " \"\"\"\n", + " Train the model for a collaborator with partial epoch updates.\n", + "\n", + " This method trains the model using the provided training dataset,\n", + " but processes it in smaller chunks (partial epochs) to allow more\n", + " frequent parameter sharing between collaborators.\n", + " \"\"\"\n", + " self.memory_tracker.log(\"before_training\")\n", + " \n", + " # Reduce steps for 8-bit quantization\n", + " if self.quant_type == \"8bit\":\n", + " max_steps = training_args.max_steps // 2 # Half the steps for 8-bit\n", + " else:\n", + " max_steps = training_args.max_steps\n", + " \n", + " # Define partial training args\n", + " self.sub_round_args = TrainingArguments(\n", + " output_dir=training_args.output_dir,\n", + " max_steps=max_steps,\n", + " per_device_train_batch_size=training_args.per_device_train_batch_size,\n", + " gradient_accumulation_steps=training_args.gradient_accumulation_steps,\n", + " optim=training_args.optim,\n", + " save_steps=max_steps // 4,\n", + " logging_steps=2,\n", + " learning_rate=training_args.learning_rate,\n", + " weight_decay=training_args.weight_decay,\n", + " fp16=training_args.fp16,\n", + " bf16=training_args.bf16,\n", + " max_grad_norm=training_args.max_grad_norm,\n", + " warmup_ratio=training_args.warmup_ratio,\n", + " lr_scheduler_type=training_args.lr_scheduler_type,\n", + " gradient_checkpointing=training_args.gradient_checkpointing,\n", + " report_to=training_args.report_to,\n", + " auto_find_batch_size=True # Add auto batch size finding\n", + " )\n", + " \n", + " # Create trainer instance with our custom training args\n", + " trainer = SFTTrainer(\n", + " model=self.model,\n", + " args=self.sub_round_args,\n", + " peft_config=peft_config,\n", + " train_dataset=self.train_dataset,\n", + " eval_dataset=self.eval_dataset,\n", + " max_seq_length=512, # Reduced sequence length\n", + " dataset_text_field=\"text\",\n", + " tokenizer=tokenizer,\n", + " )\n", + " \n", + " # Make sure optimizer is initialized before training\n", + " if not hasattr(trainer, 'optimizer') or trainer.optimizer is None:\n", + " trainer.create_optimizer_and_scheduler(num_training_steps=self.sub_round_args.max_steps)\n", + " if trainer.optimizer is None:\n", + " print(f\"[{self.input}] Warning: Failed to create optimizer. Creating standard optimizer.\")\n", + " # Create a simple optimizer if trainer.create_optimizer_and_scheduler failed\n", + " from torch.optim import AdamW\n", + " trainer.optimizer = AdamW(\n", + " trainer.model.parameters(),\n", + " lr=self.sub_round_args.learning_rate,\n", + " weight_decay=self.sub_round_args.weight_decay\n", + " )\n", + " # Create a simple scheduler\n", + " from transformers import get_scheduler\n", + " trainer.lr_scheduler = get_scheduler(\n", + " name=self.sub_round_args.lr_scheduler_type,\n", + " optimizer=trainer.optimizer,\n", + " num_warmup_steps=int(self.sub_round_args.max_steps * self.sub_round_args.warmup_ratio),\n", + " num_training_steps=self.sub_round_args.max_steps,\n", + " )\n", + " \n", + " # Restore optimizer state if available from previous rounds\n", + " if self.optimizer_states.get(self.input) is not None:\n", + " print(f\"[{self.input}] Restoring optimizer state\")\n", + " try:\n", + " # Load the optimizer state\n", + " trainer.optimizer.load_state_dict(self.optimizer_states[self.input])\n", + " except Exception as e:\n", + " print(f\"Failed to restore optimizer state: {e}\")\n", + " \n", + " # For 8-bit quantization with limited GPU memory, use simplified training\n", + " if self.quant_type == \"8bit\":\n", + " # Simplify training for 8-bit\n", + " try:\n", + " # Use trainer.train() for simpler training flow\n", + " trainer.train(resume_from_checkpoint=False)\n", + " # Get the last loss\n", + " self.loss = trainer.state.log_history[-1].get('loss', float('inf'))\n", + " except Exception as e:\n", + " print(f\"Training failed with error: {str(e)}\")\n", + " self.loss = float('inf') # Set to infinity to indicate failure\n", + " else:\n", + " # Regular training with manual control for 4-bit\n", + " print(f\"[{self.input}] Starting partial epoch training with {max_steps} steps\")\n", + " trainer.model.train()\n", + " total_loss = 0\n", + " step_count = 0\n", + " \n", + " # Set up dataloader for manual batching\n", + " dataloader = trainer.get_train_dataloader()\n", + " \n", + " # Process batches manually for more control\n", + " for step, inputs in enumerate(dataloader):\n", + " # Move inputs to the appropriate device\n", + " inputs = {k: v.to(trainer.args.device) for k, v in inputs.items()}\n", + " \n", + " # Forward pass\n", + " outputs = trainer.model(**inputs)\n", + " \n", + " # Handle different output formats\n", + " if isinstance(outputs, dict):\n", + " if \"loss\" in outputs:\n", + " loss = outputs[\"loss\"] / trainer.args.gradient_accumulation_steps\n", + " else:\n", + " # Calculate loss manually if not provided in outputs\n", + " # Get logits from outputs\n", + " if \"logits\" in outputs:\n", + " logits = outputs[\"logits\"]\n", + " # Get labels from inputs\n", + " labels = inputs.get(\"labels\")\n", + " if labels is not None:\n", + " # Calculate loss using cross-entropy\n", + " import torch.nn.functional as F\n", + " # Shift logits and labels for causal LM\n", + " shift_logits = logits[..., :-1, :].contiguous()\n", + " shift_labels = labels[..., 1:].contiguous()\n", + " loss = F.cross_entropy(\n", + " shift_logits.view(-1, shift_logits.size(-1)),\n", + " shift_labels.view(-1),\n", + " ignore_index=-100\n", + " ) / trainer.args.gradient_accumulation_steps\n", + " else:\n", + " print(f\"Warning: No labels in inputs, using dummy loss\")\n", + " loss = (outputs[\"logits\"].sum() * 0.0) / trainer.args.gradient_accumulation_steps\n", + " else:\n", + " print(f\"Warning: No logits or loss in outputs, using dummy loss\")\n", + " # Use any tensor from outputs for a dummy loss\n", + " dummy_tensor = next(iter(outputs.values()))\n", + " loss = (dummy_tensor.sum() * 0.0) / trainer.args.gradient_accumulation_steps\n", + " else:\n", + " loss = outputs.loss / trainer.args.gradient_accumulation_steps\n", + " \n", + " total_loss += loss.detach().float()\n", + " \n", + " # Backward pass\n", + " loss.backward()\n", + " \n", + " # Update weights on gradient accumulation steps or at the end\n", + " if ((step + 1) % trainer.args.gradient_accumulation_steps == 0) or (step == len(dataloader) - 1):\n", + " # Double check optimizer exists before using it\n", + " if trainer.optimizer is None:\n", + " print(f\"[{self.input}] Warning: Optimizer is None at step {step}. Creating optimizer.\")\n", + " # Create a simple optimizer\n", + " from torch.optim import AdamW\n", + " trainer.optimizer = AdamW(\n", + " trainer.model.parameters(), \n", + " lr=self.sub_round_args.learning_rate,\n", + " weight_decay=self.sub_round_args.weight_decay\n", + " )\n", + " # Create a simple scheduler\n", + " from transformers import get_scheduler\n", + " trainer.lr_scheduler = get_scheduler(\n", + " name=self.sub_round_args.lr_scheduler_type,\n", + " optimizer=trainer.optimizer,\n", + " num_warmup_steps=int(self.sub_round_args.max_steps * self.sub_round_args.warmup_ratio),\n", + " num_training_steps=self.sub_round_args.max_steps,\n", + " )\n", + " \n", + " trainer.optimizer.step()\n", + " trainer.lr_scheduler.step()\n", + " trainer.optimizer.zero_grad()\n", + " step_count += 1\n", + " \n", + " # Log progress\n", + " if step_count > 0 and step_count % 10 == 0:\n", + " print(f\"[{self.input}] Completed {step_count} steps, current loss: {total_loss/step_count:.4f}\")\n", + " \n", + " # Stop after max_steps\n", + " if step_count >= max_steps:\n", + " break\n", + " \n", + " # Calculate final training loss\n", + " self.loss = total_loss / step_count if step_count > 0 else 0\n", + " \n", + " print(f\"[{self.input}] Training completed, average loss: {self.loss:.4f}\")\n", + " \n", + " # Log memory and training metrics\n", + " self.memory_tracker.log(\"after_training\")\n", + " self.memory_tracker.log_loss(training_loss=self.loss)\n", + " self.memory_tracker.update_peak()\n", + " \n", + " # Save optimizer state for next round\n", + " if hasattr(trainer, 'optimizer') and trainer.optimizer is not None:\n", + " self.optimizer_states[self.input] = trainer.optimizer.state_dict()\n", + " # Create directory for saving optimizer state if needed\n", + " os.makedirs(f\"./optimizer_state/{self.input}\", exist_ok=True)\n", + " # Save optimizer state to disk as backup\n", + " torch.save(\n", + " trainer.optimizer.state_dict(), \n", + " f\"./optimizer_state/{self.input}/optimizer_round_{self.current_round}_update_{self.current_sub_round}.pt\"\n", + " )\n", + " \n", + " # Save model checkpoint\n", + " trainer.save_model(f\"./local_models/{self.input}/round_{self.current_round}_update_{self.current_sub_round}\")\n", + " self.training_completed = True\n", + " self.next(self.local_model_validation)\n", + "\n", + " @collaborator\n", + " def local_model_validation(self):\n", + " \"\"\"\n", + " Perform local model validation for a collaborator.\n", + "\n", + " This method evaluates the model using the provided training and evaluation datasets.\n", + " The validation score is stored, the PEFT parameters are updated, and the next step\n", + " in the process is triggered.\n", + " \"\"\"\n", + " trainer = SFTTrainer(\n", + " model=self.model,\n", + " args=self.sub_round_args, # Use sub-round specific args\n", + " peft_config=peft_config,\n", + " train_dataset=self.train_dataset,\n", + " eval_dataset=self.eval_dataset,\n", + " max_seq_length=1024,\n", + " dataset_text_field=\"text\",\n", + " tokenizer=tokenizer,\n", + " packing=True,\n", + " data_collator=transformers.DataCollatorForSeq2Seq(\n", + " tokenizer, pad_to_multiple_of=8, return_tensors=\"pt\", padding=True\n", + " ),\n", + " )\n", + " out = trainer.evaluate()\n", + " self.local_validation_score = out[\"eval_loss\"]\n", + " print(f\"[{self.input}] Local evaluation loss: {self.local_validation_score}\")\n", + " self.memory_tracker.log_loss(eval_loss=self.local_validation_score) # Log eval loss\n", + " self.peft_params = get_peft_model_state_dict(self.model)\n", + " print(f\"Doing local model validation for collaborator {self.input}\")\n", + " \n", + " # Display memory report for this collaborator\n", + " self.memory_tracker.report()\n", + " self.memory_stats = self.memory_tracker.get_stats()\n", + " self.next(self.join, exclude=[\"training_completed\", \"model\", \"memory_tracker\"])\n", + "\n", + " @aggregator\n", + " def join(self, inputs):\n", + " \"\"\"\n", + " Aggregate the results from all collaborators and update the model.\n", + "\n", + " This method calculates the average loss, aggregated model accuracy, and local model\n", + " accuracy from all collaborators. The model parameters are updated using Federated\n", + " Averaging (FedAvg), and the next round of the process is triggered if applicable.\n", + " \"\"\"\n", + " self.average_loss = sum(input.loss for input in inputs) / len(inputs)\n", + " self.aggregated_model_accuracy = sum(\n", + " input.agg_validation_score for input in inputs\n", + " ) / len(inputs)\n", + " self.local_model_accuracy = sum(\n", + " input.local_validation_score for input in inputs\n", + " ) / len(inputs)\n", + " print(\n", + " f\"[Round {self.current_round}, Update {self.current_sub_round}] Average aggregated model validation loss = {self.aggregated_model_accuracy}\"\n", + " )\n", + " print(f\"[Round {self.current_round}, Update {self.current_sub_round}] Average training loss = {self.average_loss}\")\n", + " print(f\"[Round {self.current_round}, Update {self.current_sub_round}] Average local model validation loss = {self.local_model_accuracy}\")\n", + "\n", + " # Store metrics in history for plotting trends\n", + " self.average_loss_history.append(self.average_loss)\n", + " self.agg_model_loss_history.append(self.aggregated_model_accuracy)\n", + " self.local_model_loss_history.append(self.local_model_accuracy)\n", + " \n", + " # Collect memory stats from all collaborators for this round\n", + " for input_data in inputs:\n", + " round_key = f\"round_{self.current_round}_update_{self.current_sub_round}\"\n", + " self.all_memory_stats[input_data.input][round_key] = input_data.memory_stats\n", + " # Update optimizer states from collaborators\n", + " if hasattr(input_data, 'optimizer_states') and input_data.optimizer_states.get(input_data.input) is not None:\n", + " self.optimizer_states[input_data.input] = input_data.optimizer_states[input_data.input]\n", + "\n", + " # Save aggregated optimizer states for debug/analysis\n", + " os.makedirs(\"./optimizer_state/aggregator\", exist_ok=True)\n", + " torch.save(\n", + " self.optimizer_states, \n", + " f\"./optimizer_state/aggregator/optimizers_round_{self.current_round}_update_{self.current_sub_round}.pt\"\n", + " )\n", + " \n", + " self.model = FedAvg([input.peft_params for input in inputs], self.model)\n", + " self.peft_params = get_peft_model_state_dict(self.model)\n", + "\n", + " # Save aggregated model after each sub-round update\n", + " save_dir = f\"./aggregated/model_round_{self.current_round}_update_{self.current_sub_round}\"\n", + " os.makedirs(save_dir, exist_ok=True)\n", + " self.model.save_pretrained(save_dir)\n", + " tokenizer.save_pretrained(f\"./aggregated/tokenizer_round_{self.current_round}_update_{self.current_sub_round}\")\n", + " \n", + " # Update round and sub-round counters\n", + " self.current_sub_round += 1\n", + " # Each update is treated as a partial update within a full round\n", + " # Update main round counter and reset sub-round when appropriate\n", + " if self.current_sub_round >= 2: # Default to 2 partial updates per round\n", + " self.current_sub_round = 0\n", + " self.current_round += 1\n", + " \n", + " if self.current_round < self.rounds:\n", + " self.next(\n", + " self.aggregated_model_validation,\n", + " foreach=\"collaborators\",\n", + " exclude=[\"model\"],\n", + " )\n", + " else:\n", + " self.next(self.end)\n", + "\n", + " @aggregator\n", + " def end(self):\n", + " \"\"\"\n", + " End the federated learning process.\n", + "\n", + " This method marks the end of the federated learning process and performs any\n", + " necessary cleanup or finalization steps.\n", + " \"\"\"\n", + " print(f\"This is the end of the flow for {self.quant_type} quantization\")\n", + " print(\"\\n===== Final Metrics =====\\n\")\n", + " print(f\"Average Training Loss: {self.average_loss_history[-1]:.4f}\")\n", + " print(f\"Final Aggregated Model Loss: {self.agg_model_loss_history[-1]:.4f}\")\n", + " print(f\"Final Local Model Loss: {self.local_model_loss_history[-1]:.4f}\")\n", + " \n", + " print(\"\\n===== Metric History =====\\n\")\n", + " print(\"Training Loss History:\")\n", + " for i, loss in enumerate(self.average_loss_history):\n", + " print(f\" Update {i}: {loss:.4f}\")\n", + " \n", + " print(\"\\nAggregated Model Loss History:\")\n", + " for i, loss in enumerate(self.agg_model_loss_history):\n", + " print(f\" Update {i}: {loss:.4f}\")\n", + " \n", + " print(\"\\nLocal Model Loss History:\")\n", + " for i, loss in enumerate(self.local_model_loss_history):\n", + " print(f\" Update {i}: {loss:.4f}\")\n", + " \n", + " print(\"\\n===== Memory Usage Summary Across All Rounds =====\\n\")\n", + " \n", + " # Print aggregated memory statistics\n", + " for collab, rounds_data in self.all_memory_stats.items():\n", + " print(f\"\\n==== {collab} Memory Usage Across Rounds/Updates ({self.quant_type}) ====\\n\")\n", + " for round_name, stats in rounds_data.items():\n", + " print(f\" {round_name}:\")\n", + " for metric, value in stats.items():\n", + " if value is not None:\n", + " if metric in ['training_loss', 'eval_loss', 'quant_type']:\n", + " if metric != 'quant_type':\n", + " print(f\" {metric}: {value:.4f}\")\n", + " else:\n", + " print(f\" {metric}: {value:.2f} MB\")\n", + " else:\n", + " print(f\" {metric}: Not recorded\")\n", + " print(\"-\" * 50)" + ] + }, + { + "cell_type": "markdown", + "id": "7bc8fe27", + "metadata": {}, + "source": [ + "## Run Federated Learning with 4-bit Quantization" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "38894111-41d9-4dd4-b1c8-eb7ec3cdd3e1", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "=============== Running with 4-bit Quantization ===============\n", + "\n", + "\n", + "Calling start\n", + "\u001b[94mPerforming initialization for model with 4bit quantization\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mUsing 5 main rounds with partial round updates\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 0] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 912 examples [00:01, 622.96 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "Generating train split: 103 examples [00:00, 627.49 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5811071991920471\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Map: 100%|##########| 4464/4464 [00:01<00:00, 3487.12 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "Map: 100%|##########| 496/496 [00:00<00:00, 3341.29 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5313\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5340\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5390\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5433\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5464\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5491\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5525\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.5587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4287\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4316\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4328\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4357\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4377\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4393\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4421\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.4445\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3906\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3917\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3931\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3945\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3967\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3976\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.3993\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.4006\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3724\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3728\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3738\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3752\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3763\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3773\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3783\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.3791\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.3572\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.3572\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 912 examples [00:01, 618.15 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "Generating train split: 103 examples [00:00, 632.61 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.3916991055011749\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 32696.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57374.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 30342.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 41548.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 43451.02 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 32388.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57268.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 32696.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57374.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.3572\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3917\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 0] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 912 examples [00:01, 631.44 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "Generating train split: 101 examples [00:00, 625.21 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5805659294128418\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Map: 100%|##########| 4463/4463 [00:01<00:00, 3422.90 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "Map: 100%|##########| 496/496 [00:00<00:00, 3404.21 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5439\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5469\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5501\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5523\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5550\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5580\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5638\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.5660\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4307\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4347\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4358\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4375\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4432\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4451\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.4487\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.3982\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.3988\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.3996\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.4012\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.4025\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.4033\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.4041\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.4052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3717\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3723\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3728\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3741\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3749\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3755\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3769\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.3778\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.3601\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.3601\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 912 examples [00:01, 639.46 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.3990606665611267\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 21814.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 47222.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 19625.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 30754.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 21639.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 47114.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 21814.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 47222.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.3601\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3991\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 0, Update 0] Average aggregated model validation loss = 0.5808365643024445\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 0] Average training loss = 0.35865288972854614\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 0] Average local model validation loss = 0.3953798860311508\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 1] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 912 examples [00:01, 650.05 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.38848692178726196\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Map: 100%|##########| 496/496 [00:00<00:00, 3328.62 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2647\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2668\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2703\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2761\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2782\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2813\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2857\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2647\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2676\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2687\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2708\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2726\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2740\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2758\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2780\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2695\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2704\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2717\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2729\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2756\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2772\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2783\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2724\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2732\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2745\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2752\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2762\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2770\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2778\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.2645\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.2645\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 103 examples [00:00, 645.41 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.3903903663158417\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33547.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57726.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31364.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31586.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33378.03 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57670.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33547.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57726.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.2645\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3904\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 1] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.3931271433830261\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Map: 100%|##########| 496/496 [00:00<00:00, 3406.01 examples/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2763\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2785\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2815\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2831\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2853\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2878\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2919\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2938\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2690\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2709\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2725\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2735\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2749\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2798\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2815\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2844\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2730\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2736\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2743\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2757\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2770\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2778\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2785\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2795\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2696\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2701\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2707\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2717\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2725\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2730\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2740\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.2659\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.2659\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "Generating train split: 101 examples [00:00, 653.25 examples/s]\u001b[0m\u001b[94mm\u001b[94m\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.3981446623802185\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33447.23 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57866.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31364.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33378.03 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57810.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33447.23 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57866.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.2659\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3981\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 0, Update 1] Average aggregated model validation loss = 0.39080703258514404\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 1] Average training loss = 0.2652348279953003\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 1] Average local model validation loss = 0.3942675143480301\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 0] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.23it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.38319262862205505\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.2195\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2213\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2243\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2270\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2291\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2310\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2341\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.2371\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2230\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2253\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2263\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2280\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2298\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2311\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2324\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.2346\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2331\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2338\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2351\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2361\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2378\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2385\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2398\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2408\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2375\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2379\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2386\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2398\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2404\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2413\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2421\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2428\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.2315\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.2315\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.392806738615036\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58286.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31766.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58230.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58286.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.2315\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3928\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 0] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.3884606957435608\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2279\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2298\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2345\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2363\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2382\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2412\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.2428\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2262\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2280\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2295\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2305\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2317\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2359\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2375\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.2394\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2341\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2347\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2354\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2367\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2378\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2386\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2392\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2402\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2352\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2356\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2361\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2369\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2376\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2381\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2390\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2397\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.2306\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.2306\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.4016312062740326\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58166.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31784.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58110.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58166.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.2306\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4016\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 1, Update 0] Average aggregated model validation loss = 0.3858266621828079\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 0] Average training loss = 0.23103156685829163\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 0] Average local model validation loss = 0.3972189724445343\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 1] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.38549479842185974\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.1714\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1729\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1755\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1777\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1793\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1809\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1836\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1854\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1804\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1824\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1834\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1847\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1863\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1876\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1888\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1907\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1972\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1979\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1991\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2000\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2014\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2021\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2032\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.2043\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2048\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2057\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2068\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2073\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2081\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2088\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.2095\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.1995\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.1995\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.401896208524704\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58226.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31748.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.58 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58170.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58226.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1995\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4019\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 1] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.3921719491481781\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1773\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1793\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1823\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1835\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1851\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1866\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1891\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1905\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1832\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1849\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1863\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1870\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1881\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1913\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1940\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1960\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1966\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1973\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1984\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1993\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2001\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2007\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.2017\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1996\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2001\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2005\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2011\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2018\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2023\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2031\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.2037\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.1968\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.1968\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.413899302482605\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31770.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.58 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57830.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1968\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4139\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 1, Update 1] Average aggregated model validation loss = 0.3888333737850189\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 1] Average training loss = 0.19815459847450256\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 1] Average local model validation loss = 0.4078977555036545\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 0] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.39401885867118835\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.1237\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1246\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1261\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1270\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1279\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1287\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1301\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1316\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1355\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1373\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1381\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1395\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1409\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1420\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1430\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.1444\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1612\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1619\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1628\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1637\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1649\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1656\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1666\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1675\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1733\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1737\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1742\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1751\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1756\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1762\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1768\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1775\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.1710\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.1710\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.41375958919525146\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58186.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58130.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58186.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1710\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4138\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 0] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.40143540501594543\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1366\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1386\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1411\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1418\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1435\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1452\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1468\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1478\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1464\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1478\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1488\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1494\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1505\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1524\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1537\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1551\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1653\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1658\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1664\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1673\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1681\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1689\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1695\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1704\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1743\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1751\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1757\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1764\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1768\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1775\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1780\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.1720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.1720\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.42323189973831177\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58206.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31798.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58150.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58206.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4232\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 2, Update 0] Average aggregated model validation loss = 0.3977271318435669\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 0] Average training loss = 0.17152628302574158\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 0] Average local model validation loss = 0.4184957444667816\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 1] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.40234553813934326\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.0984\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0989\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0996\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1004\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1011\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1018\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1027\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.1035\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.1473\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.1473\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.4498145878314972\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31762.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1473\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4498\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 1] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.41048040986061096\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1032\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1041\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1049\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1058\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1065\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1075\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1085\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.1092\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1116\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1130\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1140\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1148\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1160\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1172\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1186\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.1195\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1353\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1358\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1363\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1370\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1377\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1384\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1390\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1397\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1460\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1463\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1467\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1472\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1478\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1482\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1489\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1493\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.1441\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.1441\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.44478899240493774\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58266.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58210.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58266.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1441\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4448\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 2, Update 1] Average aggregated model validation loss = 0.4064129739999771\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 1] Average training loss = 0.14573591947555542\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 1] Average local model validation loss = 0.44730179011821747\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 0] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.4368877112865448\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.0855\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0863\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0871\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0882\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0889\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0895\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0902\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0911\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0842\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0857\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0862\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0868\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0874\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0882\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0887\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0894\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1078\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1084\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1092\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1099\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1108\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1113\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1121\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.1129\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1229\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1232\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1235\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1241\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1245\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1249\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1254\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.1259\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.1210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.1210\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.4723173975944519\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58146.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31782.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58090.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58146.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4723\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 0] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.44539061188697815\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0859\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0867\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0880\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0891\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0899\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0907\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0936\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0941\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0948\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0954\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0963\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0970\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0981\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0987\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1174\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1178\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1183\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1188\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1197\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1203\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.1218\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1301\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1304\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1308\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1312\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1317\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1321\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1326\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1330\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.1277\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.1277\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.4785761833190918\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58106.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31914.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58050.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58106.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1277\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4786\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 3, Update 0] Average aggregated model validation loss = 0.4411391615867615\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 0] Average training loss = 0.12434957921504974\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 0] Average local model validation loss = 0.47544679045677185\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 1] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.47256991267204285\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.0721\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0727\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0735\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0744\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0752\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0759\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0768\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0777\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0733\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0750\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0756\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0761\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0766\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0771\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0780\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0786\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.1022\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.1022\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5270882248878479\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58346.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31804.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58290.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58346.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1022\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5271\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 1] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.48190006613731384\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0685\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0691\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0700\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0707\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0716\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0723\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0744\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0731\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0741\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0750\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0754\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0761\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0766\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0772\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0777\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0940\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0945\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0949\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0954\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0960\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0966\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0972\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0978\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1058\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1061\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1064\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1068\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1073\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1076\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1082\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.1085\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.1042\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.1042\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.528550386428833\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58384.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58328.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58384.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.1042\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5286\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 3, Update 1] Average aggregated model validation loss = 0.47723498940467834\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 1] Average training loss = 0.10317578911781311\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 1] Average local model validation loss = 0.5278193056583405\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 0] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 30 steps, current loss: 0.0727\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0730\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0739\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0744\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0754\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0759\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0824\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0826\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0828\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0831\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0832\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0835\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0838\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0841\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.0805\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.0805\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5606555938720703\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58806.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31916.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58748.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58806.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.0805\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5607\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 0] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5261728763580322\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0576\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0584\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0589\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0594\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0602\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0610\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0617\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0622\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0613\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0622\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0627\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0631\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0637\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0642\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0649\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0653\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0763\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0766\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0770\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0773\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0778\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0783\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0788\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0791\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0871\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0873\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0875\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0878\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0882\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0885\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0891\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0893\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.0857\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.0857\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5728362202644348\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58444.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 32006.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58388.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58444.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.0857\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5728\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 4, Update 0] Average aggregated model validation loss = 0.5190570056438446\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 0] Average training loss = 0.08311298489570618\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 0] Average local model validation loss = 0.5667459070682526\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 1] Performing aggregated model validation for collaborator Portland with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.26it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5445127487182617\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Completed 10 steps, current loss: 0.0533\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0536\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0543\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0551\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0557\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0563\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0570\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 10 steps, current loss: 0.0577\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0569\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0578\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0583\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0590\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0595\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0600\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 20 steps, current loss: 0.0604\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0669\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0672\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0675\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0677\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0682\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0684\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0688\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 30 steps, current loss: 0.0694\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0750\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0752\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0755\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0757\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0759\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0763\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 40 steps, current loss: 0.0766\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Completed 50 steps, current loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Portland] Training completed, average loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5720435380935669\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57946.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31786.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57888.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33706.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 57946.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 1] Performing aggregated model validation for collaborator Seattle with 4bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.562188982963562\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Starting partial epoch training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:838: UserWarning: torch.utils.checkpoint: the use_reentrant parameter should be passed explicitly. In version 2.5 we will raise an exception if use_reentrant is not passed. use_reentrant=False is recommended, but if you need to preserve the current default behavior, you can pass use_reentrant=True. Refer to docs for more details on the differences between the two variants.\n", + " return fn(*args, **kwargs)\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0550\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0556\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0563\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0569\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0575\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0582\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0598\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 10 steps, current loss: 0.0602\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0583\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0590\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0594\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0597\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0605\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0610\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0614\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 20 steps, current loss: 0.0619\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0678\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0680\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0683\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0686\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0689\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0692\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0696\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 30 steps, current loss: 0.0699\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0732\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0736\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0738\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0741\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0743\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0746\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 40 steps, current loss: 0.0748\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Completed 50 steps, current loss: 0.0720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Seattle] Training completed, average loss: 0.0720\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:13]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.6029046773910522\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (4bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58264.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 31523.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 31886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58208.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 33606.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 58264.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: 0.0720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.6029\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 4, Update 1] Average aggregated model validation loss = 0.5533508658409119\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 1] Average training loss = 0.07266537845134735\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 1] Average local model validation loss = 0.5874741077423096\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling end\n", + "\u001b[94mThis is the end of the flow for 4bit quantization\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Final Metrics =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage Training Loss: 0.0727\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mFinal Aggregated Model Loss: 0.5534\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mFinal Local Model Loss: 0.5875\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Metric History =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTraining Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: 0.3587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: 0.2652\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: 0.2310\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: 0.1982\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: 0.1715\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: 0.1457\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: 0.1243\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: 0.1032\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: 0.0831\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: 0.0727\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Aggregated Model Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: 0.5808\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: 0.3908\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: 0.3858\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: 0.3888\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: 0.3977\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: 0.4064\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: 0.4411\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: 0.4772\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: 0.5191\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: 0.5534\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Local Model Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: 0.3954\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: 0.3943\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: 0.3972\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: 0.4079\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: 0.4185\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: 0.4473\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: 0.4754\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: 0.5278\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: 0.5667\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: 0.5875\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Memory Usage Summary Across All Rounds =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Portland Memory Usage Across Rounds/Updates (4bit) ====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 32696.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57374.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.3572\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3917\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 30342.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 41548.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 43451.02 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 32388.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57268.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 32696.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57374.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 54745.21 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33547.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57726.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.2645\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3904\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31364.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31586.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33378.03 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57670.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33547.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57726.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58286.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.2315\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3928\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31766.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58230.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58286.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58226.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1995\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4019\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31748.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.58 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58170.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.76 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58226.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58186.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1710\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4138\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58130.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58186.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1473\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4498\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31762.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58146.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4723\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31782.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58090.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58146.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58346.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1022\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5271\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31804.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58290.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58346.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58806.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.0805\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5607\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31916.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58748.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58806.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33706.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57946.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.0734\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31786.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57888.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33706.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57946.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Seattle Memory Usage Across Rounds/Updates (4bit) ====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 21814.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 47222.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.3601\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3991\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 19625.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 30754.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 21639.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 47114.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 21814.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 47222.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 54883.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33447.23 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57866.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.2659\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3981\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31364.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33378.03 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57810.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33447.23 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57866.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55734.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58166.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.2306\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4016\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31784.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58110.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58166.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.26 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 57886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1968\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4139\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.66 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31770.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.58 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 57830.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 57886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.27 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58206.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4232\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31798.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58150.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.78 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58206.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58266.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1441\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4448\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58210.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58266.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58106.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1277\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4786\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31914.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58050.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.79 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58106.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.29 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58384.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.1042\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5286\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.61 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58328.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.80 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58384.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.30 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58444.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.0857\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5728\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 32006.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58388.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58444.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 33606.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 58264.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: 0.0720\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.6029\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 31523.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 31886.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 33537.62 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 58208.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 33606.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 58264.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "# Setup participants\n", + "aggregator = Aggregator()\n", + "collaborators = [\n", + " Collaborator(name=\"Portland\"),\n", + " Collaborator(name=\"Seattle\")\n", + "]\n", + "\n", + "# Assign data shards\n", + "for idx, colab in enumerate(collaborators):\n", + " colab.private_attributes = {\n", + " \"train_dataset\": train_dataset.shard(len(collaborators), idx),\n", + " \"eval_dataset\": eval_dataset.shard(len(collaborators), idx)\n", + " }\n", + "\n", + "# Run with 4-bit quantization\n", + "print(\"\\n=============== Running with 4-bit Quantization ===============\\n\")\n", + "bnb_config = bnb_config_4bit # Set active config to 4-bit\n", + "runtime = LocalRuntime(aggregator=aggregator, collaborators=collaborators)\n", + "flflow_4bit = FederatedFlow(model, rounds=5, quant_type=\"4bit\") # Reduce to 1 round\n", + "flflow_4bit.runtime = runtime\n", + "flflow_4bit.run()" + ] + }, + { + "cell_type": "markdown", + "id": "87c4865a", + "metadata": {}, + "source": [ + "## Run Federated Learning with 8-bit Quantization" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "93c60404", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Cleared CUDA cache between runs\n", + "\n", + "=============== Running with 8-bit Quantization ===============\n", + "\n", + "Loading model with 8-bit quantization on CPU first...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|██████████| 6/6 [00:02<00:00, 2.05it/s]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling start\n", + "\u001b[94mPerforming initialization for model with 8bit quantization\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mUsing 5 main rounds with partial round updates\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 0] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.27it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.561655580997467\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.728600
40.559300
60.475600
80.319500
100.310800
120.300300
140.244800
160.375300
180.305800
200.279400
220.359400
240.257100

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.3953164219856262\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39210.98 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 59636.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 36814.91 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38196.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39210.98 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 59636.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39008.34 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3953\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 0] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.26it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5638197064399719\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:30, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.845500
40.502000
60.451700
80.422500
100.285700
120.259400
140.324700
160.300600
180.309400
200.332400
220.378600
240.347400

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.40347251296043396\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39317.46 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 59596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 36974.44 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38276.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39317.46 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 59596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39149.93 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42176.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4035\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 0, Update 0] Average aggregated model validation loss = 0.5627376437187195\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 0] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 0] Average local model validation loss = 0.3993944674730301\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 0, Update 1] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.395594984292984\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [22/25 01:14 < 00:11, 0.27 it/s, Epoch 0.04/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.287200
40.248800
60.295900
80.238400
100.245100
120.248000
140.212000
160.306900
180.248200
200.230800

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.39317232370376587\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40105.33 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37772.41 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38798.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57164.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40105.33 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39841.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42898.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.3932\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 0, Update 1] Average aggregated model validation loss = 0.3971341550350189\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 1] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 0, Update 1] Average local model validation loss = 0.39462728798389435\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 0] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.3897101879119873\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.237800
40.217600
60.244500
80.208000
100.210300
120.213500
140.190500
160.264600
180.209000
200.189600
220.251900
240.158800

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.40585511922836304\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61118.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57214.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61118.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39916.32 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42894.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4059\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 0] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.3935319185256958\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.268500
40.193500
60.218900
80.268700
100.190400
120.189700
140.214800
160.211600
180.231400
200.240300
220.246100
240.177200

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.40515169501304626\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40215.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38916.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40215.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39949.17 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 1, Update 0] Average aggregated model validation loss = 0.39162105321884155\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 0] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 0] Average local model validation loss = 0.40550340712070465\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 1] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.3948669135570526\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.184200
40.176300
60.187500
80.161600
100.176600
120.178600
140.169600
160.213200
180.169800
200.152300
220.184800
240.121900

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.4210392236709595\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60316.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38794.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60316.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39911.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42890.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 1, Update 1] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.39981263875961304\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:30, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.211300
40.154100
60.163600
80.212400
100.163400
120.166300
140.176000
160.181400
180.182900
200.197100
220.201300
240.147700

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.4217308461666107\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40218.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61308.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40218.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61308.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39946.36 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42924.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4217\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 1, Update 1] Average aggregated model validation loss = 0.3973397761583328\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 1] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 1, Update 1] Average local model validation loss = 0.4213850349187851\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 0] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.40616321563720703\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.128500
40.116900
60.109700
80.119700
100.154000
120.148100
140.156600
160.162500
180.128900
200.127000
220.123400
240.083500

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5076923370361328\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40203.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38848.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40203.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39909.20 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42830.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5077\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 0] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.41194236278533936\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.150600
40.110600
60.114500
80.168300
100.139400
120.133200
140.144500
160.158400
180.144300
200.149400
220.145400
240.108200

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.4701308310031891\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.40 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61008.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38938.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40210.40 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61008.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39941.88 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 43064.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.4701\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 2, Update 0] Average aggregated model validation loss = 0.4090527892112732\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 0] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 0] Average local model validation loss = 0.48891158401966095\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 1] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.4745386242866516\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.077900
40.073000
60.103700
80.090100
100.109100
120.110200
140.128100
160.140200
180.123900
200.105000
220.098300
240.070000

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5099506974220276\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40192.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61154.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38856.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40192.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61154.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39908.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42890.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5100\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 2, Update 1] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.48276180028915405\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.086700
40.066700
60.083300
80.140100
100.135800
120.124500
140.128400
160.135900
180.132500
200.125100
220.109400
240.079100

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5275096893310547\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40199.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38960.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40199.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39946.38 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42922.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5275\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 2, Update 1] Average aggregated model validation loss = 0.47865021228790283\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 1] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 2, Update 1] Average local model validation loss = 0.5187301933765411\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 0] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5035024285316467\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.049000
40.054900
60.076800
80.087800
100.106500
120.091100
140.111600
160.129900
180.105400
200.090700
220.099800
240.083200

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5614020824432373\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40187.96 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40187.96 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39911.37 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42870.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5614\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 0] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.23it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5139071941375732\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:30, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.055200
40.049700
60.085100
80.116400
100.106000
120.111700
140.102500
160.114700
180.106000
200.107500
220.085900
240.080100

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5329421162605286\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40194.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61068.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38914.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40194.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61068.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39942.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42944.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 3, Update 0] Average aggregated model validation loss = 0.50870481133461\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 0] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 0] Average local model validation loss = 0.5471720993518829\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 1] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5240783095359802\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.041300
40.044000
60.060200
80.074900
100.080700
120.078300
140.096000
160.111400
180.093000
200.079100
220.084700
240.066400

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5784252882003784\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40185.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38820.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40185.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39910.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42950.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5784\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 3, Update 1] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5348654389381409\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.047700
40.042700
60.064300
80.092000
100.089600
120.105100
140.101900
160.093900
180.090200
200.093600
220.081300
240.066200

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5685837864875793\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40193.50 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60808.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38936.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40193.50 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60808.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39946.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42922.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5686\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 3, Update 1] Average aggregated model validation loss = 0.5294718742370605\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 1] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 3, Update 1] Average local model validation loss = 0.5735045373439789\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 0] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.23it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5471141934394836\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.033400
40.040500
60.064200
80.065800
100.077300
120.071600
140.080400
160.088800
180.082500
200.062700
220.066100
240.056600

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n", + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5927156209945679\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40181.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60874.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40181.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60874.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39912.48 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42850.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 0] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.24it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5571405291557312\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.035800
40.037800
60.059100
80.089500
100.071900
120.084300
140.088000
160.095700
180.074300
200.082400
220.071100
240.058900

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5642604827880859\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40189.73 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40189.73 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 61506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39944.91 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 43022.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5643\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 4, Update 0] Average aggregated model validation loss = 0.5521273612976074\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 0] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 0] Average local model validation loss = 0.5784880518913269\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 1] Performing aggregated model validation for collaborator Portland with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.25it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mPortland evaluation loss: 0.5503019094467163\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Portland] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.033700
40.033800
60.052200
80.071900
100.063400
120.057900
140.065800
160.068000
180.085900
200.057100
220.056000
240.051500

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:12]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Portland] Local evaluation loss: 0.5799501538276672\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Portland\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Portland (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40182.25 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38820.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40182.25 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39911.18 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 42930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5800\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling aggregated_model_validation\n", + "\u001b[94m[Round 4, Update 1] Performing aggregated model validation for collaborator Seattle with 8bit\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Loading checkpoint shards: 100%|##########| 6/6 [00:04<00:00, 1.23it/s]\u001b[0m\u001b[94m\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training with 50 steps\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94mSeattle evaluation loss: 0.5621325373649597\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling train\n", + "\u001b[94m[Seattle] Restoring optimizer state\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [25/25 01:29, Epoch 0/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss
20.038600
40.032600
60.056700
80.066900
100.062500
120.060700
140.078300
160.075200
180.063500
200.071600
220.064000
240.045200

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Training completed, average loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/huggingface_hub/utils/_deprecation.py:100: FutureWarning: Deprecated argument(s) used in '__init__': max_seq_length, dataset_text_field, packing. Will not be supported from version '0.13.0'.\n", + "\n", + "Deprecated positional argument(s) used in SFTTrainer, please use the SFTConfig to set these arguments instead.\n", + " warnings.warn(message, FutureWarning)\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:212: UserWarning: You passed a `packing` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:300: UserWarning: You passed a `max_seq_length` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:328: UserWarning: You passed a `dataset_text_field` argument to the SFTTrainer, the value you passed will override the one in the `SFTConfig`.\n", + " warnings.warn(\n", + "\u001b[0mmax_steps is given, it will override any value given in num_train_epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Calling local_model_validation\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/trl/trainer/sft_trainer.py:428: UserWarning: You passed `packing=True` to the SFTTrainer/SFTConfig, and you are training your model with `max_steps` strategy. The dataset will be iterated until the `max_steps` are reached.\n", + " warnings.warn(\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.float32 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m\u001b[94m/home/azureuser/env_name/lib/python3.10/site-packages/bitsandbytes/autograd/_functions.py:315: UserWarning: MatMul8bitLt: inputs will be cast from torch.bfloat16 to float16 during quantization\n", + " warnings.warn(f\"MatMul8bitLt: inputs will be cast from {A.dtype} to float16 during quantization\")\n", + "\u001b[0m" + ] + }, + { + "data": { + "text/html": [ + "\n", + "

\n", + " \n", + " \n", + " [13/13 00:11]\n", + "
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[94m[Seattle] Local evaluation loss: 0.5817354321479797\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mDoing local model validation for collaborator Seattle\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Memory Usage Report for Seattle (8bit) ====\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mPeak Memory Usage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40191.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60688.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Memory Usage by Stage:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 38938.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 40191.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 60688.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Allocated: 39944.87 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Reserved: 43002.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Max Allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Performance Metrics:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Evaluation Loss: 0.5817\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0mShould transfer from local_model_validation to join\n", + "\n", + "Calling join\n", + "\u001b[94m[Round 4, Update 1] Average aggregated model validation loss = 0.556217223405838\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 1] Average training loss = inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m[Round 4, Update 1] Average local model validation loss = 0.5808427929878235\u001b[0m\u001b[94m\n", + "\u001b[0m\n", + "Calling end\n", + "\u001b[94mThis is the end of the flow for 8bit quantization\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Final Metrics =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mAverage Training Loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mFinal Aggregated Model Loss: 0.5562\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mFinal Local Model Loss: 0.5808\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Metric History =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94mTraining Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Aggregated Model Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: 0.5627\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: 0.3971\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: 0.3916\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: 0.3973\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: 0.4091\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: 0.4787\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: 0.5087\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: 0.5295\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: 0.5521\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: 0.5562\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "Local Model Loss History:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 0: 0.3994\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 1: 0.3946\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 2: 0.4055\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 3: 0.4214\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 4: 0.4889\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 5: 0.5187\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 6: 0.5472\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 7: 0.5735\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 8: 0.5785\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m Update 9: 0.5808\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "===== Memory Usage Summary Across All Rounds =====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Portland Memory Usage Across Rounds/Updates (8bit) ====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 39210.98 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 59636.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3953\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 36814.91 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38196.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 55894.31 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 39210.98 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 59636.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39008.34 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 56253.51 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40100.06 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60872.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57164.02 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3961\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37719.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38654.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 56518.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40100.06 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60872.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57164.02 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39808.58 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42766.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57164.02 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40210.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61118.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4059\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57214.60 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40210.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61118.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39916.32 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42894.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40210.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60316.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4210\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38794.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40210.16 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60316.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39911.82 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42890.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40203.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5077\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38848.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40203.53 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61136.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39909.20 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42830.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40192.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61154.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5100\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38856.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40192.70 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61154.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39908.28 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42890.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40187.96 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5614\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38822.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40187.96 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39911.37 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42870.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40185.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5784\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38820.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40185.57 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39910.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42950.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40181.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60874.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5927\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38838.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40181.67 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60874.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39912.48 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42850.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40182.25 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5800\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37825.69 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38820.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40182.25 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60476.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39911.18 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m\n", + "==== Seattle Memory Usage Across Rounds/Updates (8bit) ====\n", + "\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 39317.46 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 59596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4035\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 36974.44 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38276.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 39317.46 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 59596.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39149.93 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42176.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 56364.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_0_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40105.33 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.3932\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37772.41 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38798.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57164.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40105.33 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39841.12 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42898.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57210.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40215.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4052\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38916.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57272.77 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40215.71 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60930.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39949.17 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42986.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57319.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_1_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40218.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61308.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4217\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38902.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40218.59 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61308.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39946.36 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42924.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40210.40 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61008.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.4701\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38938.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40210.40 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61008.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39941.88 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 43064.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_2_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40199.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5275\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38960.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40199.68 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39946.38 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42922.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40194.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61068.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5329\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38914.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40194.72 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61068.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39942.97 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42944.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_3_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40193.50 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60808.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5686\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38936.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40193.50 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60808.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39946.86 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 42922.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_0:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40189.73 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 61506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5643\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38928.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40189.73 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 61506.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39944.91 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 43022.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m round_4_update_1:\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_allocated: 40191.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_reserved: 60688.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m peak_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m training_loss: inf\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m eval_loss: 0.5817\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_allocated: 37878.81 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_reserved: 38938.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m model_load_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_allocated: 40191.22 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_reserved: 60688.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m before_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_allocated: 39944.87 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_reserved: 43002.00 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m after_training_max_allocated: 57321.08 MB\u001b[0m\u001b[94m\n", + "\u001b[0m\u001b[94m--------------------------------------------------\u001b[0m\u001b[94m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "# Clean up CUDA cache between runs\n", + "if torch.cuda.is_available():\n", + " torch.cuda.empty_cache()\n", + " print(\"Cleared CUDA cache between runs\")\n", + "\n", + "import gc\n", + "import time\n", + "\n", + "# Force garbage collection\n", + "gc.collect()\n", + "time.sleep(5) # Give system time to free memory\n", + "\n", + "# Run with 8-bit quantization\n", + "print(\"\\n=============== Running with 8-bit Quantization ===============\\n\")\n", + "bnb_config = bnb_config_8bit # Set active config to 8-bit\n", + "\n", + "# Force model to be loaded on CPU first for 8-bit quantization\n", + "print(\"Loading model with 8-bit quantization on CPU first...\")\n", + "model_8bit = AutoModelForCausalLM.from_pretrained(\n", + " model_name,\n", + " device_map=\"cpu\", # Start on CPU to avoid OOM\n", + " quantization_config=bnb_config_8bit,\n", + " trust_remote_code=True,\n", + " torch_dtype=torch.float32, # Use float32 for CPU\n", + " low_cpu_mem_usage=True\n", + ")\n", + "model_8bit = prepare_model_for_kbit_training(model_8bit)\n", + "model_8bit = get_peft_model(model_8bit, peft_config)\n", + "\n", + "# Use only one round and one collaborator for 8-bit to save memory\n", + "runtime = LocalRuntime(aggregator=aggregator, collaborators=collaborators)\n", + "flflow_8bit = FederatedFlow(model_8bit, rounds=5, quant_type=\"8bit\")\n", + "flflow_8bit.runtime = runtime\n", + "flflow_8bit.run()" + ] + }, + { + "cell_type": "markdown", + "id": "ebe541a4", + "metadata": {}, + "source": [ + "## Visualize Memory and Performance Metrics" + ] + }, + { + "cell_type": "markdown", + "id": "4718aa9f", + "metadata": {}, + "source": [ + "Now that we've run our federated training with both 4-bit and 8-bit quantization, let's visualize the memory usage and performance metrics to understand the tradeoffs between these approaches." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "d84d3daa-7520-4b3f-a1d6-ae7cebec58e7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Converting any CUDA tensors to CPU for visualization...\n", + "Conversion complete. Ready for visualization.\n" + ] + } + ], + "source": [ + "# Pre-process tensors to fix CUDA to CPU conversion issues\n", + "def tensor_to_float(val):\n", + " if val is None:\n", + " return None\n", + " if isinstance(val, torch.Tensor):\n", + " return val.detach().cpu().float().numpy().item()\n", + " return val\n", + "\n", + "# Convert all tensors in both flow objects\n", + "print(\"Converting any CUDA tensors to CPU for visualization...\")\n", + "\n", + "# Convert the history lists\n", + "flflow_4bit.average_loss_history = [tensor_to_float(x) for x in flflow_4bit.average_loss_history]\n", + "flflow_4bit.agg_model_loss_history = [tensor_to_float(x) for x in flflow_4bit.agg_model_loss_history]\n", + "flflow_4bit.local_model_loss_history = [tensor_to_float(x) for x in flflow_4bit.local_model_loss_history]\n", + "\n", + "flflow_8bit.average_loss_history = [tensor_to_float(x) for x in flflow_8bit.average_loss_history]\n", + "flflow_8bit.agg_model_loss_history = [tensor_to_float(x) for x in flflow_8bit.agg_model_loss_history]\n", + "flflow_8bit.local_model_loss_history = [tensor_to_float(x) for x in flflow_8bit.local_model_loss_history]\n", + "\n", + "# Convert current values\n", + "flflow_4bit.average_loss = tensor_to_float(flflow_4bit.average_loss)\n", + "flflow_4bit.aggregated_model_accuracy = tensor_to_float(flflow_4bit.aggregated_model_accuracy)\n", + "flflow_4bit.local_model_accuracy = tensor_to_float(flflow_4bit.local_model_accuracy)\n", + "\n", + "flflow_8bit.average_loss = tensor_to_float(flflow_8bit.average_loss)\n", + "flflow_8bit.aggregated_model_accuracy = tensor_to_float(flflow_8bit.aggregated_model_accuracy)\n", + "flflow_8bit.local_model_accuracy = tensor_to_float(flflow_8bit.local_model_accuracy)\n", + "\n", + "# Convert tensors in memory stats\n", + "for collab, rounds_data in flflow_4bit.all_memory_stats.items():\n", + " for round_name, stats in rounds_data.items():\n", + " if \"training_loss\" in stats and isinstance(stats[\"training_loss\"], torch.Tensor):\n", + " stats[\"training_loss\"] = tensor_to_float(stats[\"training_loss\"])\n", + " if \"eval_loss\" in stats and isinstance(stats[\"eval_loss\"], torch.Tensor):\n", + " stats[\"eval_loss\"] = tensor_to_float(stats[\"eval_loss\"])\n", + "\n", + "for collab, rounds_data in flflow_8bit.all_memory_stats.items():\n", + " for round_name, stats in rounds_data.items():\n", + " if \"training_loss\" in stats and isinstance(stats[\"training_loss\"], torch.Tensor):\n", + " stats[\"training_loss\"] = tensor_to_float(stats[\"training_loss\"])\n", + " if \"eval_loss\" in stats and isinstance(stats[\"eval_loss\"], torch.Tensor):\n", + " stats[\"eval_loss\"] = tensor_to_float(stats[\"eval_loss\"])\n", + "\n", + "print(\"Conversion complete. Ready for visualization.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "e6c8db6d", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABjUAAAScCAYAAADDDw0GAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3XdYFMf/B/D30Y/eBRQEQUXsJfbee+9JFGJL7C22rwVLNNZoNLbE2GJiiy3GrpiIvRs7GhE7CNI7N78/+N2G4w44OBRP36/n4TG3Ozsz+2GP7O3nZkYmhBAgIiIiIiIiIiIiIiJ6zxkUdQeIiIiIiIiIiIiIiIi0waQGERERERERERERERHpBSY1iIiIiIiIiIiIiIhILzCpQUREREREREREREREeoFJDSIiIiIiIiIiIiIi0gtMahARERERERERERERkV5gUoOIiIiIiIiIiIiIiPQCkxpERERERERERERERKQXmNQgIiIiIiIiIiIiIiK9wKQGERERUQ4mTJgAmUwGmUyGOXPmFKiOxo0bQyaT4eTJk/k6LjAwEDKZDIGBgQVq932Wnp6OlStXon79+rCzs4OxsTEcHR3RrFkzbNy4EQqFIt91btiwATKZDP7+/vk6LjQ0FDKZDJ6envluU1tpaWn48ccf0aZNG7i6usLExAQODg6oVasWZs6cidevX7+1tt8nnp6ekMlkCA0NLequaKRv77nIyEjMmzcPjRs3houLC0xMTGBtbY0KFSpg0KBBOHHiRFF3Ue/4+/tDJpNhw4YNRd0VIiIiIsoFkxpEREREGpw5cwaLFy+GTCYr6q6oOHnyJGQyGRo3blzUXSmQlJQUNGvWDMOGDcPFixdRrVo1dOvWDWXLlkVQUBD8/f3RrVs3CCGKuquF8hD+zp07KF++PAYPHoyjR4/C29sb3bt3R82aNXHnzh0EBgbC29sbu3btKryOF4H3/WGwvr9vstu8eTM8PT0xZcoUnDt3DmXKlEG3bt3QtGlTpKen46effkKzZs3Qs2fPou4qEREREVGhMyrqDhARERG9bxITE+Hv7w9XV1d88skn2LNnzzvvw/Dhw9G7d284Ojq+87bfppUrV+Lvv/9GyZIl8ffff8PDw0Pad+nSJTRt2hR79uzBtm3b0Lt377fen+LFi+POnTswNjYu9LofPXqE+vXrIyoqCs2bN8e6detUzjcpKQnTp0/HokWL0KNHD/z+++/o3LlzoffjfXH8+HGkpaWhePHiRd0VjfTlPbd69Wp89dVXkMlkmDhxIqZMmQJra2uVMrdv30ZgYCBCQkKKqJf6ad68eZg0aRJcXV2LuitERERElAuO1CAiIiLKZvLkyQgJCcHatWthY2NTJH1wdHSEr6/ve/+ANb+UU+IMGzZM5QE/ANSoUUNKZJw9e/ad9MfY2Bi+vr7w9vYu9Lo///xzREVFoXbt2ti/f7/a+crlcixcuBDjx4+HQqFAQEAAoqKiCr0f7wtvb2/4+vq+lQRSYdCH99zdu3cxcuRIAMDixYvx7bffqiU0AMDPzw/bt2/HsmXL3nUX9Zqrqyt8fX2L7O8+EREREWmHSQ0iIiKiLE6ePInly5ejX79+aNu2baHW/ddff6Fly5awt7eHubk5atasic2bN2ssq2l+/8aNG6NJkyZSXcr1PrRdE6JPnz6QyWT49ttvcyyzf/9+yGQyVK1aVWX7jh070Lx5czg4OMDY2BgODg7w8/PDoEGDcOPGjbxP/v+ZmZlpVU6XB8uRkZFS0sTU1BQlS5bEmDFj8ObNG7WymtbUUK7P8fjxYwCAl5eXSqy1WR/lr7/+wunTpwEAK1asgKmpaY5lZ8+eDWdnZ0RHR+OHH35Q2ZfXmiw5rQMRFxeHH3/8EV27dkXp0qVhYWEBCwsLVKxYEf/73/8QHR2tsb6sU24FBQWhZcuWsLOzg1wuR7Vq1bBp0yaV8sr4bdy4EQAQEBCgEqus/dI0nZey/3n9ZHXhwgVMmDABNWvWlNaSKFasGDp06IBjx46pnZO275u81tQ4fPgw2rdvD2dnZ5iYmMDNzQ29evXCpUuXNJbP+ru7du0aunbtCkdHR5iamsLPzw+LFy/O9zRr8+fPR1paGipXrozRo0fnWb5hw4Zq254+fYoRI0agdOnSMDMzg42NDerVq4c1a9YgIyNDrXzW9WpiYmIwduxYeHp6wszMDKVLl8b8+fOldXCePXuGIUOGwN3dHaampihbtiyWL1+eZ3zy87cxIiIC33//Pdq2bQsvLy/I5XJYW1ujRo0amD9/PpKTkzUel/VaWr9+PerUqQMbGxuVazKnadQUCgXWrl2LevXqwdbWFsbGxnB2dkblypUxYsQIjVPURUVFYcqUKShfvjzMzc1hZWWF6tWrY8GCBUhKSlIrn3WKtLS0NMyfPx/ly5eHXC6Hg4MDunbtijt37mg8NyIiIqKPDaefIiIiIvp/8fHx+OKLL1CsWDEsXbq0UOvevXs3VqxYAV9fX7Rq1QrPnz9HcHAw+vXrh2vXrmHx4sV51tG6dWuYmZnh8OHDKFasGFq3bi3t0yYJEBAQgK1bt2Ljxo2YNGmSxjLr168HAHzxxRfStlmzZmHGjBkwMjJC3bp1Ubx4ccTExCAsLAzr1q1D+fLlUalSpTzbB4A2bdpg586d+OGHH9CrVy+V0QuXL1/G1q1bIZfL8fnnn2tVX3Zv3rxBrVq1EBkZqfLQdOnSpTh48CBOnToFJyenXOvw8fFB//79sXPnTiQkJKBbt26wtLSU9ru4uOTZD+WUZeXLl0f16tVzLWtmZoaePXtixYoV2Lt3L6ZNm5b3iebh+vXrGDx4MJycnFC2bFlUr14db968weXLlzF37lxs374d586dg4ODg8bjf/75Z8yZMwfVqlVD69atERoainPnzqF///6IioqSHqhbWlqif//+CA4OxsOHD1GvXj34+PhI9VSpUiXXflapUgX9+/fXuO/evXs4d+4cDAxUv4c1ZcoUBAUFSbG1sLDAw4cPsX//fuzfvx9Lly7FqFGjpPK6vm8AYNq0aZgzZw5kMhnq1q0LDw8P3LlzB9u3b8fvv/+OtWvXqrxnsjp8+DCWLFkCb29vtGjRAi9evEBwcDDGjx+PJ0+eaP23RgiBP/74AwDQr1+/Aq33c/HiRbRu3RpRUVHw8PBA586dERMTg5MnT+LMmTPYvXs39u3bBxMTE7Vjo6OjUadOHURGRqJBgwaIi4vDqVOnMGnSJDx9+hSjR49G/fr1YWxsjLp16yIiIgJ///03Ro4cicTEREycOFFjn/L7t/Hw4cMYNWoUihcvDh8fH9SuXRsRERE4f/48Jk2ahL179yIoKCjHROKIESOwcuVK1K1bF+3atcO///6bZywHDhyI9evXw8zMDPXr14eTkxOioqLw77//YsWKFWjWrJlKguzff/9F06ZN8fjxYzg5OaFt27ZIS0tDUFAQJk6ciG3btuHYsWOws7NTaystLQ1t27bFmTNn0LBhQ5QrVw4XLlzA7t27ERQUhKtXr2qVxCYiIiL6oAkiIiIiEkIIMWTIEAFA7N69W9rWv39/AUDMnj27QHU2atRIABAAxNy5c1X2nTx5UsjlcgFAHDp0SGXfjBkzBAAxY8YMle1BQUECgGjUqFG++5KRkSE8PDwEAHH27Fm1/REREcLY2FiYmJiI169fCyGESE5OFnK5XFhaWoq7d++qHRMaGiru3LmTrz7069dPABAmJiaiadOmonfv3qJevXpCJpOJSpUqiTNnzuT73NavXy/FuXbt2iIyMlLa9+bNG1G3bl0BQPTu3VvluEePHgkAomTJkmp1lixZUgAQjx49ynd/GjRoIACIgIAArcpv3LhRABCGhoYiPT1d2q68foKCgjQel9N18uTJE3Hs2DGRkZGhsj0hIUGK/9ChQ9XqU56zsbGx+OOPP1T2KWNsY2MjEhMTVfYp3yfr16/P8RzzE8/Hjx8LV1dXAUCsWLFCZd+BAwfE8+fP1Y45c+aMsLa2FsbGxuLp06cq+7R53+QUy4MHDwoAwszMTBw5ckRl308//STF6+bNmyr7sr73V69erbLv+PHjQiaTCUNDQ/HkyZMc+5TVw4cPpfr+/vtvrY7JKjk5WfodfPnllyI1NVWlbk9PTwFATJkyReW4rO+tDh06iISEBGnf5cuXhZGRkTAwMBB+fn7iyy+/FGlpadL+PXv2CADC2tpa5TghCv638fbt2xr/fkVFRYmWLVsKAGLBggVq+5VtWVtbazxeCM3X8ePHjwUAUaJECfHixQu1Y27fvi0eP36ssq1WrVoCgOjYsaOIj4+XtoeHh4tq1aoJAKJv374qxyivUQCiatWqKm0lJSWJVq1aCQBi8ODBGvtORERE9DHh9FNEREREAI4cOYI1a9agd+/eb2Wx5qpVq2Ly5Mkq2xo1aoShQ4cCgFYjNXRlYGAgfSteOSIjqy1btiAtLQ0dO3aUvsEfGxuLpKQklCpVCmXLllU7pmTJkvD19c1XHzZs2IBFixZBCIETJ05g69atOH36NORyOZo3b67z+harVq2Cvb299NrW1harV6+GTCbD9u3b8fTpU53q10ZERAQAoFixYlqVV5bLyMgolHU1SpQogWbNmqmNcjA3N8eqVatgZGSEHTt25Hj8iBEj0L59e5Vt/v7+8PX1RUxMTI5TLhWG6OhotGnTBi9evMDXX3+NYcOGqexv06aNxoWc69Spg2HDhiEtLQ179+4ttP4sWrQIADB06FC0aNFCZd+AAQPQvn17pKWl5bh+RdeuXTFkyBCVbU2bNkWrVq2QkZGBoKAgrfqhvKYAwNnZOT+nACBzCrnHjx/Dzc0NS5cuVVnbpFSpUtJ5Ll++XOMUTpaWlvjpp59gbm4ubatWrRratm0LhUKB+Ph4fPfddzAy+m8ygE6dOqFixYqIjY3N8ZrJ79/GcuXKoXbt2mr12NnZSVNd5XZtjx8/XuPxOXn16hWAzHPVNEqrXLlyKiPOgoODcf78eZibm2Pt2rWwsLCQ9jk5OWHt2rUAgK1bt2r8WySTybB+/XqVtszMzDBz5kwA0DjFGhEREdHHhkkNIiIi+ujFxMRgwIABcHJyynH+d13169dP43ZlkiE4OFjjfPaFTTln/LZt29Tmddc09ZSTkxM8PT1x48YNjBs3Drdv39ap/djYWLRv3x5ff/01hg8fjvv37yMhIQH//PMPOnfujCVLlqBmzZp48uRJgeqvXLmyximPKlasiKpVq0KhUODvv//W6RzeBpFlbYXCvA7OnDmD+fPnY9iwYQgICIC/vz+GDh0KExMTREREaFxnBAA6dOigcXu5cuUAZK6d8Dakpqaic+fOuH37Nnr37o358+drLBcZGYlNmzZhwoQJGDRoEPz9/eHv74+//voLQObUVYUhPT1dWhvF399fY5kBAwYAQI7JiaKKZXbKdVl69+6tcWqmrl27ws7ODnFxcbh8+bLa/urVq2tMppQuXRoA0KRJE41r5ij3P3/+XGO/CvK3MSMjA8ePH8fs2bMxdOhQ6dr+5ptvAOT+++/evXuO+zTx9fWFlZUVDhw4gG+++QaPHj3Ktbwyzq1bt9aY1KxevToqV64MhUIhXa9ZeXh4oHLlymrb3/X1QkRERPQ+45oaRERE9NEbPXo0nj59im3btuVrgepvv/0Wd+/eVdu+aNEitXq8vLw01qHcnpSUhMjIyAJ9Azs/SpUqhUaNGuHkyZPYvXs3+vbtCwC4evUqrl+/Djc3N7Rs2VLlmE2bNqF79+5YsmQJlixZAnt7e9SqVQstWrTA559/nq+YjRs3DgcOHMDQoUOxZMkSaXuFChWwZcsWREZG4vDhw5g6daq0+HRwcDB++ukntbo6d+6sNqompzgr9125cuWdjNRQxkT5Le+8hIeHAwAMDQ1VRpkUVHh4OLp164bg4OBcy8XGxmqc1z/rN8+zsra2BoAcF2PWhRBCSkw0atRIWqA6ux9//BFjxoxBQkJCjnXFxsYWSp8iIyOlc83p2lKOLMrpYXNhxTLrWjDh4eEaR07lRtm/nM5DJpPBy8sLb9680XguOZ2Hcr2ZnPZbWVkByPk88/u3MSQkBF26dMGtW7c0Hgfk/vvP73oUVlZWWL9+PQICAjB16lRMnToVrq6uqF27Nlq3bo2+ffuqrLmTV5yBzGvm+vXr+Yqz8npJSUnJV/+JiIiIPkQcqUFEREQfvd27d8PIyAgrV65E48aNVX4OHToEAFi3bh0aN26M3r17S8cdOnQIGzduVPuJj48vUD+yflv/bVKOxNiwYYO0TTlKo1+/fjA0NFQp36BBA4SGhmLHjh0YPnw4PD09cfjwYYwdOxalSpXC8ePHtWo3IyMDmzdvBgD06dNHYxllkiXrFCsPHjzQGOdr165p1W527yLOysXBz58/r1X5CxcuAAD8/Pw0LtKcE4VCoXH7wIEDERwcjDp16uDIkSN49eoVUlNTIYSAEEKavimnWGSftupdmDRpEn777Tf4+flhz549GkcTXL58GUOGDEFKSgrmz5+P27dvIz4+HgqFAkIIrFmzBsC7ey9po7Bi6enpKSW8Ll68WCh15kde5/E2r5msv8/u3bvj1q1baN++Pf7++2+8fv1aura1eeAvl8vz3X63bt3w5MkTbNq0CYMGDYKdnR12796NIUOGwMfHB//880++68xJUbz3iIiIiPQN75iIiIiIkDnNzF9//aX2o/ymfWhoKP766y+cO3dOOubkyZPSQ+KsP5q+CZzTlCWhoaEAMudMV65j8bZ169YN1tbWOH78OJ48eYLU1FT8+uuvAICAgACNx8jlcnTv3h3Lly/H5cuX8fLlSwwePBhxcXEq01XlJjw8XHroqPzWcXY2NjYAoLKuhL+/v8Y4BwYGqh2f29QwyliXKFFCq/7qolOnTgCA27dva5zKJ6vk5GRs374dANClSxeVfcoER1xcnMZjHz9+rLYtISEBBw4cgIGBAQ4cOIAWLVrA2dlZWkMhISEBL1++zN8JvWUrV67EggUL4ObmhoMHD8LW1lZjuR07dkAIgREjRmDChAkoV64cLCwspBEdISEhhdovBwcHKbny77//aiyj3F68ePFCbTs7AwMDaSqrTZs25ft4Zf9yOg/gv/fP2z4XTW1mp+lv4927d3Hjxg04Oztj9+7daNCgARwcHKRru7B//1nZ2Njg888/x9q1a3Hr1i2EhYWhU6dOePXqFYYPHy6V0ybO7+qaISIiIvpQMalBREREH73o6GiND82FENK87rNnz4YQQnrQll+//PKLxu3Kh5P169dXWWA3J8qH3Onp6QXqB5C5WHSvXr2gUCiwadMm/PHHH4iMjES9evVQpkwZrepwcnLCggULAABhYWE5rs2QVdYHxDmNYFAmjXKbuiU3N27cwI0bN9S237p1C1euXIGBgQEaNmyoVV26xLpJkybSYsTDhg3L9Rvk06ZNQ0REBOzs7FQejgL/PfS8c+eO2nGJiYka13GIiYlBRkYGrK2tNSYHfvnll0IfyaBLrPbt24eRI0fCysoKf/75Z47T7wD/JbtKliypti85ORm///57ofbPyMgI9evXB6A6simrn3/+GUDm7/xtmzhxIoyNjXH9+nUsXbo0z/KnTp2S/rtx48YAgG3btmmcCmr37t148+YNrKyspJFG70J+/jYqf/9ubm4a/17mVNfb4O7uLi3enXXUmDLOhw4d0jj93NWrV3Ht2rV8/S0iIiIiIlVMahARERG9A5cvX5aSAErBwcH44YcfAABjxozRqh7lKIOQkBCkpaUVuD9Zp6BSPpTVNErj8ePH+OmnnzTOUf/HH38AAOzs7HIceZGViYkJOnbsCCDzQX725MPx48elB7XKaajySwiBr776SiXJEhMTg6+++gpCCHTr1g3u7u5a1aWMdW5z9+fml19+ga2tLc6fP4/27durLX6elJSECRMmYNGiRTAyMsKOHTtU1k0AgObNmwMAfvjhB5X59xMSEjB48GCNC6oXK1YMdnZ2iI6Olqb7Ujp37hwmT55coPPJTUFjdeHCBfTp0wcGBgbYuXOnxkXes1Iulrxx40aV0SvJyckYOnRojt/61+V9M27cOADAqlWr1KZa27BhA/bt2wdjY2OMGjUqX/UWRLly5aS1aMaOHYspU6ZoHMVz//599OnTByNHjpS29ejRAx4eHnj+/DnGjh2rkuB59OiRdJ4jRozQuOD325Kfv41lypSBoaEh/vnnH2lBbqU//vgD3333XaH37+rVq9i2bRuSkpLU9in/BmZNstWvXx+1atVCUlIShgwZgsTERGnf69evMWTIEACZC7Zr+7eIiIiIiFRxoXAiIiKid2DkyJGYPHkyNm3ahEqVKuH58+c4deoUFAoFRo0ahbZt22pVj4eHB2rUqIFLly6hYsWKqFGjBszMzODo6Ihvv/1W6/7Url0b5cqVw507d/DgwQNYWFigV69eauXevHmDQYMGYejQoahSpYo0giIkJARXr16FTCbDwoUL1dbhyMl3332Hy5cv499//0W1atVQu3ZtFC9eHP/++y8uXboEAGjatCnGjx+v9blk1bFjR9y8eROlSpVCkyZNIJPJcPLkSURFRaF06dJYsWKF1nV169YNQUFB+Oyzz9CyZUtpQe2vv/5aq0Wavb29ERwcjE6dOuHYsWMoVaoUateuDXd3d0RHR+P06dOIjY2Fi4sLVq1ahWbNmqnV0bNnTyxduhSXLl1C+fLlUb9+fSgUCly6dAkmJib44osvpKSUkqGhIaZPn44xY8agX79++OGHH1CqVCmEhYXhzJkz+Oyzz/D3339rnLqqoDp37oyZM2fi+++/x82bN+Hu7g4DAwN07NhRSmRpMnnyZCQmJsLLywu//vqrNA1adspREgEBAVi2bBmuXr0KLy8vNGjQAIaGhjh16hSSkpIwatQoLFu2TO14Xd43bdq0wdSpUzFnzhy0aNEC9erVg4eHB+7evYsrV67A0NAQq1evRvny5bUPmA6GDx8OCwsLjBgxAvPmzcN3332HmjVronjx4khOTsbdu3elkT1Z1wAyNTXFzp070bp1a6xatQoHDhxA7dq1ERcXhxMnTiA5ORmtWrXCjBkz3sl5KOXnb6OjoyOGDx+OZcuWoVmzZmjQoAHc3Nxw7949XLlyRfo9FabHjx+jd+/ekMvlqFatGtzd3ZGeno5//vkH9+7dg4mJiVpS5tdff0XTpk2xd+9eeHl5oWHDhkhLS0NQUBBiY2NRrVq1fP0tIiIiIqJsBBERERHlqH///gKAmD17doGOb9SokQAggoKCxPHjx0WzZs2EjY2NkMvlokaNGmLDhg0aj5sxY4YAIGbMmKG27/Hjx6Jv377C1dVVGBkZCQCiZMmS+e7bggULBAABQPTv319jmdjYWLF06VLRpUsXUbp0aWFpaSksLCxEmTJlRL9+/cSlS5fy3W5sbKyYM2eO+OSTT4S1tbUwNDQU9vb2olGjRmLNmjUiPT0933WuX79eOo/w8HAxZMgQUaJECWFiYiLc3d3FyJEjRWRkpNpxjx49yjF+GRkZYt68eaJ8+fLCzMxMilVQUFC++paSkiJWr14tWrRoIYoVKyb9zgAIV1dXjf3K6s2bN2L48OGiRIkSwtjYWBQvXlwMHjxYvHr1KtfrZM+ePaJu3brC1tZWWFpaiho1aoiVK1cKhUIhSpYsKQCIR48eqRyT03Yl5fth/fr1avt2794t6tWrJ6ysrIRMJlPrl6a6le+PvH6yioiIEEOHDhXe3t7C1NRUuLm5ic8++0yEhISoXAfZ5fW+yS2WQghx8OBB0bZtW+Hg4CCMjIyEi4uL6NGjhzh//rzG8lnf+5rk1V5eIiIixJw5c0SDBg2Ek5OTMDIyEpaWlqJChQpi8ODB4q+//tJ4XFhYmBg2bJgoVaqUMDExEVZWVqJOnTpi1apVIi0tTa18bjHV5jxyumYK+rdRoVCIdevWierVqwtLS0thY2Mj6tevL7Zu3SqEEBqvmdy259XXFy9eiG+//Va0bdtWeHl5CXNzc2FtbS38/PzEsGHDxN27dzXWFRkZKSZPnizKlSsnzMzMhLm5uahatar49ttvRWJiolr5oKAgAUA0atQox/5pcw5EREREHwOZEIU8oS4REREREeXqyZMnqF+/PsLCwhAQEIB169ZJi10TfQwaN26Mv/76C0FBQdI6FERERERE2uCaGkRERERE75i7uzuOHz8OFxcXrF+/Xm2BcCIiIiIiItKMa2oQERERERUBHx8fHD9+HNu3bwcA3Lt3T6u1OoiIiIiIiD5mTGoQERERERURPz8/BAYGFnU3iIiIiIiI9AbX1CAiIiIiIiIiIiIiIr3ANTWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkRE75i/vz8sLS2LuhukxwIDAyGTyfD69eui7sp7zdPTE/7+/kXdDSIiIqL3lr+/Pzw9PQt0rPKelHLm6emJ9u3bF3U33msnT56ETCbDyZMni7orRKRHmNQgoo/Whg0bIJPJpB8zMzOUKVMGw4cPx6tXr4q6exopPzgYGBjgyZMnavtjY2Mhl8shk8kwfPjwIuhh0csraWRpackH3Vpq3LixyntELpejUqVKWLp0KRQKRVF3j4iIiOiDlfUeLLefj/VBML8o9p/Q0FCVa8LAwAD29vZo06YNzp49W9TdIyJ6K4yKugNEREVt1qxZ8PLyQnJyMoKDg7Fq1SocOHAAN2/ehLm5eVF3TyNTU1P89ttvmDBhgsr2Xbt2FVGP6ENVokQJzJs3DwDw+vVr/PrrrxgzZgwiIiLwzTffFHHviIiIiD5MmzdvVnm9adMmHD16VG17uXLldGrnxx9/LPCXVaZOnYpJkybp1D4Vnj59+qBt27bIyMjA/fv3sXLlSjRp0gQXL15ExYoVi7p7RESFikkNIvrotWnTBjVq1AAADBw4EA4ODliyZAn27t2LPn36FHHvNGvbtq3GpMavv/6Kdu3a4ffffy+inukmISEBFhYWRd0NysLGxgafffaZ9PrLL7+Er68vli9fjlmzZsHQ0LAIe0dERET0Ycp6/wUA586dw9GjR9W2Z5eYmJivL2YZGxsXqH8AYGRkBCMjPlZ6X1SrVk3l+mjQoAHatGmDVatWYeXKlUXYMyKiwsfpp4iIsmnatCkA4NGjR9K2X375BdWrV4dcLoe9vT169+6tNv3TqVOn0KNHD3h4eMDU1BTu7u4YM2YMkpKS8mzz2rVrcHJyQuPGjREfH59n+b59++LatWu4e/eutO3ly5c4ceIE+vbtq/GYlJQUzJgxAz4+PlL/JkyYgJSUFJVyyqmrduzYAT8/P8jlctSpUwf//PMPAGDNmjXw8fGBmZkZGjdujNDQULW2duzYIcXL0dERn332GZ49e6ZSRjlk/OHDh2jbti2srKzw6aefYsaMGTA2NkZERIRavYMHD4atrS2Sk5PzjJG20tLSMHPmTJQuXRpmZmZwcHBA/fr1cfToUanMjRs34O/vj1KlSsHMzAwuLi744osvEBkZqVbfyZMnUaNGDZiZmcHb2xtr1qzJcb5hba6r3Lx+/Ro9e/aEtbU1HBwcMGrUKJXYNGrUCJUrV9Z4bNmyZdGqVSut21IyMzPDJ598gri4OISHh0vb09PTMXv2bHh7e8PU1BSenp6YMmWKxusrMDBQrd7s618op4c7ffo0xo4dCycnJ1hYWKBLly5q14YQAnPmzEGJEiVgbm6OJk2a4NatW/k+NyIiIiJ90rhxY1SoUAGXL19Gw4YNYW5ujilTpgAA9u7di3bt2sHNzQ2mpqbw9vbG7NmzkZGRoVJH9jU1lFMZLVq0CGvXrpXu7T755BNcvHhR5VhN97jKzxJ79uxBhQoVYGpqivLly+PQoUNq/c/PfXNBafO55OXLlwgICECJEiVgamoKV1dXdOrUSeVzzqVLl9CqVSs4OjpCLpfDy8sLX3zxhdb9OHLkCKpUqQIzMzP4+fmpjLD/999/IZPJ8N1336kdd+bMGchkMvz222/5PvcGDRoAAB4+fKiy/d9//0WPHj1gb28Pc3Nz1K5dG3/++adKGeW9ePbPeprWv1Beh7dv30aTJk1gbm6O4sWLY8GCBWp9evr0KTp37gwLCws4OztjzJgxap8XiIi0wZQ6EVE2yps+BwcHAMA333yDadOmoWfPnhg4cCAiIiKwfPlyNGzYEFevXoWtrS2AzBvmxMREfPXVV3BwcMCFCxewfPlyPH36FDt27MixvYsXL6JVq1aoUaMG9u7dC7lcnmcfGzZsiBIlSuDXX3/FrFmzAADbtm2DpaUl2rVrp1ZeoVCgY8eOCA4OxuDBg1GuXDn8888/+O6773D//n3s2bNHpfypU6ewb98+DBs2DAAwb948tG/fHhMmTMDKlSsxdOhQvHnzBgsWLMAXX3yBEydOSMdu2LABAQEB+OSTTzBv3jy8evUKy5Ytw+nTp1XiBWQ+CG/VqhXq16+PRYsWwdzcHHXq1MGsWbOwbds2lXVBUlNTsXPnTnTr1g1mZmZ5xkhbgYGBmDdvHgYOHIiaNWsiNjYWly5dwpUrV9CiRQsAwNGjR/Hvv/8iICAALi4uuHXrFtauXYtbt27h3Llz0gevq1evonXr1nB1dcXMmTORkZGBWbNmwcnJSa1dba+r3PTs2ROenp6YN28ezp07h++//x5v3rzBpk2bAACff/45Bg0ahJs3b6JChQrScRcvXsT9+/cxderUAsVM+WE3ax8HDhyIjRs3onv37hg3bhzOnz+PefPm4c6dO9i9e3eB2gGAESNGwM7ODjNmzEBoaCiWLl2K4cOHY9u2bVKZ6dOnY86cOWjbti3atm2LK1euoGXLlkhNTS1wu0RERET6IDIyEm3atEHv3r3x2WefoVixYgAy78ktLS0xduxYWFpa4sSJE5g+fTpiY2OxcOHCPOv99ddfERcXhyFDhkAmk2HBggXo2rUr/v333zxHdwQHB2PXrl0YOnQorKys8P3336Nbt24ICwuTPmPl5765oLT9XNKtWzfcunULI0aMgKenJ8LDw3H06FGEhYVJr1u2bAknJydMmjQJtra2CA0N1Xrq35CQEPTq1Qtffvkl+vfvj/Xr16NHjx44dOgQWrRogVKlSqFevXrYsmULxowZo3Lsli1bYGVlhU6dOuX7/JUJCTs7O2nbq1evULduXSQmJmLkyJFwcHDAxo0b0bFjR+zcuRNdunTJdzsA8ObNG7Ru3Rpdu3ZFz549sXPnTkycOBEVK1ZEmzZtAABJSUlo1qwZwsLCMHLkSLi5uWHz5s0qnyWJiLQmiIg+UuvXrxcAxLFjx0RERIR48uSJ2Lp1q3BwcBByuVw8ffpUhIaGCkNDQ/HNN9+oHPvPP/8IIyMjle2JiYlqbcybN0/IZDLx+PFjaVv//v2FhYWFEEKI4OBgYW1tLdq1ayeSk5Pz7POMGTMEABERESHGjx8vfHx8pH2ffPKJCAgIEEIIAUAMGzZM2rd582ZhYGAgTp06pVLf6tWrBQBx+vRpaRsAYWpqKh49eiRtW7NmjQAgXFxcRGxsrLR98uTJAoBUNjU1VTg7O4sKFSqIpKQkqdz+/fsFADF9+nSVOAAQkyZNUjvPOnXqiFq1aqls27VrlwAggoKCco1R1vhqYmFhIfr37y+9rly5smjXrl2udWr63f72228CgPj777+lbR06dBDm5ubi2bNn0raQkBBhZGQksv4vNz/XlSbK66Bjx44q24cOHSoAiOvXrwshhIiOjhZmZmZi4sSJKuVGjhwpLCwsRHx8fK7tNGrUSPj6+oqIiAgREREh7t69K77++msBQCVm165dEwDEwIEDVY4fP368ACBOnDghbQMgZsyYodZWyZIlVX4vyvdn8+bNhUKhkLaPGTNGGBoaiujoaCGEEOHh4cLExES0a9dOpdyUKVMEAJU6iYiIiPTVsGHDRPZHOI0aNRIAxOrVq9XKa7p/HTJkiDA3N1f53NG/f39RsmRJ6fWjR48EAOHg4CCioqKk7Xv37hUAxB9//CFtU96TZgVAmJiYiAcPHkjbrl+/LgCI5cuXS9u0vW/OSV73/Np+Lnnz5o0AIBYuXJhjXbt37xYAxMWLF/PsV3YlS5YUAMTvv/8ubYuJiRGurq6iatWq0jbl5607d+6onIOjo2Oe97PK39nMmTNFRESEePnypTh16pT45JNPBACxY8cOqezo0aMFAJXPhXFxccLLy0t4enqKjIwMIcR/9+JZPxMKIURQUJDaZzLldbhp0yZpW0pKinBxcRHdunWTti1dulQAENu3b5e2JSQkCB8fH60+5xERZcXpp4joo9e8eXM4OTnB3d0dvXv3hqWlJXbv3o3ixYtj165dUCgU6NmzJ16/fi39uLi4oHTp0ggKCpLqyTrCIiEhAa9fv0bdunUhhMDVq1fV2g0KCkKrVq3QrFkz7Nq1C6ampvnqd9++ffHgwQNcvHhR+jenqad27NiBcuXKwdfXV+U8lFNtZT0PAGjWrJnKMPRatWoByPwWk5WVldr2f//9F0DmsOzw8HAMHTpUZTRFu3bt4OvrqzasGQC++uortW39+vXD+fPnVYZKb9myBe7u7mjUqFGucckvW1tb3Lp1CyEhITmWyfq7TU5OxuvXr1G7dm0AwJUrVwAAGRkZOHbsGDp37gw3NzepvI+Pj/TtJKX8XFe5UY6kURoxYgQA4MCBAwAy18Po1KkTfvvtNwghpH5u27ZNGvadl7t378LJyQlOTk7w9fXFwoUL0bFjR2zYsEEqo2xv7NixKseOGzcOADT+3rU1ePBglSkIGjRogIyMDDx+/BgAcOzYMaSmpmLEiBEq5UaPHl3gNomIiIj0hampKQICAtS2Z71/jYuLw+vXr9GgQQMkJiaqTGGbk169eql8w185lZHyvj83zZs3h7e3t/S6UqVKsLa2lo7Nz31zQWn7uUQul8PExAQnT57EmzdvNNalHNGxf/9+pKWl5bsvbm5uKiMgrK2t0a9fP1y9ehUvX74EkDkC28zMDFu2bJHKHT58GK9fv85zHRWlGTNmwMnJCS4uLmjQoAHu3LmDxYsXo3v37lKZAwcOoGbNmqhfv760zdLSEoMHD0ZoaChu376d7/NT1pG1nyYmJqhZs6bK9XLgwAG4urqq9Mfc3ByDBw8uUJtE9HFjUoOIPno//PADjh49iqCgINy+fRv//vuvtNZASEgIhBAoXbq09GBX+XPnzh2VNQXCwsLg7+8Pe3t7WFpawsnJSXoAHxMTo9JmcnIy2rVrh6pVq2L79u0wMTHJd7+rVq0KX19f/Prrr9iyZQtcXFykJEV2ISEhuHXrlto5lClTBgBUzgMAPDw8VF7b2NgAANzd3TVuV34AUD5oLlu2rFoffH19pf1KRkZGKFGihFrZXr16wdTUVLqpj4mJwf79+/Hpp58Wyhy7WeuYNWsWoqOjUaZMGVSsWBFff/01bty4oVI+KioKo0aNQrFixSCXy+Hk5AQvLy+pb0BmDJOSkuDj46PWXvZt+bmuclO6dGmV197e3jAwMFCZ+7Zfv34ICwvDqVOnAGQmAV69eoXPP/9cqzY8PT1x9OhRHD58GCtXrkTx4sURERGh8uHw8ePHMDAwUDtPFxcX2Nraqv3e8yP7taj8cJ39msseCycnJ5UP4kREREQfouLFi2v8LHHr1i106dIFNjY2sLa2hpOTk/TQOftnE03yugfLz7HK45XH5ue+uaC0/VxiamqK+fPn4+DBgyhWrBgaNmyIBQsWSMkGIHOdum7dumHmzJlwdHREp06dsH79eq3XgvDx8VH7DKP8HKa8b7e1tUWHDh3w66+/SmW2bNmC4sWL5/gZL7vBgwfj6NGj+OOPP6S1HbOvofL48WONMSlXrpy0vyBKlCihdo5Zf+fKujXFQlN/iIjywjU1iOijV7NmTdSoUUPjPoVCAZlMhoMHD8LQ0FBtv6WlJYDMbxu1aNECUVFRmDhxInx9fWFhYYFnz57B398fCoVC5ThTU1O0bdsWe/fuxaFDh9C+ffsC9b1v375YtWoVrKys0KtXLxgYaM5VKxQKVKxYEUuWLNG4P3uyQtO55rZdOQogv0xNTTX22c7ODu3bt8eWLVswffp07Ny5EykpKVp9S8nMzAwpKSkQQqjdMAshkJycrPJAvmHDhnj48CH27t2LI0eO4KeffsJ3332H1atXY+DAgQAyvzl15swZfP3116hSpQosLS2hUCjQunVrtd+tNrS9rvJLU8KnVatWKFasGH755Rc0bNgQv/zyC1xcXNC8eXOt6rSwsFApW69ePVSrVg1TpkzB999/n2f72sr+gUupsK85IiIiog+JpvX4oqOj0ahRI1hbW2PWrFnw9vaGmZkZrly5gokTJ2p1/6rLPZi+3b+NHj0aHTp0wJ49e3D48GFMmzYN8+bNw4kTJ1C1alXIZDLs3LkT586dwx9//IHDhw/jiy++wOLFi3Hu3LkC37tn169fP+zYsQNnzpxBxYoVsW/fPgwdOjTHz3jZlS5dWrpvb9++PQwNDTFp0iQ0adIkx8+7Ocnpvp737ET0vmBSg4goF97e3hBCwMvLS/o2jSb//PMP7t+/j40bN6Jfv37S9qNHj2osL5PJsGXLFnTq1Ak9evTAwYMH0bhx43z3r2/fvpg+fTpevHiBzZs353oe169fR7NmzQplpENOSpYsCQC4d++e2jeK7t27J+3XRr9+/dCpUydcvHgRW7ZsQdWqVVG+fHmt+pCeno6HDx+qfdPrwYMHyMjIUOuHvb09AgICEBAQgPj4eDRs2BCBgYEYOHAg3rx5g+PHj2PmzJmYPn26dEz26aqcnZ1hZmaGBw8eqPUp+zZtr6u8hISESCNGlO0oFAqVqcMMDQ3Rt29fbNiwAfPnz8eePXswaNCgHD945KVSpUr47LPPsGbNGowfPx4eHh4oWbIkFAoFQkJCpG95AZkLEUZHR6vE287ODtHR0Sp1pqam4sWLFwXqj7LukJAQlCpVStoeERGh1TcJiYiIiD40J0+eRGRkJHbt2oWGDRtK2x89elSEvfpPfu6bCyq/n0u8vb0xbtw4jBs3DiEhIahSpQoWL16MX375RSpTu3Zt1K5dG9988w1+/fVXfPrpp9i6dav0RaicPHjwQO0LV/fv3wcAlfv21q1bw8nJCVu2bEGtWrWQmJio9ehqTf73v//hxx9/xNSpU3Ho0CEAmXG5d++eWlnllGTKuChH5mS/b9dlBHbJkiVx8+ZNtVho6g8RUV44/RQRUS66du0KQ0NDzJw5U+1bJkIIREZGAvjvmylZywghsGzZshzrNjExwa5du/DJJ5+gQ4cOuHDhQr775+3tjaVLl2LevHmoWbNmjuV69uyJZ8+e4ccff1Tbl5SUhISEhHy3rUmNGjXg7OyM1atXqwzHPnjwIO7cuYN27dppXVebNm3g6OiI+fPn46+//tJ6LlnlPLwrVqxQ2/fDDz+olAEg/Q6VLC0t4ePjI/Vf0+8WAJYuXary2tDQEM2bN8eePXvw/PlzafuDBw9w8OBBlbLaXld5UZ6P0vLly9XODwA+//xzvHnzBkOGDEF8fLzWsczJhAkTkJaWJo38adu2LQD1mCj3Z/29e3t74++//1Ypt3bt2hy/9ZWX5s2bw9jYGMuXL1eJZfa+EBEREX0sNN2/pqamYuXKlUXVJRX5uW8uKG0/lyQmJiI5OVnlWG9vb1hZWUnHvXnzRu2evUqVKgCg1RRUz58/x+7du6XXsbGx2LRpE6pUqQIXFxdpu5GREfr06YPt27djw4YNqFixIipVqpS/E8/C1tYWQ4YMweHDh3Ht2jUAmfftFy5cwNmzZ6VyCQkJWLt2LTw9PeHn5wcA0pooWe/bMzIysHbt2gL3p23btnj+/Dl27twpbUtMTNSpTiL6eHGkBhFRLry9vTFnzhxMnjwZoaGh6Ny5M6ysrPDo0SPs3r0bgwcPxvjx4+Hr6wtvb2+MHz8ez549g7W1NX7//fc8vykul8uxf/9+NG3aFG3atMFff/2FChUq5KuPo0aNyrPM559/ju3bt+PLL79EUFAQ6tWrh4yMDNy9exfbt2/H4cOH8z0kWRNjY2PMnz8fAQEBaNSoEfr06YNXr15h2bJl8PT0xJgxY/JVV+/evbFixQoYGhqiT58+Wh1XpUoVDBw4EMuWLUNISAhatGgBIHPUzIEDBzBw4EBUrlxZKu/n54fGjRujevXqsLe3x6VLl7Bz504MHz4cQOZCfsq5ddPS0lC8eHEcOXJE4zfdAgMDceTIEdSrVw9fffUVMjIysGLFClSoUEH6IAFof13l5dGjR+jYsSNat26Ns2fP4pdffkHfvn1Vzg/IXH+lQoUK0oLx1apV0yqWOfHz80Pbtm3x008/Ydq0aahcuTL69++PtWvXStMdXLhwARs3bkTnzp3RpEkT6diBAwfiyy+/RLdu3dCiRQtcv34dhw8fhqOjY4H64uTkhPHjx2PevHlo37492rZti6tXr+LgwYMFrpOIiIhIn9WtWxd2dnbo378/Ro4cCZlMhs2bN79XUwFpe9+cm7S0NMyZM0dtu729PYYOHarV55L79++jWbNm6NmzJ/z8/GBkZITdu3fj1atX6N27NwBg48aNWLlyJbp06QJvb2/ExcXhxx9/hLW1tfTlntyUKVMGAwYMwMWLF1GsWDH8/PPPePXqFdavX69Wtl+/fvj+++8RFBSE+fPnaxWH3IwaNQpLly7Ft99+i61bt2LSpEn47bff0KZNG4wcORL29vbYuHEjHj16hN9//12a6qp8+fKoXbs2Jk+ejKioKNjb22Pr1q1IT08vcF8GDRqEFStWoF+/frh8+TJcXV2xefNmmJub63yeRPQREkREH6n169cLAOLixYt5lv39999F/fr1hYWFhbCwsBC+vr5i2LBh4t69e1KZ27dvi+bNmwtLS0vh6OgoBg0aJK5fvy4AiPXr10vl+vfvLywsLFTqf/36tfDz8xMuLi4iJCQkx37MmDFDABARERG59heAGDZsmMq21NRUMX/+fFG+fHlhamoq7OzsRPXq1cXMmTNFTExMrsc+evRIABALFy5U2R4UFCQAiB07dqhs37Ztm6hataowNTUV9vb24tNPPxVPnz5VKaMpDtlduHBBABAtW7bMtVx2GRkZYtmyZaJy5crCzMxMmJmZicqVK4vvv/9eZGRkqJSdM2eOqFmzprC1tRVyuVz4+vqKb775RqSmpkplnj59Krp06SJsbW2FjY2N6NGjh3j+/LkAIGbMmKFS3/Hjx0XVqlWFiYmJ8Pb2Fj/99JMYN26cMDMzU+unNteVJsrr4Pbt26J79+7CyspK2NnZieHDh4ukpCSNxyxYsEAAEHPnztUyikI0atRIlC9fXuO+kydPqpx/WlqamDlzpvDy8hLGxsbC3d1dTJ48WSQnJ6scl5GRISZOnCgcHR2Fubm5aNWqlXjw4IEoWbKk6N+/v1Qup/en8poLCgpSqXPmzJnC1dVVyOVy0bhxY3Hz5k21OomIiIj01bBhw0T2Rzi53audPn1a1K5dW8jlcuHm5iYmTJggDh8+rHYf1b9/f1GyZEnpdU73/UIItXtf5T1p9jLZP0sIITTel+Xnvjm7/v37CwAaf7y9vaVyeX0uef36tRg2bJjw9fUVFhYWwsbGRtSqVUts375dKnPlyhXRp08f4eHhIUxNTYWzs7No3769uHTpUp79LFmypGjXrp04fPiwqFSpkjA1NRW+vr5qn5+yKl++vDAwMFD7/JST3H5nQgjh7+8vDA0NxYMHD4QQQjx8+FB0795d2NraCjMzM1GzZk2xf/9+teMePnwomjdvLkxNTUWxYsXElClTxNGjR9WuoZyuw+zXlhBCPH78WHTs2FGYm5sLR0dHMWrUKHHo0CG1OomI8iIT4j1K1RMREWVx/fp1VKlSBZs2bdJpPtmi1rlzZ9y6dUttHY53admyZRgzZgxCQ0Ph4eFRZP0gIiIiIsrJ+3DfXNSqVq0Ke3t7HD9+vKi7QkT03uKaGkRE9N768ccfYWlpia5duxZ1V7SWlJSk8jokJAQHDhwo0ELwhUUIgXXr1qFRo0ZMaBARERHRe+F9vG8uapcuXcK1a9fQr1+/ou4KEdF7jWtqEBHRe+ePP/7A7du3sXbtWgwfPhwWFhZF3SWtlSpVCv7+/ihVqhQeP36MVatWwcTEBBMmTHjnfUlISMC+ffsQFBSEf/75B3v37n3nfSAiIiIi0uR9um8uajdv3sTly5exePFiuLq6olevXkXdJSKi9xqTGkRE9N4ZMWIEXr16hbZt22LmzJlF3Z18ad26NX777Te8fPkSpqamqFOnDubOnYvSpUu/875ERESgb9++sLW1xZQpU9CxY8d33gciIiIiIk3ep/vmorZz507MmjULZcuWxW+//QYzM7Oi7hIR0XuNa2oQEREREREREREREZFe4JoaRERERERERERERESkFzj91DukUCjw/PlzWFlZQSaTFXV3iIiIiIjeGSEE4uLi4ObmBgMDfrcqN/zcQEREREQfI20/MzCp8Q49f/4c7u7uRd0NIiIiIqIi8+TJE5QoUaKou/Fe4+cGIiIiIvqY5fWZgUmNd8jKygpA5i/F2tr6nbevUCgQEREBJycnfjuugBhD3TB+umH8dMP46Y4x1A3jpxvGTzfvQ/xiY2Ph7u4u3RNTzvi5Qb8xfrph/HTHGOqG8dMN46cbxk93jKFuijp+2n5mYFLjHVIOHbe2ti6yDyfJycmwtrbmm7qAGEPdMH66Yfx0w/jpjjHUDeOnG8ZPN+9T/DidUt74uUG/MX66Yfx0xxjqhvHTDeOnG8ZPd4yhbt6X+OX1mYG/WSIiIiIiIiIiIiIi0gtMahARERERERERERERkV5gUoOIiIiIiIiIiIiIiPQC19QgIiIiIiIiIiIiIgBARkYG0tLSirobBaJQKJCWlobk5GSuqVEAbzt+xsbGMDQ01LkeJjWIiIiIiIiIiIiIPnJCCLx8+RLR0dFF3ZUCE0JAoVAgLi4uz8WmSd27iJ+trS1cXFx0qp9JDSIiIiIiIiIiIqKPnDKh4ezsDHNzc71MCgghkJ6eDiMjI73sf1F7m/ETQiAxMRHh4eEAAFdX1wLXxaQGERERERERERER0UcsIyNDSmg4ODgUdXcKjEkN3bzt+MnlcgBAeHg4nJ2dCzwVFScWIyIiIiIiIiIiIvqIKdfQMDc3L+Ke0IdOeY3psm4LkxpERERERERERERExNEN9NYVxjXGpAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERDrLyABOngR++y3z34yMou5R4QkNDYVMJsO1a9feeluenp5YunRprmVSU1Ph4+ODM2fO6NRWYGAgqlSpkmsZf39/dO7cOdcyt2/fRokSJZCQkKBTf7TBpAYRERERERERERER6WTXLsDTE2jSBOjbN/NfT8/M7W+Lv78/ZDIZZDIZTExMULp0acyZMwfp6ek615vXQ/yitnr1anh5eaFu3bpq+1JSUlClSpVCS8IsW7YMGzZskF43btwYo0ePVinj5+eH2rVrY8mSJTq3lxcmNYjonWm+pDlcp7mi+ZLmRd0V+gjx+qOixmuQihKvPyIiIiJ6m3btArp3B54+Vd3+7Fnm9reZ2GjdujVevHiBkJAQjB07FrNnz8bChQsLVFdGRgYUCkUh97DwCSGwYsUKDBgwQOP+CRMmwM3NrdDas7Gxga2tbZ7lAgICsGrVKp2TSnlhUoOI3olmi5sh6F4QACDoXhCaLW5WxD2ijwmvPypqvAapKPH6I22ExYThyosr0s+NiBsqr8Niwoq6i0RERPSOCQEkJOT9ExsLjByZWV5THQAwalRmOW3q01RPbkxNTeHi4oKSJUviq6++QrNmzfDHH38AAN68eYN+/frBzs4O5ubmaNOmDUJCQqRjN2zYAFtbW+zbtw9+fn4wNTXFF198gY0bN2Lv3r3SKJCTJ0+qtZuRkYEBAwbAy8sLcrkcZcuWxbJly1TKKEd8LFq0CK6urnBwcMCwYcOQlpYmlQkPD0eHDh0gl8vh5eWFLVu25HnOly9fxsOHD9GuXTu1fQcPHsSRI0ewaNEibUMIAFizZg08PDxgY2ODXr16ISYmRu08lP/9119/YdmyZVJ8QkNDAQAtWrRAVFQU/vrrr3y1nV9Gb7V2IiJkPkw5cfeEyrYTd0+g2eJmOD7ueBH1ij4WvP6oqPEapKLE64+0ERYThrIryiI5PTnHMmZGZrg3/B48bDzeYc+IiIioKCUmApaWutcjROYIDhsb7crHxwMWFgVvz8zMDFFRUQAyH8CHhIRg3759sLa2xsSJE9G2bVvcvn0bxsbGAIDExETMnz8fP/30ExwcHODq6oqkpCTExsZi/fr1AAB7e3s8f/5cpR2FQoESJUpgx44dcHBwwJkzZzB48GC4urqiZ8+eUrmgoCC4uroiKCgIDx48QK9evVClShUMGjRI6uPz588RFBQEY2NjjBw5EuHh4bme46lTp1CmTBlYWVmpbH/16hUGDRqEPXv2wNzcXOuYPXjwANu3b8e+ffvw5s0bDBkyBEOHDtWYYFm2bBnu37+PChUqYNasWQAAJycnAICJiQmqVKmCU6dOoVmzt/dlKo7UIKK3StPDFCXlQxWit4XXHxU1XoNUlHj9kbZeJ77ONaEBAMnpyXid+Pod9YiIiIgo/4QQOHbsGI4ePYomTZpIyYyffvoJDRo0QOXKlbFlyxY8e/YMe/bskY5LS0vDypUrUbduXZQtWxbW1taQy+XSCBAXFxeYmJiotWdsbIyZM2eiRo0a8PLywqeffoqAgABs375dpZydnR1WrFgBX19ftG/fHu3atcPx45lfMLp//z4OHjyIH3/8EbVr10b16tWxbt06JCUl5Xqujx8/VpteSggBf39/fPnll6hRo0a+YpecnIxNmzahSpUqaNCgAb7//nts3boVL1++VCtrY2MDExMTmJubS/ExNDSU9ru5ueHx48f5aj+/OFKDiN6a3B6mKJ24ewJ159bHlv473lGv9JdCocDryEgkpGXAwIA56bx8urEHzj46nWsZXn/5w2swf3gNFi5ef/mj7fXHERtERERElBNz88xRE3n5+2+gbdu8yx04ADRsqF27+bF//35YWloiLS0NCoUCvXv3RmBgIE6cOAEjIyPUqlVLKuvg4ICyZcvizp070jYTExNUqlQpf43+vx9++AE///wzwsLCkJSUhNTUVFSpUkWlTPny5VUe+ru6uuKff/4BANy5cwdGRkaoXr26tN/X1zfP9SuSkpJgZmamsm358uWIi4vD5MmTczzOMsvQm88++wyrV68GAHh4eKB48eIQ/z/3V506daBQKHDv3j24uLjk2pfs5HI5EhMT83VMfjGpQUSFKiElAU+inqDPj31w7ck1rY45++g0SgUW3uJFRPnB64+KGq9BKkpMbBARERFRTmQy7aaBatkSKFEic1FwTethyGSZ+1u2BLI82y80TZo0wapVq2BiYgJXV1cAgJGR9o+95XI5ZDJZvtvdunUrxo8fj8WLF6NOnTqwsrLCwoULcf78eZVyymmulGQymc6LkTs6OkqJEaUTJ07g7NmzMDU1Vdleo0YNfPrpp9i4cSOuXbsmbbe2ttapDzmJioqCt7f3W6lbiUkNItJaekY6XsS8wJOoJwiLCkNYVJjqf795gsj4yKLuJhEREeVDXqMqiYiIiIhyY2gILFsGdO+emcDImthQ5gqWLn07CQ0AsLCwgI+PD4DMKZjS09MBAOXKlUN6ejrOnz+PunXrAgAiIyNx7949+Pn55VqniYkJMjIyci1z+vRp1K1bF0OHDpW2PXz4MF999/X1RXp6Oi5fvoxPPvkEAHDv3j1ER0fnelzVqlWxatUqCCGkhMz333+POXPmSGWeP3+OVq1aYdu2bdJoFWWcsgsLC8Pz58+lpNC5c+dgYGCAsmXLaiyfW3xu3ryJ7t2759p/XTGpQUQAMv/oRydGa05W/P9/P4t+hgxF7n/QAcDKzAoZigwkpmo/1KxxmcY4OPKwLqfwwVMoFAiPCIezkzOnXslDm+9b4eT9k1qX5/WnHV6D2uM1WPh4/Wkvv9dfU9+mb68zRERERPRR6NoV2LkTGDUqc1FwpRIlMhMaXbu++z6VLl0anTp1wqBBg7BmzRpYWVlh0qRJKF68ODp16pTrsZ6enjh8+DDu3bsHBwcH2GhY5bx06dLYtGkTDh8+DC8vL2zevBkXL16El5eX1n0sW7YsWrdujSFDhmDVqlUwMjLC6NGjIZfLcz2uSZMmiI+Px61bt1ChQgUAmVNIZaWcasrb2xslSpTItT4zMzP0798fCxcuxJs3bzBq1Cj07Nkzx6mnPD09cf78eYSGhsLS0hL29vYwMDBAaGgonj17hubNm2sbggJhUoPoI5Gcloynb57mOMoiLCoMCSkJedZjZGiEErYl4OHgAXc7d3jYe8DD3gPu9v/9t4155h96bdbUADIfpnDai7wpFAqYmZjAzNSED/TyEPR1EK+/t4DXoPZ4DRY+Xn/a4/VH+ZXHlxDzXY6IiIg+Tl27Ap06AadOAS9eAK6uQIMGb2+EhjbWr1+PUaNGoX379khNTUXDhg1x4MABtSmhshs0aBBOnjyJGjVqID4+HkFBQfD09FQpM2TIEFy9ehW9evWCTCZDnz59MHToUBw8eDDffRw4cCAaNWqEYsWKYc6cOZg2bVquxzg4OKBLly7YsmUL5s2bl6/2NPHx8UHXrl3Rrl07REVFoX379li5cmWO5cePH4/+/fvDz88PSUlJePToETw9PfHbb7+hZcuWKFmypM59yo1MCE0zndHbEBsbCxsbG8TExLy1Octyo1AoEB4eDmdnfsOxoN7XGCoUCoTHhf+XoIjMnAoq63+/in2lVV1OVk7/JSuyJS48HDxQzLoYDA20+7+REAI2fcojzupOjmWs4soh5rdbBZq78GPzvl5/7yuFQgHjTr5QuITkWMbgZWmk7b3LeGqJ12D+8BosXLz+8ud9vP6K+l5Yn7zrWK394wqGXKmeZ7k11S5jcIdqb70/+o5/r3TD+OmOMdQN46cbxk83RRm/5ORkPHr0CF5eXmoLUOsT5fRTRkZGH/yzphs3bqBFixZ4+PChygLgutAlfqmpqShdujR+/fVX1KtXL8dyuV1r2t4Hc6QGkR6IS45DWOR/61ao/Pf/j7hIy0jLsx65ify/BIX9/ycsHP77b3d7d8hNch/elh+pqRkwPzkccdWWAMU1zCn4zBsWV4cjNTUDpqb8c0SFKz4+FTgwGGi1OsfrD4cHIz4+FdbW+nvDRu8vXoNUlHj9UX6kvHEE0swA4+ScC6WZZZYjIiIiovdCpUqVMH/+fDx69AgVK1Ys6u4gLCwMU6ZMyTWhUVj4FJGoiKWlp+FZ9DO1BbezJi6iE6PzrMdAZgA3Wze1qaCy/re9hf07zVKbmhrh4sXPEBHRDZ9u6Yi7sRekfZ8Ur4fVX/0OZ2dzJjTorbC2NsOZM33x4EFbzL32BW5Hn5f2+dnWwpTGP6PMTDs+zKO3htcgFSVef5QfFT08gJH3APPXgPdhoPkU4FkNYP+a/wolOqLiHo+cKyEiIiKid87f37+ouyDx8fHJcSHywsYniURvkRACkfGRmhff/v/ExfOY59BmFjg7c7sckxXu9u5ws3GDsVHu8wEWBXd3a7i7W2PQyR8w7k5vwO0h8Nwbs7tvRrVqxYq6e/SBq1XLDbVqueHTT8+h6aKmCLoXhCZlm+DE+LznmScqDLwGqSjx+iNtNWgAlLDywLNnHhAO9zM3ploBLzKnmpLJMhf5bNCgCDtJRERERPT/mNQg0kFiSiKevHmS4+LbT948QVJqUp71mBiZSGtXZE1WKF+727vDyszqHZyR7jIyFHj5MgFPnsQhLCwWYWGZ/27YcBOI+xIAYGgow7TXwWjZ0vODn9+Q3h/Hxh6T5iYlKgq8Bqko8fqj3BgaAsuWAd27q+9T3qotXVq0i3wSERERESkxqUGUgwxFBl7GvPxv8e3/X3Q75EUIwhPC8eTNE7yOf61VXS42Lv+tYZF9AW4HDzhZOunFAlpCCERHp0gJC9V/M//72bN4pKcrcq0nI0Pg4sVXOHIkFK1aeb2j3hMRERFRTrp2BXbuBPotABKybC9RIjOh0bVrUfWMiIiIiEgVkxr0URJCICYpRm1kRVjkf4tvP4t+hvSM9DzrsjS1lJITmhIXJexKwNTY9B2cle6Sk9Px9GmcyiiL7AmM+Pi8FyQ3NJShRAkruLtn/gQFPcGrVwnIOsuWoaEM06ZxtAYRERHR+6JrVyAoAljxEpDLBfYfV6BRIwOO0CAiIiKi9wqTGvRBSklLwdM3T9UW3M6auIhLjsuzHkMDQ5SwKyElK9zt3WFnbIdyHuXg6egJD3sP2Mht9OKhvEIhVKaFyv5vWFgcwsMTtarL0VEODw9reHhkJi08PKylfz08rODiYgFDw8yRJ4cPP8Jvv91Vq4OjNYiIiIjeP8rbWhMTgcaNAT0YTExEREREHxkmNUjvKBQKRMRHqCYrsiUuXsa81KouR0tHjWtYKP/bxcYFhgb/fTVNoVBI81G/b9NFxcSkqCUpsiYunj6NQ1pa7tNCAYC5uZFakiJr4qJECSuYm2u3ILkQAtOmBcPAAFBoaNrAABytQURERERERERERFpjUoPeO/HJ8eoLbmdbfDs1PTXPeuQm8v+mgsqWrHC3d4e7nTvMTc3fwRnpLiUlHU+fxuPJE9VkRdb1LOLi8o6JoaEMbm6WuY6ysLMzK7QEQ2pqBsLC4jQmNIDMRMeTJ3FITc2AqSn/HBEREdG798MPP2DhwoV4+fIlKleujOXLl6NmzZoay+7atQtz587FgwcPkJaWhtKlS2PcuHH4/PPPpTL+/v7YuHGjynGtWrXCoUOH3up5EBERERF9LPgU8SMy5885CNwXiMCOgZjeYXqR9CE9Ix3Po5+rLL6dPXnxJvFNnvXIZDK42bipLridLXHhYOmgF9/+VygEwsMTNYyy+C+B8fJlQt4VAXBwkOeYrHB3t4KrqyWMjN7dCBNTUyNcvPgZIiKSAGSOdImKioK9vb000sXZ2ZwJDSIiIioS27Ztw9ixY7F69WrUqlULS5cuRatWrXDv3j04Ozurlbe3t8f//vc/+Pr6wsTEBPv370dAQACcnZ3RqlUrqVzr1q2xfv166bWpqX6sr0ZERERUUGExYXid+DrH/Y7mjvCw8XiHPSo6np6eGD16NEaPHq1TPevWrcO2bdtw5MgRneqRyWTYvXs3OnfurHF/aGgovLy8cPXqVVSuXDnHenr37o1PPvkE48aN06k/hYFPEj8Ss/fPxox9MwAAM/bNgEwmw7T20wq1DSEEohKickxWhEWF4Xn0cyhE3lMg2Zrbqi24LU0R5eABNxs3GBtpNwVSUYuNTVEZUZF9lMXTp/FITc3Isx653CjHKaGU/2o7LdS75O5uDXd3awDK6btk7+X0XURERPTxWbJkCQYNGoSAgAAAwOrVq/Hnn3/i559/xqRJk9TKN27cWOX1qFGjsHHjRgQHB6skNUxNTeHi4qJ1P1JSUpCSkiK9jo2NBZB576TIacjrWyKy3Ku/67Y/FAqFAkIIxq+AGD/dMYa6Yfx0w/jppijjp2xb+ZMfYTFh8P3BF8npyTmWMTMyw91hdws9sREREYHp06fjwIEDePXqFezs7FCxYkXMmDED9erVK7R2vLy8MGrUKJVExYYNGzBmzBi8eaP+Be2CxDGr5ORkTJs2Ddu3b9dYz9atW9G3b1906tQJu3fvzrO+3PpTokQJPH/+HI6OjgCAkydPomXLloiKioKtra1U7n//+x8aNWqEAQMGwMbGpmAnlqUvmu51tb32mdT4CMzePxvT96qOzFC+zk9iIyk1CU+inqgtuJ01cZGYmvdC08aGxqpTQdm5w8PBQ2Uxbmu5df5Osoikpmbg2bPsyQrVURYxMSl51mNgoJwWKudRFg4Ocr0YeUJERESkD1JTU3H58mVMnjxZ2mZgYIDmzZvj7NmzeR4vhMCJEydw7949zJ8/X2XfyZMn4ezsDDs7OzRt2hRz5syBg4NDjnXNmzcPM2fOVNseERGB5OScHw68DYlJmSNshRAIDw/nF1EKQKFQICYmBkIIxq8AGD/dMYa6Yfx0w/jppijjl5aWBoVCgfT0dKSnp+fr2Fdxr3JNaABAcnoyXsW9gpuFmy7dVNOtWzekpqZi3bp18PLywqtXr3D8+HG8evUq3+eRF2V8sr4GoLGd7GXza9u2bbCyskKtWrXU6gkNDcXXX3+N+vXra91ORkZGruWUCQ3ldQBA7Vrw9fVFqVKlsGnTJnz11VcFOS2pXoVCgcjISBgbq35BOy4uTqs6mNT4wGlKaChlTWwoFAq8jH2ptuB21sRFRFyEVm0Wsy6mcQ0L5X87W+nHt/QVCoGIiESVURZhYbEICYlAeHiqNC2UNklXe3uzXEdZuLm922mhiIiIiD52r1+/RkZGBooVK6ayvVixYrh7926Ox8XExKB48eJISUmBoaEhVq5ciRYtWkj7W7duja5du8LLywsPHz7ElClT0KZNG5w9exaGhoYa65w8eTLGjh0rvY6NjYW7uzucnJxgbf1uv+xjLpcDMZnTFHB0bcEoFArIZDI4OTkxfgXA+OmOMdQN46cbxk83RRm/5ORkxMXFwcjICEZGmY+MhRBITMv7C8ypirzXeVWWS1Hk/eVfc2Nzrb7YGx0djeDgYAQFBaFRo0YAAG9vb3zyyScqD8ujo6Mxfvx47Nu3DykpKahRowaWLFkiTbX08OFDjBs3DufOnUNCQgLKlSuHuXPnonnz5gCAJk2a4PHjxxg/fjzGjx8PADhx4gQGDhwIADAxMQEATJ8+HYGBgQAyvyyjjGNe7Wuyc+dOdOjQQapDKSMjA/7+/ggMDERwcDCio6PVymgSHh6Ojh074uTJk3B1dcX8+fPRvXt3AJlJklKlSuHKlSuwtbVF69atAUCajrV///7S1KodOnTAjh07MGLEiDzbzImRkREMDAzg4OAAMzMzlX3ZX+dYR4Fbp/debgkNpel7p2PJkSWIT41HekbeWT0LU4v/poHKlqzwsPdAcbviMDPW7uIranFxqbkuvP30aRxSUvKeFsrU1DDXhbfd3a1gYWHyDs6IiIiIiN42KysrXLt2DfHx8Th+/DjGjh2LUqVKSVNT9e7dWypbsWJFVKpUCd7e3jh58iSaNWumsU5TU1ON624YGBi88wcaMtl/7RVF+x8KmUzG+OmA8dMdY6gbxk83jJ9uiip+BgYGkMlk0g8AJKYlwupbq0Jro8GGBlqVi58cDwsTizzLWVlZwdLSEnv37kWdOnVgamoKIYTUf+W/PXv2hFwux8GDB2FjY4M1a9agefPmuH//Puzt7ZGQkIC2bdvim2++gampKTZt2oSOHTvi3r178PDwwK5du1C5cmUMHjwYgwYNApC51trSpUsxffp03Lt3DwBgaWmp0ra27WsSHByMzz//XC25M3v2bDg7O2PgwIEIDg5WOc/cTJ8+Hd9++y2WLVuGzZs3o0+fPqhQoQLKlSun0md3d3ds27YNvXr1wr1792BtbQ25/L/ZY2rVqoW5c+ciNTW1wOvGKWOj6TrX9rpnUuMDpU1CQyk6KRoAYGhgiOK2xTUmK5Svbc1t9WIKpLS0DDx7Fp9tLYtYabTFkydxiI7OOzMskwFubpZZkhWWsLWVwc/PDSVLWsPDwxqOjpwWioiIiEjfODo6wtDQEK9evVLZ/urVq1zXwzAwMICPjw8AoEqVKrhz5w7mzZuntt6GUqlSpeDo6IgHDx7kmNQgIiIiovwzMjLChg0bMGjQIKxevRrVqlVDw4YN0b17d1SrVg1AZnLgwoULCA8Plx7CL1q0CHv27MHOnTsxePBgVK5cWWXUxOzZs7F7927s27cPw4cPh729PQwNDWFlZaVyn2hjYwOZTJbrvaM27WcXHR2NmJgYuLm5qdW1bt06XLt2Ld+x6tGjhzSyZPbs2Th69CiWL1+OlStXqpQzNDSUEi3Ozs4qa2oAgJubG1JTU/Hy5UuULFky3/0oLExqfKBm7J2Rr/IyyJC8MhlGhu//JSFE5rRQWaeEUo60UG578SJeq2mh7OyU00JpHmXh5mYJY+P/pgnIXOg6nEPxiYiIiPSciYkJqlevjuPHj6Nz584AMu/1jh8/juHDh2tdj0KhUFnkO7unT58iMjISrq6uunaZiIiI6J0yNzZH/OT4PMtde3kN9dfXz7NccEAwqrhU0apdbXXr1g3t2rXDqVOncO7cORw8eBALFy7Ejz/+iICAAFy/fh3x8fFq65slJSXh4cOHAID4+HgEBgbizz//xIsXL5Ceno6kpCSEhYVp3Y+caNN+dkn/v8ZZ1qmY4uLi8Pnnn+PHH3+U1r/Ibu7cuZg7d670+vbt2/DwyFyYvU6dOipl69SpU6DkiFwuBwAkJuY9Ldnb9P4/waYCmdlpptYjNZTl35eERnx8arYkheqaFk+fxiM5Oe+pskxNDeHubpVtKqj/poRyd7eGlRWnhSIiIiL6WI0dOxb9+/dHjRo1ULNmTSxduhQJCQkICAgAAPTr1w/FixfHvHnzAGQu6F2jRg14e3sjJSUFBw4cwObNm7Fq1SoAmR+IZ86ciW7dusHFxQUPHz7EhAkT4OPjg1atWhXZeRIREREVhEwm02oaKLmxXKv65MZyrerLLzMzM7Ro0QItWrTA1KlTMWDAAAQGBiIgIADx8fFwdXXFyZMn1Y5TjkIYP348jh49ikWLFsHHxwdyuRzdu3dHaqp2a4XkRpv2s3NwcIBMJsObN2+kbQ8fPkRoaCg6dOggbVMu6G1kZIR79+7hyy+/RM+ePaX92Ud6FIaoqCgAgJOTU6HXnR/vx1NsKnTT2k8DAK0SG7M6zZLKv21paRl4/jw+xymhwsLi8OZNcp71yGSAq6tlrqMsnJy0W1SIiIiIiD5OvXr1QkREBKZPn46XL1+iSpUqOHTokLR4eFhYmMro3ISEBAwdOhRPnz6FXC6Hr68vfvnlF/Tq1QtA5nD9GzduYOPGjYiOjoabmxtatmyJ2bNnF3jOYSIiIiLKn3LlymHfvn0AgGrVquHly5cwMjKCp6enxvKnT5+Gv78/unTpAiAzEREaGqpSxsTEBBkZGXluy06b9rMzMTGBn58fbt++jZYtWwIAfH198c8//6iUmzp1KuLi4rBs2TK4u7vDxMQkxzU6zp07h379+qm8rlq1ao7tA9B4bjdv3kSJEiVyHC3yrjCp8QHTJrFRmAkNIQQiI5NynBLqyZM4PH8eD4Ui73mhbGxMVZIV/yUsMrcVL24FExPDPOshIiIiIsrN8OHDc5xuKvs36ubMmYM5c+bkWJdcLsfhw4cLs3tERERE7z1Hc0eYGZkhOT3nLyqbGZnB0bxwH4RHRkaiR48e+OKLL1CpUiVYWVnh4sWLWLx4MTp27AgAaN68OerUqYPOnTtjwYIFKFOmDJ4/f44///wTXbp0QY0aNVC6dGns2rULHTp0gEwmw7Rp06RREEqenp74+++/0bt3b5iamsLR0RGenp6Ij4/H8ePHUblyZZibm8PcXHXqLG3a16RVq1YIDg7G6NGjM+NnZoYKFSqolFGO9Mi+XZMdO3agRo0aqF+/PrZs2YILFy5g3bp1Gst6eHhAJpNh//79aNu2LeRyOSwtLQEAp06dkhItRYlJjQ9cbomN/CY0EhIyp4XKaZTFkydxSErKe1ooE5Os00KpTgmlTF5YW/ObbERERERERERERO87DxsP3Bt+D68TX+dYxtHcER42HoXarqWlJWrVqoXvvvsODx8+RFpaGtzd3fHFF19g6tSpADKn0Dpw4AD+97//ISAgABEREXBxcUHDhg2l0blLlizBF198gbp168LR0RETJ05EbGysSluzZs3CkCFDpGlIhRCoW7cuvvzyS/Tq1QuRkZGYMWMGAgMDVY7Tpn1NBgwYgBo1aiAmJgY2NjY6x2rmzJnYunUrhg4dCldXV/z222/w8/PTWLZ48eIIDAzEpEmTEBAQgH79+mHDhg1ITk7Gnj17cOjQIZ37oyuZENosp0yFITY2FjY2NoiJiYG1tfU7bXvWH7MwY99/i4fP7DgT0zv8l+hIT1fgxYv4XEdZREYmadWWi4tFLqMsrOHsbA4DA/2cFooLheuG8dMN46cbxk93jKFuGD/dMH66eR/iV5T3wvqmKGM1cu1WLH/RBzZRjRD13Qm+3wrgfXi/6TPGT3eMoW4YP90wfropyvglJyfj0aNH8PLyUlmgWt8IIZCeng4jIyO9n5a+R48eqFatGiZPnvzO2swtfqtWrcLu3btx5MgRndrI7VrT9j6YIzU+ErVMPgfilwCWMcCDqri5uQr6/LpfSlw8e6bdtFBWViYoWdI62xoW/yUwihe3hKkpLysiIiIiIiIiIiKiglq4cCH++OOPou6GxNjYGMuXLy/qbgBgUuOjIITAtGnBgKtNZlLj38rYcfK+WjljYwOUKKE6JVT2URY2NpwWioiIiIiIiIiIiOht8vT0xIgRI4q6G5KBAwcWdRckTGp8BI4cCcXFi6+AjqrbBw2qhJYtS0oJjGLFLPR2WigiIiIiIiIiIiIi+vAxqfGBU47SMDSUISPLdkNDGa5de4U1a1ro/fxyRERERERERERERPRx4Io9HzjlKI2MDNX1MjIyBC5efIUjR0KLpmNERERERERERERERPnEpMYHTDlKwyCH37KBATBtWjCEyHuBcCIiIiIiIiIiIiKiosakxgcsNTUDYWFxUCg071cogCdP4pCamqG5ABERERERERERERHRe4RranzATE2NcPHiZ4iISAIA1F+wCkkAAgIqYHjHzwAAzs7mMDXlZUBERERERERERERE7z8+zf7Aubtbw93dGgBgYJi5ILiLqwWqVStWlN0iIiIiIiIiIiIiIso3Tj9FRERERERERERERJSL0NBQyGQyXLt27a235enpiaVLl+ZaJjU1FT4+Pjhz5oxObW3YsAG2tra5lgkMDESVKlVyLfP69Ws4Ozvj6dOnOvVHG0xqEBEREREREREREVGhOHbsMfz8fsaxY4/felv+/v6QyWSQyWQwMTFB6dKlMWfOHKSnp+tcb+fOnQunk2/J6tWr4eXlhbp160rb7t+/j06dOsHR0RHW1taoX78+goKCdG5r/PjxOH78uPRaU3wcHR3Rr18/zJgxQ+f28sKkBhERERERERERERHpTAiBKVP+xp07UZgy5W8IId56m61bt8aLFy8QEhKCsWPHYvbs2Vi4cGGB6srIyIBCoSjkHhY+IQRWrFiBAQMGqGxv37490tPTceLECVy+fBmVK1dG+/bt8fLlS53as7S0hIODQ57lAgICsGXLFkRFRenUXl6Y1CAiIiIiIiIiIiIiFUIIJCSk5utn374HuHjxFQDg4sVX2LfvQb7ryG8ixNTUFC4uLihZsiS++uorNGvWDH/88QcA4M2bN+jXrx/s7Oxgbm6ONm3aICQkRDpWOfXSvn374OfnB1NTU3zxxRfYuHEj9u7dK40COXnypFq7GRkZGDBgALy8vCCXy1G2bFksW7ZMpYxyRMOiRYvg6uoKBwcHDBs2DGlpaVKZ8PBwdOjQAXK5HF5eXtiyZUue53z58mU8fPgQ7dq1k7a9fv0aISEhmDRpEipVqoTSpUvj22+/RWJiIm7evJlnnXv27EGZMmVgZWWF1q1b48mTJ9K+rNNPBQYG5hif8uXLw83NDbt3786zPV1woXAiIiIiIiIiIiIiUpGYmAZLy+91qqNz5735PiY+fiQsLEwK3KaZmZk0UsDf3x8hISHYt28frK2tMXHiRLRt2xa3b9+GsbExACAxMRHz58/HTz/9BAcHB7i6uiIpKQmxsbFYv349AMDe3h7Pnz9XaUehUKBEiRLYsWMHHBwccObMGQwePBiurq7o2bOnVC4oKAiurq4ICgrCgwcP0KtXL1SpUgWDBg2S+vj8+XMEBQXB2NgYI0eORHh4eK7neOrUKSkBoeTg4ICyZcti06ZNqFatGkxNTbFmzRo4OzujevXqudaXmJiIb775Bhs3boShoSFGjhyJ3r174/Tp02plx48fjzt37qjFR6lmzZo4deqU2iiSwsSkBhERERERERERERHpNSEEjh07hqNHj2L48OFSMuP06dPSuhNbtmyBu7s79uzZgx49egAA0tLSsHLlSlSuXFmqSy6XIyUlBS4uLjm2Z2xsjJkzZ0qvvby8cPbsWWzfvl0lqWFnZ4cVK1bA0NAQvr6+aNeuHY4fP45Bgwbh/v37OHjwIC5cuIBPPvkEALBu3TqUK1cu13N9/Pgx3NzcVLbJZDIcO3YMnTt3hpWVFQwMDODs7IxDhw7Bzs4u1/rS0tKwYsUK1KxZE+np6diwYQP8/Pxw4cIF1KxZU6WspaVlrvFxc3PD1atXc21PV0xqEBEREREREREREZEKc3NjxMeP1KqsEAKNGm3D9esRyMj4b/ooQ0MZKld2wl9/9YJMJtO63fzYv38/LC0tkZaWBoVCgd69eyMwMBAnTpyAkZERatWqJZVVjma4c+eOtM3ExASVKlXKV5tKP/zwA37++WeEhYUhKSkJqamp0jRNSuXLl4ehoaH02tXVFf/88w8A4M6dOzAyMlIZSeHr6wtbW9tc201KSoKZmZnKNiEEhg0bBmdnZ5w6dQpyuRw//fQTOnTogIsXL8LV1RXly5fH48eZC7g3aNAABw8eBAAYGRlJSZWsfbhz545aUiMvcrkciYmJ+Tomv5jUICIiIiIiIiIiIiIVMplM62mgDh9+hCtX1KdMysgQuHIlHKdPP0erVl6F3UUAQJMmTbBq1SqYmJjA1dUVQOZDem3J5XKtEy5Zbd26FePHj8fixYtRp04dWFlZYeHChTh//rxKOeU0V0oymUznxcgdHR2lxIjSiRMnsH//frx58wbW1tYAgJUrV+Lo0aPYuHEjJk2ahAMHDkjrecjlcp36kJOoqCg4OTm9lbqVuFA4ERERERERERERERWIEALTpgXDIIcnzQYGwLRpwfleAFxbFhYW8PHxgYeHh0oyo1y5ckhPT1dJMkRGRuLevXvw8/PLtU4TExNkZGTkWkY5rdXQoUNRtWpV+Pj44OHDh/nqu6+vL9LT03H58mVp27179xAdHZ3rcVWrVsXdu3dVYqocHWGQ7RdhYGAgJVFKliwJHx8f+Pj4oHjx4lKZ9PR0XLp0Sa0POU2DlVt8bt68iapVq+baf10xqUFEREREREREREREBZKamoGwsDjkNPhAoQCePIlDamruSYLCVrp0aXTq1AmDBg1CcHAwrl+/js8++wzFixdHp06dcj3W09MTN27cwL179/D69WtpdEP2+i9duoTDhw/j/v37mDZtGi5evJivPpYtWxatW7fGkCFDcP78eVy+fBkDBw7McxRFkyZNEB8fj1u3bknb6tSpAzs7O/Tv3x/Xr1/H/fv38fXXX+PRo0do165drvUZGxtjxIgROH/+PK5cuYKAgADUrl07x6mncopPYmIiLl++jJYtW+YrDvnFpAYRERERERERERERFYipqREuXvwMly9/nuPPxYufw9T03a+EsH79elSvXh3t27dHnTp1IITAgQMH1KaEym7QoEEoW7YsatSoAScnJ5w+fVqtzJAhQ9C1a1f06tULtWrVQmRkJIYOHVqgPrq5uaFRo0bo2rUrBg8eDGdn51yPcXBwQJcuXbBlyxZpm6OjIw4dOoT4+Hg0bdoUNWrUQHBwMPbu3auyCLom5ubmmDhxIj799FM0atQIlpaW2LZtW47lc4rP3r174eHhgQYNGuQjAvknE29r3A+piY2NhY2NDWJiYqR5zd4ly099kGD+EJNrrMLcIV++8/Y/BAqFAuHh4XB2dlYbykV5Y/x0w/jphvHTHWOoG8ZPN4yfbt6H+BX1vbA+KcpYjVy7Fctf9IFNVCNEfXeC77cCeB/eb/qM8dMdY6gbxk83jJ9uijJ+ycnJePToEby8vNQWoNYnQgikp6fDyMioQOtk6JMbN26gRYsWePjwISwtLQulTl3jV7t2bYwcORJ9+/bNsUxu15q298H860JEREREREREREREpEcqVaqE+fPn49GjR0XdFQDA69ev0bVrV/Tp0+ett/Xux/wQEREREREREREREZFO/P39i7oLEkdHR0yYMOGdtMWRGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApMb/8/T0xNKlS4u6G0RERERERERERERElAO9S2oEBgZCJpOp/Pj6+kr7Q0ND1fYrf3bs2FHgdv39/VXqcnBwQOvWrXHjxo3COC0iIiIiIiIiIiIiIsqD3iU1AKB8+fJ48eKF9BMcHCztc3d3V9n34sULzJw5E5aWlmjTpo1O7bZu3Vqq8/jx4zAyMkL79u11PR0iIiIiIiIiIiIiItKCXiY1jIyM4OLiIv04OjpK+wwNDVX2ubi4YPfu3ejZsycsLS21buOnn36Cra0tjh8/Lm0zNTWV6qxSpQomTZqEJ0+eICIiolDPj4iIiIiIiIiIiEgfzd4/GwaDDDB7/+yi7gp9oIyKugMFERISAjc3N5iZmaFOnTqYN28ePDw8NJa9fPkyrl27hh9++EHr+hcsWIAFCxbgyJEjqFmzpsYy8fHx+OWXX+Dj4wMHBweNZVJSUpCSkiK9jo2NBQAoFAooFAqt+1NoxP//I0TRtP8BUCgUjJ8OGD/dMH66Yfx0xxjqhvHTDeOnm/chfvzdEREREX34Zu+fjel7pwOA9O+09tOKskv0AdK7pEatWrWwYcMGlC1bVppaqkGDBrh58yasrKzUyq9btw7lypVD3bp1tap/4sSJ2Lx5M/766y+UL19eZd/+/ful0R4JCQlwdXXF/v37YWCgecDLvHnzMHPmTLXtERERSE5O1qo/hUmIzKxGYkICwsPD33n7HwKFQoGYmBgIIXL8vVPOGD/dMH66Yfx0xxjqhvHTDeOnm/chfnFxcUXSLhERERG9G1kTGkofY2LD09MTo0ePxujRo3WqZ926ddi2bRuOHDny1vsjk8mwe/dudOrUKccykyZNQkJCApYvX65TfwqD3iU1sq6LUalSJdSqVQslS5bE9u3bMWDAAJWySUlJ+PXXXzFtmnZvmsWLFyMhIQGXLl1CqVKl1PY3adIEq1atAgC8efMGK1euRJs2bXDhwgWULFlSrfzkyZMxduxY6XVsbCzc3d3h5OQEa2trrfpUmGQyGQDA3MICzs7O77z9D4FCoYBMJoOTkxMfqBQA46cbxk83jJ/uGEPdMH66Yfx08z7Ez8zMrEjaJSIiIqK3T1NCQ+ltJjYiIiIwffp0/Pnnn3j16hXs7OxQqVIlTJ8+HfXr1y+0djQlBjZs2IDRo0cjOjq60NpRSk5OxrRp07Bjxw6V7UuXLsWqVasQFhYGR0dHdO/eHfPmzdP5XvvFixews7MDAISGhqJMmTK4evUqqlSpIpUZP348SpUqhTFjxmh8dv4u6V1SIztbW1uUKVMGDx48UNu3c+dOJCYmol+/flrV1aBBA/z555/Yvn07Jk2apLbfwsICPj4+0uuffvoJNjY2+PHHHzFnzhy18qampjA1NVXbbmBgUDQfJmX//49MxocBOlDGjzEsGMZPN4yfbhg/3TGGumH8dMP46aao48ffGxEREZF+EUIgMTUxz3LfHvwWc/5Ufzaa1fS905GanopJbdSfuWZnbmIufTk7L926dUNqaio2btyIUqVK4eXLlzh69CgiIyO1Ov59tXPnTlhbW6NevXrStl9//RWTJk3Czz//jLp16+L+/fvw9/eHTCbDkiVLdGrPxcUFwH8z/Wji6OiIVq1aYdWqVVi4cKFO7elK7z9ZxMfH4+HDh3B1dVXbt27dOnTs2BFOTk5a1VWzZk0cPHgQc+fOxaJFi/Isr/xgmJSUlO9+ExEREREREREREb2vElMTYTncMs+fvBIaSnP+nKNVfdokUgAgOjoap06dwvz589GkSROULFkSNWvWxMSJE9GxY0eVcgMHDpRmz2natCmuX78u7X/48CE6deqEYsWKwdLSEp988gmOHTsm7W/cuDEeP36MMWPGQCaTQSaT4eTJkwgICEBMTIy0LTAwMMd+5ta+Jlu3bkWHDh1Utp05cwb16tVD37594enpiZYtW6JPnz64cOFCnrGKi4tDnz59YGFhgeLFi6utPy2TybBnzx4AQJkyZQAAVatWhUwmQ+PGjaVyHTp0wNatW/Ns723Tu6TG+PHj8ddffyE0NBRnzpxBly5dYGhoiD59+qiUe/DgAf7++28MHDgwX/XXrVsXBw4cwMyZM7F06VKVfSkpKXj58iVevnyJO3fuYMSIEYiPj1e7wIiIiIiIiIiIiIjo7bG0tISlpSX27NmDlJSUHMv16NED4eHhOHjwIC5fvoxq1aqhWbNmiIqKApD5pfm2bdvi+PHjuHr1Klq3bo0OHTogLCwMALBr1y6UKFECs2bNwosXL/DixQvUrVsXS5cuhbW1tbRt/PjxBWpfk+DgYNSoUUNlW926dXH58mUpifHvv//iwIEDaNu2bZ6xWrhwISpXroyrV69i0qRJGDVqFI4ePaqx7JkzZwAAx44dw4sXL7Br1y5pX82aNfH06VOEhobm2ebbpHfTTz19+hR9+vRBZGQknJycUL9+fZw7d05tNMbPP/+MEiVKoGXLlvluo379+vjzzz/Rtm1bGBoaYsSIEQCAQ4cOSSNCrKys4Ovrix07dqhkq4iIiIiIiIiIiIj0nbmJOeJXxOdaRpupp7Ka2m5qnlNQmZuYa1WXkZERNmzYgEGDBmH16tWoVq0aGjZsiO7du6NatWoAMpMDFy5cQHh4uLRMwKJFi7Bnzx7s3LkTgwcPRuXKlVG5cmWp3tmzZ2P37t3Yt28fhg8fDnt7exgaGsLKykqapgkAbGxsIJPJVLZlp0372UVHRyMmJgZubm4q2/v27YvXr1+jfv36EEIgPT0dX375JaZMmZJnrOrVqyctt1CmTBmcPn0a3333HVq0aKFW1tHREQDg4OCgdm7KPj1+/Bienp55tvu26F1SQ9vhLXPnzsXcuXO1rjd7dqlhw4aIj//vTbthwwZs2LBB6/qIiIiIiIiIiIiI9JVMJoOFqUWuZWZ3ng0TI5McFwnPalanWYW+WHi3bt3Qrl07nDp1CufOncPBgwexcOFC/PjjjwgICMD169cRHx8PBwcHleOSkpLw8OFDAJkjNQIDA/Hnn3/ixYsXSE9PR1JSkjRSQxfatJ+dcqmD7It/nzx5EnPnzsXKlStRq1YtPHjwAKNGjcLs2bMxbdo0bNmyBUOGDJHKHzx4EA0aNAAA1KlTR6WuOnXqqM1SpA25XA4ASEzUboqwt0XvkhpERERERERERERE9H5QJipyS2y8jYSGkpmZGVq0aIEWLVpg6tSpGDBgAAIDAxEQEID4+Hi4urri5MmTasfZ2toCyFzu4OjRo1i0aBF8fHwgl8vRvXt3pKam6tw3bdrPzsHBATKZDG/evFHZPm3aNHz++efScgsVK1ZEQkICBg8ejP/973/o2LEjatWqJZUvXry4zv3PTjlllrZrWL8tTGoQERERERERERERUYHllth4mwkNTcqVK4d9+/YBAKpVq4aXL1/CyMgox+mSTp8+DX9/f3Tp0gVAZiIi+6w+JiYmyMjIyHNbdtq0n52JiQn8/Pxw+/ZtlaUVEhMTYWCgukS2oaEhAEAIASsrK1hZWWms89y5c2qvy5Url2P7ADSe282bN2FsbIzy5ctrdS5vi94tFE5ERERERERERERE75dp7adhVqdZKtveZkIjMjISTZs2xS+//IIbN27g0aNH2LFjBxYvXoyOHTsCAJo3b446deqgc+fOOHLkCEJDQ3HmzBn873//w6VLlwAApUuXxq5du3Dt2jVcv34dffv2hUKhUGnL09MTf//9N549e4bXr19L2+Lj43H8+HG8fv1a45RM2rSvSatWrRAcHKyyrUOHDli1ahW2bt2KR48e4ejRo5g2bRo6dOggJTdycvr0aSxYsAD379/HDz/8gB07dmDUqFEayzo7O0Mul+PQoUN49eoVYmJipH2nTp1CgwYNpGmoigqTGkRERERERERERESkM2ViQwbZWx+hYWlpiVq1auG7775Dw4YNUaFCBUyfPh1ffPEFVqxYASBzXZADBw6gYcOGCAgIQJkyZdC7d288fvwYxYoVAwAsWbIEdnZ2qFu3Ljp06IBWrVpJC40rzZo1C6GhofD29pamXqpbty6+/PJL9OrVC05OTliwYIFaH7VpX5MBAwbgwIEDKgmFqVOnYty4cZg6dSr8/PwwYMAAtGrVCmvWrMkzVuPGjcOlS5dQtWpVzJkzB0uWLEGrVq00ljUyMsKyZcuwZs0auLm5oVOnTtK+rVu3YtCgQXm297bJhBCiqDvxsYiNjYWNjQ1iYmJgbW39ztu3/NQHCeYPMbnGKswd8uU7b/9DoFAoEB4eDmdnZ7XhXpQ3xk83jJ9uGD/dMYa6Yfx0w/jp5n2IX1HfC+uToozVyLVbsfxFH9hENULUdyf4fiuA9+H9ps8YP90xhrph/HTD+OmmKOOXnJyMR48ewcvLS22Ban0ihEB6ejqMjIwgk8mKujs66dGjB6pVq4bJkye/szZzi9/Bgwcxbtw43LhxA0ZGBV/VIrdrTdv7YP51ISIiIiIiIiIiIiJ6jyxcuBCWlpZF3Q1JQkIC1q9fr1NCo7AUfQ+IiIiIiIiIiIiIiEji6emJESNGFHU3JN27dy/qLkg4UoOIiIiIiIiIiIiIiPQCkxpEREREREREREREBC6/TG9bYVxjTGoQERERERERERERfcSMjY0BAImJiUXcE/rQKa8x5TVXEFxTg4iIiIiIiIiIiOgjZmhoCFtbW4SHhwMAzM3NIZPJirhX+SeEQHp6OoyMjPSy/0XtbcZPCIHExESEh4fD1tYWhoaGBa6LSQ0iIiIiIiIiIiKij5yLiwsASIkNfSSEgEKhgIGBAZMaBfAu4mdraytdawXFpAYRERERERERERHRR04mk8HV1RXOzs5IS0sr6u4UiEKhQGRkJBwcHGBgwJUX8uttx8/Y2FinERpKTGoQEREREREREREREYDMqagK48FzUVAoFDA2NoaZmRmTGgWgL/F7f3tGRERERERERERERESUBZMaRERERERERERERESkF5jUICIiIiIiIiIiIiIivcCkBhERERERERERERER6QUmNYiIiIiIiIiIiIiISC8wqUFERERERERERERERHqBSQ0iIiIiIiIiIiIiItILTGoQEREREREREREREZFeYFKDiIiIiIiIiIiIiIj0ApMaRERERERERERERESkF5jUICIiIiIiIiIiIiIivcCkBhERERERERERERER6QUmNYiIiIiI6KP1ww8/wNPTE2ZmZqhVqxYuXLiQY9ldu3ahRo0asLW1hYWFBapUqYLNmzerlBFCYPr06XB1dYVcLkfz5s0REhLytk+DiIiIiOijwaQGERERERF9lLZt24axY8dixowZuHLlCipXroxWrVohPDxcY3l7e3v873//w9mzZ3Hjxg0EBAQgICAAhw8flsosWLAA33//PVavXo3z58/DwsICrVq1QnJy8rs6LSIiIiKiDxqTGkRERERE9FFasmQJBg0ahICAAPj5+WH16tUwNzfHzz//rLF848aN0aVLF5QrVw7e3t4YNWoUKlWqhODgYACZozSWLl2KqVOnolOnTqhUqRI2bdqE58+fY8+ePe/wzIiIiIiIPlxGRd0BIiIiIiKidy01NRWXL1/G5MmTpW0GBgZo3rw5zp49m+fxQgicOHEC9+7dw/z58wEAjx49wsuXL9G8eXOpnI2NDWrVqoWzZ8+id+/eGutKSUlBSkqK9Do2NhYAoFAooFAoCnR+BSXEf+2967Y/FAqFAkIIxq+AGD/dMYa6Yfx0w/jphvHTHWOom6KOn7btMqlBREREREQfndevXyMjIwPFihVT2V6sWDHcvXs3x+NiYmJQvHhxpKSkwNDQECtXrkSLFi0AAC9fvpTqyF6ncp8m8+bNw8yZM9W2R0REvPNpqxKTkgBkJm3Cw8NhYMDB/fmlUCgQExMDIQTjVwCMn+4YQ90wfrph/HTD+OmOMdRNUccvLi5Oq3JMahAREREREWnJysoK165dQ3x8PI4fP46xY8eiVKlSaNy4cYHrnDx5MsaOHSu9jo2Nhbu7O5ycnGBtbV0IvdaeuVwOxAAymQzOzs58GFAACoUCMpkMTk5OjF8BMH66Ywx1w/jphvHTDeOnO8ZQN0UdPzMzM63KMalBREREREQfHUdHRxgaGuLVq1cq21+9egUXF5ccjzMwMICPjw8AoEqVKrhz5w7mzZuHxo0bS8e9evUKrq6uKnVWqVIlxzpNTU1hamqqsa13/WFSJvuvvaJo/0Mhk8kYPx0wfrpjDHXD+OmG8dMN46c7xlA3RRk/bdvkb5aIiIiIiD46JiYmqF69Oo4fPy5tUygUOH78OOrUqaN1PQqFQloPw8vLCy4uLip1xsbG4vz58/mqk4iIiIiIcsaRGkRERERE9FEaO3Ys+vfvjxo1aqBmzZpYunQpEhISEBAQAADo168fihcvjnnz5gHIXPuiRo0a8Pb2RkpKCg4cOIDNmzdj1apVADK/1TZ69GjMmTMHpUuXhpeXF6ZNmwY3Nzd07ty5qE6TiIiIiOiDwqQGERERERF9lHr16oWIiAhMnz4dL1++RJUqVXDo0CFpoe+wsDCVIfAJCQkYOnQonj59CrlcDl9fX/zyyy/o1auXVGbChAlISEjA4MGDER0djfr16+PQoUNazw9MRERERES5Y1KDiIiIiIg+WsOHD8fw4cM17jt58qTK6zlz5mDOnDm51ieTyTBr1izMmjWrsLpIRERERERZcE0NIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApAYREREREREREREREekFJjWIiIiIiIiIiIiIiEgvMKlBRERERERERERERER6gUkNIiIiIiIiIiIiIiLSC0xqEBERERERERERERGRXmBSg4iIiIiIiIiIiIiI9AKTGkREREREREREREREpBeY1CAiIiIiIiIiIiIiIr3ApMb/8/f3R+fOnYu6G0RERERERERERERElIMiTWoEBgZCJpOp/Pj6+qqVO3v2LJo2bQoLCwtYW1ujYcOGSEpK0rne/NiwYYNKfZaWlqhevTp27dqlU71ERERERERERERERKQdo6LuQPny5XHs2DHptZGRapfOnj2L1q1bY/LkyVi+fDmMjIxw/fp1GBjkno/Jq96CsLa2xr179wAAcXFxWL9+PXr27Ilbt26hbNmyOtdPREREREREREREREQ5K/KkhpGREVxcXHLcP2bMGIwcORKTJk2StmmTQMir3rxcvHgRbdu2xfjx4zFx4kQAgEwmk+p0cXHBnDlzsGjRIty4cUNjn1JSUpCSkiK9jo2NBQAoFAooFIoC963AxP//I0TRtP8BUCgUjJ8OGD/dMH66Yfx0xxjqhvHTDeOnm/chfvzdvd/CYsLwOvE1wtMeAQAyDONw5cUV6ctkjuaO8LDxKMouEhEREREBeA+SGiEhIXBzc4OZmRnq1KmDefPmwcMj82Y5PDwc58+fx6effoq6devi4cOH8PX1xTfffIP69esXuN68nDhxAl27dsWCBQswePBgjWUyMjKwadMmAEC1atU0lpk3bx5mzpyptj0iIgLJycla9eX/2LvzuKjq/Y/jr2HYZVFZBBTEFRR3UdNALffKTCvLupnmtbpdu5nZr7xlqS22WNmqbWp1WzQz292XXLNcSlBxRUQFBJRVQJj5/TEyOoEKDjiK7+fjcR4wZ77zPd/z4WQw7/meb1Uymy2pRn5eHmlpaZf8+DWByWQiKysLs9l8wdlCUpbqZx/Vzz6qn/1UQ/uofvZR/exzOdQvJyfHIceVC0vKSiLinQgKis/8jZLru4VOH3WyPnZ3didhTIKCDRERERFxOIeGGl26dGHOnDlERERw9OhRJk+eTGxsLHFxcXh7e7N//37AskbGtGnTaNeuHZ9++im9evUiLi6OZs2aXVS/5/Ptt98yfPhwPvroI+644w6b57KysvDy8gLg5MmTuLi48MEHH9CkSZNy+5owYQLjxo2zPs7OziY0NJSAgAB8fHwqXKeqYjAYAPCsVYvAwMBLfvyawGQyYTAYCAgI0BsqF0H1s4/qZx/Vz36qoX1UP/uofva5HOrn7u7ukOPKhaXnp9sEGuUpKC4gPT9doYaIiIiIOJxDQ40BAwZYv2/Tpg1dunShYcOGzJs3j1GjRlmnqD/wwAOMHDkSgPbt27N8+XJmzZrF1KlTL6rfc/ntt9/48ccfmT9/PrfcckuZ5729vdmyZQsA+fn5LFu2jAcffBA/Pz8GDhxYpr2bmxtubm5l9js5OTnmj0nD6S8Gg94MsENp/VTDi6P62Uf1s4/qZz/V0D6qn31UP/s4un76uYmIiIiISFVw+O2nzla7dm2aN2/O3r17AQgODgagZcuWNu1atGhBUlLSRfd7Lk2aNMHPz49Zs2Zx44034uLiYvO8k5MTTZs2tT5u06YNS5Ys4eWXXy431BARERERERERERERkapzWX1cKjc3l3379lnDjPDwcEJCQkhISLBpt3v3bho2bHjR/Z6Lv78/K1asYO/evQwdOpRTp05dsG+j0cjJkycrPBYREREREZHLSUlJxdq9u+k9tqdut67VJyIiIiLiCA4NNcaPH8/q1atJTExk/fr1DB48GKPRyLBhwwDLFPnHH3+ct956i/nz57N3714mTpzIrl27bG4j1atXL955550K93s+gYGBrFixgl27djFs2DCKi4utz5nNZlJSUkhJSeHAgQN88MEHLF68mEGDBlVhVURERERERC6drVsr1m7Wto9pM7MNLd9rybMrnyU+Lb56ByYiIiIiUg6H3n4qOTmZYcOGkZGRQUBAADExMWzcuJGAgABrm7Fjx1JQUMCjjz5KZmYmbdu2ZenSpTaLc+/bt4/09PRK9Xs+QUFBrFixgp49e3L33XfzxRdfAJaFvktne7i5udGwYUOmTJnCE088URXlEBERERERueTO+lPqvPxyupPtu5Fd6buY8usUpvw6hZYBLRnacihDo4bSIqBF9Q5URERERAQHhxpfffVVhdo9+eSTPPnkk+d8PjEx8aL6PducOXNsHgcHB9vc9mrEiBGMGDGi0v2KiIiIiIhczvz9gQosWZjxxRuQ2QS/bj/g03Ueh9wWsePYDiatnsSk1ZNoFdiKoS2HcnvU7UT6R1b7uEVERETk6nRZrakhIiIiIiIil1afa/2h2P38jYrd6Rvjj4eTLxkr/8GBF7+neGoaXks/oUH+jTgbXIhLi+OZVc/Q4t0WtJnRhud/fZ7dGbsvzUmIiIiIyFXDoTM1RERERERExLEa1Q1jZlQCD447932oZr7uzwPPhZGfD0uXwsKF8MMPtclYN5zcdcPB/Tiubb6jTsw80n2Wsj1tO9vTtjNx5UTa1mvL0Kih3N7ydpr5Nbt0JyYiIiIiNZJCDRERERERkavcA3eGEeAaxiOPQHLymf2hoTB9OgwZYnns6QmDBlm24mJYu9YScHz7bR2SNo0gddMI8MjE0GIhdWK+5kTdZfyZ+id/pv7JUyueon1Qe2vA0aRuk3JGIiIiIiJyfgo1REREREREhCFDLGHF6tUmEhKyiYjwoUcPJ4zG8ts7O0PPnpbtjTdg2zb49ltYuLAu27fcR+aW+8AjAyIX4tN1HrmBy9maspWtKVuZsHwCHYM7WgOORnUaXcIzFREREZErmUINERERERERAcBotIQULVsWEBjog1MFV2E0GKB9e8s2ZQrs3QvffQcLF/qxbt0osreOAs90iPwWj07zKAhaweajm9l8dDNPLHuC6JBo6yLj4bXDq/MURUREROQKp4XCRUREREREpEo1bQqPPQZr1sDRo/Dhh3BDT39c40Zz8v2lmF9NgR9m4prcC4PZiT+O/MH/Lfs/Gr3ZiC4fdeG19a+RlJXk6NMQERERkcuQQg0RERERERGpNvXqwT//CT/9BOnpMHcuDBsUgM+eByj6aBnmaUfhxxkYk67DYHZi0+FNjF86nobTG9L14668seENDmUdcvRpiIiIiMhlQqGGiIiIiIiIXBLe3jB0KHzxBRw7BosWwYP3BBKU/CAls1Zgfu0w/PQuhoM9wGxgY/JGxi0ZR9j0MLp93I3pG6eTnJ184QOJiIiISI2lUENEREREREQuOVdX6NcPZsyAw4dhwwb4v4eCaHbiIcyzV8Frh+Hnt+FgLJgNbEjewKOLHyX0jVBiZsXw1m9vcSTniKNPQ0REREQuMYUaV5Ei50wA1h752cEjEREREREROcPJCa65Bl5+GRISID4eXpgQTLRpDMz+FV4/BL+8CQdjAFh3aB2PLHqEBq83oPvs7ryz6R2O5hx18FmIiIiIyKWgUOMq8dyPz3HK9TgAa478wHM/PufgEYmIiIiIiJRlMEDLlvDf/8Lvv0NSErwztT69vP6D8dM1pwOO6ZDUDTNm1iSt4eFfHqb+6/XpOacn7/3+Him5KY4+DRERERGpJgo1rgLP/fgcz3z3jM2+Z757RsGGiIiIiIhc9kJD4d//hmXLIC0NPn2nAUPqP4LnV+vg9SRY9DocugYzZlYfXM2/f/439V+vz/WfXM+M32eQmpvq6FMQERERkSqkUKOGKy/QKKVgQ0REREREriR168I998A331gWGv/us1BGRD6K38IN8EYiLJ4GyZ0xmU2sTFzJQz8/RMjrIfT6tBfv//E+x/KOOfoURERERMROCjVqsPMFGqUUbIiIiIiIyJXI0xNuvhlmz4aUFFj1XUMe6fwYDZf+BtMPwJJX4XAnTGYTKw6s4MGfHiT4tWD6fNaHDzd/SHp+uqNPQUREREQugkKNGqoigUYpBRsiIiIiInIlc3aGHj1g+nQ4cAC2rAjnmd7jab1xE0zfD0tfhiMdKTGXsGz/Mu7/8X6CpgXR77N+fLzlYzLyMxx9CiIiIiJSQQo1aqhnv3u2WtuLiIiIiIhcjgwGaN8eJk+Gv/6CvX80Ytrg/yNmxx/w1l5YNhWOtqfEXMKS/Uv45w//pN60IPr/bwCzts4i82Smo09BRERERM5DoUYNNXnQ5GptLyIiIiJSE7z77ruEh4fj7u5Oly5d2LRp0znbfvjhh8TGxlKnTh3q1KlD7969y7QfMWIEBoPBZuvfv391n4acR5Mm8NhjsGYNpOxowof3PsmNyVtwmbEHlr0IR9tRYi5m8b5FjPp+FIGv1qP/ZzcwZ9scjp887ujhi4iIiMjfKNSooSbeNJEpg6ZUqO2UQVOYeNPEah6RiIiIiMjlZe7cuYwbN45nn32WLVu20LZtW/r160daWlq57VetWsWwYcNYuXIlGzZsIDQ0lL59+3L48GGbdv379+fo0aPW7csvv7wUpyMVUK8e/POf8OOPkLGnKfP+M4Fh2VupNSsBlj8PKW0sAcf+Xxj53UgCXqlH/09v4tM/P+VEwQlHD19EREREUKhRo1Uk2FCgISIiIiJXq9dff53Ro0czcuRIWrZsycyZM/H09GTWrFnltv/888956KGHaNeuHZGRkXz00UeYTCaWL19u087NzY2goCDrVqdOnUtxOlJJ3t5w++3wxReQuac5i556igf5k4C5O2HFFEhtRQmnWHzgJ+5deC8BL9ej35yb+d9f/yO7MNvRwxcRERG5ajk7egBSvUoDi/IWDVegISIiIiJXq6KiIjZv3syECROs+5ycnOjduzcbNmyoUB/5+fmcOnWKunXr2uxftWoVgYGB1KlTh+uvv57nn38ePz+/c/ZTWFhIYWGh9XF2tuUNc5PJhMlkqsxpVQmTyYTZbHbIsR3F2Rn69LFsb5uas2nTUyxc+DRzf9hBktd8iJpHceAOlhz8gSUHf8CIKz3q92NEp9sZ2HwgPm4+1r6uxvpVJdXPfqqhfVQ/+6h+9lH97Kca2sfR9avocRVqXAXKCzYUaIiIiIjI1Sw9PZ2SkhLq1atns79evXrs2rWrQn088cQThISE0Lt3b+u+/v37M2TIEBo1asS+ffv473//y4ABA9iwYQNGo7HcfqZOncrkyWXXuDt27BgFBQWVOKuqYTKZyMrKwmw24+R0dU7ub9wYxo2DRx/1Y8+eh/jll3F8u3wfCcaFEDWXkoBdrDj8AysO/4DR7EaXur24u/1N9G3YB09nz6u+fvbQ9Wc/1dA+qp99VD/7qH72Uw3t4+j65eTkVKidQo2rxMSbJvLBB3+SHPgNwyIeUaAhIiIiImKHl156ia+++opVq1bh7u5u3X/nnXdav2/dujVt2rShSZMmrFq1il69epXb14QJExg3bpz1cXZ2NqGhoQQEBODj41Pua6qTyWTCYDAQEBCgNwOwrMMREwMv0IZDh9rw/fcT+XzpDjblzcPcch4l/rtZf/xn1q/4GaPZna7+A7izdV/ubngnPu6X/ud3pdP1Zz/V0D6qn31UP/uofvZTDe3j6Pqd/Xv1+SjUuIrUT7uF5B+uYei3gxw9FBERERERh/L398doNJKammqzPzU1laCgoPO+dtq0abz00kssW7aMNm3anLdt48aN8ff3Z+/evecMNdzc3HBzcyuz38nJyWF/jBsMBoce/3LVsCE8/DA8/HBrjh9vzQ8/TOHTxdtZnT6P4oh5lPjtYW3Gt6xd9S2PrBhH5zo38lCPoQyOuoFarrUcPfwrhq4/+6mG9lH97KP62Uf1s59qaB9H1q+ix9RPVkRERERErjqurq507NjRZpHv0kW/u3btes7XvfLKKzz33HMsWrSI6OjoCx4nOTmZjIwMgoODq2TccvmoUweGDzew7PM2ZC98noW9Erj5yFY8fp8AmU0ocTrJhqz53PP9UHyfD6TztKHM2TSf/FP5jh66iIiIyBVNoYaIiIiIiFyVxo0bx4cffsgnn3zCzp07+de//kVeXh4jR44EYPjw4TYLib/88stMnDiRWbNmER4eTkpKCikpKeTm5gKQm5vL448/zsaNG0lMTGT58uUMGjSIpk2b0q9fP4eco1waHh4waJCB795vx4kFzzO/+0buzNqMz19PQmZjSoz5/J73NSN/uR3v5wLoOPVO3v91ASdPnXT00EVERESuOLr9lIiIiIiIXJXuuOMOjh07xjPPPENKSgrt2rVj0aJF1sXDk5KSbKbAz5gxg6KiIm677Tabfp599lkmTZqE0Wjkr7/+4pNPPuHEiROEhITQt29fnnvuuXJvLyU1k7MzXHttMYMHt8Ng6MDWrS8y47stLNwzj/TAeZjqJLKlaC4PrpzLQ0u9iHIZyD+vGcro6/rj4VKx+0iLiIiIXM0UaoiIiIiIyFVrzJgxjBkzptznVq1aZfM4MTHxvH15eHiwePHiKhqZ1AQGA3ToYODDDh35kI7s3fsS73z7B/N3zONw7XmYaiex3fwlj2z4krGrvYk03My90UN5+Ia+eLoq4BAREREpz0XdfurUqVMcOnSIhIQEMjMzq3pMIiIiIiIiIjVO06YGpj/eieTZr5LyZCITAjbSKOUxyArF7JrDTpfPefLPQXhNDqTZE/cw6YsfyM4rdPSwRURERC4rFQ41cnJymDFjBj169MDHx4fw8HBatGhBQEAADRs2ZPTo0fz+++/VOVYRERERERGRGqFePQMvPtSF/TOmcWJSIs+HbyDi+KMYchpgds1hr+f/mLznZnxfCKTRo/fyxMc/kX68yNHDFhEREXG4CoUar7/+OuHh4cyePZvevXuzcOFCtm3bxu7du9mwYQPPPvssxcXF9O3bl/79+7Nnz57qHreIiIiIiIhIjeDr48RT917Drumvk//CQV5vtY7WeY/glBcCbtkk1v6UV5JvIuDlejT490j+89YvJB1WwCEiIiJXpwqtqfH777/z66+/EhUVVe7znTt35r777mPmzJnMnj2bNWvW0KxZsyodqNgvK8sybfnPP9O45Rb9fERERERERC437m5OPHprNx69tRvFJa/z8ZL1fLh+HtuK5lPieZTDHnN4+/gc3n67DoEZgxnYZCiPDbmeFs1dHD10ERERkUuiQjM1vvzyy3MGGmdzc3PjwQcf5L777rN7YFK1zGYzhw7lAPC//+3EbDY7eEQiIiIiIiJyPs5GJx4YEMMfz71F0UvJfHrdr1zjNAaXgiDwOE5ag1l8XNiflh8HUefef3LXM0v47fdT6M89ERERqckuaqFwufIsWZJIXl4xAHv3nmDJkkTHDkhEREREREQqzMngxD3dY9kw8W1OvpDMvBtWEev+EK5FgeCZyYnGH/OlsR/XzA/G+677ueWxZSxdXkxxsaNHLiIiIlK1KhVqrFy5ktdee41169YB8P777xMWFkZAQACjR4/m5MmT1TJIsY/ZbGbixLXWx05OMHHiWs3WEBERERERuQIZnYzc3qkHvz7xLvnPHeG7wSu43udB3IoDwDODvMgP+c6nD30XB+N154P0fWAF33xbTH6+o0cuIiIiYr8Khxoffvghffr0YebMmfTq1YupU6fy2GOPceONNzJ06FDmzZvH5MmTq3OscpGWLEnk999TrY9NJvj991TN1hAREREREbnCGZ2M3NzmOpY/OoPcyUf4+Y7l9Pd/ALcSf6iVTmHr91ka0ovb1tfH565/EXPPSmbPKSEjw9EjFxEREbk4FQ413nzzTd544w327NnDwoULeeaZZ3j33XeZMWMG7777Lh999BHz58+vzrHKRSidpWE0Gmz2G40GzdYQERERERGpQZydnBkQeT2//HsmuZOOsuiupdwUPBp3kx94pVHSfibrml7PffH1Cbj337QfvJrpb5aQlOTokYuIiIhUXIVDjf3793PzzTcD0L9/fwwGA507d7Y+36VLFw4dOlT1IxS7lM7SKCmxDS9KSsyarSEiIiIiIlJDOTs5069Zb364/wOynznKorsXc0vYKNzNdcArFXOn99jWriePHmpAwwcfJqLPGqY8ZyIuDi00LiIiIpe1CocaBQUFeHh4WB+7ubnh5uZm87hYK5BdVkpnaTid46estTVERERERERqPhejC/2a9uXbkR+RPTGVX+7+hduajMSD2uCdAl3eYXdMd549EUrrxx8h9Np1jH/cxLp1ltsXi4iIiFxOKhxqGAwGcnJyyM7OJisrC4PBQG5uLtnZ2dZNLi9FRSUkJeWc85dQkwkOHcqhqKjk0g5MREREREREHMLF6EL/pv35+h+zOPF0Kj/f9TN3RozAw+ALPkfgmrc43C+G10xhxDz3KP7t1zP6fhO//AKFhY4evYiIiAg4V7Sh2WymefPmNo/bt29v89hgMJT3UnEQNzdnfv/9Hxw7dhIAk8lEZmYmdevWxen09I3AQE/c3Cp8GYiIiIiIiEgN4Wp0ZUCzAQxoNoDC4pks27+Mz/+cx8JdCznpcxi6Tuc40/koqwEfvXk7nmOHclP7Lgy+xcANN4CPj6PPQERERK5GFX43e+XKldU5DqkmoaE+hIZaftM0mUykpRkIDAy0hhoiIiIiIiIibs5u3Nj8Rm5sfiOFxYUs2beEr7bPY+Gu78j3TYaub5Df9Q3mnQhj3uzbMT45lN4tOjFksIGbb4agIEefgYiIiFwtKhxq9OjRozrHISIiIiIiIiKXATdnNwZGDGRgxEAKigtYsm8Jc+PmsXDnd+TXToJur1HS7TUWn2jI4vlDeWDyUK5p2JEhgw3ccgs0a+boMxAREZGaTB/XFxEREREREZFyuTu7c3PEzXx+6/9IfyKNb+/4lmGthuHhXAtqH4RrX4X7O7GxcxP+b8mTNO+xmahWZp5+Gv74A8xmR5+BiIiI1DQVnqlhNBor1K6kRItOi4iIiIiIiNQ0Hi4e3BJ5C7dE3sLJUyf5Ze8vzIufx/cJP3CyzgGIeRliXmZHZmN2bBzKCx8Ppb5zOwbfYmDwYIiNBReXc/dfUgKrV0NCgjsREdCjB1TwrQgRERG5ilRqofCGDRty77332iwQLiIiIiIiIiJXFw8XD4a0GMKQFkPIP5XPL3t+Yd6OefyY8CP5dfdD7EsQ+xKHM5ryTvxQ3vl6KLWL2nDzQMstqvr1A0/PM/0tWACPPALJyU5AbQAaNIA334QhQxxxhiIiInK5qnCosWnTJj7++GPefPNNGjVqxH333cfdd99NnTp1qnN8IiIiIiIiInIZ83Tx5NaWt3Jry1vJK8rj5z0/M2/HPH7a/RMn/fZC9xeh+4ucSG/Op/FD+fRft+Oe3Zp+fS0BR2ZJEo9NTLd0Fnym3+QSuHUMzCzy54E7wxxybiIiInL5qXCoER0dTXR0NG+88Qbz589n9uzZPPHEEwwcOJBRo0bRp0+f6hyniIiIiIiIiFzmarnW4vao27k96nZyi3L5afdPzNsxj5/3/EyB/27o8Tz0eJ6C9Ai+ix/Kd8/FwrCb4YGCc/b5YLw7fTMTaFRXwYaIiIhcxELh7u7u/OMf/2D58uXExcWRlpZG//79yczMrI7xiYiIiIiIiMgVyMvVizta3cE3Q78hbXwaXwz5glsib8HN6Ab+CdDjORjeF1zOHWgA4FzA0nXpl2bQIiIictmr8EyNsyUnJzNnzhzmzJlDfn4+jz/+OD4+PlU9NhERERERERGpAbzdvBnWehjDWg8juzCbH3f/yLz4efyY8BMlFF/w9enKNEREROS0Cs/UKCoqYu7cufTt25dmzZqxZcsWpk+fzqFDh3jppZdwdr6ofEREREREREREriI+bj7c1fouFt65kGltl1XoNf7+1TwoERERuWJUOIkIDg7G29ube++9l/fee4/AwEAA8vLybNppxoaIiIiIiIiIVMS10d7w54XbtW9f/WMRERGRK0OFQ43jx49z/PhxnnvuOZ5//vkyz5vNZgwGAyUlJVU6QBERERERERGpmYzGqm0nIiIiNV+FQ42VK1dW5zhERERERERERERERETOq8KhRo8ePapzHCIiIiIiIiIiIiIiIudVoYXC/75uRlW3FxEREREREZGrj7+nP+7O7udt4+7sjr+nVgoXERERiwqFGk2bNuWll17i6NGj52xjNptZunQpAwYM4K233qqyAYqIiIiIiIhIzRTmG0bCmAQ237+Zzfdvxj2nBQD3N3jDui9hTAJhvmEOHqmIiIhcLip0+6lVq1bx3//+l0mTJtG2bVuio6MJCQnB3d2d48ePs2PHDjZs2ICzszMTJkzggQceqO5xi4iIiIiIiEgNEOYbZg0tnEpqARDs3pQOwR0cOSwRERG5TFUo1IiIiOCbb74hKSmJr7/+mjVr1rB+/XpOnjyJv78/7du358MPP2TAgAEYjcbqHrOIiIiIiIiIiIiIiFyFKrxQOEBYWBiPPfYYjz32WHWNR0REREREREREREREpFwVWlNDRERERERERERERETE0RRqiIiIiIiIiIiIiIjIFUGhhoiIiIiIXFEOHTpEcnKy9fGmTZsYO3YsH3zwgQNHJSIiIiIil4JCDRERERERuaLcddddrFy5EoCUlBT69OnDpk2beOqpp5gyZYqDRyciIiIiItVJoYaIiIiIiFxR4uLi6Ny5MwDz5s2jVatWrF+/ns8//5w5c+Y4dnAiIiIiIlKtKh1qhIeHM2XKFJKSkqpjPCIiIiIiIud16tQp3NzcAFi2bBk333wzAJGRkRw9etSRQxMRERERkWpW6VBj7NixLFiwgMaNG9OnTx+++uorCgsLq2NsIiIiIiIiZURFRTFz5kzWrFnD0qVL6d+/PwBHjhzBz8/PwaMTEREREZHqdFGhxrZt29i0aRMtWrTg4YcfJjg4mDFjxrBly5bqGKOIiIiIiIjVyy+/zPvvv0/Pnj0ZNmwYbdu2BeD777+33pZKRERERERqpoteU6NDhw689dZbHDlyhGeffZaPPvqITp060a5dO2bNmoXZbK7KcYqIiIiIiADQs2dP0tPTSU9PZ9asWdb9999/PzNnznTgyEREREREpLo5X+wLT506xbfffsvs2bNZunQp11xzDaNGjSI5OZn//ve/LFu2jC+++KIqxyoiIiIiIsLJkycxm83UqVMHgIMHD/Ltt9/SokUL+vXr5+DRiYiIiIhIdap0qLFlyxZmz57Nl19+iZOTE8OHD+eNN94gMjLS2mbw4MF06tSpSgcqIiIiIiICMGjQIIYMGcKDDz7IiRMn6NKlCy4uLqSnp/P666/zr3/9y9FDFBERERGRalLp20916tSJPXv2MGPGDA4fPsy0adNsAg2ARo0aceedd1bZIEVEREREREpt2bKF2NhYAObPn0+9evU4ePAgn376KW+99ZaDRyciIiIiItWpUjM1SkpKmDVrFjfffLN1qnd5atWqxezZs+0enIiIiIiIyN/l5+fj7e0NwJIlSxgyZAhOTk5cc801HDx40MGjExERERGR6lSpmRpGo5EHHniAEydOVNNwREREREREzq9p06YsXLiQQ4cOsXjxYvr27QtAWloaPj4+Dh6diIiIiIhUp0rffqpVq1bs37+/OsbiUD179mTs2LGOHoaIiIiIiFzAM888w/jx4wkPD6dz58507doVsMzaaN++vYNHJyIiIiIi1anSocbzzz/P+PHj+fHHHzl69CjZ2dk2W3WbNGkSBoPBZvv7mh4AGzZs4Prrr6dWrVr4+PjQvXt3Tp48WWXH9fX1JTY2ltWrV9tzOiIiIiIiUkm33XYbSUlJ/PHHHyxevNi6v1evXrzxxhsOHJmIiIiIiFS3Sq2pAXDDDTcAcPPNN2MwGKz7zWYzBoOBkpKSqhvdOURFRbFs2TLrY2dn29PYsGED/fv3Z8KECbz99ts4Ozvz559/4uRU6QznnMfNzMxk2rRp3HTTTSQnJ+Pr62tX3yIiIiIiUnFBQUEEBQWRnJwMQIMGDejcubODRyUiIiIiItWt0qHGypUrq2McleLs7ExQUNA5n3/00Uf5z3/+w5NPPmndFxERUalj/PTTT9x1112899573H333WWOGxQUxJQpU5g9eza7d++mU6dOZfooLCyksLDQ+rh0JovJZMJkMlVqPFXBZDJhNpsdcuyaQjW0j+pnH9XPPqqf/VRD+6h+9lH97HM51K8qj20ymXj++ed57bXXyM3NBcDb25vHHnuMp556yu4PM4mIiIiIyOWr0qFGjx49qmMclbJnzx5CQkJwd3ena9euTJ06lbCwMMCyOOBvv/3G3XffTbdu3di3bx+RkZG88MILxMTEVKj/L774ggcffJAvvviCm266qdw2hYWFzJ49m9q1a58zMJk6dSqTJ08us//YsWMUFBRU8GyrjslkIisrC7PZrD/0LpJqaB/Vzz6qn31UP/uphvZR/eyj+tnncqhfTk5OlfX11FNP8fHHH/PSSy9x7bXXArB27VomTZpEQUEBL7zwQpUdS0RERERELi+VDjUATpw4wccff8zOnTsBy22Z7rvvvktyC6YuXbowZ84cIiIiOHr0KJMnTyY2Npa4uDi8vb2ti5hPmjSJadOm0a5dOz799FN69epFXFwczZo1O2//7777Lk899RQ//PBDmQBn+/bteHl5AZCfn4+3tzdz587Fx8en3L4mTJjAuHHjrI+zs7MJDQ0lICDgnK+pTiaTCYPBQEBAgN4MuEiqoX1UP/uofvZR/eynGtpH9bOP6mefy6F+7u7uVdbXJ598wkcffcTNN99s3demTRvq16/PQw89pFBDRERERKQGq3So8ccff9CvXz88PDys96x9/fXXeeGFF1iyZAkdOnSo8kGebcCAAdbv27RpQ5cuXWjYsCHz5s1j1KhR1mntDzzwACNHjgSgffv2LF++nFmzZjF16tRz9j1//nzS0tJYt25dubeTioiI4PvvvwcsnzSbO3cut99+OytXriQ6OrpMezc3N9zc3Mrsd3JyctgfkwaDwaHHrwlUQ/uofvZR/eyj+tlPNbSP6mcf1c8+jq5fVR43MzOTyMjIMvsjIyPJzMyssuOIiIiIiMjlp9J/WTz66KPcfPPNJCYmsmDBAhYsWMCBAwe46aabGDt2bDUM8fxq165N8+bN2bt3LwDBwcEAtGzZ0qZdixYtSEpKOm9f7du3JyAggFmzZmE2m8s87+rqStOmTWnatCnt27fnpZdeon79+kyfPr1qTkZERERERC6obdu2vPPOO2X2v/POO7Rp08YBIxIRERERkUvlomZqfPjhhzg7n3mps7Mz//d//1fubIXqlpuby759+7jnnnsACA8PJyQkhISEBJt2u3fvtpnlUZ4mTZrw2muv0bNnT4xGY7l/KP2d0Wjk5MmTF38CIiIiIiJSKa+88go33ngjy5Yto2vXrgBs2LCBQ4cO8fPPPzt4dCIiIiIiUp0qPVPDx8en3BkPhw4dwtvbu0oGdT7jx49n9erVJCYmsn79egYPHozRaGTYsGGAZVr9448/zltvvcX8+fPZu3cvEydOZNeuXYwaNeqC/Tdv3pyVK1fyzTfflJl5UlxcTEpKCikpKezZs4fnn3+eHTt2MGjQoOo4VRERERERKUePHj3YvXs3gwcP5sSJE5w4cYIhQ4YQHx/PZ5995ujhiYiIiIhINar0TI077riDUaNGMW3aNLp16wbAunXrePzxx63BQnVKTk5m2LBhZGRkEBAQQExMDBs3biQgIMDaZuzYsRQUFPDoo4+SmZlJ27ZtWbp0KU2aNKnQMSIiIlixYoV1xsZrr70GQHx8vPX2Vp6enjRp0oQZM2YwfPjwqj9RERERERE5p5CQkDILgv/55598/PHHfPDBBw4alYiIiIiIVLdKhxrTpk3DYDAwfPhwiouLAXBxceFf//oXL730UpUP8O+++uqrCrV78sknefLJJyvc76pVq2wet2jRgtTUVOvjSZMmMWnSpAr3JyIiIiIiIiIiIiIiVavSoYarqytvvvkmU6dOZd++fYBlLQpPT88qH5yIiIiIiIiIiIiIiEipSocapTw9PWndunVVjkVEREREREREREREROScKh1qFBQU8Pbbb7Ny5UrS0tIwmUw2z2/ZsqXKBiciIiIiIlJqyJAh533+xIkTl2YgIiIiIiLiMJUONUaNGsWSJUu47bbb6Ny5MwaDoTrGJSIiIiIiYsPX1/eCzw8fPvwSjUZERERERByh0qHGjz/+yM8//8y1115bHeMREREREREp1+zZsx09BBERERERcbBKhxr169fH29u7OsYiIjXQhkMb2H9iP8WnYMlSM4cOFREa6krfPgacXaBx7cZ0De3q6GFKDaXrTxxN16A4kq4/ERERERGpiSodarz22ms88cQTzJw5k4YNG1bHmESkhthwaAPdZnWz3RkAFMAXP5zZtf6+9XpTRaqcrj9xNF2D4ki6/kREREREpKZyquwLoqOjKSgooHHjxnh7e1O3bl2bTUSk1P4T+6u0nUhl6PoTR9M1KI6k609ErkQlJZYNIDHxzPciIiIiZ6t0qDFs2DAOHz7Miy++yNtvv80bb7xhs4mIlCo+VbXtRCpD1584mq5BcSRdfxX37rvvEh4ejru7O126dGHTpk3nbPvhhx8SGxtLnTp1qFOnDr179y7T3mw288wzzxAcHIyHhwe9e/dmz5491X0aIle8BQsgPBwKCyyPP/nEifBwy34RERGRs1X69lPr169nw4YNtG3btjrGIyI1yNJlFWs3Zvb7PPfF0uodTA1RUlKC0Wh09DCuCKmF+8H/wu10/VWOrsGKq/g1OJMpXyyp/gHVALr+Ki6tgtff0mVwb4fqH8/lau7cuYwbN46ZM2fSpUsXpk+fTr9+/UhISCAwMLBM+1WrVjFs2DC6deuGu7s7L7/8Mn379iU+Pp769esD8Morr/DWW2/xySef0KhRIyZOnEi/fv3YsWMH7u7ul/oURa4ICxbAbbeB2Wy7//Bhy/7582HIEMeMTURERC4/lQ41IiMjOXnyZHWMRUSuYAdTT/DzH/Gs2xPP9pQ4DhXEc9z9D/C48Gtz/deQy5rqH6RcXbwr1kzXn1SbCl+Da8llbfWORa4+Fbz+0lKrdxiXu9dff53Ro0czcuRIAGbOnMlPP/3ErFmzePLJJ8u0//zzz20ef/TRR3zzzTcsX76c4cOHYzabmT59Ok8//TSDBg0C4NNPP6VevXosXLiQO++8s9xxFBYWUlhYaH2cnZ0NgMlkwmQyVcm5VobJZMJsNjvk2DWB6lc5JSXwyCOG04GGweY5sxkMBjNjx8LAgWaUa1eMrkH7qH72Uf3so/rZTzW0j6PrV9HjVjrUeOmll3jsscd44YUXaN26NS4uLjbP+/j4VLZLEbmCpGTm8ssfO/h1VxzbU+JJzI/juEs8Jq/DZxp5UKEwo1Rwxp20q9e+ysda85gpLCzCzc2Vv//BJ2VtS93KUb+vLtguOONO2gddxR9TrgyzmcKiQtxc3cCga/BCtqZsqdA1GJIxjPbBugYvyGymsLAQNzddfxWx9egWjvh9ecF2gfUuwWAuU0VFRWzevJkJEyZY9zk5OdG7d282bNhQoT7y8/M5deqUdW3BAwcOkJKSQu/eva1tfH196dKlCxs2bDhnqDF16lQmT55cZv+xY8coKCiozGlVCZPJRFZWFmazGSenSt+x+Kqn+lXO+vWuJOfkQnC6ZYdLnuVrnb0QvAUzcCjbnx9+8KJbtyKHjfNKomvQPqqffVQ/+6h+9lMN7ePo+uXk5FSoXaVDjf79+wPQq1cvm/1msxmDwUCJVvISqREys0/yyx87WbMrnq1H4kjMiyfDGEeJ98EzjdxOb6cZcxtQuziKcM9WtAmKwsk3mY8PPHPBY00dcRP3dri76k+ihjGZTKSlpREYGKj/MVfAJ1s+Z8QPF35DWddfxekarJyKXoMvjrhR12AF6PqrHMv1d+FQo0/vCzapsdLT0ykpKaFePdtkp169euzatatCfTzxxBOEhIRYQ4yUlBRrH3/vs/S58kyYMIFx48ZZH2dnZxMaGkpAQIBDPjRmMpkwGAwEBATov7eLoPpVzqHsJBjTAlz+FuDd8MiZ70+58/Sruxg9NJQ+faBFC+Xb56Nr0D6qn31UP/uofvZTDe3j6PpV9HatlQ41Vq5cWenBiMjlKzuvkCVbEli9I56th+PZlxNHhlM8p7z3geH0TW1dgNpnXuOUF4RvURRhHlG0qdeKa5tH0b9jSxrWq23T9+fbP+fjAxceg7PLhduIVFZFrytdf1JddA2KI+n6q34vvfQSX331FatWrbJ7rQw3NzfLLKS/cXJyctgf4waDwaHHv9KpfhXn4ZcJBy8wI8mlgJ0HMxg3riEAISHQpw/07Qu9e0M5S+Bc9XQN2kf1s4/qZx/Vz36qoX0cWb+KHrPSoUaPHj0qPRgRcbz8glOs+HMvK+Li2HIonr3Z8RwjjiLvPeB0eoaVEZvwwnDSD5+CVoS6RREVGMW1zVrRv0MUzRr4VeiYjWs3rtJ2IpWh608cTdegOJKuvwvz9/fHaDSSmmq7sEhqaipBQUHnfe20adN46aWXWLZsGW3atLHuL31damoqwcHBNn22a9eu6gYvUoO0bw9suXC7hx+GXSthzRo4cgQ++cSyAbRteybkiIkBj0rcCldERESuPJUONQDWrFnD+++/z/79+/n666+pX78+n332GY0aNSImJqaqxygilVB0qoTVf+1nRVw8vx+MY09WPGnmeAq8doHxlKWRAfA960UFvnifjKK+ayta+kdxTZMoBnRoRcuGgTg5Xfy87q6hXVl/33r2n9hP8SlYstTMoUNFhIa60rePAWcXy5spXUO72nXOIuXR9SeOpmtQHEnX34W5urrSsWNHli9fzi233AJYptsvX76cMWPGnPN1r7zyCi+88AKLFy8mOjra5rlGjRoRFBTE8uXLrSFGdnY2v/32G//617+q61RErmgVXfx7xAjoMAFOnoS1a2HpUsu2bRv8+adlmzYN3NwgNtYScPTpA23agD6oKyIiUrNUOtT45ptvuOeee7j77rvZsmULhYWFAGRlZfHiiy/y888/V/kgRaSs4hITG3cmsWRbHL8fjCchM44UUzwna+20vR/t2bdhLqpFrfwoQpyjaOHXis6NoujbLoqOzerbFV6cT9fQrtY3TO5pp/uhy6Wl608cTdegOJKuvwsbN24c9957L9HR0XTu3Jnp06eTl5fHyJEjARg+fDj169dn6tSpALz88ss888wzfPHFF4SHh1vXyfDy8sLLywuDwcDYsWN5/vnnadasGY0aNWLixImEhIRYgxMRuTgjFo6gf9P+xIbF0u3abvTpY5k9npYGy5adCTkOH7Y8XrbM8rqAAMstqkpDjvr1HXgSIiIiUiUqHWo8//zzzJw5k+HDh/PVV2cWv7z22mt5/vnnq3RwIgImk5nNew6zZFs8G/fHkZAZz5HiOPI8d4Br3pmGZ4cXp9zxyGtJsDGK5nWi6BLeit5to7imRRjORr2RISIiIgJwxx13cOzYMZ555hlSUlJo164dixYtsi70nZSUZBMCzZgxg6KiIm677Tabfp599lkmTZoEwP/93/+Rl5fH/fffz4kTJ4iJiWHRokV2r7shcrXbnrad7WnbeXX9qwC0DGhJTGgMMWExxN4Qy7BhDQEDO3eeCThWrYJjx+DLLy0bWBYZLw04evQALy+HnZKIiIhcpEqHGgkJCXTv3r3Mfl9fX06cOFEVYxK5KplMZuISU1m8NZ6N++LZkR7H4aJ4cjziwT3rTMOzf+kuccE9N5J6hlY0qx1FdFgUvdu0IrZVI1xdKjiPW0REROQqNmbMmHPebmrVqlU2jxMTEy/Yn8FgYMqUKUyZMqUKRicipSb3nMyhrEOsPbSWXem72HFsBzuO7eCDLR8AUN+7PjFhlpCj5x2xjHm4FSXFRjZsOBNy/PEH7Nxp2d58E1xcoFs3S8DRpw907Fjx22GJiIiI41Q61AgKCmLv3r2Eh4fb7F+7di2NG1+9Cw2KVMae5Ax+3hzHhr3xxKfFc6gwjmz3eMweGWca1Tq9AZiMuOY0J4Aomvm2okODKK5rFcX1bZvi6e7iiFMQERERERG5ZG5qfhMdgjsAcCzvGOsPrWdN0hrWJq1l89HNHM45zNz4ucyNnwuAj5sP3UK7ERMaQ9/RsTz1bCdO5niwYoUl4FiyBBITYfVqy/b001CnDvTqdSbkaNTIgScsIiIi51TpUGP06NE88sgjzJo1C4PBwJEjR9iwYQPjx49n4sSJ1TFGkStWUloWv/wRz5rdccSlxpN0Mp4stzhMnqlnGnme3gDMBlxymuBvakVj7yg6NGhFj5ZR9GnfHJ9abo44BRERERERkWrj7+mPu7M7BcUF52zj7uyOv6e/9XFArQAGRQ5iUOQgAPJP5bPp8CbWHFzD2kNrWX9oPdmF2Szau4hFexcB4OLkQnRINDFhMdw4LoYXXr+WE0f9rLM4VqyA48dh/nzLBtCkyZlbVV13HdSuXW1lEBERkUqodKjx5JNPYjKZ6NWrF/n5+XTv3h03NzfGjx/Pww8/XB1jFLnspWTm8ssfO1iTEM9fKfEk5sdxwjmeEq/kM43cT2+nOeeEU7ckika1WtEuJIoeLVvRr0MkdX08Lvn4RUREREREHCHMN4yEMQmk56cDYDKZyMzMpG7dutY1bfw9/QnzDTtnH54unvQM70nP8J4AFJuK2Z663TqTY03SGlJyU9iQvIENyRvKrMsx+NkYXvkglrTdDVm2zMDSpbBhA+zbBzNmWDYnJ+jS5cwsji5dLLevEhERkUuv0qGGwWDgqaee4vHHH2fv3r3k5ubSsmVLvLS6llwFMrNP8u2GHWw9spi/ju7kQF48mcY4ir0TzzRyPb2dZsxtQO3iKMI9omgT3IrYiCgGRLckqK7+mxEREREREQnzDbOGFiaTiTRjGoGBgdZQo7KcnZxpH9ye9sHt+U+X/2A2mzlw4oAl4Dg9m+Oc63K0jOGO/jG85BdLWlwrViwzsmQJ7N5tCTo2bIApU8Db2zJ7ozTkaN4cDIYqK4mIiIicR6VDjVKurq60bNmyKscictnIPVnE4s0JrN4Rz5bkOPbnxJPuFM8pr33gZLI0cgFqn3mNU349fAtbEeYRRat6UcQ2b0X/ji1pWK92OUcQERERERGRS8FgMNC4TmMa12nM8LbDgTPrcpTO5DjnuhzXdGP47TE0c48h46/OrF7mwbJlkJEB339v2QDCws4EHL16gb//uUYjIiIi9qpwqHHfffdVqN2sWbMuejAil1pBUTErtu1l+fY4tiTHsycrjmPEU+S1B4zFlkZGbMILw8m6+BS0ooFbFK0CW9G1aRT9O0QREarfWkVERERERK4E51qXozTkOOe6HH2iGXlfDMGnYjj+17WsW+rHunWQlAQff2zZDAbo0OFMyHHtteCmJRJFRESqTIVDjTlz5tCwYUPat2+P2WyuzjGJVLmiUyWsiTvA8u3x/H4wjj0n4kk1x1FQKwGci8409D3rRYU+eOe3or5rFC38o+japBV92rYg0NNAUFC9i54KLSIiIiIiIpeXc63LURpyrE1ay9Hco9Z1OeD0uhy3teTuh2Pwy43h+J8xbFwcTnycgc2bYfNmeOkl8PSE7t0tAUffvhAVpVtViYiI2KPCoca//vUvvvzySw4cOMDIkSP5xz/+Qd26datzbCKVVlxiYuPOJJb9Gc+mxHgSjsdxtCSek7V2gEvBmYY+Z72oqBa18lsS4tyKiLpRdGkURb/2rejYrD5OTra/aZpMJtLS0i7NyYiIiIiIiIhDnL0ux8NdHrZZl6M06Dh7XQ74ALyh/sj63BwQg29WDBlbY9j8S2tSjxpZtAgWWSZ8EBwMvXufmckRFOTQUxUREbniVDjUePfdd3n99ddZsGABs2bNYsKECdx4442MGjWKvn37YtDHDOQSMpnMbNl7hMVb49h0IJ6dGfEcKY4jz3MHuOaeaeh91otOueOR14Igpygi6raiU8MoereJoltUQ5yNmnUhIiIiIiIi5avMuhyHc+YCcyEMfP7jw7W1u+FzPIb0rTFsX9SZo0c9+Owz+OwzS9+tW5+ZxREba5nZISIiIudWqYXC3dzcGDZsGMOGDePgwYPMmTOHhx56iOLiYuLj4/Hy8qquccpVymQys+NgGou3xrN+bxw70uM5XBRPjkccuGedaXj2pVfigltuBPUMrWjmG0Wnhq24vlUUPdo0xtXFeMnPQURERERERGqeiq7LsS51EbAIosCltQsta0XjfTyGY5tj2L/6WrZv92P7dnj9dXB1hZgYS8DRpw+0awe687GIiIitSoUaZ3NycsJgMGA2mykpKanKMclVak9yBou2xLN+TzxxaXEcKown2z0es0f6mUa1Tm8AJiOuOc0IIIqmPq3oEBrF9a1acX3bpni6uzjiFEREREREROQqVdF1OXbkbADnDdDlVegCDVxbUisjhmNbYsjcGsOKFeGsWGHgySfB3x969ToTcoSGOvYcRURELgeVCjUKCwutt59au3YtN910E++88w79+/fXoslSYUlpWSzavIO1CXFsT43n4Mk4slzjMdVKOdPI8/QGYDbgktMEP1MUjb2j6FC/FT1aRtG3QwQ+tdwccQoiIiIiIiIi53WhdTnWJq1lZ/pOkot2gPcO6PEB9ABfQwi1MmJJ3xpD+p4Y5s5rzdy5lrsOREScCTh69gRv7/OPQUREpCaqcKjx0EMP8dVXXxEaGsp9993Hl19+ib+/f3WOTa5wacfz+OWPHaxJiGfb0TgO5sdz3DmOEq/kM43cT2+nGXMa4lfSivBaUbQNjqJHi1b06xiJv69uKioiIiIiIiJXrguty7H20Fr+OPIHWaYjZNWdC73mQi9wM/vgnt6N7O0xJByMIWFGZ95+2wNnZ7jmmjMhR3Q0OF/0/ThERESuHBX+393MmTMJCwujcePGrF69mtWrV5fbbsGCBVU2OLkynMgtYPHmXazaEce2I/Hsz40j0xhPsfeBM41cT2+nOeXWp86pKMI9W9E6KIruka3o17EFIX76mImIiIiIiIhcHc63LsfapLWsP7SenKJsCgMWwfWLAHAyu+ByLJrCPTGsTYph7dRreeYZP3x94frroXdv6NDBSGCgI89MRESk+lQ41Bg+fDgGg6E6xyLVICkrifT8dEpKYPMWEwcT82gYnkzHDk4YjeDv6U+Yb1iF+so9WcTSLbtZtSOeLclx7MuOJ90pjlNe+8DJZGnkDNQ+8xqn/Hr4FkYR6h5F66BWXNssihuio2hYr3Y5RxARERERERG5ep1vXY61h9ay5uAajuYepTBwAwRugGtfBcApowVZB2L5dl8M366MgRPhNGp0ZhbH9ddDnToOPDEREZEqVOFQY86cOdU4DKkOSVlJRLwTQUFxge0Tx4Gtlm/dnd1JGJNgE2wUFBWzYtteVsbFs/lQPHuy4jhGPIVeu8FYbGnkhE14YThZF5+CVjRwi6JlQBTXNmtF/w5RRITqFmUiIiIiIiIiF6Oi63KY/HaC306I/sDywuwQDiTF8v7WGN7/LgbDsdZ0jjbSp48l5LjmGnB1Pf+xRURELle622INlp6fXjbQ+JuC4gImzZtPckoBu0/Ek2qKo8BrFzgXnWnke9YLCn3wyo+igWsrWvhHcU3jKPp3aEWr8Ho4OWkmj4iIiIiIiEh1qei6HMU+R6DVXMsGmAt8+O1QN35bHcPzn8VQK6sz18V4WEOOyEjQzTlERORKoVCjBispqVi72Uces3zjc9bOIk9q5UcRbIwi0q8VXRpF0adtFJ0iGii8EBEREREREblM/H1djtzCXJbELyE+J551h9ZZ1uUgG5otsmxAXokLPx7pyI8/xsJ7MQQXX0v/7n706WNZkyMgwJFnJCIicn4KNWqwrVsr1s45qzmhhs40rxNF5/BW9G4TRbeohjgbnap3gCIiIiIiIiJSpTxdPOkW0o1bAm/BycmJElMJf6X+VWZdDkI3WrZrX+UoMPtYC2Z/GQMvxdLSO4Ybrw2nbx8DMTHg7u7osxIRETlDoUYNlp5esXaTW3/Jf0d2qN7BiIiIiIiIiMglZ3QynnddjjUH17IrYycEnN46fsgOYEd2CK++H4Pzs7FEB8YwuFtr+vU10qaNblUlIiKOpVCjBvP3B5Iq2E5EREREREREarzy1uVIz09nXdI61iatZeX+tWxL/YMSnyPQah7FreaxEdh43JsnpnWjVkYsXevHMLRbZ27s50FIiGPPR0RErj4KNWqw9u2BLRVsJyIiIiIiIiJXJX9Pf5t1OfJP5bPp8CbWHFzL4p1r+SN1PYVuOdB0MXlNF7MMWHbABZ7piF9eLN0axHB37LXc1MuPWrUcey4iIlLzKdSowYzGqm0nIiIiIiIiIjWfp4snPcN70jO8JxN7YF2XY9X+tXz/51r+SFtDrtGyLkcGG/mBV/lhK7C0BcFFMVwbGss/usdwY7dwnJ11ryoREalaCjVqMH9Pf9yd3SkoLjhnG3dnd/w9df8pERERERERESnf2etyPHrtmXU5Fu1Yy4I/1rL52FpOuOwE/50cZSfzSz5k/kow/BBC/ZIYYsMsIUe/dq0xOumTlSIiYh+FGjVYmG8YCWMSSM9Pp6QEYl67l6LacfQ3TGPKfddhNFqCjzDfMEcPVURERERERESuEKXrcjx0bWMeutayLsexvHTmb1rHN7+vZUv6Wo67/4HZ6wjJzOPLrHl8+QM4LfCmvqkbsQ1juDs2luuadcbDxcPBZyMiIlcahRo1XJhvmDW0cKEWRUD7hk3o1KCDYwcmIiIiIiIiIjVGQC1//nXdIP51nWVdjuyT+Xy2/He+2byGrelrOeG9HpNbDodYzBcpi/niazCYXKhv6EhseAy3d44lNryb7iYhIiIXpFBDRERERERERESqlI+HJ/++qQf/vqkHAJnHS/hk0Xa+3byGrZlrya27BrP3UZLZyJcHN/LlwWkABBlb0D08hhtaxdC9YSzhtcMxGLQuh4iInKFQQ0REREREREREqlXdOkYeHdaOR4e1Ax4mMdHMl4sS+XbzGv46sZbCemshYCcpJTuZt28n8/Z9CEAd5xB6hMdwfdMYYhvG0jpQ63KIiFztFGqIiIiIiIiIiMglFR5uYMKDjZhAI0pKhrN1Kyxcks53W9azI3cNpgZrIeQPjnOEhXvnsXDvPAA8jd5cG9aN7uGWBcg719e6HCIiVxuFGiIiIiIiIiIi4jBGI0RHQ3S0P89zM3l5N/Prr/Dz0nx+3PI7ieY1ELYWQteT75bD0gOLWXpgMQDOBheiQzoSE2aZydEtVOtyiIjUdAo1RERERERERETkslGrFgwYAAMGePI2PThypAfLlsGiJSUs3rqdTK+1ELYGGq6h2PsoGw9vZOPhjUzbYFmXo4V/C2LCYqxbo9qNzrsuR1JWEun56QCYTCYyMzOpW1IXJycnAPw9/QnzDav+ExcRkQpRqCEiIiIiIiIiIpetkBAYPhyGDzdiNrdj+/Z2LF06hsVLzKz+M5Giemuh4enZHAE72Zlu2T7cYlmXI8Q7xBJwhFpCjjb12ljX5UjKSiLinQgKigvOeXx3Z3cSxiQo2BARuUwo1BARERERERERkSuCwQBt2li2xx4zUFDQiHXrGrFkyT0sXQpbE9IhdL0l4AhbAyGbOZJzhHnx85gXb1mXw9vVm26h3YgJiyGwVuB5Aw2AguIC0vPTFWqIiFwmFGqIiIiIiIiIiMgVyd0devWybC+/DMeO+bN8+c0sWXIzSxdDcmo+hPxuCTkarsEQtp4ccli8bzGL9y129PBFROQiKNQQEREREREREZEaISAA7rzTspnNkJDgydKlPViypAerFkJuXgnU226dyWFsuoIS9/QL9ltSUv1jFxGRinFy9ABERERERERERESqmsEAkZHw8MPwww+QkQG/rjby9D/b0cUwBqcFcyn5ZFGF+tq6tZoHKyIiFaZQQ0REREREREREajxXV4iNheeeg40bIT0d7rrLUKHXpl94MoeIiFwiCjVEREREREREROSqU6cO9OhRsbb+/tU7FhERqTiFGiIiIiIiIiIiclVq375q24mISPVTqCEiIiIiIiIiIlelet7+uBjcz9vGxeBOPW9N1RARuVwo1BARERERERERkatSmG8Yex9J4NWmm/Fa8YFl5/FweH8z9RZu5tWmm9n7SAJhvmEOHaeIiJzh7OgBiIiIiIiIiIiIOEqYbxjj7w7D6JHDuO1gNHuw5H/t6NHDCaPR0aMTEZG/U6ghIiIiIiIiIiJXPafTAYbRCD17gpPubyIiclnSP88iIiIiIiIiIiIiInJFUKghIiIiIiIiIiIiIiJXBIUaIiIiIiIiIiIiIiJyRVCoISIiIiIiIiIiIiIiVwSFGiIiIiIiIiIiIiIickVQqCEiIiIiIiIiIiIiIlcEhRoiIiIiIiIiIiIiInJFUKghIiIiIiIiIiIiIiJXBIUaIiIiIiIiIiIiIiJyRVCoISIiIiIiIiIiIiIiVwSFGiIiIiIiIiIiIiIickVQqHFaz549GTt2rKOHISIiIiIiIiIiIiIi5+DQUGPSpEkYDAabLTIy0qZNz549y7R58MEHz9vviBEjyrymf//+VTpWX19fYmNjWb16tV39ioiIiIiIiIiIiIhIxTg7egBRUVEsW7bM+tjZueyQRo8ezZQpU6yPPT09L9hv//79mT17tvWxm5ubnSO1HWtmZibTpk3jpptuIjk5GV9fX7v7FxERERERERERERGRc3P47aecnZ0JCgqybv7+/mXaeHp62rTx8fG5YL9ubm42r6lTp06lxvXTTz/h6+vL559/Xu5YW7ZsyZQpU8jNzWX37t2V6ltERERERERERERERCrP4TM19uzZQ0hICO7u7nTt2pWpU6cSFhZm0+bzzz/nf//7H0FBQQwcOJCJEydecLbGqlWrCAwMpE6dOlx//fU8//zz+Pn5VWhMX3zxBQ8++CBffPEFN910U7ltCgsLmT17NrVr1yYiIuKcbQoLC62Ps7OzATCZTJhMpgqNpUqZT38x45jj1wAmkwmz2az6XSTVzz6qn31UP/uphvZR/eyj+tnncqiffnYiIiIiIlIVHBpqdOnShTlz5hAREcHRo0eZPHkysbGxxMXF4e3tDcBdd91Fw4YNCQkJ4a+//uKJJ54gISGBBQsWnLPf/v37M2TIEBo1asS+ffv473//y4ABA9iwYQNGo/G8Y3r33Xd56qmn+OGHH+jRo4fNc9u3b8fLywuA/Px8vL29mTt37jlnjkydOpXJkyeX2X/s2DEKCgrOO47qYDZbUo38/FzS0tIu+fFrApPJRFZWFmazGScnh090uuKofvZR/eyj+tlPNbSP6mcf1c8+l0P9cnJyHHJcERERERGpWRwaagwYMMD6fZs2bejSpQsNGzZk3rx5jBo1CoD777/f2qZ169YEBwfTq1cv9u3bR5MmTcrt984777R5TZs2bWjSpAmrVq2iV69e5xzP/PnzSUtLY926dXTq1KnM8xEREXz//feA5Y+yuXPncvvtt7Ny5Uqio6PLtJ8wYQLjxo2zPs7OziY0NJSAgIAK3UKrqhkMBgA8Pb0IDAy85MevCUwmEwaDgYCAAL2hchFUP/uofvZR/eynGtpH9bOP6mefy6F+7u7uDjmuiIiIiIjULA6//dTZateuTfPmzdm7d+8523Tp0gWAvXv3njPU+LvGjRvj7+/P3r17zxtqtG/fni1btjBr1iyio6OtIUApV1dXmjZtatN+4cKFTJ8+nf/9739l+nNzcyt3gXInJyfH/DF5+nQMBvRmgB0MBoPjfoY1gOpnH9XPPqqf/VRD+6h+9lH97OPo+unnJiIiIiIiVeGy+ssiNzeXffv2ERwcfM4227ZtAzhvm79LTk4mIyPjgq9p0qQJK1eu5LvvvuPhhx+uUN9Go5GTJ09WeCwiIiIiIiIiIiIiInJxHBpqjB8/ntWrV5OYmMj69esZPHgwRqORYcOGAbBv3z6ee+45Nm/eTGJiIt9//z3Dhw+ne/futGnTxtpPZGQk3377LWAJRh5//HE2btxIYmIiy5cvZ9CgQTRt2pR+/fpdcEzNmzdn5cqVfPPNN4wdO9bmueLiYlJSUkhJSWHPnj08//zz7Nixg0GDBlVdUUREREREREREREREpFwOvf1UcnIyw4YNIyMjg4CAAGJiYti4cSMBAQGA5XZPy5YtY/r06eTl5REaGsqtt97K008/bdNPQkICWVlZgGXmxF9//cUnn3zCiRMnCAkJoW/fvjz33HPl3gqqPBEREaxYsYKePXtiNBp57bXXAIiPj7fO9vD09KRJkybMmDGD4cOHV1VJRERERERERERERETkHBwaanz11VfnfT40NJTVq1dfsB+z2Wz93sPDg8WLF1d6LKtWrbJ53KJFC1JTU62PJ02axKRJkyrdr4iIiIiIXL7effddXn31VVJSUmjbti1vv/02nTt3LrdtfHw8zzzzDJs3b+bgwYO88cYbZWZ3T5o0icmTJ9vsi4iIYNeuXdV1CiIiIiIiV5XLak0NERERERGRS2Xu3LmMGzeOZ599li1bttC2bVv69etHWlpaue3z8/Np3LgxL730EkFBQefsNyoqiqNHj1q3tWvXVtcpiIiIiIhcdRw6U0NERERERMRRXn/9dUaPHs3IkSMBmDlzJj/99BOzZs3iySefLNO+U6dOdOrUCaDc50s5OzufN/T4u8LCQgoLC62Ps7OzATCZTJhMpgr3U1VMJhNms9khx64JVD/7qH72Uw0vntlaM9XvYun6s4/qZz/V0D6Orl9Fj6tQQ0RERERErjpFRUVs3ryZCRMmWPc5OTnRu3dvNmzYYFffe/bsISQkBHd3d7p27crUqVMJCws7Z/upU6eWuWUVwLFjxygoKLBrLBfDZDKRlZWF2WzGyUmT+ytL9bOP6mc/1fDiZWXlAFBSAgsXnqBr12KMRgcP6gqj688+qp/9VEP7OLp+OTk5FWqnUENERERERK466enplJSUUK9ePZv99erVs2v9iy5dujBnzhwiIiI4evQokydPJjY2lri4OLy9vct9zYQJExg3bpz1cXZ2NqGhoQQEBODj43PRY7lYJpMJg8FAQECA3gy4CKqffVQ/+6mGF2fBAnhjug8MhpISA7ff7k+DBmbeeMPMkCGOHt2VQ9effVQ/+6mG9nF0/dzd3SvUTqGGiIiIiIhIFRkwYID1+zZt2tClSxcaNmzIvHnzGDVqVLmvcXNzw83Nrcx+Jycnh/0xbjAYHHr8K53qZx/Vz36qYeUsWABDh4I5zGCz//BhA0OHGpg/HwUblaDrzz6qn/1UQ/s4sn4VPaZ+siIiIiIictXx9/fHaDSSmppqsz81NbVS62FcSO3atWnevDl79+6tsj5FRKTqlJTAI4+A2Vz2udJ9Y8da2omIyOVBoYaIiIiIiFx1XF1d6dixI8uXL7fuM5lMLF++nK5du1bZcXJzc9m3bx/BwcFV1qeIiFSdNWsgOScJgreA327LTueTlsfBWzAHbeFQdhJff+3YcYqIyBm6/ZSIiIiIiFyVxo0bx7333kt0dDSdO3dm+vTp5OXlMXLkSACGDx9O/fr1mTp1KmBZXHzHjh3W7w8fPsy2bdvw8vKiadOmAIwfP56BAwfSsGFDjhw5wrPPPovRaGTYsGGOOUkRETmv7UlJMCYCXArO7KyTCA90PPP4lDvDHkzgySfD6NkT6xYefkmHKiIipynUEBERERGRq9Idd9zBsWPHeOaZZ0hJSaFdu3YsWrTIunh4UlKSzX19jxw5Qvv27a2Pp02bxrRp0+jRowerVq0CIDk5mWHDhpGRkUFAQAAxMTFs3LiRgICAS3puIiJSMW510m0DjfK4FGColc7Bg2F88gl88olld8OGKOQQEXEAhRoiIiIiInLVGjNmDGPGjCn3udKgolR4eDjm8m66fpavvvqqqoYmIiKXQPv2wJYLt1u1CgoTLV9XrYJNm+DgQWxCjvBw25CjYcNqGbKIyFVPoYaIiIiIiIiIiFyVjMaKtfPygu59oE8fy+O8PFi/3jbkSEyEOXMsGyjkEBGpLgo1REREREREREREzmPEwhF0DOlIhF8Ekf6RRPpH0vP6JvTp4wJAbq5tyPH772VDjkaNbEOOsDAHnIiISA2gUENEREREREREROQ8tqdtZ3vadpt9zk7ONKnTxBpyRAZGcvNDkTw+MQKXkjplQo4DByzb7NmW1yvkEBG5OAo1REREREREREREzuOlXi9RVFLEroxd7ErfRUJ6Anmn8kjISCAhI4HvEr6zaV+vVj0i/COI7BrJ7QMjGe8VSc7+SHZsDOPXVcbzhhzXXWf5Ghp6yU9TROSKoFBDRERERERERETkPPo06UOH4A7Wx2azmcM5h9mVvqvMdjjnMKl5qaTmpfLrwV9t+nH3cqf5vc0Z9J9IPPIiyTkQyYHfI4lf05wDB2rZhByNG9vO5FDIISJioVBDRERERERERESuSv6e/rg7u1NQXHDONu7O7vh7+tvsMxgMNPBpQAOfBvRu3NvmuZzCHBIyEqwzOkpnd+zO2E1BcQF/pf7FX6l/WRq7AtdatgDXUGqdjOTkoUjSdkSy/1gk+7+OZNasYMCgkENE5DSFGiIiIiIiIiIiclUK8w0jYUwC6fnpAJhMJjIzM6lbty5OTk6AJfgI8634ghfebt5Eh0QTHRJts7/EVELiiUTbmR2nA4/0/HSOFR3imPEQhC+F8DOvcyr2xpQayf70SPbviWTWhgh4LJLGtZtyXXc3a8jRoIGdxRARuUIo1BARERERERERkatWmG+YNbQwmUykGdMIDAy0hhpVxehkpEndJjSp24Qbm99o81xGfoZ1dsfZ277j+zA550D93y3bWfabnNh/vDEf/xwJn0YS4BRJl8aR3NAlkoG9/BRyiEiNpVBDRERERERERETEgfw8/ejm2Y1uod1s9hcWF7Lv+L5y1+7IKcoBv72WLeJHjgE/Aj8mwkNv+eOeF0lYrQg6hkXSt0MkMZGRhNcOx9lJbweKyJVN/4qJiIiIiIiIiIhchtyc3WgZ0JKWAS1t9pvNZlJyU6wBx19HdrFp/y72nthFtlMS1EqnoNZadrOW3Rnw5VJgKTiZXQlxa0abkEjah0YS6W/ZIvwi8HbzdsxJiohUkkINERERERERERGRK4jBYCDYO5hg72Cua3SdzXN5RXlsSdrNDxt2sW6X5ZZWmU67wC8Bk0sByUXxJCfG83OibZ8h3iGWkMPvTNgR6R9JA58GGAyGS3dyIiIXoFBDRERERERERESkhqjlWovYpu2Jbdreui87G35dY+KHX5NYFbeLPcd3YfbbBf6nN69UjuQc4UjOEVYcWGHbn0stIvwjrIFH6ffN6jbDw8XjUp+eiIhCDRERERERERERkZrMxwduutGJm24MB8LJyurP2rWwapVl27zjOOa6CeCXYA06XEJ2Ueyzl7xTeWw5uoUtR7fY9GnAQHjtcJtZHc3rNscffwLMAQ44SxG5WijUEBERERERERERuYr4+sKNN1o2gKysOqxdew2rVl3DqlWwZSWcMgFOp6DOfvDfhX/kLupG7MJUdxfHTLvIKjrBgRMHOHDiAL/s/cWm/zrudWzW6yj9vnGdxrgYXS75+YpIzaJQQ0RERERERERE5Cr295DjxAlOz+RwYdWqCLZujSA9YRDp1leYadz6GFE9dhHYchfGers4dNKyaHniiUSOFxxnQ/IGNiRvsDmOs5MzTes2LbN2R4R/BLXda1+6ExaRK5pCDREREREREREREbGqXRtuusmywdkhh2XbssXA/u2B7N8eCHQHICIC+vQwE9UuhRbXpnLcuJtd6btIyLAsVr4rfRf5p/Kt3/9dvVr1bG5lVbqF+YbhZHC6RGcuIlcChRoiIiIiIiIiIiJyTuWFHGvWnAk5tm6FhARISDAAwUAwERHt6NkTBvaEaQOhXpCJw9mHraHGrvRd7MqwfD2Sc4TUvFRS81JZfXC1zbHdnd1tbmFVekur5n7NqeVa6xJWQUQuFwo1REREREREREREpMJq14aBAy0bnAk5Vq40s2xZMXFxziQkGEhIgPfft7SJiHCiZ89QrrsulNt79CGoy5n+sguzSUhPsJnVsSt9F3sy91BQXMCfqX/yZ+qfZcYR5htW5lZWkf6RBHkFYTAYqr0OIuIYCjVERERERERERETkopWGHDfeaCYtLQMXl0DWrzewcqVlJse2baUzOc6EHJGR0LOnZevRw4dO9TvRqX4nm36LTcUknki0nd1xess4mUFSVhJJWUks2bfE5nU+bj7lLlTetG5TXI2ul6AiIlKdFGqIiIiIiIiIiIhIlalTx3Ymx/Hjtrer2rYNdu2ybDNnWtrYhhwQFHRmYfGmdZtyU/ObbI6Rnp9OQnpCmVtZ7T++n+zCbDYd3sSmw5tsXmM0GGlcp3GZdTsi/CLw8/Sr5qqISFVRqCEiIiIiIiIiIiLVpk4duPlmywZnQo7SmRx//lk25GjRwjbkqFfPtk9/T3/8w/y5Nuxam/2FxYXszdxrE3aUhh85RTnsydzDnsw9/LD7hzL9lXcrq/Da4RidjNVSFxG5OAo1RERERERERERE5JL5e8iRmWk7k+PPP2HnTss2Y4alzYVCjlJuzm5EBUYRFRhls99sNnM092i5t7I6lH2I9Px01iatZW3SWpvXuRpdae7X3CbwiPCPIMIvAm83b7trkZSVRHp+OgAmk4nMzEzqltTFyckJsIQtYb5hdh9HpCZRqCEiIiIiIiIiIiIOU7cuDBpk2eBMyHH2TI6/hxwtW9qGHIGB5z+GwWAgxDuEEO8Qrm90vc1zuUW57M7Yza7007M6Tt/KKiE9gcKSQuLS4ohLiyvTZ33v+mVuZRXpH0l97/oVWqg8KSuJiHciKCguOGcbd2d3EsYkKNgQOYtCDREREREREREREbls/D3kyMgoO5Njxw7L9t57ljaVDTnO5uXqRYfgDnQI7mCzv8RUQlJWku3MjtOBR1peGodzDnM45zDLDyy3eV0tl1pl1uyI9I+kmV8z3J3dre3S89PPG2gAFBQXkJ6frlBD5CwKNUREREREREREROSy5ecHt9xi2aByIcd110H37pULOUoZnYw0qtOIRnUaMaDZAJvnjp88TkJGQplbWe3N3EveqTw2H93M5qObbV5jwECjOo2st7LycPGo/KBERKGGiIiIiIiIiIiIXDnKCzl+/fVMyPHXX2VDjqgo25kcAQH2jaGORx2uaXAN1zS4xmZ/UUkR+4/vtwk6EjIS2HlsJ1mFWew/vp/9x/fz856f7RuAyFVMoYaIiIiIiIiIiIhcsfz8YPBgywblhxzx8Zbt3Xctbao65CjlanS13nbqbGazmbS8NJuw47fDv7EheUPVHFjkKqJQQ0RERERERERERGqMv4cc6em2Icf27WVDjlatzoQc3btXXchRymAwUM+rHvW86tEjvAcAW45uoeMHHav2QCJXAYUaIiIiIiIiIiIiUmP5+8OQIZYNyg854uIs2zvvWNqcHXL06GHpQ0QuDwo1RERERERERERE5Krx95Dj2DHbkKM04Dg75Gjd2nYmh0IOEcdRqCEiIiIiIiIiIiJXrYAAuPVWywblhxzbt1u2t9+2tKmKkMPf0x93Z3cKigvO2cbd2R1/TyUoImdTqCEiIiIiIiIiIiJy2rlCjpUrLSFHfPy5Q47rrrOEHH5+Fz5OmG8YCWMSSM9PB+CRXx5h7aG1PB37NINbWBYE8ff0J8w3rOpPUuQKplBDRERERERERERE5Bz+HnKkpdnO5Cgv5GjTxnYmx7lCjjDfMMJ8wygpgeI8XwAKUhrStkcHjMZqPjGRK5STowcgIiIiIiIiIiIicqUIDITbbrOstxEXB6mp8PXX8NBD0LKlpc1ff8Fbb1nW7fD3h7Zt4ZFHYOFCyMy07W/BAggPh40bDABMm+ZEeLhlv4iUpZkaIiIiIiIiIiIiIhepNOS47TbL49RU25kcO3ZYQo7SoMNgODOTw80NXn0VzGbbPg8ftvQ3f/6ZBc1FxEKhhoiIiIiIiIiIiEgVqVcPbr/dssGZkKN0TY6dO+HPPy3buZjNlvBj7FgYNAjdikrkLLr9lIiIiIiIiIiIiEg1KQ053nvPMmsjJQXmzoWbbz7/68xmOHQI1qy5NOMUuVJopoaIiIiIiIiIiIjIJVKvHgwdCiUl8P33F25/9Gj1j0nkSqJQQ0REROQqUVJSwqlTpxw9jItiMpk4deoUBQUFODlpsnFlXYr6GY1GnJ2dMRgM1dK/iIiISE0THFy17USuFgo1RERERK4Cubm5JCcnY/77CoRXCLPZjMlkIicnR2+aX4RLVT9PT0+Cg4NxdXWttmOIiIiI1BSxsdCggWVR8PJ+TTcYLM/Hxl76sYlczhRqiIiIiNRwJSUlJCcn4+npSUBAwBUZCpjNZoqLizUT4CJVd/3MZjNFRUUcO3aMAwcO0KxZM82oEREREbkAoxHefBNuu80SYJyda5T+yjZ9uhYJF/k7hRoiIiIiNdypU6cwm80EBATg4eHh6OFcFIUa9rkU9fPw8MDFxYWDBw9SVFSEu7t7tRxHREREpCYZMgTmz4dHHoHks/Y3aGAJNIYMcdTIRC5f+viUiIiIyFVCYYBUN83OEBEREam8IUMgMRFq17E87tfPxIEDCjREzkV/dYiIiIiIiIiIiIg4kNEIpcuSBQXpllMi56NQQ0RERERERERERERErghaU0NEREREKqSkBNasgaNHITgYYmP1CTIRERERERG5tDRTQ0REREQuaMECCA+H666Du+6yfA0Pt+yvCRITEzEYDGzbtq3ajxUeHs706dPP26aoqIimTZuyfv16u441adIk2rVrd942I0aM4JZbbjlvmx07dtCgQQPy8vLsGo+IiIiIiIi9FGqIiIiIyHktWAC33QbJybb7Dx+27K+uYGPEiBEYDAYMBgNubm60aNGCKVOmUFxcbHe/F3oT39FmzpxJo0aN6NatW5nnCgsLadeuXZWFMG+++SZz5syxPu7Zsydjx461adOyZUuuueYaXn/9dbuPJyIiIiIiYg+FGiIiIiJXGbMZ8vIqtmVnw3/+Y3lNef0APPKIpV1F+iuvn/Pp378/R48eZffu3YwdO5bJkyfz6quvXtR5l5SUYDKZLuq1l5LZbOadd95h1KhR5T7/f//3f4SEhFTZ8Xx9faldu/YF240cOZIZM2bYHSqJiIiIiIjYQ6GGiIiIyFUmPx+8vCq2+fpaZmSci9lsmcHh61ux/vLzKzdWNzc3goKCaNiwIQ888AC9e/fm+++/B+D48eMMHz6cOnXq4OnpyYABA9izZ4/1tXPmzKF27dp8//33tGzZEjc3N+677z4++eQTvvvuO+sskFWrVpU5bklJCaNGjaJRo0Z4eHgQERHBm2++adOmdMbHtGnTCA4Oxs/Pj3//+9+cOnXK2iYtLY2BAwfi4eFBo0aN+Pzzzy94zps3b2bfvn3ceOONZZ775ZdfWLJkCdOmTatoCQF4//33ady4MbVq1WLo0KFkZWWVOY/S71evXs2bb75prU9iYiIAffr0ITMzk9WrV1fq2CIiIiIiIlVJC4WLiIiIyBXDw8ODjIwMwPIG/J49e/j+++/x8fHhiSee4IYbbmDHjh24uLgAkJ+fz8svv8xHH32En58fwcHBnDx5kuzsbGbPng1A3bp1OXLkiM1xTCYTDRo04Ouvv8bPz4/169dz//33ExwczNChQ63tVq5cSXBwMCtXrmTv3r3ccccdtGvXjtGjR1vHeOTIEVauXImLiwv/+c9/SEtLO+85rlmzhubNm+Pt7W2zPzU1ldGjR7Nw4UI8PT0rXLO9e/fy9ddfs2DBAvLz8/nnP//JQw89VG7A8uabb7J7925atWrFlClTAAgICADA1dWVdu3asWbNGnr16lXh44uIiIiIiFQlhRoiIiIiVxlPT8jNrVjbX3+FG264cLuff4bu3St27IthNptZvnw5ixcv5uGHH7aGGevWrbOuO/H5558TGhrKwoULuf322wE4deoU7733Hm3btrX25eHhQWFhIUFBQec8nouLC5MnT7Y+btSoERs2bGDevHk2oUadOnV45513MBqNREZGcuONN7J8+XJGjx7N7t27+eWXX9i0aROdOnUC4OOPP6ZFixbnPdeDBw+Wub2U2WxmxIgRPPjgg0RHR1tnT1REQUEBn3zyCfXq1cPZ2Zm3336bG2+8kddee61MDXx9fXF1dcXT07Pc+oSEhHDw4MEKH1tERERERKSqKdQQERERucoYDFCrVsXa9u0LDRpYbkFV3noYBoPl+b59wWis2nEC/Pjjj3h5eXHq1ClMJhN33XUXkyZNYvny5Tg7O9OlSxdrWz8/PyIiIti5c6d1n6urK23atLmoY7/77rvMmjWLpKQkTp48SVFREe3atbNpExUVhfGsEw8ODmb79u0A7Ny5E2dnZzp27Gh9PjIy8oLrV5w8eRJ3d3ebfW+//TY5OTlMmDDhnK/z8vKyfv+Pf/yDmTNnAhAWFkb9+vWta2F07doVk8lEQkLCeYOd8nh4eJBf2XuIiYiIiIiIVCGFGiIiIiJyTkYjvPkm3HabJcA4O9gwGCxfp0+vnkAD4LrrrmPGjBm4uLgQGBiIu7s7htIDV4CHh0el2pf66quvGD9+PK+99hpdu3bF29ubV199ld9++82mXeltrkoZDAa7FyP39/e3BiOlVqxYwYYNG3Bzc7PZHx0dzd13380nn3zCtm3brPt9fHzsGsO5ZGZm0qRJk2rpW0REREREpCK0ULiIiIiInNeQITB/PtSvb7u/QQPL/iFDqu/YtWrVomnTpoSFheHsfObzOC1atKC4uNgmZMjIyCAhIYGWLVuet09XV1dKSkrO26b0tlYPPfQQ7du3p2nTpuzbt69SY4+MjKS4uJjNmzdb9yUkJHDixInzvq59+/bs2rUL81kJ0ltvvcWff/7Jtm3b2LZtGz///DMAc+fO5YUXXgCgadOm1i0wMND62qSkJJs1QzZu3IiTkxMRERHlHv989YmLi6N9+/bnP3EREREREZFqpJkaIiIiInJBQ4bAoEGwZg0cPQrBwRAbW30zNC6kWbNmDBo0iNGjR/P+++/j7e3Nk08+Sf369Rk0aNB5XxseHs7ixYtJSEjAz88PX1/fcvv/9NNPWbx4MY0aNeKzzz7j999/p1GjRhUeY0REBP379+eBBx5gxowZODs7M3bsWDw8PM77uuuuu47c3Fzi4+Np1aoVYLmF1NlKbzXVpEkTGjRocN7+3N3dGTFiBFOnTiU/P5///Oc/DB069Jy3ngoPD+e3334jMTERLy8v6tati5OTE4mJiRw+fJjevXtXtAQiIiIiIiJVTjM1RERERKRCjEbo2ROGDbN8dVSgUWr27Nl07NiRm266ia5du2I2m/n555/L3BLq70aPHk1ERATR0dEEBASwbt26Mm0eeOABhgwZwh133EGXLl3IyMjgoYceuqgxhoSE0KNHD4YMGcL9999vM4uiPH5+fgwePJjPP/+80scrT9OmTRk8eDCDBg2iX79+tGnThvfee++c7cePH4/RaKRly5YEBASQlJQEwJdffknfvn1p2LBhlYxLRERERETkYhjM5vKWfJTqkJ2dja+vL1lZWdV2n+Pz8Xr0GvJq/8aE8G958d5bLvnxawKTyURaWhqBgYE4OSkTrCzVzz6qn31UP/uphvZxZP0KCgo4cOAAjRo1KrMA9ZXCbDZTXFyMs7PzRa2RcaX566+/6NOnD/v27bNZAPxi2Vu/oqIimjVrxhdffMG11157znbnu9Yc/bvwlcTRtdK/9/ZR/eyj+tlPNbSP6mcf1a/ykrKSSM9PB6D3a2M57r2Gm7yeZvJdgwHw9/QnzDfsfF3IWXQN2sfR9avo78G6/ZSIiIiIyGWmTZs2vPzyyxw4cIDWrVs7ejgkJSXx3//+97yBhoiIiIhUTlJWEhHvRFBQXGDZ4W358mPu8/z4wfMAuDu7kzAmQcGGyFkUaoiIiIiIXIZGjBjh6CFYlS5ALiIiIiJVJz0//UygcQ4FxQWk56cr1BA5i+bgiIiIiIiIiIiIiIjIFUGhhoiIiIiIiIiIiIiIXBF0+ykRERERERERERGRy9Qd8++gS/0utA5sTet6rWkd2JoGPg0wGAyOHpqIQyjUEBEREREREREREbnESkoq1m5v5l72Zu612VfbvTatAltZgo7TYUerwFbUdq9d9QMVucwo1DgtPDycsWPHMnbsWEcPRURERERERERERGq4rVsr1u6hxm9QPzyf7Wnb2Z66nYSMBE4UnGBt0lrWJq21aRvqE2qdzVEadkT6R+JqdK2GMxBxjCtuTY1JkyZhMBhstsjISJs2PXv2LNPmwQcftOu4I0aMsOnPz8+P/v3789dff9nVr4iIiIiIOM67775LeHg47u7udOnShU2bNp2zbXx8PLfeeivh4eEYDAamT59ud58iIiJy9UpPr1i7jXO7Uyfuv4wL+5LNo+LI+28efz74J/8b/D+euPYJbmh2A6E+oQAcyj7Ez3t+5uV1L/OPb/9B25ltqfViLVq914ph3wzjxTUv8n3C9xw4fgCT2VSNZydSfa7ImRpRUVEsW7bM+tjZuexpjB49milTplgfe3p62n3c/v37M3v2bABSUlJ4+umnuemmm0hKSrK7bxEREZHLVVJWEun55/6Ly9/TnzDfsEs4IpGqMXfuXMaNG8fMmTPp0qUL06dPp1+/fiQkJBAYGFimfX5+Po0bN+b222/n0UcfrZI+RURE5OrVJNgf9rmDS8G5G51yZ8tafx76yfLQxQXatHGlU6c2dOrUhruioWVPcHaGEwUniEuLY3vqdsusjtMzO7IKs4g/Fk/8sXi+4itr116uXmVuYdU6sDV+nn7Ve+IidroiQw1nZ2eCgoLO28bT0/OCbc7no48+Yvz48XzzzTf06tULADc3N2ufQUFBPPnkk8TGxnLs2DECAgLK9FFYWEhhYaH1cXZ2NgAmkwmTyQFJqPn0FzOOOX4NYDKZMJvNqt9FUv3so/rZR/Wzn2poH0fWr/TYpVtlJGUlEfluJAXF5/5Dy93ZnV3/3lXtwUbp2Ct7DlWpUaNGPPLII3bfsvTjjz9m3rx5LF682K5+nJycWLBgAbfccku5zycmJtK4cWO2bNlCVFQUUH79hg0bRnR0NI899phd4ym9xsr7ffdy/Lfj9ddfZ/To0YwcORKAmTNn8tNPPzFr1iyefPLJMu07depEp06dAMp9/mL6FBERkavXbX3CCBqbQEr2uT9AVMfNn3/9O4zNm+H33yEzEzZvtmwzZ1raeHhA+/bQqVNtOnWK4froGB7oCE5Olt/PkrOT2Z623RJ4nA46dqbvJLcol43JG9mYvNHmmMFewWVuYdUyoCXuzu7VWQ6RCrsiQ409e/YQEhKCu7s7Xbt2ZerUqYSF2f4R/fnnn/O///2PoKAgBg4cyMSJEys8W+OVV17hlVdeYcmSJXTu3LncNrm5ufzvf/+jadOm+PmVn15OnTqVyZMnl9l/7NgxCgrOk8BWk9I/YPPzc0lLS7vkx68JTCYTWVlZmM1mnJyuuLu3OZzqZx/Vzz6qn/1UQ/s4sn6nTp3CZDJRXFxMcXFxpV6bmpN63kADoKC4gNScVEJqhdgzzDKOHTvG5MmT+eWXX0hNTaV27dq0adOGp59+mm7dulXZcZo1a8bDDz/Mf/7zH+u+Tz/9lMcee4xjx46VaV9ay4tVUFDAM888w5dfflluP3PnzuWee+5h4MCBfPPNNxfsr6Sk5JzjCQ4OJikpCT8/P0pKSli9ejV9+/YlLS2N2rVrW9s98cQT9OrVixEjRuDr63vR51ZcXIzJZCIjIwMXFxeb53Jyci663+pQVFTE5s2bmTBhgnWfk5MTvXv3ZsOGDZe0z8vtw1AKse2j+tlH9bOfamgf1c8+ql/lGAzw9vMNGDrUcusos9lw1nOW9/E+mGdmyBDT6echMdESbmzebOCPPyzhRk6OgfXrYf36M337+Jjp2BGioyE6uj7R0fXpd01/DKcPcarkFHsy99iEHXFpcRw4cYCjuUc5mnuUJfuWWPtzMjjRrG4z68yO0q+N6zTGyXD5/H2oa9A+jq5fRY97xYUaXbp0Yc6cOURERHD06FEmT55MbGwscXFxeHt7A3DXXXfRsGFDQkJC+Ouvv3jiiSdISEhgwYIFF+z/iSee4LPPPmP16tXWT7OV+vHHH/Hy8gIgLy+P4OBgfvzxx3O+MTFhwgTGjRtnfZydnU1oaCgBAQH4+PhcbAkumuH0v1qenl6a+n6RTCYTBoOBgIAAvaF3EVQ/+6h+9lH97Kca2seR9SsoKCAnJwdnZ2ecnZ0xm83kn8qv0GuLTEUVbldoKrxgO08XT+vvJBdy5513UlRUxJw5c2jcuDGHDx9m9erVnDhxotzbj9rDycnJps/Sn1F5x/l728pauHAhPj4+dO/evcxziYmJ1tnAFT2O0Wg8ZztnZ2caNGgAWMKt0nal10Kpdu3a0aRJE7766iv+/e9/X8xpWft1cnLCz88Pd3fbT/L9/bGjpaenU1JSQr169Wz216tXj127dl3SPi+3D0MpxLaP6mcf1c9+qqF9VD/7qH6VFxMDH37oxsSJPhw9arTuDw42MWVKNjExhZz92eRataBnT8sGYDLBvn1Gtm1z4c8/LVtcnAvZ2QZWroSVKwEsv3/XrWuiXbtTtG176vTXQK6rdx3XBVwHp98GzS3KJeF4Ajszd7Ircxc7M3eyM3MnxwuOk5CRQEJGAt/sPPPBGw9nDyLqRNCibgsi60bSom4LWvi1wN/Dv1rrdi66Bu3j6PpV9INQV1yoMWDAAOv3bdq0oUuXLjRs2JB58+YxatQoAO6//35rm9atWxMcHEyvXr3Yt28fTZo0OWffr732Gnl5efzxxx80bty4zPPXXXcdM2bMAOD48eO89957DBgwgE2bNtGwYcMy7d3c3HBzcyuz38nJyTH/UZ1+/8BgQP9R28FgMDjuZ1gDqH72Uf3so/rZTzW0j6Pq5+TkhMFgsG75p/Lxfsm7So8ROye2Qu1yJ+RSy7XWBdudOHGCNWvWsGrVKnr06IHZbKZ+/fp069bNJhQ5ceIE48eP57vvvqOwsJDo6GjeeOMN2rZtC8C+ffsYN24cGzduJC8vjxYtWjB16lR69+4NQM+ePTl48CDjxo2zfhhl5cqV3HfffcCZ35meffZZJk2aBGCtY0WOX565c+cycODAMuFOSUkJ//jHP5g8eTJr1qzhxIkTFQqAUlJSuOGGG1i1ahXBwcG88sor3HbbbYAlJGnUqBFbtmzBy8uL66+/HoC6desCcO+99zJnzhwABg4cyNy5cxkzZswFj3kupbUp7zrXvxvndrl9GEohtn1UP/uofvZTDe2j+tlH9bs4I0fC8OHw668lJCRkExHhQ/fuBozGis2gDQqCa6898/jUKTM7dphtZnT89RdkZjqxYoUbK1aceb+yfn3z6dkclq8dOwYyoEFjBnDmPViz2UxKbkqZWR070ndwsvgk245tY9uxbTZjCqwVSOvA1kQFRFlndUQFRFXobwF76Bq0j6PrV9EPQl1xocbf1a5dm+bNm7N3795ztunSpQsAe/fuPW+oERsby08//cS8efPKvd9trVq1aNq0qfXxRx99hK+vLx9++CHPP/+8HWchIiIiImfz8vLCy8uLhQsXcs011+Dq6lpuu9tvvx0PDw9++eUXfH19ef/99+nVqxe7d++mbt265ObmcsMNN/DCCy/g5ubGp59+ysCBA0lISCAsLIwFCxbQtm1b7r//fkaPHg1Y3vCfPn06zzzzDAkJCdbxXMzxy7N27VruueeeMvunTJlCYGAgo0aNYs2aNRWu1cSJE3nppZd48803+eyzz7jzzjvZvn07LVq0sGkXGhrK/Pnzue2220hISMDHxwcPDw/r8507d+aFF16gsLCw3A/m1DT+/v4YjUZSU1Nt9qempl702nwX2+dl92EoFGLbS/Wzj+pnP9XQPqqffVS/i+PkBNddZyIqqpDAQINd9XNzs6yx0b79mX0FBZZg4/ffLdsff8COHXD4sIHDh+G77858mKZxY+jUyXLrqk6doEMHqO9bn/q+9enfrL+1XYmphL2Ze63rdJQuTr4vcx9peWksP7Cc5QeWW9sbMNC4TuMy63U0rdsUZ6eqe5ta16B9HFm/ih7zig81cnNz2bdvX7l/GJbatm0bYLmn8Pl07tyZMWPG0L9/f5ydnRk/fvx525f+gE+ePFnpcYuIiIg4iqeLJ7kTcivUdlvKNmJmx1yw3dqRa2kX1K5Cx64IZ2dn5syZw+jRo5k5cyYdOnQgJiaGu+66yzoLYu3atWzatIm0tDTrG8LTpk1j4cKFzJ8/n/vvv5+2bdvazJp47rnn+Pbbb/n+++8ZM2YMdevWxWg04u3tbfOms6+vLwaD4bxvRFfk+H934sQJsrKyCAkJKdPXxx9/bP29tTJuv/12/vnPf1rPb+nSpbz99tu89957Nu2MRqM1aAkMDLRZUwMgJCSEoqIiUlJSyp2FXNO4urrSsWNHli9fbl1o3WQysXz58ouerVIdfYqIiIhUBXd36NzZspXKzYWtW22Djr17Yf9+yzZ3rqWdwQCRkbZBR7t24O5uJMI/ggj/CG5reZu137yiPHYc21Em7EjLS2Pf8X3sO76PhbsWWtu7Gd1oGdCyTNgR7BVc4VvXytXligs1xo8fz8CBA2nYsCFHjhzh2WefxWg0MmzYMMByi4EvvviCG264AT8/P/766y8effRRunfvTps2bS7Yf7du3fj5558ZMGAAzs7OjB071vpcYWEhKSkpgOX2U++88w65ubkMHDiwWs5VREREpDoYDIYKT/v2cPG4cKPT7ap6Kvmtt97KjTfeyJo1a9iwYQO//PILr732Gh999BEjRozgzz//JDc3Fz8/P5vXnTx5kn379gGWD8BMmjSJn376iaNHj1JcXMzJkydJSkqye3wVOf7flX4Y5uxp1Tk5Odxzzz18+OGH+PuXf+/hF198kRdffNH6eMeOHYSFhQHQtWtXm7Zdu3a9qHCkdNZGfn7F1lupCcaNG8e9995LdHQ0nTt3Zvr06eTl5TFy5EgAhg8fTv369Zk6dSpgWQh8x44d1u8PHz7Mtm3b8PLyss7ovlCfIiIiIpcLLy+IjbVspY4ftyw+fnbQcegQ7Nxp2T791NLO2RlatbINOlq1AhcXqOVai071O9Gpfieb46XlpbE99cwtrEpvY5V/Kp+tKVvZmrLVpn1dj7o2IUfpAuXeblV7K1258lxxoUZycjLDhg0jIyODgIAAYmJi2LhxIwEBAYDl01HLli2z/vEQGhrKrbfeytNPP13hY8TExPDTTz9xww03YDQaefjhhwFYtGiRdbaHt7c3kZGRfP311/QsXZlHRERERKqUu7s7ffr0oXfv3kyYMIEHH3yQZ599lhEjRpCbm0twcDCrVq0q87rSWQjjx49n6dKlTJs2jaZNm+Lh4cFtt91GUVHFFkA/n4oc/+/8/PwwGAwcP37cum/fvn0kJibafFDGZDIBlhkrCQkJPPjggwwdOtT6/N9nelSFzMxMAOvv1VeDO+64g2PHjvHMM8+QkpJCu3btWLRokXWh76SkJJsp8EeOHKH9WfdxmDZtGtOmTaNHjx7W6+BCfYqIiIhczurUgd69LVup1FRLuFEadPz+Oxw7Btu2WbYPP7S0c3OzzOA4O+iIiADj6fXPA2sF0qtxL3o17mXt22Q2ceD4gTKzOnZn7CbzZCarD65m9cHVNmMMrx1eJuxo7tccF6NLtdZGLh9XXKjx1Vdfnff50NBQVq9efd425UlMTLR53L17d3Jzz9yWYc6cOdZFFEVERESuFv6e/rg7u1NQXHDONu7O7vh7lj/DoKq1bNmS7777DoAOHTqQkpKCs7Mz4eHh5bZft24dI0aMYPDgwYAliPj7732urq6UlJRccN/fVeT4f+fq6krLli3ZsWMHffv2BSAyMpLt27fbtHv66afJycnhzTffJDQ0FFdX13Ou0bFx40aGDx9u8/jsN97/fnyg3HOLi4ujQYMG55wtUlONGTPmnLeG+ntgFR4ejtlstqtPERERkStNvXpw442WDcBstszeKJ3JUfo1Kwt++82ylfLysqzJcXbQ0bix5ZZWAE4GJ5rUbUKTuk24JfIW6+sKigvYeWxnmbDjSM4REk8kkngikR92/2Bt7+LkQqR/JK0CW9HIsxHXNL6GtkFtCfUJ1S2saqArLtQQERERkUsnzDeMhDEJpOenn7ONv6c/Yb5hVXrcjIwMbr/9du677z7atGmDl5cXv/32G6+++iqDBg0CoHfv3nTt2pVbbrmFV155hebNm3PkyBF++uknBg8eTHR0NM2aNWPBggUMHDgQg8HAxIkTrbMgSoWHh/Prr79y55134ubmhr+/P+Hh4eTm5rJ8+XLatm2Lp6cnnp6264FU5Pjl6devH2vXrrXe5tTd3Z1WrVrZtCmd6fH3/eX5+uuviY6OJiYmhs8//5xNmzbx8ccfl9u2YcOGGAwGfvzxR2644QY8PDysi6CvWbPGGrSIiIiIiJyLwQBhYZbt1lst+0wm2LfPNujYssWybsevv1q2UnXqnAk4Sr/Wr38m6ADLB6faB7enfbDth3Uy8jPO3L4q9cwtrHKKcqzBBwCbLF983XxpFdjKZlZH63qtqe1eu/oKJNVOoYaIiIiInFeYb1iVhxYX4uXlRZcuXXjjjTfYt28fp06dokGDBvzzn//kqaeeAixrg/z888889dRTjBw5kmPHjhEUFET37t2tt/p5/fXXue++++jWrRv+/v488cQTZGdn2xxrypQpPPDAAzRp0oTCwkLMZjPdunXjwQcf5I477iAjI4Nnn32WSZMm2byuIscvz6hRo4iOjiYrKwtfX1+7azV58mS++uorHnroIYKDg/nyyy9p2bJluW3r16/P5MmTefLJJxk5ciTDhw9nzpw5FBQUsHDhQhYtWmT3eERERETk6uPkBM2aWba77rLsKymxrMNxdtDx55+WdTuWLrVspYKCzgQcpWFHeXdF9fP04//Zu+/4pqr+D+CfjCZNN120pZMChTIKlGEFBGRU1gMKCKIssYAiQx9U4CfLrSiiIIiCLFEQxIIIQtlDBGQvC5S2jAId0EV3c35/5EkgJG3Tpm06Pu/nlZdPTs6999yT23K//d5zTmf/zujs31lXJoRAfFo8zt09h7N3z+Kf6//gSvoVRKdEIy03DYdvHMbhG4f19uPt4G0whVVj18ZQypUV0T1UziTClPHTVC7S09Ph6OiItLQ0ODg4VPrx7d54Ag+cjmK6/2/4aOSASj9+TaBWq5GYmAh3d3e9+ZXJNOw/87D/zMP+M5HcHBQAAQAASURBVB/70DyW7L+cnBzExsYiICBAb4Hq6kQIgYKCAsjl8hoxfHzw4MFo3bo1pk+fXinHK6n/lixZgt9++w07d+406zjFXWuWvheuTizdV/x9bx72n3nYf+ZjH5qH/Wce9p95akP/5eUB587pJzouXNAkQB7n56ef6AgNBUp6JujRPiwQBfg3+V+96avO3T2HG+k3jG4rl8rRyKWRQbLDz8kPUknN/D4eZ+lr0NT7YI7UICIiIiKqZPPmzcPvv/9ecsVKYmVlhYULF1q6GURERERUwykUmuREaOjDsqwszYLjjyY6oqOB+HjN69dfH9Zt1Eg/0dGyJWBrW8SxZAq0qNsCLeq20CtPzUnFhcQLBut1pOak4mLSRVxMuoj1F9br6tsp7B5OYeXeXPP/6zavtHUFyRCTGkRERERElczf3x8TJ060dDN0XnnlFUs3gYiIiIhqKRsb4MknNS+ttDTNmhzHjz9MdsTFAZcva14//aSpJ5UCTZs+THSEhmoWNi+Ok7UTOvh2QAffDroyIQRuZdwyGNVxKfkSMvMy8ffNv/H3zb/19uNh52EwqiPYLRgqK1U59QwVhUkNIiIiIiIiIiIiIqoyHB2Brl01L62kJODEiYeJjuPHgTt3NNNZnTsHrFgBAFIoFHXRooX+QuRNmgDyYv4SLpFI4O3gDW8Hb/Rq2EtXnl+Yjyv3rhgkO2JTY3En8w7uZN5B1LWHi4NIJVI0cG5gkOyoX6c+ZFJZ+XdULcWkBhERERERERERERFVaW5uwDPPaF5at249nLJKM6JD4N49Cf75R1OuZWMDtGqln+ho0EAz0qM4VjIrBLsFI9gtGEMwRFeekZuBC0kXDJIdKdkpuJxyGZdTLuPXSw/nzVLJVWjq3tQg2VHXroRhJWQUkxo13PW060jOSgYAFMoeAADu5Mbg5O2TAABXG1f4OvparH1EREREREREREREZVGvnubVv7/mfWGhwD//JOPaNRecPCnF8eOa0R2ZmcDhw5qXlqOjZrqqRxMdvr6ARFLyce2V9njC+wk84f2ErkwIgTuZdwzW6riYdBHZBdn4J+Ef/JPwj95+3GzcdAkObbKjqVtT2CqKWCiEADCpUaNdT7uOoEVByCnI0RTYa/6z4vZUrPhO8/+t5daIfj2aiQ0iIiIiIiIiIiKq1iQSwM+vEG3bAi+8oClTqzULjz86ouP0ac26HXv2aF5abm4PExza/3p4mHpsCTztPeFp74megT115YXqQly9d9Ug2RFzLwZJWUnYE7sHe2IfNkICCerXqa9LdmgXKW/o0hByKf+cDzCpUaMlZyU/TGgUIacgB8lZyUxqEBERERERERERUY0jlWrW1GjSBBg+XFOWnw9cuPBwEfLjxzXrciQlAdu3a15a3t76iY42bQBnZ9OPL5PKEOQahCDXIAwKHqQrf5D3ABeTLuJc4jmcTzyvS3rcfXAXMfdjEHM/BpH/RurqK2VKNHFrYjCFlZe9FySmDC+pQZjUICIiIiIiIiIiIqJaw8oKaNlS84qI0JTl5ABnzugnOi5dAm7e1LwiIx9uHxion+ho3Rqwty9dG2wVtmhbry3a1murV570IMlgVMf5xPPIys/C6TuncfrOab36dazrGExh1cy9GRyUDia149HlC9RqNe7duwfnQmdI/7fgSFVcvoBJDSIiIiIiIiIiIiKq1aytgfbtNS+tzEzg5En9REdMzMPX+vWaehKJZiSINtHRti0QEqLZZ2m52brh6YCn8XTA07oytVAj9n6sQbLjcspl3M+5jwPxB3Ag/oDefvwc/QySHUEuQbCSWenqGCxfYKxfquDyBUxqEBEREZHJdu2Kx6RJu/H1193QvbufpZtTbuLi4hAQEIBTp06hZcuWFXosf39/TJkyBVOmTCmyTl5eHoKDg7F69Wo8+eSTZT7WypUrMWXKFNy/f7/IOnPmzEFkZCROnz5dZJ3k5GQEBwfj5MmT8Pb2LnN7iIiIiIiqEzs74KmnNC+te/c0i48/mui4eRO4eFHzWr1aU08uB5o31090NG2qGSVSWlKJFIHOgQh0DsSAxgN05TkFObiUdMkg2ZGQkYD4tHjEp8Vj6+WtuvpWUis0dm2sS3ao5KpquXwBkxpEREREZBIhBGbMOIBLl+5hxowD6NbtpQqdu3XUqFFYtWoVAMDKygq+vr4YPnw4/u///g9yedlvY0eNGoXU1FREPjp+vIr59ttvERAQoJfQuHz5Mt566y0cPnwYeXl5aNGiBd5//3107drVrGNNnToVEydO1L031j+urq4YMWIEZs+ejeXLl5t1PCIiIiKi6szZGejRQ/PSunPnYYJD+9+kJODUKc3r++819aytNVNePZroaNQIkMnK1hZruTVaebZCK89WeuX3su89THLcfTiFVUZehi7xUZ0xqUFERERUywghkJWVX+rtdu2Kx/HjdwEAx4/fxZYtV0s9WsPGxqpUiZBnnnkGK1asQE5ODrZu3YpJkyZBoVBg+vTppTouABQWFlaLBfSEEFi0aBHee+89vfK+ffuiYcOG2LNnD1QqFRYsWIC+ffsiJiYGHh4eZT6enZ0d7OzsSqw3evRohIaGYt68eXAuzcqIREREREQ1nIcH0Lev5gUAQgDXrz9McGiTHenpwN9/a15adnZAaOjD9TnatgUCAjRTWpWVs8oZnf07o7N/Z12ZEALxafF6IzqO3TyGa6nXyn4gC2FSg4iIiKiWycrKh53d12bvZ8CAzaXeJjNzEmxtFSbXVyqV8PDwgBAC48aNw5YtW7BlyxZMnz4d9+/fx+TJk/H7778jNzcXnTt3xtdff42GDRsCeDj10urVqzFt2jRcvnwZL730km70hzbBsXfvXvj7++sdt7CwEGPHjsWePXtw584d+Pr64rXXXsPkyZN1dbQjGjp27IgvvvgCeXl5GDp0KBYsWACr/40pT0xMxJgxY7Br1y54eHjggw8+KPGcT5w4gZiYGPTp00dXlpycjCtXrmD58uVo0aIFAOCTTz7B4sWLcf78+RKTGpGRkXj77bdx48YNdO7cGcuWLYOPjw8A/emn5syZY7R/unTpgqZNm8LLywu//fYbxowZU+J5EBERERHVVhIJ4OeneQ0cqClTq4GrV/UTHSdPatbt2L9f89JydtZfiLxtW8DLy7xEh0Qigb+TP/yd/NEvqB8A4OTtkwj9LtSMM7UMJjVqMFcbV1jLrUtc6MXVxrUSW0VERERUdiqVCikpKQA0SYUrV65gy5YtcHBwwDvvvIPevXvj4sWLuqRCVlYWPv30UyxbtgwuLi7w9PREdnY20tPTsWLFCgCAs7MzEhIS9I6jVqvh7e2NDRs2wMXFBX/99RfGjh0LT09PPP/887p6e/fuhaenJ/bu3YurV69iyJAhaNmyJSIiInRtTEhIwN69e2FlZYVJkyYhMTGx2HM8ePAgGjVqBHt7e12Zi4sLgoKCsHr1arRu3RpKpRJLly6Fu7s7QkOLD0KysrLw0Ucf4YcffoBKpcKECRMwdOhQHD582KDu1KlTcenSJYP+0WrXrh0OHjzIpAYRERERUSlJpZqppho1AoYN05QVFACXLuknOs6c0azbsXOn5qXl4fFwyiptosO1lv5Zl0mNGszX0RfRr0cjOSsZADBypMD58xJ8/rkaXbtKAWgSH1VpkRciIiKqeDY2VsjMnGRyfSEEOndejzNnklBYKHTlMpkEISFu2L9/iMnTOtnYlGFVvP+1Yffu3dixYwcmTpyoS2YcPnxYt+7E2rVr4ePjg8jISAwePBgAkJ+fj8WLFyMkJES3L5VKhdzc3GJHN1hZWWHu3Lm69wEBAThy5Ah++eUXvaRGnTp1sGjRIshkMjRu3Bh9+vTB7t27ERERgcuXL2P79u04duwY2rZtCwBYvnw5mjRpUuy5xsfHw8vLS69MIpFg165dGDBgAOzt7SGVSuHu7o4///wTderUKXZ/+fn5WLhwIUJDQyGXy7Fq1So0adIEx44dQ7t27fTq2tnZFds/Xl5eOHXqVLHHIyIiIiIi02gXE2/eHBg9WlOWmwucO6ef6LhwQbNux++/a15afn76iY7QUMDR0TLnUpmY1KjhfB19dUkL23QB3JYg0EaN1p5SC7eMiIiILEUikZRqCqgdO2Jx8qTh6ILCQoGTJxNx+HACwsMDyrOJOlu3boWdnR3y8/OhVqsxbNgwzJkzB7t374ZcLkf79u11dbWjGS5duqQrUygUuumaSuubb77BDz/8gOvXryM7Oxt5eXlo2bKlXp2mTZtC9siqfp6enjh3TrPo3qVLlyCXy/VGUjRu3BhOTk7FHjc7OxvW1tZ6ZUIITJgwAe7u7jh48CBUKhWWLVuGfv364fjx4/D09ETTpk0RHx8PAOjUqRO2b98OAJDL5Wjbti3UarVeGy5dumSQ1CiJSqVCVlZWqbYhIiIiIiLTKZWaBEWbNsD48ZqyrCzNguOPLkR++TIQH695bdz4cPtGjfQTHa1aATY2ljmXisKkBhEREREVSQiBmTMPQSrVzAH7OKkUmDnzEHr29K+QRbi7du2KJUuWwMrKCu7u7rC2ti7VcVQqVZnatW7dOkydOhVffPEFwsLCYG9vj3nz5uHo0aN69bTTXGlJJBJd8qCsXF1ddYkRrT179mDr1q24f/8+HBwcAACLFy9GVFQUVq1ahWnTpmHbtm3Iz9csAK9SqcxqQ1Hu3bsHNze3Ctk3EREREREZZ2MDdOigeWmlpQEnTugnOuLjNcmOy5eBtWs19aRSoGlT/URHixaAQqGZxcdKYo18UfTyBVaSqrd8AZMaRERERFSkvLxCXL+eYTShAWgSHTduZCAvrxBKZfnfWtra2qJBgwYQQqCgoEBX3qRJExQUFODo0aO66adSUlIQHR2N4ODgYvepUChQWFhYbB3ttFavvfaariwmJqZUbW/cuDEKCgpw4sQJ3fRT0dHRSE1NLXa7Vq1aYcmSJRBC6BIy2tERUqn+aFupVKpLovj5+RndX0FBAf755x+0bt1arw1FTYNVXP+cP38eXbp0Kbb9RERERERU8Rwdgaef1ry0kpIeJji0/71zRzOd1blzwA8/aOopFEBICNC6tS9stkcjLT8ZcIwDhg4EcuyAVQ9XLXd1dEW9/6tayxcwqUFERERERVIq5Th+/CUkJWUXWcfd3aZCEhrFadiwIfr374+IiAgsXboU9vb2mDZtGurVq4f+/fsXu62/vz927NiB6OhouLi4wNHIpLMNGzbE6tWrsWPHDgQEBGDNmjU4fvw4AgJMn2YrKCgIzzzzDMaNG4clS5ZALpdjypQpJY6i6Nq1KzIzM3HhwgU0a9YMABAWFoY6depg5MiRmDVrFlQqFb7//nvExsaiT58+xe5Pu0D5/PnzoVQqMXHiRDzxxBNFTj1lrH+srKyQlZWFEydO4KOPPjK5D4iIiIiIqPK4uQG9emleACAEcOuWYaLj/v2H63UAvppXnu3/NpIDt1vr9nn7NnDwIFCVnm3iwgpEREREVCwfHwe0bl23yJe3t71F2rVixQqEhoaib9++CAsLgxAC27ZtM5gS6nEREREICgpCmzZt4ObmhsOHDxvUGTduHJ577jkMGTIE7du3R0pKit6ojdK00cvLC507d8Zzzz2HsWPHwt3dvdhtXFxc8Oyzz2Ktdrw4NFNS/fnnn8jMzMTTTz+NNm3a4NChQ9i8ebPeIujG2NjY4O2338aIESPQsWNH2NnZYf369UXWL6p/Nm/eDF9fX3Tq1KkUPUBERERERJYikQDe3sCAAcCHHwI7dgApKcDVq8C6dUDv3qbt5/btCm1mqUmEEMLSjagt0tPT4ejoiLS0NN1cyJXpiScEjh6V4Lff1BgwgPmsslCr1UhMTIS7u7vB9A9UMvafedh/5mH/mY99aB5L9l9OTg5iY2MREBBgsAB1daGdfkoul1fI2h1VzdmzZ9GjRw/ExMTAzs7O7P2VR/898cQTmDRpEoYNG1ZkneKuNUvfC1cnlu4r/r43D/vPPOw/87EPzcP+Mw/7zzzsP/OxD0tn3z6ga9f/vXGJBiY2BrKdgE/v69Xbu7dyRmqYeh/Mb5aIiIiIqIpp0aIFPv30U8TGxlq6KQCA5ORkPPfcc3jhhRcs3RQiIiIiIionnTppRnIU9dyTRAL4+GjqVSVMahARERERVUGjRo1C8+bNLd0MAJrpr95+++1aMUqGiIiIiKi2kMmAr77635vHbvW1t/4LFmjqVSVMahARERERERERERER1ULPPQds3Ai4uuiXe3tryp97zjLtKo7c0g0gIiIiIiIiIiIiIiLLeO45QFkP6PsnAAmwe7canTtLq9wIDS2O1CAiIiIiIiIiIiIiomqBSQ0iIiIiIiIiIiIiolpq0yZg1Mj/vRFAt25S+PtryqsiJjVqicJCID1d8//Pn9e8JyIiIiIiIiIiIqLaa+m66xj4+kkkSy5oCiQFgOdJ3Cw8iYGvn8TSddct20AjmNSoBTZtAvz9gUuXNEvWz5xZtTNtREREVHW9v/V9SCOkeH/r+5ZuChEREREREZkh9t51jL8QBIwLBYYO1BRaZ2re/+81/kIQYu9VrcQGkxo13KZNwKBBwM2b+uW3bmnKmdggIiIiU72/9X3M2jwLAgKzNs+qlYkNf39/LFiwwOz9LF++HD179qyU9kgkEkRGRhZbZ9q0aZg4caLZ7SEiIiIiouoj6nAyIM8pvpI8R1OvCmFSowYrLAQmTwaEMPxMWzZlCqeiIiIiopJpExqPqujERlJSEl599VX4+vrC2toaPj4+eOaZZ3D48OFyPY6xxMDKlSvh5ORUrsfRysnJwcyZMzF79my98gULFiAoKAgqlQo+Pj544403kJNTQoBhgtu3b6NXr14AgLi4OEgkEpw+fVqvztSpU7Fq1Spcu3bN7OMREREREVH1kGxirsLUepVFbukGUMU5eNBwhMajhABu3NDU69Kl0ppFRERE1YyxhIaWtnxm35nlftyBAwciLy8Pq1atQkBAABISErBv3z6kpKSU+7Eq08aNG+Hg4IAOHTroyn766SdMmzYNP/zwA5588klcvnwZo0aNgkQiwfz58806noeHB4QQKCgoKLKOq6srwsPDsWTJEsybN8+s4xERERERUfXg6grAhJmlXF0rvCmlwpEaNdjt2+Vbj4iIiGoGIQQe5D4w6TUzcmaRCQ2tWZtnYWbkTJP2J4wNITUiNTUVBw8exKeffoquXbvCz88Pbdu2xfTp0/Gf//xHr94rr7wCNzc3ODg44Omnn8aZM2d0n8fExKB///6oW7cu7Ozs0LZtW+zatUv3eZcuXRAfH4833ngDEokEEokE+/btw+jRo5GWlqYrmzNnTpHtLO74xqxbtw79+vXTK/vrr7/QoUMHDBs2DP7+/ujZsydeeOEFHDt2rMS+ysjIwAsvvABbW1vUq1cP33zzjd7nj04/Vb9+fQBAq1atIJFI0OWRJ1v69euHdevWlXg8IiIiIiKqGVq1Kt96lYUjNWowT8/yrUdEREQ1Q1ZeFuxetyvXfX7wxwf44I8PSqyXuSgTtkrbEuvZ2dnBzs4OkZGReOKJJ6BQKIzWGzx4MFQqFbZv3w5HR0csXboU3bp1w+XLl+Hs7IzMzEz07t0bH374IZRKJVavXo1+/fohOjoavr6+2LRpE0JCQjB27FhEREQAAJydnbFgwQLMmjUL0dHRuvaU5fjGHDp0CMOHD9cre/LJJ/Hjjz/i2LFjaNeuHa5du4Zt27YZ1DNm3rx5mDFjBubOnYsdO3Zg8uTJaNSoEXr06GFQ9+jRo2jfvj127dqFpk2b6vVru3btcPPmTcTFxcHf37/E4xIRERERUfUmk5VvvcrCpEYN1qkT4O2tWRTc2EOREonm806dKr9tRERERMWRy+VYuXIlIiIi8O2336J169bo2LEjhg0bhpCQEACa5MCxY8eQmJgIpVIJAPj8888RGRmJjRs3YuzYsQgJCdHVB4D3338fv/32G7Zs2YLXX38dzs7OkMlksLe3h4eHh66eo6MjJBKJXtnjTDn+41JTU5GWlgYvLy+98mHDhiE5ORkdO3bUTRU1fvx4zJgxo8S+6tChA6ZNmwYAaNSoEQ4fPowvv/zSaFLDzc0NAODi4mJwbto2xcfHM6lBRERERERVFpMaNZhMBnz1FTBokCaB8WhiQyLR/HfBgqqXaSMiIqKKZaOwQeaizBLrfbL9E5NGX2i92+ddTOs1rcRjm2rgwIHo06cPDh48iCNHjmD79u344osvsGzZMowaNQpnzpxBZmYmXFxc9LbLzs5GTEwMACAzMxNz5szBH3/8gdu3b6OgoADZ2dm4ft2EiWNLYMrxH5ednQ0AsLa21ivft28fPvroIyxevBjt27fH1atXMXnyZLz//vuYOXMm1q5di3Hjxunqb9++HZ3+92RKWFiY3r7CwsIMFj43hUqlAgBkZWWVelsiIiIiIqLKwqRGDffcc8DGjcDkyfqLhnt7axIazz1nsaYRERGRhUgkEpOmgHp/wPtQyBUlrqkBAO/1f69CFgu3trZGjx490L17d0yfPh3jx4/H7NmzMWrUKGRmZsLT0xP79u0z2M7JyQkAMHXqVERFReHzzz9HgwYNoFKpMGjQIOTl5ZndNlOO/zgXFxdIJBLcv39fr3zmzJkYPnw4XnnlFQBA8+bN8eDBA4wdOxb/93//h//85z9o3769rn69evXMbv/j7t27B+DhaA4iIiIiIqrZXG1cYS23Rk5BTpF1rOXWcLWpWiuFM6lRCzz3HNC/P7B/vxrR0ekICnJA585SjtAgIiKiEmkTFcUlNioqoWFMcHAwNm/eDABo3bo17ty5A7lcXuR0SYcPH8aoUaPw7LPPAtAkIuLi4vTqKBQKFBYWllj2OFOO/ziFQoHg4GBcvHgRPXv21JVnZWVBKpXq1ZX972ZNCAF7e3vY29sb3efff/9t8L5JkyZFHh+A0XM7f/48rKys0LRpU5POhYiIiIiIqjdfR19Evx6N5KxkAIBarca9e/fg7Oysi09cbVzh6+hryWYaYFKjlpDJgC5dgODgHLi7O+CxmJmIiIioSMUlNioqoZGSkoLBgwfj5ZdfRosWLWBnZ4ejR49i3rx56N+/PwCge/fuCAsLw4ABA/DZZ5+hUaNGSEhIwB9//IFnn30Wbdq0QcOGDbFp0yb069cPEokEM2fOhFqt1juWv78/Dhw4gKFDh0KpVMLV1RX+/v7IzMzE7t27ERISAhsbG9jY6E+dZcrxjQkPD8ehQ4cwZcoUXVm/fv0wf/58tGrVSjf91MyZM9GvXz9dcqMohw8fxmeffYYBAwYgKioKGzZswB9//GG0rru7O1QqFf788094e3vD2toajo6OAICDBw+iU6dOummoiIiIiIio5vN19NUlLdRqNRJliXB3dzd46KoqqbotIyIiIqIqY2bfmXiv/3t6ZRU5QsPOzg7t27fHl19+iaeeegrNmzfHnDlz8Morr2DRokUANNNobdu2DU899RRGjx6NRo0aYejQoYiPj0fdunUBAPPnz0edOnXw5JNPol+/fggPD0fr1q31z+O99xAXF4fAwEDd1EtPPvkkxo8fjyFDhsDNzQ2fffaZQRtNOb4xY8aMwbZt25CWlqYre/fdd/Hf//4X7777LoKDgzFmzBiEh4dj6dKlJfbVf//7X/zzzz9o1aoVPvjgA8yfPx/h4eFG68rlcnz99ddYunQpvLy8dAkiAFi3bh0iIiJKPB4REREREZElSYR4dPloqkjp6elwdHREWloaHBwcKv34arUaiYlVP9NWlbEPzcP+Mw/7zzzsP/OxD81jyf7LyclBbGwsAgICDBaoLq33t76P2ZtnY27/uZU25RSgmYKpoKAAcrkcEomk0o5bUQYPHozWrVtj+vTplXK8kvpv+/bt+O9//4uzZ89CLi/7YO7irjVL3wtXJ5buK/6+Nw/7zzzsP/OxD83D/jMP+8887D/zsQ/NY+n+M/U+mN8sEREREZlsZt+ZUH+vrtSERk00b9482NnZWboZOg8ePMCKFSvMSmgQERERERFVBkYtRERERESVzN/fHxMnTrR0M3QGDRpk6SYQERERERGZhCM1iIiIiIiIiIiIiIioWmBSg4iIiIiIiIiIiIiIqgUmNYiIiIhqCSGEpZtANRyvMSIiIiIiqmhMahARERHVcDKZDACQl5dn4ZZQTZeVlQUAsLKysnBLiIiIiIiopuJC4UREREQ1nFwuh42NDZKSkmBlZQWptPo91yKEQEFBAeRyOSQSiaWbU+1UdP8JIZCVlYXExEQ4OTnpEmlERERERETljUkNIiIiohpOIpHA09MTsbGxiI+Pt3RzykQIAbVaDalUyqRGGVRW/zk5OcHDw6PC9k9ERERERMSkBhEREVEtoFAo0LBhw2o7BZVarUZKSgpcXFyq5UgTS6uM/rOysuIIDSIiIiIiqnBMahARERHVElKpFNbW1pZuRpmo1WpYWVnB2tqaSY0yYP8REREREVFNwYiGiIiIiIiIiIiIiIiqBSY1iIiIiIiIiIiIiIioWuD0U5VICAEASE9Pt8jx1Wo1MjIyOO2AGdiH5mH/mYf9Zx72n/nYh+Zh/5mH/WeeqtB/2ntg7T0xFY1xQ/XG/jMP+8987EPzsP/Mw/4zD/vPfOxD81i6/0yNGZjUqEQZGRkAAB8fHwu3hIiIiIjIMjIyMuDo6GjpZlRpjBuIiIiIqDYrKWaQCD4qVWnUajUSEhJgb28PiURS6cdPT0+Hj48Pbty4AQcHh0o/fk3APjQP+8887D/zsP/Mxz40D/vPPOw/81SF/hNCICMjA15eXnxqrgSMG6o39p952H/mYx+ah/1nHvafedh/5mMfmsfS/WdqzMCRGpVIKpXC29vb0s2Ag4MDf6jNxD40D/vPPOw/87D/zMc+NA/7zzzsP/NYuv84QsM0jBtqBvafedh/5mMfmof9Zx72n3nYf+ZjH5rHkv1nSszAR6SIiIiIiIiIiIiIiKhaYFKDiIiIiIiIiIiIiIiqBSY1ahGlUonZs2dDqVRauinVFvvQPOw/87D/zMP+Mx/70DzsP/Ow/8zD/qPS4PViHvafedh/5mMfmof9Zx72n3nYf+ZjH5qnuvQfFwonIiIiIiIiIiIiIqJqgSM1iIiIiIiIiIiIiIioWmBSg4iIiIiIiIiIiIiIqgUmNYiIiIiIiIiIiIiIqFpgUoOIiIiIiIiIiIiIiKoFJjWIiIiIiIiIiIiIiKhaYFKjhvnmm2/g7+8Pa2trtG/fHseOHSu2/oYNG9C4cWNYW1ujefPm2LZtWyW1tGoqTf+tXLkSEolE72VtbV2Jra1aDhw4gH79+sHLywsSiQSRkZElbrNv3z60bt0aSqUSDRo0wMqVKyu8nVVZaftw3759BtegRCLBnTt3KqfBVcjHH3+Mtm3bwt7eHu7u7hgwYACio6NL3I6/Ax8qSx/y9+BDS5YsQYsWLeDg4AAHBweEhYVh+/btxW7D6++h0vYfr73iffLJJ5BIJJgyZUqx9XgN1m6MG8zDuKHsGDeYhzGDeRg3mIcxg3kYM5iPcUP5qs5xA5MaNcj69evx5ptvYvbs2Th58iRCQkIQHh6OxMREo/X/+usvvPDCCxgzZgxOnTqFAQMGYMCAATh//nwlt7xqKG3/AYCDgwNu376te8XHx1dii6uWBw8eICQkBN98841J9WNjY9GnTx907doVp0+fxpQpU/DKK69gx44dFdzSqqu0fagVHR2tdx26u7tXUAurrv3792PChAn4+++/ERUVhfz8fPTs2RMPHjwochv+DtRXlj4E+HtQy9vbG5988glOnDiBf/75B08//TT69++PCxcuGK3P609fafsP4LVXlOPHj2Pp0qVo0aJFsfV4DdZujBvMw7jBPIwbzMOYwTyMG8zDmME8jBnMx7ih/FT7uEFQjdGuXTsxYcIE3fvCwkLh5eUlPv74Y6P1n3/+edGnTx+9svbt24tx48ZVaDurqtL234oVK4Sjo2Mlta56ASB+++23Yuu8/fbbomnTpnplQ4YMEeHh4RXYsurDlD7cu3evACDu379fKW2qThITEwUAsX///iLr8Hdg8UzpQ/4eLF6dOnXEsmXLjH7G669kxfUfrz3jMjIyRMOGDUVUVJTo3LmzmDx5cpF1eQ3WbowbzMO4ofwwbjAPYwbzMW4wD2MG8zFmMB/jhtKrCXEDR2rUEHl5eThx4gS6d++uK5NKpejevTuOHDlidJsjR47o1QeA8PDwIuvXZGXpPwDIzMyEn58ffHx8SswMkz5ef+WnZcuW8PT0RI8ePXD48GFLN6dKSEtLAwA4OzsXWYfXYPFM6UOAvweNKSwsxLp16/DgwQOEhYUZrcPrr2im9B/Aa8+YCRMmoE+fPgbXljG8Bmsvxg3mYdxQ+Xj9lQ/GDMYxbjAPY4ayY8xgPsYNZVcT4gYmNWqI5ORkFBYWom7dunrldevWLXKuzDt37pSqfk1Wlv4LCgrCDz/8gM2bN+PHH3+EWq3Gk08+iZs3b1ZGk6u9oq6/9PR0ZGdnW6hV1Yunpye+/fZb/Prrr/j111/h4+ODLl264OTJk5ZumkWp1WpMmTIFHTp0QLNmzYqsx9+BRTO1D/l7UN+5c+dgZ2cHpVKJ8ePH47fffkNwcLDRurz+DJWm/3jtGVq3bh1OnjyJjz/+2KT6vAZrL8YN5mHcUPkYN5iHMUPRGDeYhzFD2TBmMB/jBvPUlLhBbtGjE1VjYWFhepngJ598Ek2aNMHSpUvx/vvvW7BlVFsEBQUhKChI9/7JJ59ETEwMvvzyS6xZs8aCLbOsCRMm4Pz58zh06JClm1JtmdqH/D2oLygoCKdPn0ZaWho2btyIkSNHYv/+/UXeYJO+0vQfrz19N27cwOTJkxEVFcWFD4mqIP7OIktizFA0xg3mYcxQNowZzMe4oexqUtzApEYN4erqCplMhrt37+qV3717Fx4eHka38fDwKFX9mqws/fc4KysrtGrVClevXq2IJtY4RV1/Dg4OUKlUFmpV9deuXbtafVP++uuvY+vWrThw4AC8vb2LrcvfgcaVpg8fV9t/DyoUCjRo0AAAEBoaiuPHj+Orr77C0qVLDery+jNUmv57XG2/9k6cOIHExES0bt1aV1ZYWIgDBw5g0aJFyM3NhUwm09uG12DtxbjBPIwbKh/jhvJX22MGgHGDuRgzlB1jBvMxbii7mhQ3cPqpGkKhUCA0NBS7d+/WlanVauzevbvIeeXCwsL06gNAVFRUsfPQ1VRl6b/HFRYW4ty5c/D09KyoZtYovP4qxunTp2vlNSiEwOuvv47ffvsNe/bsQUBAQInb8BrUV5Y+fBx/D+pTq9XIzc01+hmvv5IV13+Pq+3XXrdu3XDu3DmcPn1a92rTpg1efPFFnD592iAwAXgN1maMG8zDuKHy8forf7U1ZgAYN5iLMUP5Y8xgPsYNpqtRcYNFlymncrVu3TqhVCrFypUrxcWLF8XYsWOFk5OTuHPnjhBCiOHDh4tp06bp6h8+fFjI5XLx+eefi0uXLonZs2cLKysrce7cOUudgkWVtv/mzp0rduzYIWJiYsSJEyfE0KFDhbW1tbhw4YKlTsGiMjIyxKlTp8SpU6cEADF//nxx6tQpER8fL4QQYtq0aWL48OG6+teuXRM2NjbirbfeEpcuXRLffPONkMlk4s8//7TUKVhcafvwyy+/FJGRkeLKlSvi3LlzYvLkyUIqlYpdu3ZZ6hQs5tVXXxWOjo5i37594vbt27pXVlaWrg5/BxavLH3I34MPTZs2Tezfv1/ExsaKs2fPimnTpgmJRCJ27twphOD1V5LS9h+vvZJ17txZTJ48Wfee1yA9inGDeRg3mIdxg3kYM5iHcYN5GDOYhzGD+Rg3lL/qGjcwqVHDLFy4UPj6+gqFQiHatWsn/v77b91nnTt3FiNHjtSr/8svv4hGjRoJhUIhmjZtKv74449KbnHVUpr+mzJliq5u3bp1Re/evcXJkyct0OqqYe/evQKAwUvbZyNHjhSdO3c22KZly5ZCoVCI+vXrixUrVlR6u6uS0vbhp59+KgIDA4W1tbVwdnYWXbp0EXv27LFM4y3MWL8B0Lum+DuweGXpQ/4efOjll18Wfn5+QqFQCDc3N9GtWzfdjbUQvP5KUtr+47VXsseDE16D9DjGDeZh3FB2jBvMw5jBPIwbzMOYwTyMGczHuKH8Vde4QSKEEOU//oOIiIiIiIiIiIiIiKh8cU0NIiIiIiIiIiIiIiKqFpjUICIiIiIiIiIiIiKiaoFJDSIiIiIiIiIiIiIiqhaY1CAiIiIiIiIiIiIiomqBSQ0iIiIiIiIiIiIiIqoWmNQgIiIiIiIiIiIiIqJqgUkNIiIiIiIiIiIiIiKqFpjUICIiIiIiIiIiIiKiaoFJDSIiojLo0qULpkyZYulmEBERERFRFcWYgYioYjCpQUREVdaoUaMgkUggkUhgZWWFgIAAvP3228jJybF004iIiIiIqApgzEBEVPvILd0AIiKi4jzzzDNYsWIF8vPzceLECYwcORISiQSffvqppZtGRERERERVAGMGIqLahSM1iIioSlMqlfDw8ICPjw8GDBiA7t27IyoqCgCQm5uLSZMmwd3dHdbW1ujYsSOOHz+u23blypVwcnLS219kZCQkEonu/Zw5c9CyZUusWbMG/v7+cHR0xNChQ5GRkaGr8+DBA4wYMQJ2dnbw9PTEF198UbEnTUREREREJmPMQERUuzCpQURE1cb58+fx119/QaFQAADefvtt/Prrr1i1ahVOnjyJBg0aIDw8HPfu3SvVfmNiYhAZGYmtW7di69at2L9/Pz755BPd52+99Rb279+PzZs3Y+fOndi3bx9OnjxZrudGRERERETmY8xARFTzMalBRERV2tatW2FnZwdra2s0b94ciYmJeOutt/DgwQMsWbIE8+bNQ69evRAcHIzvv/8eKpUKy5cvL9Ux1Go1Vq5ciWbNmqFTp04YPnw4du/eDQDIzMzE8uXL8fnnn6Nbt25o3rw5Vq1ahYKCgoo4XSIiIiIiKiXGDEREtQvX1CAioiqta9euWLJkCR48eIAvv/wScrkcAwcOxNmzZ5Gfn48OHTro6lpZWaFdu3a4dOlSqY7h7+8Pe3t73XtPT08kJiYC0DyRlZeXh/bt2+s+d3Z2RlBQkJlnRkRERERE5YExAxFR7cKkBhERVWm2trZo0KABAOCHH35ASEgIli9fjrZt25a4rVQqhRBCryw/P9+gnpWVld57iUQCtVptRquJiIiIiKiyMGYgIqpdOP0UERFVG1KpFDNmzMC7776LwMBAKBQKHD58WPd5fn4+jh8/juDgYACAm5sbMjIy8ODBA12d06dPl+qYgYGBsLKywtGjR3Vl9+/fx+XLl807GSIiIiIiKneMGYiIaj4mNYiIqFoZPHgwZDIZlixZgldffRVvvfUW/vzzT1y8eBERERHIysrCmDFjAADt27eHjY0NZsyYgZiYGPz0009YuXJlqY5nZ2eHMWPG4K233sKePXtw/vx5jBo1ClIp/wklIiIiIqqKGDMQEdVsnH6KiIiqFblcjtdffx2fffYZYmNjoVarMXz4cGRkZKBNmzbYsWMH6tSpA0Azj+2PP/6It956C99//z26deuGOXPmYOzYsaU65rx585CZmYl+/frB3t4e//3vf5GWllYRp0dERERERGZizEBEVLNJxOMTBxIREREREREREREREVVBHAdHRERERERERERERETVApMaRERERERERERERERULTCpQURERERERERERERE1QKTGkREREREREREREREVC0wqUFERERERERERERERNUCkxpERERERERERERERFQtMKlBRERERERERERERETVApMaRERERERERERERERULTCpQURERERERERERERE1QKTGkREREREREREREREVC0wqUFERERERERERERERNUCkxpERERERERERERERFQtMKlBRERERERERERERETVApMaRERERERERERERERULTCpQURERERERERERERE1QKTGkRENYBEIsGcOXMscux9+/ZBIpFg3759Fjl+dSCRSPD6669buhlV2sqVKyGRSBAXF2fpphARERERVVtdunRBly5dLHZ8S8amRFR7MKlBRFROtH+ULer1999/W7qJZlm8eDFWrlxp6Wbo6dKlC5o1a2bpZlQJ2uSS9iWTyeDu7o5Bgwbh0qVLlm4eERERUaV79P780KFDBp8LIeDj4wOJRIK+fftaoIWWN2fOHEgkEiQnJxv9vFmzZhb9A3l1FRcXV2xs+Mknn1i6iWbZtm1blUtclHQtE1HNIrd0A4iIapr33nsPAQEBBuUNGjSwQGvKz+LFi+Hq6opRo0bplT/11FPIzs6GQqGwTMNIz6RJk9C2bVvk5+fj7Nmz+Pbbb7Fv3z6cP38eHh4elm4eERERUaWztrbGTz/9hI4dO+qV79+/Hzdv3oRSqbRQy6ime+GFF9C7d2+D8latWlmgNeVn27Zt+Oabb4wmNrKzsyGX88+NRFSx+FuGiKic9erVC23atLF0MyqNVCqFtbW1pZtB/9OpUycMGjRI9z4oKAivvvoqVq9ejbffftuCLSMiIiKyjN69e2PDhg34+uuv9f7Y+tNPPyE0NLTaPtn94MED2NraWroZVIzWrVvjpZdesnQzKhVjQyKqDJx+ioioEuXn58PZ2RmjR482+Cw9PR3W1taYOnUqACAvLw+zZs1CaGgoHB0dYWtri06dOmHv3r0lHmfUqFHw9/c3KNcOyX3UihUr8PTTT8Pd3R1KpRLBwcFYsmSJXh1/f39cuHAB+/fv1w2Z1g5DL2pNjQ0bNiA0NBQqlQqurq546aWXcOvWLYN22tnZ4datWxgwYADs7Ozg5uaGqVOnorCwsMTzNNXixYvRtGlTKJVKeHl5YcKECUhNTdWrc+XKFQwcOBAeHh6wtraGt7c3hg4dirS0NF2dqKgodOzYEU5OTrCzs0NQUBBmzJhhcjvWrl2LoKAgWFtbIzQ0FAcOHNB9tnfvXkgkEvz2228G2/3000+QSCQ4cuRIqc+9U6dOAICYmBi98lOnTqFXr15wcHCAnZ0dunXrZjBFmrHrBTC+/oW/vz/69u2LQ4cOoV27drC2tkb9+vWxevVqg+0vXLiAp59+GiqVCt7e3vjggw+gVqtLfW5EREREpnjhhReQkpKCqKgoXVleXh42btyIYcOGGd1GrVZjwYIFaNq0KaytrVG3bl2MGzcO9+/f16unvQfat28f2rRpA5VKhebNm+vujTdt2oTmzZvr7v9OnTplcKw9e/agU6dOsLW1hZOTE/r3728wfaj2vuzixYsYNmwY6tSpg44dO2LFihWQSCRG9/vRRx9BJpMZ3IOba+HChWjatClsbGxQp04dtGnTBj/99JPu8/j4eLz22msICgqCSqWCi4sLBg8ebHTttLNnz6Jz585694Xac3q8/vbt23X9ZG9vjz59+uDChQvFtvWff/6BRCLBqlWrDD7bsWMHJBIJtm7dCgDIyMjAlClT4O/vD6VSCXd3d/To0QMnT54sfSeZqG/fvqhfv77Rz8LCwvQeljMlbjOmqLXrjMVxBw8exODBg+Hr6wulUgkfHx+88cYbyM7O1tUZNWoUvvnmGwDQm1JLy9iaGqbEHtp2Hj58GG+++Sbc3Nxga2uLZ599FklJSSWep6lM+Xkz5VowJX4koorDkRpEROUsLS3N4GkviUQCFxcXWFlZ4dlnn8WmTZuwdOlSvSmbIiMjkZubi6FDhwLQJDmWLVuGF154AREREcjIyMDy5csRHh6OY8eOoWXLluXS3iVLlqBp06b4z3/+A7lcjt9//x2vvfYa1Go1JkyYAABYsGABJk6cCDs7O/zf//0fAKBu3bpF7nPlypUYPXo02rZti48//hh3797FV199hcOHD+PUqVNwcnLS1S0sLER4eDjat2+Pzz//HLt27cIXX3yBwMBAvPrqq2af35w5czB37lx0794dr776KqKjo7FkyRIcP34chw8fhpWVFfLy8hAeHo7c3FxMnDgRHh4euHXrFrZu3YrU1FQ4OjriwoUL6Nu3L1q0aIH33nsPSqUSV69exeHDh01qx/79+7F+/XpMmjQJSqUSixcvxjPPPINjx47p5ir28fHB2rVr8eyzz+ptu3btWgQGBiIsLKzU568NXurUqaMru3DhAjp16gQHBwe8/fbbsLKywtKlS9GlSxfs378f7du3L/VxAODq1asYNGgQxowZg5EjR+KHH37AqFGjEBoaiqZNmwIA7ty5g65du6KgoADTpk2Dra0tvvvuO6hUqjIdk4iIiKgk/v7+CAsLw88//4xevXoB0PyBPC0tDUOHDsXXX39tsM24ceN097STJk1CbGwsFi1ahFOnTunuIbWuXr2KYcOGYdy4cXjppZfw+eefo1+/fvj2228xY8YMvPbaawCAjz/+GM8//zyio6MhlWqeMd21axd69eqF+vXrY86cOcjOzsbChQvRoUMHnDx50uBBpcGDB6Nhw4b46KOPIITAoEGDMGHCBKxdu9ZgSqO1a9eiS5cuqFevXrn15ffff49JkyZh0KBBmDx5MnJycnD27FkcPXpUlyA6fvw4/vrrLwwdOhTe3t6Ii4vDkiVL0KVLF1y8eBE2NjYAgFu3bqFr166QSCSYPn06bG1tsWzZMqPTga1ZswYjR45EeHg4Pv30U2RlZWHJkiXo2LEjTp06ZfSBLgBo06YN6tevj19++QUjR47U+2z9+vWoU6cOwsPDAQDjx4/Hxo0b8frrryM4OBgpKSk4dOgQLl26hNatW5epv7KysoyOBHJycoJcLseQIUMwYsQIHD9+HG3bttV9Hh8fj7///hvz5s3TlZkSt5lrw4YNyMrKwquvvgoXFxccO3YMCxcuxM2bN7FhwwYAmp+NhIQEREVFYc2aNSXus7Sxx8SJE1GnTh3Mnj0bcXFxWLBgAV5//XWsX7/e7PMz9eetpGvBlPiRiCqYICKicrFixQoBwOhLqVTq6u3YsUMAEL///rve9r179xb169fXvS8oKBC5ubl6de7fvy/q1q0rXn75Zb1yAGL27Nm69yNHjhR+fn4GbZw9e7Z4/Fd/VlaWQb3w8HC9tgghRNOmTUXnzp0N6u7du1cAEHv37hVCCJGXlyfc3d1Fs2bNRHZ2tq7e1q1bBQAxa9YsvXYCEO+9957ePlu1aiVCQ0MNjvW4zp07i6ZNmxb5eWJiolAoFKJnz56isLBQV75o0SIBQPzwww9CCCFOnTolAIgNGzYUua8vv/xSABBJSUkltutx2uvgn3/+0ZXFx8cLa2tr8eyzz+rKpk+fLpRKpUhNTdU7B7lcrvf9GqP9Hn744QeRlJQkEhISxJ9//ikaNGggJBKJOHbsmK7ugAEDhEKhEDExMbqyhIQEYW9vL5566ildmbHrRYiH13psbKyuzM/PTwAQBw4c0Gu7UqkU//3vf3VlU6ZMEQDE0aNH9eo5Ojoa7JOIiIjIHNp7luPHj4tFixYJe3t73b3v4MGDRdeuXYUQmvuYPn366LY7ePCgACDWrl2rt78///zToFx7D/TXX3/pyrT3+yqVSsTHx+vKly5dqnffLIQQLVu2FO7u7iIlJUVXdubMGSGVSsWIESN0Zdr7shdeeMHgPF944QXh5eWld7978uRJAUCsWLGi2D7S7reoe9zHY4D+/fsXe/8thPH44siRIwKAWL16ta5s4sSJQiKRiFOnTunKUlJShLOzs959YUZGhnBychIRERF6+7xz545wdHQ0KH/c9OnThZWVlbh3756uLDc3Vzg5OenFVY6OjmLChAnF7stUsbGxRcaGAMSRI0eEEEKkpaUZ3C8LIcRnn30mJBKJ3vVjatzWuXNnve/M2L27EIZxXFHH+Pjjjw3aMmHCBKNxghCGsampsYe2nd27dxdqtVpX/sYbbwiZTKYXIxlT0rUshOk/byVdC6bEj0RUsTj9FBFROfvmm28QFRWl99q+fbvu86effhqurq56T5rcv38fUVFRGDJkiK5MJpPpRnKo1Wrcu3cPBQUFaNOmTbkOgX70CXntKJPOnTvj2rVrZRo6+88//yAxMRGvvfaa3nyqffr0QePGjfHHH38YbDN+/Hi99506dcK1a9dKfezH7dq1C3l5eZgyZYruaTgAiIiIgIODg64t2idpduzYgaysLKP70o4u2bx5c5mmSgoLC0NoaKjuva+vL/r3748dO3boptoaMWIEcnNzsXHjRl299evXo6CgwOS5eF9++WW4ubnBy8sLzzzzDNLS0rBmzRrdk1+FhYXYuXMnBgwYoDfU3dPTE8OGDcOhQ4eQnp5e6vMDgODgYN10VwDg5uaGoKAgve9y27ZteOKJJ9CuXTu9ei+++GKZjklERERkiueffx7Z2dnYunUrMjIysHXr1iKnntqwYQMcHR3Ro0cPJCcn616hoaGws7MzmA42ODhYb0St9snzp59+Gr6+vgbl2nuj27dv4/Tp0xg1ahScnZ119Vq0aIEePXpg27ZtBm17/L4Z0NxDJiQk6LVr7dq1UKlUGDhwYIl9UxpOTk64efMmjh8/XmSdR+OL/Px8pKSkoEGDBnByctKLY/7880+EhYXpjUB3dnY2uC+MiopCamoqXnjhBb3vQyaToX379iVOzztkyBDk5+dj06ZNurKdO3ciNTVVL/5ycnLC0aNHkZCQUGI/mGrs2LEGsWFUVBSCg4MBAA4ODujVqxd++eUXCCF0261fvx5PPPGE3vVT3nGbMY8e48GDB0hOTsaTTz4JIYTRKc5KUpbYY+zYsXrTWXXq1AmFhYWIj48vwxk9VJqft5KuBVPiRyKqWExqEBGVs3bt2qF79+56r65du+o+l8vlGDhwIDZv3ozc3FwAmrl28/Pz9W6qAWDVqlVo0aIFrK2t4eLiAjc3N/zxxx/lOk/n4cOH0b17d92com5ubrp1IspyHO3NZlBQkMFnjRs3NrgZtba2hpubm15ZnTp1DOYrLoui2qJQKFC/fn3d5wEBAXjzzTexbNkyuLq6Ijw8HN98843e+Q8ZMgQdOnTAK6+8grp162Lo0KH45ZdfTE5wNGzY0KCsUaNGyMrK0s0R27hxY7Rt2xZr167V1Vm7di2eeOIJNGjQwKTjzJo1C1FRUfjtt98wYsQIpKWl6SV0kpKSkJWVZfT7adKkCdRqNW7cuGHSsR73aNCl9fh3GR8fb7QvjLWHiIiIqLy4ubmhe/fu+Omnn7Bp0yYUFhZi0KBBRuteuXIFaWlpcHd3h5ubm94rMzMTiYmJevUfvwfS/sHTx8fHaLn23qi4++YmTZogOTkZDx480CsPCAgwqNujRw94enrq7iHVajV+/vln9O/fH/b29sY7pBQe/QPzO++8Azs7O7Rr1w4NGzbEhAkTDKZjzc7OxqxZs+Dj4wOlUglXV1e4ubkhNTVV7/46Pj7e6D3u42VXrlwBoEkSPf597Ny50+D7eFxISAgaN26s91DZ+vXr4erqiqefflpX9tlnn+H8+fPw8fFBu3btMGfOHLMftGrYsKFBbNi9e3c4ODjo6gwZMgQ3btzQrZ8XExODEydOGMSG5R23GXP9+nXdH/216x127ty5zMcoS+zx+M+Tdhpdc+PD0vy8lXQtmBI/ElHFYlKDiMgChg4dioyMDN0Ijl9++QWNGzdGSEiIrs6PP/6IUaNGITAwEMuXL8eff/6JqKgoPP300yX+Id3Y4s4ADBbfjomJQbdu3ZCcnIz58+fjjz/+QFRUFN544w0AqJTFm2UyWYUfwxRffPEFzp49ixkzZiA7OxuTJk1C06ZNcfPmTQCap5YOHDiAXbt2Yfjw4Th79iyGDBmCHj16lOui5iNGjMD+/ftx8+ZNxMTE4O+//zZ5lAYANG/eHN27d8eAAQOwatUq/Oc//0FERESZEhWmXkdaRX2Xjz51RkRERGQpw4YNw/bt2/Htt9+iV69eeuu8PUqtVsPd3d3oE/ZRUVF477339OoXdQ9UEfdGxtYhk8lkGDZsGH799Vfk5ORg7969SEhIMOkeUjuy+tGFoB+VlZWlN/q6SZMmiI6Oxrp169CxY0f8+uuv6NixI2bPnq2rM3HiRHz44Yd4/vnn8csvv2Dnzp2IioqCi4tLmeIL7TZr1qwx+n1s3ry5xH0MGTIEe/fuRXJyMnJzc7FlyxYMHDgQcvnDpWaff/55XLt2DQsXLoSXlxfmzZuHpk2b6o26rwj9+vWDjY0NfvnlFwCa2FAqlWLw4MG6OubEbabe0xcWFqJHjx74448/8M477yAyMhJRUVFYuXJliccoT1UhpjDlWigpfiSiisWkBhGRBTz11FPw9PTE+vXrkZycjD179hg8ibNx40bUr18fmzZtwvDhwxEeHo7u3bsjJyenxP3XqVMHqampBuWPj5L4/fffdTf148aNQ+/evdG9e3ejwVJRN8OP8/PzAwBER0cbfBYdHa37vDIU1Za8vDzExsYatKV58+Z49913ceDAARw8eBC3bt3Ct99+q/tcKpWiW7dumD9/Pi5evIgPP/wQe/bsKXHIO/DwCbNHXb58GTY2NnojVYYOHQqZTIaff/4Za9euhZWVlcG1URqffPIJcnJy8OGHHwLQPKVoY2Nj9Pv5999/IZVKdU8Vap+KevxaMmfot5+fn9G+MNYeIiIiovL07LPPQiqV4u+//y5y6ikACAwMREpKCjp06GD0KftHH0QyR3H3zf/++y9cXV1ha2tr0r5GjBiB9PR0/P7771i7di3c3Nx0C2CXtQ1ZWVm4ceOGwT2zra0thgwZghUrVuD69evo06cPPvzwQ12csnHjRowcORJffPEFBg0ahB49eqBjx44G95R+fn64evWqwXEfLwsMDAQAuLu7G/0+unTpUuJ5DhkyBAUFBfj111+xfft2pKenY+jQoQb1PD098dprryEyMhKxsbFwcXHR3UdXFFtbW/Tt2xcbNmyAWq3G+vXr0alTJ3h5eenqlCZue5yp9/Tnzp3D5cuX8cUXX+Cdd95B//790b17d712aJkaG5Ym9qhopf15M+VaKCl+JKKKw6QGEZEFSKVSDBo0CL///jvWrFmDgoICgz9ca59QefSJlKNHj+qGJRcnMDAQaWlpOHv2rK7s9u3b+O2330o8RlpaGlasWGGwT1tbW6OJkse1adMG7u7u+Pbbb3XTawHA9u3bcenSJfTp06fEfZSX7t27Q6FQ4Ouvv9Y7x+XLlyMtLU3XlvT0dBQUFOht27x5c0ilUt053Lt3z2D/2vl/Hz3Pohw5ckRvDuEbN25g8+bN6Nmzp97TSK6urujVqxd+/PFHrF27Fs888wxcXV1NP+nHBAYGYuDAgVi5ciXu3LkDmUyGnj17YvPmzYiLi9PVu3v3Ln766Sd07NhRNxxeG0AeOHBAV+/BgwdYtWpVmdvTu3dv/P333zh27JiuLCkpSW/KLSIiIqKKYGdnhyVLlmDOnDno169fkfWef/55FBYW4v333zf4rKCgwKR7YlN4enqiZcuWWLVqld4+z58/j507d6J3794m76tFixZo0aIFli1bhl9//RVDhw7VG4VQlG7dukGhUGDJkiUGT+J/9913KCgoQK9evXRlKSkpenUUCgWCg4MhhEB+fj4ATYzx+FP1CxcuNBgZEB4ejiNHjuD06dO6snv37hncF4aHh8PBwQEfffSR7hiP0k7lWpwmTZqgefPmWL9+PdavXw9PT0889dRTus8LCwsNpg5yd3eHl5eX3r1+cnIy/v3333JfR2HIkCFISEjAsmXLcObMGZNiw6LitscZu6cvLCzEd999V+IxhBD46quvDPap/eN/ST8LpYk9KpqpP2+mXAumxI9EVLFK/heOiIhKZfv27fj3338Nyp988km9xdGGDBmChQsXYvbs2WjevDmaNGmiV79v377YtGkTnn32WfTp0wexsbH49ttvERwcjMzMzGLbMHToULzzzjt49tlnMWnSJGRlZWHJkiVo1KiR3h/We/bsCYVCgX79+mHcuHHIzMzE999/D3d3d9y+fVtvn6GhoViyZAk++OADNGjQAO7u7npz0GpZWVnh008/xejRo9G5c2e88MILuHv3Lr766iv4+/vrhkiXl6SkJHzwwQcG5QEBAXjxxRcxffp0zJ07F8888wz+85//IDo6GosXL0bbtm11Q/L37NmD119/HYMHD0ajRo1QUFCANWvWQCaT6RZXfO+993DgwAH06dMHfn5+SExMxOLFi+Ht7Y2OHTuW2M5mzZohPDwckyZNglKpxOLFiwEAc+fONag7YsQI3RzPxoLp0nrrrbfwyy+/YMGCBfjkk0/wwQcfICoqCh07dsRrr70GuVyOpUuXIjc3F5999pluu549e8LX1xdjxozBW2+9BZlMhh9++AFubm64fv16mdry9ttvY82aNXjmmWcwefJk2Nra4rvvvoOfn59eEo6IiIioIowcObLEOp07d8a4cePw8ccf4/Tp0+jZsyesrKxw5coVbNiwAV999VWR63GU1rx589CrVy+EhYVhzJgxyM7OxsKFC+Ho6Ig5c+aUal8jRozA1KlTAcDk6Uvd3d0xa9YsvPvuu3jqqafwn//8BzY2Nvjrr7/w888/o2fPnnoJoJ49e8LDwwMdOnRA3bp1cenSJSxatAh9+vTRrd/Rt29frFmzBo6OjggODsaRI0ewa9cuuLi46B377bffxo8//ogePXpg4sSJsLW1xbJly+Dr64t79+7pRgM4ODhgyZIlGD58OFq3bo2hQ4fq7kf/+OMPdOjQAYsWLSrxXIcMGYJZs2bB2toaY8aM0Vt3LiMjA97e3hg0aBBCQkJgZ2eHXbt24fjx4/jiiy909RYtWoS5c+di7969Jo0QOXnyJH788UeD8sDAQL3F5Xv37g17e3tMnTpVLwZ5tN9Njdse17RpUzzxxBOYPn067t27B2dnZ6xbt87gj/KNGzdGYGAgpk6dilu3bsHBwQG//vqr0bUsQkNDAQCTJk1CeHg4ZDKZ0ZEvAEyOPcrL/PnzYWNjo1cmlUoxY8YMk37eTLkWTIkfiaiCCSIiKhcrVqwQAIp8rVixQq++Wq0WPj4+AoD44IMPDPanVqvFRx99JPz8/IRSqRStWrUSW7duFSNHjhR+fn56dQGI2bNn65Xt3LlTNGvWTCgUChEUFCR+/PFHMXv2bPH4r/4tW7aIFi1aCGtra+Hv7y8+/fRT8cMPPwgAIjY2Vlfvzp07ok+fPsLe3l4AEJ07dxZCCLF3714BQOzdu1dvv+vXrxetWrUSSqVSODs7ixdffFHcvHlTr87IkSOFra2twbkba6cxnTt3LrK/u3Xrpqu3aNEi0bhxY2FlZSXq1q0rXn31VXH//n3d59euXRMvv/yyCAwMFNbW1sLZ2Vl07dpV7Nq1S1dn9+7don///sLLy0soFArh5eUlXnjhBXH58uUS2wlATJgwQfz444+iYcOGuu/z8T7Tys3NFXXq1BGOjo4iOzu7xP0L8fB72LBhg9HPu3TpIhwcHERqaqoQQoiTJ0+K8PBwYWdnJ2xsbETXrl3FX3/9ZbDdiRMnRPv27YVCoRC+vr5i/vz5umv90evDz89P9OnTx2D7zp07664VrbNnz4rOnTsLa2trUa9ePfH++++L5cuXG+yTiIiIyBzae5bjx48XW6+o+5jvvvtOhIaGCpVKJezt7UXz5s3F22+/LRISEkrcVnv/96jY2FgBQMybN0+vfNeuXaJDhw5CpVIJBwcH0a9fP3Hx4kW9Otr746SkpCLP4/bt20Imk4lGjRoVe77G/Pjjj+KJJ54Qtra2QqlUisaNG4u5c+eKnJwcvXpLly4VTz31lHBxcRFKpVIEBgaKt956S6Slpenq3L9/X4wePVq4uroKOzs7ER4eLv7991/h5+cnRo4cqbe/U6dOiU6dOgmlUim8vb3Fxx9/LL7++msBQNy5c0ev7t69e0V4eLhwdHQU1tbWIjAwUIwaNUr8888/Jp3jlStXdLHCoUOH9D7Lzc0Vb731lggJCRH29vbC1tZWhISEiMWLF+vV034PRd3Ha2m/66Jej/eDEEK8+OKLAoDo3r270X2aGrcZu/+OiYkR3bt3F0qlUtStW1fMmDFDREVFGZzLxYsXRffu3YWdnZ1wdXUVERER4syZMwbxbEFBgZg4caJwc3MTEolEL3YzFpuaEnsU9fNaVLz5OO13Y+wlk8l09Ur6eTPlWjAlfiSiiiURgqt3EhERVSUFBQXw8vJCv379sHz5cks3h4iIiIiqgeTkZHh6emLWrFmYOXOmpZtTZlOmTMHSpUuRmZlZ5KLRRERUu3FNDSIioiomMjISSUlJGDFihKWbQkRERETVxMqVK1FYWIjhw4dbuikmy87O1nufkpKCNWvWoGPHjkxoEBFRkbimBhERURVx9OhRnD17Fu+//z5atWqFzp07W7pJRERERFTF7dmzBxcvXsSHH36IAQMGwN/f39JNMllYWBi6dOmCJk2a4O7du1i+fDnS09Or9UgTIiKqeJx+ioiIqIoYNWoUfvzxR7Rs2RIrV65Es2bNLN0kIiIiIqriunTpgr/++gsdOnTAjz/+iHr16lm6SSabMWMGNm7ciJs3b0IikaB169aYPXs2unfvbummERFRFcakBhERERERERERERERVQtcU4OIiIiIiIiIiIiIiKoFrqlRidRqNRISEmBvbw+JRGLp5hARERERVRohBDIyMuDl5QWplM9WFYdxAxERERHVRqbGDExqVKKEhAT4+PhYuhlERERERBZz48YNeHt7W7oZVRrjBiIiIiKqzUqKGZjUqET29vYANF+Kg4NDpR9frVYjKSkJbm5ufDqujNiH5mH/mYf9Zx72n/nYh+Zh/5mH/WeeqtB/6enp8PHx0d0TU9EsHTfUZlXhZ4UqF7/z2offee3C77v24XdevZkaMzCpUYm0Q8cdHBwsltTIycmBg4MDf6jLiH1oHvafedh/5mH/mY99aB72n3nYf+apSv3H6ZRKZum4oTarSj8rVDn4ndc+/M5rF37ftQ+/85qhpJiB3ywREREREREREREREVULTGoQEREREREREREREVG1wKQGERERERERERERERFVC1xTowoqLCxEfn5+ue9XrVYjPz8fOTk5nFOujKp7H1pZWUEmk1m6GURERERkJrVajby8PEs3o8ap7vf75YVxA5VVQkYCsvOzK2z/b094G5vWbQIAyOVyeHh5oFf/XpgybQqc7J3gZe+lq5ubm4v27dvjzJkzOHXqFFq2bFlh7SIiosrFpEYVIoTAnTt3kJqaWmH7V6vVyMjI4AKNZVQT+tDJyQkeHh7Vtv1EREREtV1eXh5iY2OhVqvLvI/cXAkSE+UoKJBALhdwdy+AUinKsZXVU0243y8vjBuotBIyEvDCry8gIzejwo4RHxcP+6b28BvhB1EokHU9C8u/X47N0ZvR+IXG+Hngz7rExttvvw0vLy+cOXOmwtpDRESWwaRGFaJNaLi7u8PGxqbcbx6FECgoKIBcLueNaRlV5z4UQiArKwuJiYkAAE9PTwu3iIiIiIhKSwiB27dvQyaTwcfHp1SjCW7cAHbulOHIEQmuXZMgO1sCIQCJBFCpBOrXFwgLE+jZsxA+PhV4ElVYdb7fLy+MG6issvOzkZGbAaVcCZVcVSHHuC27DalCCre6bpoCLyDzeCYy/81ERm6GbpTI9u3bsXPnTvz666/Yvn17hbSFiIgsh0mNKqKwsFCX0HBxcamQY/AG3XzVvQ9VKs2NZWJiItzd3TmknIiIiKiaKSgoQFZWFry8vGBjY2PSNnfvAgsXAlFRQGoqoFAAtraAkxMglQJqNZCdDZw5Axw/DqxebYUePYCJE4G6dSv0dKqc6n6/X14YN5A5VHIVbBW2FbJvuUwOIRW6/affSEd6TDqUzkpdnbt37yIiIgKRkZEm/54kIqLqhUmNKkK7hgb/waWKpr3G8vPzGZwQERERVTOFhYUAAIVCYVL9nTuBzz8H4uMBFxegQQNNIuNxdnaAm5smwXH/PrBhA3DsGDB1KtCzZ3meAVUXjBuoqko8lYjto7dDqAXU+WpAAjR4sQEATWJy1KhRGD9+PNq0aYO4uDjLNpaIiCoEkxpVTG1+GogqB68xIiIiourPlHu6X34BPv0UyMsDAgMBU/4uLZVqkh9OTprpqqZP14zueP55s5tM1QzjBqqqXIJd0Pzl5ijMLcS17dcgkUrg3tYdqTmpWP3damRkZGD69OmWbiYREVUg0ydgJSIiIiIiomph505NQgMA/P1NS2g8SibTbCcE8NlnmqmriIiqAplSBlsPWzj4OSBkbAhSY1KRsD8BAHDk4BEcOXIESqUScrkcDRpoRnC0adMGI0eOtGSziYioHDGpQUREREREVIPcvauZciovD/DyMm9fXl5Abi4wb55mv0REVYlEKkGD/g0QuykW6jw1Zn0yC2fOnMHp06dx+vRpbNu2DQCwfv16fPjhhxZuLRERlRcmNWqgwkJg3z7g5581//3ftLs1QlxcHCQSCU6fPl3hx/L398eCBQuKrZOXl4cGDRrgr7/+MutYc+bMQcuWLYutM2rUKAwYMKDYOhcvXoS3tzcePHhgVnuIiIiIqPpauFCzhoaPj/n7kkg0+4mP1+y3OmDMMKDYOowZqKbxbO8JiVSCpH1J8PL2QrNmzXSvRo0aAQACAwPh7e1t4ZYSEVF5YVKjhtm0STNMvGtXYNgwzX/9/TXlFWXUqFGQSCSQSCRQKBRo0KAB3nvvPRQUFJi935JuyC3t22+/RUBAAJ588kmDz3Jzc9GyZctyC6i++uorrFy5Uve+S5cumDJlil6d4OBgPPHEE5g/f77ZxyMiIiKi6ufGDc1UUS4upZ9yqigymWZ/UVGa/ZcFYwbGDEQVRSqTol63ekiMSkTWgyxLN4eIiCoBkxo1yKZNwKBBwM2b+uW3bmnKKzKx8cwzz+D27du4cuUK/vvf/2LOnDmYN29emfZVWFgItVpdzi0sf0IILFq0CGPGjDH6+dtvvw0vc8f7P8LR0RFOTk4l1hs9ejSWLFlidoBIRERERNXP9u2ahb3r1Cnf/dapo9nv9u1l3wdjBkOMGYhKp+X4lmj737YG5X59/dB8XnPY2Nrolfv7+0MIUeIoJyIiql6Y1KjChAAePDDtlZ4OTJqk2cbYfgBg8mRNvZL2ZWwfJVEqlfDw8ICfnx9effVVdO/eHVu2bAEA3L9/HyNGjECdOnVgY2ODXr164cqVK7ptV65cCScnJ2zZsgXBwcFQKpV4+eWXsWrVKmzevFn3RNe+ffsMjltYWIgxY8YgICAAKpUKQUFB+Oqrr/TqaJ/e+vzzz+Hp6QkXFxdMmDAB+fn5ujqJiYno168fVCoVAgICsHbt2hLP+cSJE4iJiUGfPn0MPtu+fTt27tyJzz//3NQuBAAsXboUPj4+sLGxwfPPP4+0tDSD89D+//379+Orr77S9U9cXBwAoEePHrh37x72799fqmMTERFRzXI97TpO3j6pe51NOqv3/nradUs3kSrA4cOAQgFIyznSk0o1+z1ypOz7qC4xg6+vL1xdXRkzEBEREVVRcks3gIqWlQXY2ZXPvoQAbt2SwNXVqsS6mZmAra15x1OpVEhJSQGguZm+cuUKtmzZAgcHB7zzzjvo3bs3Ll68CCsrTXuysrLw6aefYtmyZXBxcYGnpyeys7ORnp6OFStWAACcnZ2RkJCgdxy1Wg1vb29s2LABLi4u+OuvvzB27Fh4enri+eef19Xbu3cvPD09sXfvXly9ehVDhgxBy5YtERERoWtjQkIC9u7dCysrK0yaNAmJiYnFnuPBgwfRqFEj2Nvb65XfvXsXERERiIyMhI2NTRFbG7p69Sp++eUX/P7770hPT8eYMWPw2muvGQ2WvvrqK1y+fBnNmjXDe++9BwBwc3MDACgUCrRs2RIHDx5Et27dTD4+ERER1RzX064jaFEQcgpyiqxjLbdG9OvR8HX0rcSWUUXKzgZiYsy/ly+KrS1w5QqQkwNYW5u/v6oYM3h4eGDnzp2Ii4vD0KFDGTMQERERVUFMalC5EkJg9+7d2LFjByZOnKgLTA4fPqybQ3bt2rXw8fFBZGQkBg8eDADIz8/H4sWLERISotuXSqVCbm4uPDw8ijyelZUV5s6dq3sfEBCAI0eO4JdfftELUOrUqYNFixZBJpOhcePG6NOnD3bv3o2IiAhcvnwZ27dvx7Fjx9C2rWYY6/Lly9GkSZNizzU+Pt5gqLgQAqNGjcL48ePRpk0b3ZNQpsjJycHq1atRr149AMDChQvRp08ffPHFFwZ94OjoCIVCARsbG6P94+Xlhfj4eJOPTURERDVLclZysQkNAMgpyEFyVjKTGjXI7duaxIaDQ8XsX6XSjPxOSADq1y/7fqp6zCCEQLNmzRgzEBEREVVRnH6qCrOx0YyaMOW1bZtp+/z99wJkZIhi91WKB4V0tm7dCjs7O1hbW6NXr14YMmQI5syZg0uXLkEul6N9+/a6ui4uLggKCsKlS5d0ZQqFAi1atCj9gQF88803CA0NhZubG+zs7PDdd9/h+nX96RSaNm0K2SMrJXp6euqeqtK2MTQ0VPd548aNS5yLNjs7G9aPPaK2cOFCZGRkYPr06UVuZ2dnp3uNHz9eV+7r66sLTgAgLCwMarUa0dHRxbbDGJVKhawsLpBGREREVJvk5wNqdflPPaUllWr2/8iMTKXCmEGDMQMRERGReThSowqTSEwfOt6zJ+DtrVkU3NiaGBIJ4O0t0L27gFKpeV+eunbtiiVLlkChUMDLywtyeekuLZVKBUkZGrVu3TpMnToVX3zxBcLCwmBvb4958+bh6NGjevW0Q9a1JBKJ2QsLurq64ty5c3ple/bswZEjR6BUKvXK27RpgxdffBGrVq3C6dOndeUOFfQY3b179xAYGFgh+yYiIiKiqsnK6mHioSJoEyZWJc9oaxRjBg3GDERERETmYVKjhpDJgK++AgYN0iQsHk1saO/7v/xSU68i2NraokGDBgblTZo0QUFBAY4ePaobSp6SkoLo6GgEBwcXu0+FQoHCwsJi62iHqL/22mu6spiYmFK1vXHjxigoKMCJEyd0Q8mjo6ORmppa7HatWrXCkiVLIITQBVdff/01PvjgA12dhIQEhIeHY/369bonz4z1EwBcv34dCQkJuuHpf//9N6RSKYKCgozWL65/zp8/j0GDBhXbfiIiIiKqWby8NFNEZWeX39p8j8rO1uz/sdmUTMaYgTEDERERUXng9FM1yHPPARs3Ao+MRgagGcGxcaPm88rWsGFD9O/fHxERETh06BDOnDmDl156CfXq1UP//v2L3dbf3x9nz55FdHQ0kpOTkW9knHvDhg3xzz//YMeOHbh8+TJmzpyJ48ePl6qNQUFBeOaZZzBu3DgcPXoUJ06cwCuvvAKVSlXsdl27dkVmZiYuXLigK/P19UWzZs10r0aNGgEAAgMD4e3tXez+rK2tMXLkSJw5cwYHDx7EpEmT8Pzzzxc5P7C/vz+OHj2KuLg4JCcn654ii4uLw61bt9C9e/fSdAMRERERVXPW1kBgIPDgQcXs/8EDoGHD8lkk/FGMGRgzEBEREZUGkxo1zHPPAXFxwN69wE8/af4bG2uZhIbWihUrEBoair59+yIsLAxCCGzbts1gePfjIiIiEBQUhDZt2sDNzQ2HDx82qDNu3Dg899xzGDJkCNq3b4+UlBS9J7BK00YvLy907twZzz33HMaOHQt3d/dit3FxccGzzz6LtWvXlvp4xjRo0ADPPfccevfujZ49e6JFixZYvHhxkfWnTp0KmUyG4OBguLm56eYE/vnnn9GzZ0/4+fmVS7uIiIiIqPro0AHIyyv/KajUas1+w8LKd79ajBlMw5iBiIiICJAIYWwFhprhm2++wbx583Dnzh2EhIRg4cKFaNeuXZH1U1NT8X//93/YtGkT7t27Bz8/PyxYsAC9e/cu8z4flZ6eDkdHR6SlpRnMi5qTk4PY2FgEBAQYLCRXXoQQKCgogFwuL9NctGTYh2fPnkWPHj0QExMDu4oY419KeXl5aNiwIX766Sd06NDBaJ3KuNaKolarkZiYCHd3d0gragXLGoz9Zx72n/nYh+Zh/5mH/Vc6J2+fROh3oSXWOzH2BFp7tq6EFhV/L2xJVS1mAMyLG27c0ExJK5MBLi4mH7JEKSlAYaFmBLiPT/ntt6qpiJipOsYMgGXjhsrEf1/KT8y9GAzeMBhO1k6wVZi4QGg5eZD3AKk5qdgweAMCnYtfK4bfee3C77v24XdevZkaM9TYb3b9+vV48803MXv2bJw8eRIhISEIDw9HYmKi0fp5eXno0aMH4uLisHHjRkRHR+P7779HvUfmcirtPqnma9GiBT799FPExsZauikANHPszpgxo9jghIiIiGo+VxtXWMuL/yOktdwarjauldSiqqkmxgw+PkCPHg+TEOWhsFCzvx49anZCo6IwZiAiIiIqXzV2pEb79u3Rtm1bLFq0CIAmS+fj44OJEydi2rRpBvW//fZbzJs3D//++2+RQ5xLu8/c3Fzk5ubq3qenp8PHxwf37983+sRVXFxchT8Fk5+fX+IQbipede9D7RNX/v7+FhmpkZSUBDc3N2bLy4D9Zx72n/nYh+Zh/5mH/Vd619OuIzkrGQDw8aGPsenfTYhoFYGxoWMBaBIfvo6+ldae9PR01KlTp0qN1KgKMQNQ/nHD3bvAiBHA7duAv3+J3VAsIYD4eMDTE1i9Gqhb17z9VQfV/X6/vFgybqhM/Pel/Fy7fw1DNg6x6EiN9YPWo36d+sXW5Xdeu/D7rn34nZvn669PomHDOujVK6DIOqNHj8bq1asBAHK5HN7e3hg0aBDmzp1rcM+Qm5uLsLAwnDlzBidOnEDLli2LPb6pMYPc9FOqPvLy8nDixAlMnz5dVyaVStG9e3ccOXLE6DZbtmxBWFgYJkyYgM2bN8PNzQ3Dhg3DO++8A5lMVqZ9fvzxx5g7d65BeVJSEnJycvTK8vPzoVarUVBQgIKCgrKcdomEECj83+NanH6qbGpCHxYUFECtViMlJaXSgzW1Wo20tDQIIfgPSxmw/8zD/jMf+9A87D/zsP9KzxrW8JZpFh22lWj+uOQgcdCVIReVOuI4IyOj0o5liqoSMwDlGzckZCQgW5qNIeMlmD9fiph7gHsZExFCAEmJgNQJGPqqGqlSgdz7KnjZe5Vth9VATbjfLy+WjBsqE/99KT8ZGRloqGwIW7ktrGWVmwjLkefggfIBMu5lIDG/+H/b+J3XLvy+ax9+52UXH5+Go0ev4OJFKwQHK6BSGf/3PycnB127dsWCBQuQn5+Ps2fPYvLkycjOzsa7776rV/fdd9+Fy//mRL13716J8YepMUONTGokJyejsLAQdR97jKhu3br4999/jW5z7do17NmzBy+++CK2bduGq1ev4rXXXkN+fj5mz55dpn1Onz4db775pu699okrNzc3o09cZWRkQC6XQy6v2K+lJt+QVpbq3IdyuRxSqRQuLi4WGakhkUiYLS8j9p952H/mYx+ah/1nHvafeVQqFQDA1ta2xIWNK0pVe9K7qsQMQPnFDQkZCRixeQQy8jTBYFZfzaiNWAEoFCX3yePy8gCJv2Z0xqJEYNFvgL3CHj8N/KlGJzaA6n2/X14sGTdUJv77Un4yrTJxJfcKnCROsJVW8kiNggdIzU2FvbM93OsU/+8cv/Pahd937cPvvOy+/PJfnDr1AFKpBIcOpeHFF4ON1rO2toa9vT2aNWsGAGjVqhV+//13/PXXX3qxxvbt23H48GFs2LABzZs3h7Ozc4mxiKn3HDUyqVEWarUa7u7u+O677yCTyRAaGopbt25h3rx5mD17dpn2qVQqoVQqDcqlUqnBD5VUKoVEItG9KoIQQrfv2v7UUVnVhD7UXmPGrsPKOr6ljl0TsP/Mw/4zH/vQPOw/87D/yu7R+xdL9V9N+N4qImYAyi9uyCnIQUZeBpRyJVRyFZw8AUclcP06kJMBWFkBcjmA4m5jBVBQAOTnAyprwNcXcHbWfJRdkI2MvAzkFORU23vhktSE+/3yYum4oTLVlvOsaBKJBOKR/1Um7TFN/XeO33ntwu+79uF3Xnrnzydh9+4bcHW1QWZmPtasuYQBAxrB3t7wyZhH7xE0257HkSNH4Ofnpyu7e/cuxo0bh8jISNjZ2QEwfm/7OFO/sxqZ1HB1dYVMJsPdu3f1yu/evQsPDw+j23h6esLKygoymUxX1qRJE9y5cwd5eXll2icREREREVVNNTlmUMlVuvnsbT0A9zpAbCyQnATk5gBSCSCTAzIpNAkOARSqgcICQC0AKzlQzx0ICAAez7XkFuQaHI+IiIiIqrc1ay4iIyMPHh5OsLNTIC4uDZGRVzB8eFOj9bdu3Qo7OzsUFBQgNzcXUqlUt6acEAKjRo3C+PHj0aZNG8TFxZV7e2tkukqhUCA0NBS7d+/WlanVauzevRthYWFGt+nQoQOuXr0KtVqtK7t8+TI8PT2hUCjKtE8iIiIiIqqaalPMoFQCjRsDoW2A+vUBRydAIgHy/zciI79A897RSfN5aBtNfSODR4iIiIiohtGM0rgONzcVJBIJrKyksLaW6xIdxnTt2hWnT5/G0aNHMXLkSIwePRoDBw4EACxcuBAZGRl668yVtxqZ1ACAN998E99//z1WrVqFS5cu4dVXX8WDBw8wevRoAMCIESP0OvbVV1/FvXv3MHnyZFy+fBl//PEHPvroI0yYMMHkfRIRERERUfVR22IGlQrw8wNatgTCngDahAKtW2v+G/aEptzPT1OPiIiIiGoHbfLCyenhEy0eHra4fj0dkZFXjG5ja2uLBg0aICQkBD/88AOOHj2K5cuXAwD27NmDI0eOQKlUQi6Xo0GDBgCANm3aYOTIkeXS5ho5/RQADBkyBElJSZg1axbu3LmDli1b4s8//9Qt2nf9+nW9Obp8fHywY8cOvPHGG2jRogXq1auHyZMn45133jF5n0REREREVH3U5phBKgNsKncdXyIiIiKqYh4fpaH16GiNAQMaGl1bQ0sqlWLGjBl48803MWzYMHz99df44IMPdJ8nJCQgPDwc69evR/v27cul3TV2pAYAvP7664iPj0dubi6OHj2q12n79u3DypUr9eqHhYXh77//Rk5ODmJiYjBjxgy9+XJL2qelXU+7jpO3Txb5up523dJNrDT+/v5YsGCB2ftZvnw5evbsafZ+JBIJIiMji/w8Li4OEokEp0+fLnY/Q4cOxRdffGF2e4iIiIhIo7bFDPQQYwYiIiKq7YyN0tAqabTGowYPHgyZTIZvvvkGvr6+aNasme7VqFEjAEBgYCC8vb3Lpd01OqlRm1xPu46gRUEI/S60yFfjbxpXSGIjKSkJr776Knx9faFUKuHh4YHw8HAcPny4XI9jLOhYuXIlnJycyvU4Wjk5OZg5cyZmz55t9PN169ZBIpFgwIABZh/Lx8cHt2/fRrNmzQBoAmiJRILU1FS9eu+++y4+/PBDpKWlmX1MIiIiIqLKwphhgNnHYsxARERE5amoURpapqytoSWXy/H666/js88+w4MHDyqqyQ+PV+FHoEqRnJWMnIKcYuvkFOQgJTsF9VG/XI89cOBA5OXlYdWqVahfvz7u3r2L3bt3IyUlpVyPU9k2btwIBwcHdOjQweCzuLg4TJ06FZ06dSqXY8lkMnh4eJRYr1mzZggMDMSPP/6oN3czEREREVFVxpjBfIwZqLrILsiuFcckIqrutMkKDw+nIut4eNgiLi4NkZFXMHx4UwAwGMmsNW3aNEybNs2g3N/fH0KI8miyDkdqVGFCCDzIe2DSKzvftH/As/OzS9xXaS6y1NRUHDx4EJ9++im6du0KPz8/tGvXDtOnT8d//vMfvXqvvPIK3Nzc4ODggKeffhpnzpzRfR4TE4P+/fujbt26sLOzQ9u2bbFr1y7d5126dEF8fDzeeOMNSCQSSCQS7Nu3D6NHj0ZaWpqubM6cOUW2s7jjG7Nu3Tr069fPoLywsBAvvfQS5s6di/r1TU8Q3b59G7169YJKpUL9+vWxceNG3WePDiWPi4tD165dAQB16tSBRCLBqFGjdHX79euHdevWmXxcIiIiIiJLqm4xg5eXFxwdHc2OGV588UXGDFSrqKxUsFfaI7cgF6k5qZX6yi3Ihb3SHiorlaW7gYioWihplIZWaUZrVCaO1KjCsvKzYPexXbnus8uaLiXWyZyeCVuFaasG2tnZwc7ODpGRkXjiiSegVBrOvwZo5lVTqVTYvn07HB0dsXTpUnTr1g2XL1+Gs7MzMjMz0bt3b3z44YdQKpVYvXo1+vXrh+joaPj6+mLTpk0ICQnB2LFjERERAQBwdnbGggULMGvWLERHR+vaU5bjG3Po0CEMHz7coPyDDz6Au7s7xowZg4MHD5rUTwAwc+ZMfPLJJ/jqq6+wZs0aDB06FOfOnUOTJk306vn4+ODXX3/FwIEDER0dDQcHB6hUD2/M2rVrhw8//BC5ublF9jcRERHRo26eVgGLpuKarQzoaunWUG1T3WKG33//Hc7Ozvjuu+/KHDO89957jBmo1vGy98LPA382+aHL8qayUsHL3ssixyYiqm5MGaWhZWy0hqUxqUFmkcvlWLlyJSIiIvDtt9+idevW6Ny5M4YOHYoWLVoA0NzoHzt2DImJibob6s8//xyRkZHYuHEjxo4di5CQEISEhOj2+/777+O3337Dli1b8Prrr8PZ2RkymQz29vZ6Q64dHR0hkUiKHYZtyvEfl5qairS0NHh5eRnsa+XKlTh16lSp+2rw4MF45ZVXdOcXFRWFhQsXYvHixXr1ZDKZLmhyd3c3mP/Xy8sLeXl5uHPnDvz8/ErdDiIiIqpdhBA4ttoFSLbG7mWFEP8VxT6NRVTeqlPMcPfuXchkMsjlcrNihuXLl5e4oLcxjBmoumNSgYio6jN1lIbWo6M1BgxoCHt7RSW0snhMalRhNlY2yJyeaVLd03dOo+OKjiXW2zd8H0LrhRZ7wdpY2ZjcRkAzP26fPn1w8OBB/P3339i+fTs+++wzLFu2DKNGjcKZM2eQmZkJFxcXve2ys7MRExMDAMjMzMScOXPwxx9/4Pbt2ygoKEB2djauXzd/YXNTjv+47GzNkyXW1ta6soyMDIwYMQJLliyBq6ur0e0++ugjfPTRR7r3Fy9ehK+vLwAgLCxMr25YWFiZAh3tE1hZWVml3paIiIhqn50745B0RXNPkxAtw86dcQgPD7Bwq6i2qS4xw+P3+WWJGYYPH47vv/+eMQMRERFVSaUZpaFV1UZrMKlRhUkkEpOngTJ13kiVlQq2CttyfzrP2toaPXr0QI8ePTBz5ky88sormD17NkaNGoXMzEx4enpi3759BttpnyiaOnUqoqKi8Pnnn6NBgwZQqVQYNGgQ8vLMn6vNlOM/zsXFBRKJBPfv39eVxcTEIC4uDs8++6yuTK1WA9A8fRYdHY3x48fj+eef133++FNb5eHevXsAADc3t3LfNxEREdUcOTkFOH8+CePHRwEQACSQSAVmzjyEnj39OVqDKl11iBn27t2LgoICyOVy3c9IWWKGR9fZYMxAREREVUVpR2loVbXRGkxqUIUIDg5GZGQkAKB169a4c+cO5HI5/P39jdY/fPgwRo0apUsYZGZmIi4uTq+OQqFAYWFhiWWPM+X4j1MoFAgODsbFixfRs2dPAEDjxo1x9uxZvSDn3XffRUZGBr766iv4+PhAoVAUOd/u33//jREjRui9b9WqVZHHB2D03M6fPw9vb+8in/wiIiKi2ufOnQc4cyYRZ84k/e+ViH//vYfCQvG/GpqARaglOH78LkdrUJVQlWOGR5MaRSkqZjh37pxePcYMREREVFUUN0pDQCBRcRJKtROcCgINPq9KozWY1KghXG1cYS23Rk5BTpF1rOXWcFG5FPl5WaSkpGDw4MF4+eWX0aJFC9jb2+Off/7BZ599hv79+wMAunfvjrCwMAwYMACfffYZGjVqhISEBPzxxx949tln0aZNGzRs2BCbNm1Cv379IJFIMHPmTN0TTVr+/v44cOAAhg4dCqVSCVdXV/j7+yMzMxO7d+9GSEgIbGxsYGOjP32WKcc3Jjw8HIcOHcKUKVM0/WdtjWbNmuklNbRPbTVr1qzEvtqwYQPatGmDjh07Yu3atTh27BiWL19utK6fnx8kEgm2bt2K3r17Q6VS6RY0PHjwoC5oIiIiotolP78Q0dH3cOZMEk6ffpjESEw0PsWMTCZ5JLHxsIyjNagyVaeY4dlnn8VHH32EJk2a4Pbt22WOGR7FmIGIiIiqguJGaQgI3LLej2u2v0MuVGic8RKc8xvr1alKozWY1KghfB19Ef16NJKzkous46JygZdt+Q5ttrOzQ/v27fHll18iJiYG+fn58PHxQUREBGbMmAFAM43Wtm3b8H//938YPXo0kpKS4OHhgaeeegp169YFAMyfPx8vv/wynnzySbi6uuKdd95Benq63rHee+89jBs3DoGBgcjNzYUQAk8++STGjx+PIUOGICUlBbNnz8acOXP0tjPl+MaMGTMGbdq0QVpaGhwdHc3uq7lz52LdunV47bXX4OnpiZ9//hnBwcFG69arVw9z587FtGnTMHr0aIwYMQIrV65ETk4OIiMj8eeff5rdHiIiIqra7t3L1ht5ceZMEi5cSEFenuFT2RIJ0LBhHbRs6Y6QEDeEhLjh/v1cDB++zaBuYaHgaA2qVNUpZpgxYwYiIiIYMxAREVGNU9QojUcTGhBS5Eky8K/9j0YTG1VltIZECCFKrkblIT09HY6OjkhLS4ODg4PeZzk5OYiNjUVAQIDeQnPlSQhhMD8sFW/w4MFo3bo1pk+fDsDyfbhkyRL89ttv2LlzZ5n3URnXWlHUajUSExPh7u4OqVRaqceuCdh/5mH/mY99aB72n3lqcv+p1QJXr943SGDcuJFhtL69vQItWrjpkhchIe5o1swFtrYPn5QSQqB9+x9x4sRdPPYgOwBAKgVCQ+vi6NGXKu2eprh7YdJX1rgh5l4MBm8YDCdrJ5PX5iuNB3kPkJqTig2DNyDQ2XBKgpqgLPf7j8cMllYeMQNg2bihMtXkf1/IOH7ntQu/79qH37lx588nYdSoP2FtLUOdOg//XX80oSERMijVThAQyJYlQiHsjSY2btzIgLu7DX79tX+5j9YwNWbgSA2iYsybNw+///67pZuhY2VlhYULF1q6GURERFRGGRl5OHcuSW/6qHPnkpCVVWC0vr+/A0JCNKMvtKMw/P0dIZUW/8fWvLxCXL+eYTShAQBqtSYYycsrhFLJkIDIHIwZiIiIqKozNkrDWEIDACSQQFXojmxZotERG1VhtAYjGKJi+Pv7Y+LEiZZuhs4rr7xi6SYQERGRCYQQuH49HadPJ+kt4B0Tk2q0vrW1HM2bu+qNvmjRwg2OjsoyHV+plOP48ZeQlJQNABg0cTFi/7JH6755+H7uGACAu7sNExpE5YAxAxEREVVlxtbSKCqhoVVcYqMqrK3BKIaIiIiIyAzZ2fm4cCFFb+qos2eTkJqaa7S+l5edXvIiJMQNDRvWgVxevsPjfXwc4OOjGbKtctQM2bB3EWjduuj1Aaj2yc+SwcrGcJ0WIiIiIqoZHh+lUVJCQ6u4xIalR2swqUFEREREZAIhBO7ceaBLXmhHYURH34dabbhMnZWVFE2auOhNHRUS4gZXVxsLtJ7IUOo1O/y7PgCNh8TCqX6mpZtDREREROXs8VEapiY0tIwlNq4fj8LNm/sAACNHyjBzpjeef/55vPfeewbrcOXm5qJ9+/Y4c+YMTp06hZYtW5bLeTGpQURERET0mPz8Qvz77z3duhfaRIZ2OqfHubqqDEZfNGniAoVCVsktJzKNEMCN/R7IvGWDGwfqwjEgE5W0ZjwRERERVZJHR2mUNqGh9XhiQya1hptbKzRt+iquX09Fv35KLF06ExKJBJ9++qnetm+//Ta8vLxw5syZcj0vJjWIiIiIqFZLScnWmzrqzJkkXLyYgrw8wyl5pFIJGjWqo5e8CAlxg5eXnW5+WiJLyy4wnnx7VPo1R6RE20Fum4uUf+1x57IcDgFpZu+XiIiIiKqGR0dpQIIyJTS0Hk1s5FklwC7fC3Z2LrC3V+DiRRt06fI0oqKi9JIa27dvx86dO/Hrr79i+/bt5XpuTGoQERERUa1QWKjG1aupuuSFdhTGrVvGp91xcFCgRQs3vemjmjZ1hY2NVSW3nMg0KisV7JX2yMjNQG6B8TVdAM0ojZv7A5GfCyjcMpGXZIf4fa7w9ogvcbSGvdIeKitVObeciIiIiMqbdpRGXQ9HkxMaBQUCUqnmYa7HaRMbObiHDPkN3LP6Fx4ejRAdfRH37x9C48aBurp3795FREQEIiMjYWNT/tPvMqlBRERERDVORkYezp5NemT6qEScP5+MrKwCo/Xr13fUG3kREuIGf39Hjr6gasXL3gs/D/wZ2fnFj6g4e/I+/i/lFAIC5bCzt0KmfT4y7/lidsNxaN6qTrHbqqxU8LL3Ks9mExEREVE5047ScHWzRoLqgEkJDSGAjMw8yGQSONgrjNaRQAL13QKoM/Pw143/0/ts7Nj5/9uPwKhRozB+/Hg0b94crVq1AgBER0dzTQ0iIiIiIiEE4uLSHln3QpPAuHbN+DQ6KpUczZu76iUwWrRwg4ODspJbTlQxSko4CCHwZeRuiAIZ3Os4QCKRwMZZIP1eGnb9lor+T4cymUdERERUza1ZcxHpGbmwCTiDWNutJk05lZtbAHWhGupCCfLz1bCykhZRUwKJQgZlN2fI86whOS1HRsp1bNq0Dy+/PBwLFy5ERkYGpk+fjjfffBPu7u64dOlSuZ5fUS0jqpLi4uIgkUhw+vTpCj+Wv78/FixYUGydvLw8NGjQAH/99ZdZx1q5ciWcnJyKrTNnzpwSs5nJyclwd3fHzZs3zWoPERFRVZSdXYDjx+9g2bKzmDhxNzp1+hlOTgtRv/4yPPvsZsyZ8xd+++2KLqFRr54devcOwIwZ7bF+fV/8++/LyMiYhKNHX8J33/XEhAmt0LGjNxMaVKv8888dHD58C+7uKl3yQiKRoG5dFQ4fTsCJE3ct3ELzMWZoWWwdxgxEREQ12/nzSdi1Ox5ocMLkhIYQQHZ2oe7+MDvH+Ah3HakE9i4+gLeAvLcSEpkMu3ZtQUZGHvbs2YMjR45AoVDg66+/xqFDhwAAL774IkaOHFkep8ikRk21a1c8goN/wK5d8RV+rFGjRkEikUAikUChUKBBgwZ47733UFBQwsVvwn4HDBhQPo2sIN9++y0CAgLw5JNP6souX76M/v37w9XVFQ4ODujYsSP27t1r9rGmTp2K3bt3694b6x9XV1eMGDECs2fPNvt4REREliKEQEJCJrZvv4aPPz6KoUN/R9OmK9GgwXo88cRPiIjYiUWLTuHQoVtIT8+DlZUULVu6Y+TIppg/vwt2734eyckTcPPmePzxx0B8+GEnPP98YwQFOUMm4+0v1V5CCKxadQE5OYWwf2xKAXt7BbKzC7By5XkIIcr92IwZGDMQERFR5Vi9+gISVPtxx+1PkxcFz80tgFotIJFKIJVKkJ+nRn6+2mhddU4+RE4Bkn48iczVN3F/42UIkYfc3Hv45Zdz+Prrr9G+fXsAgFwuh62tLQBg+vTp+PDDD8vlHDn9VA0khMCMGQdw6dI9zJhxAN26vVThQ8ifeeYZrFixArm5udi2bRsmTJgAKysrTJ8+vdT7KiwsrBZD3oUQWLRoEd577z298r59+6Jhw4bYs2cPVCoVFixYgL59+yImJgYeHh5lPp6dnR3s7OxKrDd69GiEhoZi3rx5cHZ2LvPxiIiIKkNeXiEuXUrRTRulnUIqOdn4mgBubqrH1r5wR+PGzlAoZJXccqLqx9goDa3HR2u0aVP2+9aiMGZ4iDEDERERVYRr1+5j07l1uFdvJ6Q5csjz7ZCF4h8iEUJoRmYIoLBA/K9Ms06h0loGCYq455JJAAEg+3/JD6nA+1++i5Ejt+L+/ft45ZVXMGPGDGzbtg2vvfYa1q9fj/fff79czpOPqlVhQgg8eJBX6teWLVdx/Lhm2Pjx43exZcvV/32W/8ir6O3L8mSWUqmEh4cH/Pz88Oqrr6J79+7YsmULAOD+/fsYMWIE6tSpAxsbG/Tq1QtXrlzRbasdRr1lyxYEBwdDqVTi5ZdfxqpVq7B582bdE1379u0zOG5hYSHGjBmDgIAAqFQqBAUF4auvvtKro3066fPPP4enpydcXFwwYcIE5Ofn6+okJiaiX79+UKlUCAgIwNq1a0s85xMnTiAmJgZ9+vTRlSUnJ+PKlSuYNm0aWrRogYYNG+KTTz5BVlYWzp8/X+I+IyMj0bBhQ1hbWyM8PBw3btzQffboUPI5c+YU2T9NmzaFl5cXfvvttxKPR0REVJmSk7Owe3c85s//ByNHbkNIyCrY2X2Fli1XY+TI7Zg//wR2776O5ORsSKUSNGnijBdeaIxPPumEP/54FqdPP4fbt8cjKmowPv+8C4YPb4oWLdyY0CAyQXGjNLQqerRGdYkZfH194erqypiBiIiIqhUhBHbF/4b8oH3w8nBAkK8vAgMdS3w5O1vDSi6FSiUHJIBUKoGNjRxyuRSeHrYG9aUSAFIJGo9ug0bDW6FO07qwclBCqpLiZuwuTP6/SXBxccHixYvh5+eH5s2bAwCuXLmid29lDo7UqMKysvJhZ/e12fsZMGBzqepnZk6Cra3xQMdUKpUKKSkpADQBwpUrV7BlyxY4ODjgnXfeQe/evXHx4kVYWVkBALKysvDpp59i2bJlcHFxgaenJ7Kzs5Geno4VK1YAAJydnZGQkKB3HLVaDW9vb2zYsAEuLi7466+/MHbsWHh6euL555/X1du7dy88PT2xd+9eXL16FUOGDEHLli0RERGha2NCQgL27t0LKysrTJo0CYmJicWe48GDB9GoUSPY29vrylxcXBAUFITVq1ejdevWUCqVWLp0Kdzd3REaGlrs/rKysvDhhx9i9erVUCgUeO211zB06FAcPnzYoO7UqVNx6dIlg/7RateuHQ4ePIgxY8YUe0wiIqKKUFioxpUr93HmTBJOn344+iIhIdNofUdHpd7Ii5AQNzRt6gKVykpXR61WIzExsVo8mV0VJdkfBMbsR7y6G4D/s3RzyAKKG6WhVRmjNR5VFWMGDw8P7Ny5E3FxcRg6dChjBiIiIqoWhBBYc2QNFh/4Gu6u9vBwNO0+Lj9fjVu3MqFUyqBWa/cFyOVSQKJGXp4ajRs76G1zxdEahXmFaNCiLgCgroctDs07BGsnawS8HoB1G9ch9d9UKJWGaxe+8sorWLVqlXknCyY1qJwJIbB7927s2LEDEydO1AUmhw8f1s0hu3btWvj4+CAyMhKDBw8GAOTn52Px4sUICQnR7UulUiE3N7fY4ddWVlaYO3eu7n1AQACOHDmCX375RS9AqVOnDhYtWgSZTIbGjRujT58+2L17NyIiInD58mVs374dx44dQ9u2bQEAy5cvR5MmTYo91/j4eHh5eemVSSQS7Nq1CwMGDIC9vT2kUinc3d3x559/ok6dOsXuLz8/H4sWLdLNObdq1So0adIEx44dQ7t27fTq2tnZFds/Xl5eOHXqVLHHIyIiKg9pabk4e1Z/6qjz55ORnW18iHNgoJNeAqNlSzf4+jowWVGB3t/6PpIc9gMA4mS78f7W9zGz70wLt4oq06OjNOrVK/7hJXt7Be7ezcbKlecRGlq3Qn42q3rMIIRAs2bNGDMQERFRtaBNaMyPmg8rmZXJCQ0AuHMnE9nZBbC2liM9PRdSiQRCADk5BbC1tUJqag7u389BnTrWetvlpudi+9TtEGoBdYEmG5KfnQ9fe1/k98iHX08/eN3wwu4/diMnJwcAsGLFCnTv3r1czplJjSrMxsYKmZmTTK4vhEDnzutx5kwSCgsfDheXySQICXHDvn1DUFhYCLlcjuJiExsbq6I/LMLWrVthZ2eH/Px8qNVqDBs2DHPmzMHu3bshl8t1N93AwyeTLl26pCtTKBRo0aJFqY8LAN988w1++OEHXL9+HdnZ2cjLy9MNudZq2rQpZLKHU1N4enri3LlzAIBLly5BLpfrPRXVuHFjODk5FXvc7OxsWFvr/0ALITBhwgS4u7vj4MGDUKlUWLZsGfr164fjx4/D09MTTZs2RXy8ZgH3Tp06Yfv27QA0C+doA6RH23Dp0iWDAKUkKpUKWVlZpdqGiIioOGq1QFxcmt7aF6dPJyIuLt1ofRsbOZo31yQvWrbUjL5o3tytyGlvqGK8v/V9zNo8S69M+56JjdrDlFEaWhU5WqO6xAzaxcsZMxAREVFVZ05CQztKQyaTIDe3EGq15u/IAkBBgUBhoUChWuDGjQyDpIbCToEOb3RAYV4hru29BkiA5H+TEX8oHo2eboTYlFgkBydjxbgVyL+TjxEjRmD58uUYOXJkuZw3kxpVmEQiKdU0UDt2xOLkScPhz4WFAidPJuLw4QR06+b9v6RG+T5x1bVrVyxZsgQKhQJeXl6Qy0t3aalUJQdYxqxbtw5Tp07FF198gbCwMNjb22PevHk4evSoXj3tkHUtiUQCtXZMVRm5urrqghytPXv2YOtWzWI4Dg6aoVmLFy9GVFQUVq1ahWnTpmHbtm26+eNUKpVZbSjKvXv34ObmViH7JiKimi8rKx/nzyfrTR119mwSMjLyjNb38bHXmzoqJMQNgYFOkMm4fJslGUtoaDGxUXuUZpSGVkWN1mDMoMGYgYiIiMqDOQkNwHCUhvY2S3u3VdxoDYlUAls3WwBAyLAQHPj0AJz8nRCzOwZ+Hf0Q4BKA2JRYfHPyG0xpPwUAcOjQIfz9998ICwsz99SZ1KgphBCYOfMQpFLA2H23VArMnHkITz89pEKOb2triwYNGhiUN2nSBAUFBTh69KhuKHlKSgqio6MRHBxc7D4VCgUKCwuLraMdov7aa6/pymJiYkrV9saNG6OgoAAnTpzQPfUUHR2N1NTUYrdr1aoVlixZAiGELrjSPukkler/EUcqleoCIj8/P6P7KygowD///KN7wkrbhqKGtBfXP+fPn0eXLl2KbT8REZEQArduZepNHXXmTBKuXLkPtdpwkWCFQoamTV0MEhjOzhXzBzcqu+ISGlpMbNQOpRmloVVRozUYMzBmICIiovJhbkKjqFEaWlKZpMjRGs71nZGf/XDBb4lUggY9G+DibxfRbU43yBSa2XK0iY0FRxfg1/2/YmDngcjNzS2Hs2dSo8bIyyvE9esZRhMagCbRcfNmBvLyCg2eQKpIDRs2RP/+/REREYGlS5fC3t4e06ZNQ7169dC/f/9it/X398eOHTsQHR0NFxcXODo6Gt3/6tWrsWPHDgQEBGDNmjU4fvw4AgICTG5jUFAQnnnmGYwbNw5LliyBXC7HlClTSnwiqmvXrsjMzMSFCxfQrFkzAEBYWBjq1KmDkSNHYtasWVCpVPj+++8RGxuLPn36FLs/KysrTJw4EV9//TXkcjlef/11PPHEE0UOIzfWP1ZWVsjKysKJEyfw0UcfmdwHRERU8+XlFeLixRSDBEZKSrbR+u7uNrppo7SvoCBnWFnJjNanqsOUhIYWExs1mymjNPIkGVAIe4PyylhbQ4sxA2MGIiIiKp1fT/5a5oQGUPQoDS1TRms8yrOlJy5FXkLcwTg4Bzoj9XoqnOs7o66qLq5duIZRS0bBx8+nXEZpAExq1BhKpRzHj7+EpCTjf5gAADc3FZTKyv/KV6xYgcmTJ6Nv377Iy8vDU089hW3btpWYXImIiMC+ffvQpk0bZGZmYu/evfD399erM27cOJw6dQr/z959h0dRr20c/86W9EIn9C5IF1C6opRgo4lHxYqC7SgKAlIEBGwoB1BB0CPFduwIviqIYEEEpQgoTXqoIQHSe3bn/WNJIBBCQjbZTXJ/risXZHZ25pnZBHb2nuf3u+OOOzAMg7vuuovHH388e8zZgtQ4ZMgQrrvuOqpWrcoLL7zAhAl5X9xXrFiR/v3789FHH/Hyyy8Drvby5cuXM378eG644QYyMjJo1qwZS5cuzTGhYW4CAgJ49tlnGTRoEEePHqVr167Mnz//ouvndn66devG0qVLqV27Nl27di3QORARkdIjOjo5e86LrDkwdu48TWbmhXc/WK0GTZpUyNF50apVFcLCAj1QubjDpKWTCry+Qo3S6VJdGkf9fiUiYAV1kntRIzXne8einFsjN7pm0DWDiIiI5F9qRioOpwM/e+4hQ14u1aWRJa9ujQvXtVD32rrsW7WPyk0qE7k1kt3f7caR7sAaZKX8FeWZO2suvr6+Ba43N4ZpmheOLSBFIj4+ntDQUOLi4rLHTs2SmprKgQMHqFev3gUTybmLaZpkZmYWyZwaZcX55/Cvv/6iZ8+e7Nu3j6CgIE+XB0CHDh0YNmwYgwYNyvXx4vhZuxin00lUVBRVqlS5oN1eLk3nr3B0/gpP57BwiuL8ZWY62b379DmdF64Q4/jxpFzXL1fO94Kho5o1q4Sfn/ff56Kfv/wrSKcGwJS+U4ol1MjrvbDk5I7rBtM0efLJVaxadYgGDUIveP9/1O9X9gUuJdNIxWb60SCp7wXBhmma7N0bR48etXnzze5l4hqiKK6ZSuI1A3j2uqE46f+Xskevedmi17vsKSuvuWmazP15LnN/nkugbyCVg/M/T9bhw/Hs3Rub3aVhmmCx5P6+x+EwsdkMAgPtZGY6adGi8kWDjdxqjDgdQZBvEC/0e4HuV3a/5HPye83g/VewIl6sZcuWTJs2jQMHDtCiRQtPl8PJkycZMGAAd911l6dLERERN4uNTeWvv6JzBBjbtp0iNTXzgnUNAxo0KHfO8FGuP2vVCi4TH0qWdVkBRX6CjeIKNKT45dWlkRVoYEKQozqpltOu7yFHsFHc3Rqlla4ZpCw5lnCMlIyLjyBRGKP/PZrFnywGwGazEVY9jBv73sjTY57G188Xf7s/1YOrA5CWlkb79u3ZunUrmzdvpnXr1kVSk4iIpxiGwWPdHgNg7s9zAfIVbOS3SyNLQbo1znU5gUZBKNQQKaQHHnjA0yVkq1SpEqNHj/Z0GSIikouVKyN44okfmD27J716XXwcd6fT5MCBOLZujTpn+KhoIiLic10/MNBOy5aVcwwd1aJFJYKCch8/X8qG/AQbCjRKr7zm0jg30PBzVgBcf14s2CjOuTVKM10zSFlwLOEYd315FwlpCQV6nsMBThMwXTdmWK1cMLY7QMTBCIKbBVPnvjqYDpPkQ8nM/+98lv6zlBoDahDsG8zHt31M9eDqjB49murVq7N161b3HJyIiBe6nGDjUnNpXLCPM3/md24NKPpAAxRqiIiIiBQ50zQZP34Ne/bEM378Gnr2rIthGCQlpfP33ydzDB3111/RJCZm5Lqd2rWDs7susrow6tcvd9FWYSnbJtwygT17Yvhgx8wLHru36XAFGqXYxbo0cgs0slws2FC3hojkV0pGCglpCfjafPG3+V90vcwMiImBxGRISoDUdMgaGN0AbDYIDHR9hYZCYIDrgePW4yQdT2Lbs9tc61oNDKtBzO8x1O1fl4S0BFIyUli2bBkrVqzg448/ZtmyZVx11VXq1hCRUqsgwUZBuzSyFKRbozgCDVCoISIiIlLkVqw4yMaNJwDYuPEE1133CSdOJLNnTwy5zW7m62ulWbNKtG59duioli3zP3apCLguKHZ9fBWGIxyzzffZy40/w9m19SrMp03ddV8KXaxLI69AI8vFgg11a4hIQfjb/An0CbxgeWIiHD8OUVGQnu5aZrWAzQrGmWHvTROcqRCbBDHAcSuUC4WwamCzuOa6qdyqMq0eaUXC4QT+fP1PMhIyiPy/SCr2qcjJqJMMHTqUJUuW8PrrrxffQYuIeFB+g42Cdmlkb//Mn5fq1iiuQAMUaoiIiIgUKdM0GTfu1xzLfv31aPbfw8ICcwwd1apVZRo3roDNVnontZPisWLFQTZsOAH0gFrbofIRONoA888ebOAEK1YcJDz84kOhScmUW5dGfgKNLLkFG+rWEJHCcDrg0GE4chgyMl2dGAEBuQ8xBYD97F8zM+D0aTgdA2YspJ5KJS0mjR+f+hFnhhMMKFe/HKe3n6bCrRUY/cRoHn30UaKjo1m9enVxHJ6IiFe4VLCR/y4Nk7MxxlmX6tYozkADFGqIiIiIFKkVKw7y559RFyx/8cUuPPRQC6pWvfBORpHCMk2TCRPWYLGA0wmcrOkKNSLrA2CxwIQJa+jVq67uui9Fsro0EhIyqFDBj5SUTCIDfyPC/2sMp4GvozxOcmkPO4+Pszxp1hj2+C8hPd1JWFJnbDYLCQnp6tYQkQJJSoJ//oG4uLPDShWEze76cmRCXCoYAb4EVQ/iqgebsX/5fjJTMonZE4NvBV+if4rGmmjlwQcf5JprruGtt96ib9++RXNgIiJeKK9gIz9dGqYlE0fwcXDYsSVVBfPsSnl1axR3oAGgWwBFREREisjZD5ZzvmO0Wg2WLNlDlSoBHqpMSrv0dAeHDiW4Ao1cOJ1w+HAC6emO4i1MitTJkyns2HGKoCA7p06lssf5E/sCl+DINDFSQkhPd+T7y0gJwZFpsi9wCXucP3HqVCpBQXZ27DjFyZMpnj5UESkBEhNg29+uQMPfH3x9L39bVpsrFMEwSNh3mjXP/8bRtUeJ3BBJWmwatW6sReI/iWzesJmaNWty/PhxBgwYkP38CRM0j5SIlA1ZwcZj3R4jKS2J6IToXLs0zr9GzQo0nPYUnH7xZAaeACPnzTC5dWt4ItAAdWqIiIiIFJmzw//k5HCYbNig4X+k6Pj62tiw4R6io10fPg/85H0OxEDbWzN45917AahSJQBfX10OlCaVKwfw3ns3kpSUwbc7v2T+xt+oTzmqBF3ucFFViUqMxKz3G/e2u4qbr7yNwEA7lSsrkBWRvCUnwfYdkJxyiaGmCsgwwFqjIn5dWlCzmoOoZduI3R9LxVYVoQZ0ua4Lf6z+g0WLFnHixAnCw8MBeOKJJ9xTgIhICXB+x0Z0dBIpKZaLdmmcG2gYDjtguoINyNGxcX63RkxsCjsP76NKhXLFGmiAOjVKranfTMUy1MLUb6Z6uhQREZEy6dzhf3KTNfyPmdtM4SJuUKtWCG3aVKVNm6r4h7o6MoIqmtnLatYM9nCFUhRq1Qphc9wKPvp7Hv5+duqH1SYoyOeyv+qH1cbfz85Hf89jc9wKatUK8fQhioiXc5qwew8kJ7s30Mhi8bFiKRdIZFoIzf7dHtNhsvOdnfhU8GHfnn389ddftG3blhtvvDH7OTfffDP333+/ewsREfFiWcHGA+2HcuJ0HKZvYq5dGhcEGqYFTCuGw5Zrx0ZWt0amw0ma7RSJsTC179RiDTRAoUapNPWbqUxcOhETk4lLJ5bJYKNu3brMmjWr0NuZP38+vXr1KpZ6DMNgyZIlea4zZswYnnzyyULXIyIiRU/D/4iIJ3z8x8e89v1rGIZB9XLV3bLN6uWqYxgGr33/Gh//8bFbtukNdM0gUjROHIeYGPD3c3+gkcXPDzIyYH+ElbCrw4jeFE1GQgYTX5nI1q1b2bJlC9999132+p9++ikvvvhi0RQjIuKlDMOgfGQ3Ag9di2HPIMWMzfHvcq6BRvaDuQcbBuD46wjxn28l/ZOjRM3fzX09HmT06NGkpqZmP71Pnz7Url0bPz8/qlWrxr333suxY8fcdmwKNUqZrEDjXEUdbERHR/PYY49Ru3ZtfH19CQsLIzw8nN9++82t+8ntTf6iRYsoV66cW/eTJTU1lQkTJjBp0qQcy9944w2aNGmCv78/tWrVYvjw4Tl+aS/X8ePHs+8kOXjwIIZhsGXLlhzrjBw5kvfee4/9+/cXen8iIlK0sob/2bTpXpYs6QeAv7+NDRvuZtOme9m06V42bLhXw/+IiNsURaCRpbDBRlm7Zpg1axaNGzfWNYOUOenpcOw42Kxgsbp32yHhrfGpVSn7e39/iI2Dyn2uwrecL6fXnaZ6zeo0b96c5s2bEx4ezoEDBwBo0KABNWvWdG9BIiJeLi4ujf/9bxfVYnsQdORanNY08EsELhFoZMkl2DAxMWzpUMWPBp2eoEH9V2je/EHefvvtHO+Hrr/+ej777DP++ecfvvzyS/bt28fAgQPddmy6ii5Fcgs0skxcOhHTNBnbe6zb93vbbbeRnp7Oe++9R/369Tlx4gSrVq3i1KlTbt9Xcfriiy8ICQmhc+fO2cv+97//MX78eObPn0/nzp3ZvXs3DzzwAIZhMGPGjELtLyzs0mMdV6pUifDwcObOnctrr71WqP2JiEjRq1UrhFq1QggNdc2MabUatGlTFcvFxqQScaNDcYc4mXwSgJRM19waiWmJ/Hn8TwAqBVSidmhtj9Un7lWUgUaW6uWqcyz2GK9973ofelf7u/L93LJ2zTBmzBgWLFhAp06ddM0gZUpcnKuDItiv6PdlsYDVApFRFqp3r8Hh7w+RnJQMFYp+3yIiJcHixbs5ciSBatUCidhyDb5108lotAaHJQ7TNz73QMPicM2hkbXMtGI4wOkXTwYmFqdrfUtiFZIPNKVhw3JERaVz1VWd+eGHH5g2bRoAw4cPz95knTp1GDNmDP369SMjIwO73V7oY9MVtRczTZOktKR8fU1YMuGigUaWSV9P4vn/e/6S2yrI2N6xsbH8+uuvTJs2jeuvv546depwzTXXMHbsWPr06ZNjvSFDhlC5cmVCQkK44YYb2Lp1a/bj+/bto2/fvlStWpWgoCCuvvpqVq5cmf14t27diIiIYPjw4RiGgWEY/PzzzwwePJi4uLjsZc8///xF68xr/7n55JNPuPXWW3MsW7t2LZ06dWLQoEHUrVuXXr16cdddd7F+/fpLnquEhATuuusuAgMDqVGjBnPmzMnx+Lmt5PXquSaNveqqqzAMg27dumWvd+utt/LJJ59ccn8iIiJSdh2KO0Tj2Y1p+05b2r7TlgMxrjtVN0Vuyl7WeHZjDsUd8nCl4g7FEWhkuZyOjZJ2zVC9enVCQ0MLdc3QuXNnXTNImZOc4uqcsLq5Q+NcIeGtKdfn6uzvfXxdc3eEdqpDi9daEBAYkGP9unXrYpomrVu3LrqiRES8UFxcGh99tBN/fxsnTiSTkW4SfPRafI9cjTMgGqdPUi6BRiamLRXTlgrGOeMom1ZwWDH9Y3Hak7HF18SWGURCQjqpqQ7i4yNYv/4PfHx8cq3l9OnTfPTRR3Tq1MktgQYo1PBqyenJBD0RlK+vF759IV/bfGnZSwQ/GZzntpLTk/NdY1BQEEFBQSxZsoS0tLSLrnf77bcTFRXFsmXL2LRpE23atKF79+6cPn0agMTERG666SZWrVrF5s2b6d27N7feeiuHDrkutBcvXkzNmjWZMmUKx48f5/jx43Tq1IlZs2YREhKSvWzkyJGXtf/crFmzhnbt2uVY1qlTJ/7888/sC5L9+/fz3XffcdNNN13yXL322mu0atWKzZs3M2bMGJ566il++OGHXNfN2v7KlSs5fvw4ixcvzn7smmuu4ciRIxw8ePCS+xQREZGy6WTySVIz8x7qJjUzNbuTQ0qu4gw0shQ02ChJ1wzR0dH83//9Hxs3bizUNcOmTZt0zSBlzp9/QkY6uOnzqnyxWMA04dTFf01FRMqkrC6NcuV8iYxMwmaz4PRNIDPkCDht4LRgnhtcWDIxbWfepxnOHMGGiQkWJzhtGKYFp2886enbSUwczYYNgzlwYBIpKTHceuuDOWp49tlnCQwMpGLFihw6dIilS5e67fgUakih2Gw2Fi1axHvvvUe5cuXo3Lkz48aN46+//speZ82aNaxfv57PP/+cdu3a0ahRI6ZPn065cuX44osvAGjVqhWPPPIIzZs3p1GjRkydOpUGDRrw9ddfA1ChQgWsVivBwcGEhYURFhaGj48PoaGhGIaRvSwoKOiCGvOz//PFxsYSFxdH9eo5LwwHDRrEpEmT6Nq1K3a7nQYNGtCtWzfGjRt3yXPVuXNnxowZwxVXXMGTTz7JwIEDmTlzZq7rVq5cGYCKFSsSFhZGhQpn+2ezaoqIiLjkPkVERESk9PJEoJGlIMFGSbpm+Oyzz2jbtm2hrxmmTJlCly5ddM0gZUrWNC5GMX/SZLNCYqIr3BARkQu7NNLTndhCEkls9iWOckewxtXAklzRFV5YMnMGGllDT50JNkzDAdYMDNOCLaEa1qRKOP1jsJQrR7kKz+DvP5yKFbsQGtqZiIi6OUYAGjVqFJs3b2bFihVYrVbuu+++Ao0QlBeFGl4swCeAxNmJl/x67ubnCrTd525+Ls/tBfgEXHoj57jttts4duwYX3/9Nb179+bnn3+mTZs2LFq0CICtW7eSmJhIxYoVs+/SCgoK4sCBA+zbtw9w3XU1cuRIrrzySsqVK0dQUBA7d+7MvuuqMPKz//OlpLjGnfbzyzkQ6M8//8y0adOYM2cOf/75J4sXL+bbb79l6lTXROwfffRRjn38+uuv2c/t2LFjjm117NiRnTt3Fvh4/P39AUhOzn9HjYiIiIiULqeTTjPvl3kkpiUWe6CRpXq56iSmJTLvl3mcTsr7NumScs1QqVIlypcvT3BwcKGuGV566SXeeustXTNImbJnDxhG8e/XaoOMTNdcHiIicmGXhiXQFWhkljuMJakCFtOONaUCluQK2UNOAa5AI4tpAcMBdtf7HWtiVSzpQRgOP3DYcQYmktZiAxZ7Fez2O8nIiGDZsk9Yt+5Y9iYqVarEFVdcQc+ePfnkk0/47rvv+P33391yjJoo3IsZhkGgb+Al15vabyo+Np9LzqkBMOmWSUzqMwnDze80/Pz86NmzJz179mTChAkMGTKESZMm8cADD5CYmEi1atX4+eefL3heuXLlABg5ciQ//PAD06dPp2HDhvj7+zNw4EDS09MLXVt+9n++ihUrYhgGMTExOZZPnDiRu+++myFDhmAYBi1atCApKYmHH36Y8ePH06dPH9q3b5+9fo0aNQpd//my2t+z7swSEREROZ/D4d71xPtUCKzAo9c9ymvfv8ax2GMeCTaOxR4jyDeIR697lAqBl56ZtyRcM/z0009kZmZis9myr5kKes0wYcIE7r33XoYMGQKgawYpE0wTDuwHo0nx79tqBWc6uOGfAhGREi+rSyM93cnevbEkOU/jbL0UR8gRLAkVME2razgpwMy0AyYYgPP8LZm4HgBMAzPDhtN5psvCaQWHnbSqf2Ft4SRmczjlgsM5duwz3n23Px07Vr/gs2en07WDvIYiLQiFGqXEhFsmAOQZbEzuM5mxvccWSz1NmzbNnsCuTZs2REZGYrPZqFu3bq7r//bbbzzwwAP0798fcF1UnD/+q4+PD47zrrxzW3a+/Oz/fD4+PjRt2pQdO3bQq1ev7OXJyckX/FJaz8yCZpomwcHBBAcH57rN85PI33//nSuvvPKi+wdyPbZt27Zht9tp1qxZvo5FREREyp7fN+Xv7uzNm+HqmkVcjBSZu9rfBeCRYONY7DFM02RU+KjsOgrKm68Zzg01LiavawaLJeegCLpmkNLO4XCFCh5o1MAwXKGKU8NPiYgQH59GaKgvtWoFs3nXPpztl+IsfwRrckUMw5b9D7XTJwEzKNrVkZFpAWvmmXH8DFxBx5m/Z/qAxYkzNBJrQjUMhy8YYJgWLGkhOGpuc/0bvP9m7HYbf/+9lHXrKvHnn5vo0qUL5cuXZ9++fUyYMIEGDRpc0JV6uTT8VCky4ZYJTOk7JdfHpvSdkh18uNOpU6e44YYb+PDDD/nrr784cOAAn3/+Oa+++ip9+/YFoEePHnTs2JF+/fqxYsUKDh48yNq1axk/fjwbN24EoFGjRixevJgtW7awdetWBg0alJ3gZalbty6rV6/m6NGjnDx5MntZYmIiq1at4uTJk7m2V+dn/7kJDw9nzZo1OZbdcsstvPPOO3zyySccOHCAH374gQkTJnDrrbdmX6hczG+//carr77K7t27mTNnDp9//jlPPfVUrutWqVIFf39/li9fzokTJ4iLi8t+7Ndff6Vr167ZLeUiIiIiWZymk0VbFjHur/75Wv+k5gkv8e5qfxejwkdhmibHYo9d+gluUNBAoyRdM/Tv358ffvihUNcMt956K3PnztU1g5QpXjGfhTfUICLiYbVqhfDll3255V+VMTp9jVHpGH6ZVfC1++LjY8XHx4olIBkzJArDYmIxfbGYvhhOuyvPMM50bmDB4vDDig8Wpy/YMzBDI7H5ZUBMMuaJBDKX7sX5WRSZW38mKXAOd9x1LxERXxMYGMDixYvp3r07V1xxBb1792bt2rW89dZb+Pr6uuU41alRyuTWsZEVaLhrIpZzBQUF0b59e2bOnMm+ffvIyMigVq1aDB06NHsiPMMw+O677xg/fjyDBw8mOjqasLAwrr32WqpWrQrAjBkzePDBB+nUqROVKlXi2WefJT4+Pse+pkyZwiOPPEKDBg1IS0vDNE06derEo48+yh133MGpU6eYNGkSzz//fI7n5Wf/uXnooYdo164dcXFxhIaGAvDcc89hmiYTJkzg6NGjVK5cmVtvvZUXX3zxkufqmWeeYePGjUyePJmQkBBmzJhBeHh4ruvabDbeeOMNpkyZwsSJE+natWt2K/4nn3xywTGKiEgJ0PoHEtuu4IVv45h466WHjBQpqF8jfmX498PZdHxTvp9TqVIRFiTFpjg7Ni6nQ6MkXTOMGzeOoUOHFvqawTAMnnvuOV0zSJlhs4El78yuyJhnPn+z6LZdEREAIuMiWZ3+FoG1o6jn3wibxZ79WHz6aY4mRWOYFnwsvtmdqaZpJ92RSrozDcMAP2tgjueZpp00Rwr2wGh864bgTHFS9+YmmE6TmKMnOfrdXraf/I1DRw/hZ/fjxx9/BOCpp55iz549LFu2jCpVqrjtGA2zKD7pllzFx8cTGhpKXFwcISEhOR5LTU3lwIED1KtX74KJ5i7H1G+mMmnpJCb3nZwddJimecH4sJK322+/nTZt2jB2rGvYLk+fw2XLlvHMM8/w119/YbNdXibp7p+1gnA6nURFRVGlSpULWvLl0nT+Ckfnr/B0Di/f8PfHM+vXl7K/L6oOytJMP38XdzD2IKN/GM3nOz4HINgnmH9f/QSv/DIDbGfGrI0BkoCQM18AmX7sH/4P9SrULpY683ovLDld7nXDx398zGvfv4ZhGEUSbLhjyClvdznv98+/ZvA0d1wzgGevG4qT/n9xn4EP7+PbgNsJsJbD5rz0/KDukpkJ6WYSta+I5et7PqdBhQZ5rq/XvGzR6132lPXXPDIukqc/fZrNhzZTt2JdfGw+2Y9FJ0TzT+Q/OJwO/Ox+F7zXMU2TtMw0LIYlx/POfTw5I5nMHzMJsgTR8ZGzQ0n9/s7vJEQnMGz2MKb0nYKf3Y9ly5YxYsQIvvzyS5o1a8bmzZtp3bp1nvXn95qh7L2yZcSEWybg/K9TH5gU0muvvUZQUJCny8iWlJTEwoULC3VxIiIixWvqN1NzBBrg6qic+s1UD1UkpUVCWgLjV42nyewmfL7jcyyGhYfbPMyeJ/fwco+XmNdsN7y9yfW14zbXkzY+kr1sXrPiCzSkeBTlUFRlIdC4XLpmEHFp3Ngzw1A5HK5OEZ8LP38TESlTChNogKtz1c/ul2ugkfV4gD0Ah+kgISWBxNREAOKPxZNwKAEfXx++3vI1E5dOJOJIBEOHDuWDDz4gICDA7ceqdzkieahbty5PPvmkp8vINnDgQE+XICIiBTD1m6k5hoQ8V9Zy3YAgBeU0nby35T3G/TiOyMRIAK6vez0zw2fSKqxV9nqP3Fmbyj61eeopOJJc2bUwoRq1bG2YNQsGDPBA8VLkimIoKgUaedM1g4hL3brAIYp9bgtHJoSWd00YLiJSVhU20MgvwzCwGlbSDqTx8/ifMUwDM9M1DmDzgc0JCg1i6ealfPHiFwwZOoR27dpx8OBBNxxhTgo1RERERIpAXoFGFgUbUlCrI1bz9PKn2Ry5GYAG5Rvwn17/oU/jPrlenAwYAH37QoP7TCKAVq1NNv0XLjFXsZRw7gw2FGiISH61aQO2tZCe7uqcKA5Op+vP8hWKZ38iIt6ouAKNLIZhYK9lx9bVhh9++OzywcfuQ7XW1QA4su0IkScjSWySSGpGaqH3lxuFGl5GU5xIUdPPmIhI0ctPoJFl4tKJ/Lb3N25rcxsh/iGE+IUQ6h+a/fcQ/xCC/YKxemr2TfEKB2IOMHrlaL7Y8QUAIb4hTLx2Ik9c8wS+Nt88n2u1QmAgYEK5cgo0SotLvadzR7ChQKNs03WDFFS5UAgJgdhIiu3TpvR08PWDCuUhIaN49iki4k1OJpws1kADIP1oOs44JxkfZZBiScEINLA5bYSuCaVul7okHkgk5UgK/7nzP/yH/2Q/r127dtx999289957ha5BoYaXsNtds8knJyfj7+/v4WqkNEtOTgbO/syJiIj7TVo6qUDrf7/9e77f/n2e6wT5Bl009Aj1D83+e16PB/kFKRwpYeLT4nn515eZ8fsM0h3p2fNmTL5+MlUCq3i6PPEA65lUKj09/ZLXDYUJNhRoiK4b5HKEhkLCSVfYcKk5LpxO13wYDseZuThMwACrBaw215/k8fmbeeb5tWqdCewVaohIGXTw1EH2R+/Hz+aH3Xr2/+zLDTQyM51YLAYWS97rG/4GFe6tgOkwSTqWRNqKNHYs2UGta2rR/LbmBFQOIDk4mWpVqjGw/kCeG/scDRo04MUXXyzU8WZRqOElrFYr5cqVIyoqCoCAgAC3pWdZTNMkMzMTm83m9m2XFSX5HJqmSXJyMlFRUZQrVy77glhERNxvct/J+e7UAGhZsyV1K9YlPjWeuJQ44lPiiU+NJz4lnrTMNAAS0xJJTEvkGIWb/DfINyjPUCQ/AUmQbxAWi6VQdUjeHE4Hi7YsYvyP4zmRdAKA7vW6MzN8Ji2qtvBwdeJJNpuNgIAAoqOjsdvtl/xd7N+qP+kZ6cxcNZOjMUepFlrtkvs4Hnccp+lkePfh9G/Vn9TUohk2wBuV5Pf77qLrBikMf3+oWgUiD7uGoDr/nyinE9LTIDXN9fdzw4xspmt+DMPiCkZ8fV3biv9+C6k7j7jWsRgYgX74X1GNatc0xnFeHWlpabRv356tW7eyefNmWrduXWTHLCLiSe3qtmN079G8/N3LHI09So1yNTiZePKyAg3ThITEdKxWCyHBl0imDbAGut4jhASHcGrXKZwRTv756R+a9GqCXyc/6obWZfrt06lgVuC5sc+xe/duqlatWthDBhRqeJWwsDCA7GDD3UzTxOl0YrFYyuwb9MIqDeewXLly2T9rIiJSNLLmyMhPsDGl75Q859RIy0jLDjjODz3OD0DiUuJyXTcuJY4Mh+v2xaxw5Gjs0UIdY7BfcKG6RkL8QxSOXMQvB3/h6e+fZkvkFgAaVWjEf3r9h1uuuKXEvv8Q9zEMg2rVqnHgwAEiIiLy9ZxrKl7DPS3vYcGGBRxMO0iVoIt3+UQlRmGaJg9e/SDXVLyGAwcOuKv0EqE0vN93F103yOWqUQOSYiAh8czwh7i6KpJTXIGG0wkYYDHOhB65/KqZpusrNQXSUsFmB6cJPnUqE9yrFRmpJhlRcaT9soUDXxvUvq12juePHj2a6tWrs3Xr1iI/XhERTxvYdiAAL3/3Mjsjd3Iq8RROp7PAQ06lpWXidJiYTicZGU7s9vxdqzlOOeAEGAEGB348gLOxk3rV6jH99um0qdOGLVu2ANCqVSu3dYAq1PAiWRcoVapUISPD/X2TTqeTU6dOUbFiRX2AcJlK+jm02+2600pEpJjkJ9i4VKAB4Gv3pbK9MpWDKxeqnrSMtIuGHvGp8cQln3ksj4AkLiWOTEcmAAmpCSSkJhQqHDEMIzscuVQoEuQbhJFuUDOsJuX8y51d1z+EQJ/AEvn/8vn2x+xn1A+jWLxzMQChvqFMvM41b4aP9RJ3Sl1CtLkJgIi0Xwpdp3iej48PjRo1Ij09Pd/PeaLeE1SuXJmZq2YSlxGXa8fG8bjj2H3sDO8+nDva3eHOkkuMkv5+31103SCFYbVCoytg+zZITnZ1WSQngyPT1X1htZLnsFJwplPDACyucCMjHZwZYMGC0+6H04D6Xf05dboS0X9H5wg1li1bxooVK/jyyy9ZtmxZkR6riIi3GNh2IFsPb2XGDzNwOp0E+wUXKNAwTUhJycQwDEzTJCU1E7s992sQnxo+pO5MJXp2NKbTBAdgQMhNIaTWTCU+M54nbniCT9/6lK6zu5KcnEyHDh345ptv3HS0CjW8ktVqLZI3kE6nE7vdjp+fX5l+g14YOociIlIQeQUb+Qk03MnX7ksVexWqhFz+PAymaZKWmZZ76JGcR2BybkfJme8zHZmYpulanhJfqGPLCkfy7BLxCyE0IO8ukiDfII/cmR2fFs+Lq19k1h+zsufNeLTtozzf7XkqBxYuzALXxPXRbATgYPrPTP1marH+7EnRsFgs+Pn5Feg593e5Hx+7D699/xqR8ZE55tg4Fusa2u7Z3s+W6Tk09H5fxD1CQ6FxE9i6BeKTXAFFfsKM3BiGa44NJ645NBISoW5dqGiLZ++eGPwrnZ1f6GTUSYYOHcqSJUsICAhw09GIiHi/FdtXsGL7CioFVSImKYa0zDT87Pl/r5iWlonTaWK1WjBNyMzIu1vDXstOcPdgzAyTlD9TwALOuk6CrEEE+wfzzup3eP6+53nooYeIiIhg8uTJ3HfffXzzzTduueZSqCEiIiJShCbcMoH9R4+zaOPc7GXFHWi4i2EY+Nn98LP7FTocSc1IzdkVcolhtOJS4jgVf4pUR2r243EpcTicDreGI/kdOiuvxwN9A/P1Rt3hdLBg8wKe++k5opJcw4/2rN+TGeEzaF6leaGOJcvUb6ZeEKplfV8Sfwal8HKbPFyTgouI25kQF+eaLsNidQ0/ZZ6ZK+OyNpc198bRKDL/t4y9pslehxMMaP5A8zPrmIx+YjSPPvoo7dq14+DBg246GBER77Zi+womLJlASkYKLWq0IDI+kj0n9pCakZqvYOPcLg1wXRc5nc48uzUMu4GtnCtasPWycer9U1h3WWnWqxlBvkHsj97P8z88z+t3vk7Pnj258sorqVWrFr///jsdO3Ys9DEr1BAREREpYve2HeoKNUzXJOJl/cNkwzDw9/HH38efqiH5myjO6XQSFRVFlSpVsu+eNk2TlPSUi3aI5DXPyPmPZ4UjWQFKYVgMy9nOkYuEIqdTT/P9/u85lnQMDKhRrgYjOo3gxkY3EuoTSlJaEgE+AYW6iym3QCOLgo2y7dxg45/IfwjyDVKgISJuk5KZwokTEHEUbAHgZ4WUFEhPB0zXPBqGQb66NkzzzBwcgGHNxFKzHIHdryAt0Yll12FCyxuEtA4hJTOF6J+isSZaGTt2bFEenoiIVzk30KhdobZreoMzw4zmN9g4t0sji8ViXLJbI0tKRgoB1wSQ9lsaQTcGYbVYqV+5Pvuj9/PUJ0/x+p2v4+/0P7OvtEIesYtCDREREZHikunLczc/5+kqSg3DMAjwDSDAN4Cw0MufzDYrHMlvAJLX407TidN0FjgcOXr6KM/sf4ZneCZ7mcWwXHbXyEe/f8QbP76R5z4VbJRtWQHGvF/m8eh1jyrQEJFC87f7E+wbzMn4BCJOpOH0BavdNdS6jx0smZCR4RpC6lw5Ag7zzB/m2cctFte8HJk+6a6Wj8rp2CtCerUwTn22i32r9lGxc0VS96SyeetmfH19c2y/Xbt23H333bz33ntFdegiIh6RW6CRJb/BxvldGlny060BkJyejK/NlyuvvZKN6zZy8NeDVGhQgdhDsVSqW4k9+/Zwz9R7cGxw0KBBA7d0aYBCDREREREp484NR6px4eTJ+WWaJsnpybmGHsfjj7N4+2J+PvAzTocTwzRoUK4BdULqkJqRekFAkhWOxCbHEpsc676DPY+CjbLtrvZ3Ed48nAqBFTxdioiUAtWDq/NB348ZPS6F5PVQp44rkDiXaUJKMpw+DQkJkJwCTsd5IYYVfHwgKAjKl4PQcq7t7EwcTWZ6PC32zwMg6gQkVvw/Er57ie9e/oDkrsnZdwIDHDt2jPDwcD799FPat29f9CdARKQY/bTrp4sGGlnyE2zk1qWR5VLdGlmBRtPqTQn1D6XutXXZt2oflZtUJnJrJLu/240j3cHhwMOENQvjk8WfXBA8Xy6FGiIiIiIibmAYBoG+gQT6BmaHIw6ng3f/fJfJGyYTnRwNwdCrQS9m9JpBsyrNct2OaZokpSXlb+L1XDpIth3dVqC6Jy2dpFCjDFOgISLutHtTdbavgXqVITAj93WCbFC5ClDF1bWRkuL60zwzNJWfnyvUyHZmOzZHMKbTQWBaAwDqhsLuE0+QnjadZR8vY+TIkTn3ExQEQIMGDahZs6abj1RExLN2Ht9JbEosVYKr5DlkbV7BxsW6NLJcrFsjJDzkgkADoGHPhjTs2RCAjk+e7cg4GnMUwzBI83HP0FOgUENEREREpEj8eOBHnl7+NH9H/Q1A44qNmRE+gxsb3pjnhYdhGAT5BRHkF0T1ctULvN+85tLIzeS+kwu8DxERkdwsWQKZmRAYmL/1rVZXR8blMAyoWNFGSsoTvPLKqzz22GME5nfHIiIl3NCuQzkRf4LPNn6GgUFoQOhF171YsJFXl0aW3Lo1cgs0LuZY7DFMTEb2Gsn1Ta4v6GFelEINERERkeJiwsqVEfTqVc/TlUgR2nNqD6N+GMXSf5YCUN6vPM93e57H2j2G3Wov8v1ndV3kJ9iY0neKujRERMQt9uyBP/6AihWLZvutWy+6YFmFCnD69BgmThxzQZBSt25dzHPHtRIRKUXsNnv2fI2fbfwMoEDBho/VN88ujSznd2sUNNBwmk5GhY9iUPtBBTm8S8p76nIRERERKbRzL6jHj1+jC+xSKjY1lpErRtLsrWYs/WcpVsPKk9c8yZ4n9zCs/bBiCTSyTLhlAlP6TslzHQUaIiLiTps2uebJKFeu+PZpsYCvL/z6a/HtU0TEW2QFG/9q9y+iE6OJS47Lc/1qodVoVLURAIkpyTicJhZL3qEGuLo1MjIcJKQkekWgAQo1RERERIrcxo0ncvx9xYqDnitG3C7Tmcm8jfNo9GYj/rPuP2Q4M7ix4Y38/djfvHHjG1QMKKJbVi9hwi0TuLNR7sHGnY0UaIiIiHvt3u368xI3/bpdYCDs2wdJScW7XxERb3A5wUb9ig3IyHCCJQO49A13hgFOIx1npo2m1TwfaIBCDREREZEiZZomc9/anP291WowYYK6NUqLlftXctXbV/HYt49xMvkkV1a6ku8Gfcd3d3/HlZWv9GhtixfDp2MmwMbzgo1NU/h0zAQWL/ZMXSIiUjr99Zdrku/iFhDgCjT27y/+fYuIeIOCBhtmcjC25CpYLAZO41LBhonDSMdq2LElhOFM881z28URaIBCDREREZEik5iYTp8+XxEREZ+9zOEw2bBB3Rol3e5Tu+nzcR96ftCTbVHbqOBfgTdvfJOtj27lxkY3ero8HA546ikwTWDLmWDDNFx/bnZ1aDz9tGs9ERGRwnI6ITLSM6GGnx+kpUF0dPHvW0TEW+Q32MjIcHLkSCK+jnIEZFYD8go2XIGGxbQT4qgFGX4cPpxw0RqKK9AATRQuIiIiUiRWrYrgoYeWExGRAME5H8vq1ujVq+4lJ2YT7xKTEsPU1VN5c/2bZDozsVls/PvqfzPxuolU8K/g6fKy/forHDlyzoItE1xfZ5gmHD7sWq9bt2IvT0RESpnMTNf/LZ54W2MYrlAlI6P49y0i4k3yM3l4ZGQiqamZBATYMZzlIROSbZE4jQwsph3I+of8bKAR7KiJzRkAPiaxsanExKRSvnzOFLs4Aw1QqCEiIiLiVvHxaYwevZq339560XXO7dYID69XjNXJ5cp0ZvLOpneY+NNETqWcAuDmRjczvdd0mlRq4uHqckpLg08/zd+6x48XbS0iIlI2WK2eCTTAFaZYLGDTJ1wiInkGG1ldGlarkf1vtl+uwQYXBhqAzWaQlm5y+HBCjlCjuAMNUKghIiIi4jY//HCQIUO+59AhV0tu5cr+nDqVgjOXdS0W1K1RQqzYt4Lh3w9nR/QOAJpWbsqMXjMIbxju4cpyOn4c5s6Ft9+GqKj8PadataKtSUREygarFUJC4MSJ4t93RoYr0AgJKf59i4h4o4sFG+d2aZzr3GDDcWYoqvMDjSy+PrYc3RqeCDRAoYaIiIhIocXFpTFq1C/8979/AVCvXihz5/bg/vuX4cwt0cA1TMLhwwmkpzvw9dVbMm/0z8l/eGbFM3y751sAKvpXZMr1U3i47cPYLN7zmq1fD2+8AZ99dnbojRo1IDER4uPPzKtxHsOAmjWha9firVVEREqvFi3gwIHi329SEgQGQoMGxb9vERFvdX6wkZnp5MiRlBxdGuc6N9gwTFuugQbk7NZIMU57JNAAhRoiIiIihbJ8+QGGDl3BkSOu7ownnriKl1/uSlCQDxs23EN0dAqH4w7S73/TCAiw8cuGu7FYLABUqRKgQMMLnU45zZRfpjBnw5zseTOeuPoJJl43kfL+5T1dHuAKL778El5/HX7//ezyzp1dE4T36wf/938wcKArwDg32Mi6iJk1y3VnrYiIiDs0buyZ/SYnQ716UMF7prYSEfEK5wYb76z8gKQMK8G+oRdd389ZHmumDcO0YzP9Lrqer4+N6KRIfBODeH7AuGIPNEChhoiIiMhliY1NZcSIn1m4cBsADRqUY/78cK67rlb2OrVqhVCrVgihUYkAWKwGbdpUzQ41xLtkODJ4e9PbTPp5EqdTTgNwyxW3ML3ndBpX8tAnNeeJjoZ33oG33oJjx1zL7Ha4805XmNG27dl1BwyAL75wLT930vCaNV2BxoABxVq6iIiUci1agI/P2c6J4mCarlDjmmuKZ38iIiWN3Wbnya6jeP/97Zj+a8m02LGbF3ZgZK/vDL7kNjN9YnGmO6l5+hbuuuYud5abbwo1RERERAro22/38fDDP3DsWCKGAcOGteHFF7sQGOjj6dLkMn2/93uGfz+cnSd3AtCscjNmhs+kZ4OeHq7MZetW1xBTH33kmggcoGpVeOwxeOQRCAvL/XkDBkDfvvDLL07++Seexo1DuO46izo0RETE7dq0gSZNYPt2V+dEcUhIcAUoN99cPPsTESmJvl5yAJ8dPanZwuBEwDpwkmewkZdUSwwYTuol9uX0zitZt+4YnTrVcHPFl6ZQQ0RERCSfYmJSGT78J957bzsAjRqVZ8GCcLp0qenhyuRy7Tq5i2dWPMN3e74DoFJAJaZeP5UhbYZ4fN4MhwOWLnWFGb/8cnZ5u3au7ovbbwdf30tvx2qFbt2gadNUqlQJQY1CIiJSFKxWuO02+Osv1zCJdvuln1NYUVHQsSM0b170+xIRKYni4tL46KOdBPj5UT3ldiwWg+N+ay8r2MgKNBok9aMGXdmbFsOiRdvp2LE6Rm4TdRQhhRoiIiIi+fD113t59NEfOH48CcOA4cPbMnVqFwICiuGKXdzudMppJv88mTkb5uAwHdgsNoZdM4wJ102gnF85j9YWEwPz58Ps2RAR4VqW9UHRU0+5Prwp5msGERGRfOndGz78EPbtg/r1i3ZfMTGucP/ee/X/oojIxXz55W6OHk2gXr1QLFholDQQoMDBRo5AI7UrAFWrBrJ+/XGPdGso1BARERHJw6lTKTz11I989JFrWKLGjSuwYEG4R1pspfAyHBnM2ziPST9PIiY1BoA+jfvwWs/XuKLiFR6tbedOV1fG+++7xgcHqFgRHn4YHn/cNReGiIiINwsJgZEj4emn4fTpopu8OzPTNc/UnXe6uhFFRORCWV0a/v42bDZXu7YFW4GDjdwCDYDgYB9OnEjySLeGQg0RERGRi1iyZA+PPvoDJ04kY7EYPPNMOyZP7oS/v7ozSqJle5YxYsUIdp3cBUDzKs2ZGT6THvV7eKwmpxOWLXOFGStWnF3eooWrK2PQIPD391h5IiIiBda1q6u78IMPwM8PAi5v2PaLcjrh4EFo1Mj1f6W6NEREcndul8a5ChJsXCzQyOKpbg2FGiIiIiLnOXkymWHDfuTjj10ffl95ZQUWLryR9u2rebgyuRw7onfwzIpnWL53OeCaN+OF61/goTYPeWzejIQEWLQI3nwT9uxxLTMM16Tew4a57jrVhzQiIlISuYbphKNHYdUqqFHDNZm3OzgccOAAVK8Or7xSdJ0gIiIlXW5dGufKT7BxqUADPNetoVBDRERE5BxffPEP//73KqKiXN0Zo0dfzaRJnfDz09umkuZU8ime//l55m6ci8N0YLfYear9Uzx37XOE+oVeegNFYN8+V5CxYIEr2AAIDYUhQ+Df/4Z69TxSloiIiFsFBMC0aTB2LKxc6fq/rnLlwgX2yclw5AjUqQOvvqrJwUVE8vJ//7ePw4fjMU3Ysyf2ousZRg98a6WSUHkDlvRMrA5XsJHpEwuGkwpHepMc3YI9XHwb6elONmw4zsaNkVx9dfHcCKircxEREREgKiqJJ55Yxeef7wagWbOKLFzYu9jelIn7ZDgyeGvDWzz/y/PEpsYC0K9JP17r+RoNKzQs9npM03Wn6uuvw7ffur4HaNzY1ZVx330QFFTsZYmIiBSp4GB47TV45x3X5OF797rmhyrosIoOB0RGQmoqXHcdjBlT9JOQi4iUdFdcUZ6HHmqRr3UdZjN+PL2QvxJXEmgNIN2Ziokv15W/m9atw/O1DYvFoGpVN7Xl5YNCDRERESnTTNPk889d3RknT6ZgtRqMGdOeCRM64Ourt0oliWmafLfnO55Z8Qz/nPoHgJZVWzIzfCY31Luh2OtJTnZ9iPPGG7B9+9nlN97oGgO8Z0+wXNgJLiIiUmr4+7v+z+va1dW5sX27K6SoWNE1qbjVmvvzTNP1/+jJk5CeDlWquIa0uvNOsGtqMxGRS7rmmmpcc03+b9AblXkNL3z7Ap9u+JRyPv6MCh/FoPaDirDCwtGVuoiIiJRZJ04k8e9/r+TLL12TGrRoUYmFC3vTtm2YhyuTgtoetZ0RK0awYp9rtu3KAZV58YYXefCqB7FaLvKJSRE5dAjmzIH//hdiYlzLAgNh8GB44glXh4aIiEhZ0qaNa+Lw336DJUtg3TrXZN+m6RqSymZz/el0Qmama7mfHzRsCAMGQO/eUKmSp49CRKT0stvsPHfzc5QPLE+t8rW4re1tni4pTwo1REREpMwxTZNPPtnFk0/+yKlTKdhsFsaNa8/48R3w8SneD8ClcE4mn2TST5N4e9PbOEwHPlYfnm7/NOO6jivWeTNME9ascQ0x9dVXrg9lwDVHxpNPwoMPusYTFxERKat8fOD6611fhw/Drl2uIal274a4ONf/nX5+rqGlGjWCBg2gaVNX4CEiIkXPbrPzdI+nPV1Gvui/BhERESlTjh9P5LHHVrJ06V4AWrWqzKJFN9K6dRUPVyYFke5IZ876OUz+ZTJxaXEADLhyAK/2eJUGFRoUWx2pqfDJJ64hpjZvPrv8hhtcw23cfPPFh9YQEREpq2rVcn317OnpSkREpCRSqCEiIiJlgmmafPTRToYN+5GYmFTsdgvPPdeBsWPbY7frU+eSwjRNvtn9Dc+seIbdp1yTureq2opZvWfRrW63Yqvj+HGYOxfmzYPoaNcyPz+4915XZ0aL/M3JJyIiIiIiIgVUqqcmnDNnDnXr1sXPz4/27duzfv36i667aNEiDMPI8eXn55djnQceeOCCdXr37l3UhyEiIiKFdOxYIn37LuHee78jJiaVNm2qsnHjvUyc2EmBRgmy6/Quev+vN7d+fCu7T+2mSmAV3r31XTY9vKnYAo316+Huu6F2bZg61RVo1KwJL78MR47AO+8o0ChpdM0gIiIiIlKylNpOjU8//ZQRI0Ywb9482rdvz6xZswgPD+eff/6hSpXch5cICQnhn3/+yf7eMIwL1unduzcLFy7M/t7X19f9xYuIiIhbmKbJ++9v5+mnfyI2Ng273cKkSZ0YPfpqhRklSHRSNBN/msg7f76D03TiY/VhRIcRjO06lhDfkCLff0YGfPGFa4ip338/u7xzZ9cQU/37a7zvkkrXDCIiIiIiJU+pvfyaMWMGQ4cOZfDgwQDMmzePb7/9lgULFjBmzJhcn2MYBmFhYXlu19fX95LriIiIiOcdOZLAww+vYNmyAwC0a1eVhQt707x5ZQ9XJvmV7khn9vrZTPllytl5M5oM4LVer1G/fP0i3390tKvz4q234Ngx1zIfH7jzThg2DNq2LfISpIjpmkFEREREpOQplaFGeno6mzZtYuzYsdnLLBYLPXr0YN26dRd9XmJiInXq1MHpdNKmTRteeuklmjVrlmOdn3/+mSpVqlC+fHluuOEGXnjhBSpWrJjr9tLS0khLS8v+Pj4+HgCn04nT6SzMIV4Wp9OJaZoe2XdpoXNYODp/haPzVzg6f4VXUs6haZosXLidZ575mfj4dHx8rDz/fEeeeaYdNpvFM/8Hm2f36e3nzxuYpsn/7f4/Rq0cxd7TrgndW1dtzYSrJ9CnVR8slqJ9HbduhTfeMPj4Y0hLc92FX7WqyaOPmjzyCFSt6lqvJL2U3vD7620/+95yzQDed91QlnnD74oUL73mZY9e87JFr3fZo9e8ZMvv61YqQ42TJ0/icDiomnXFeUbVqlXZtWtXrs9p3LgxCxYsoGXLlsTFxTF9+nQ6derE9u3bqVmzJuBqIx8wYAD16tVj3759jBs3jhtvvJF169ZhtV44hMXLL7/M5MmTL1geHR1NamqqG460YJxOJ3FxcZimicVSqqdTKTI6h4Wj81c4On+Fo/NXeCXhHB45ksSoUb/z88/HAbjqqorMnNmRxo3Lcfr0SY/Vder0KcD1YX1UVJTXnj9vsOPUDp5f9zy/Hv0VgMr+lRl7zVgGNhxIYkJikZ0/hwOWL/dl/vxA1q3zyV7eqlUGQ4Yk0adPKj5nFkdFuX33Rc4bfn8TEhI8st+L8ZZrBvC+64ayzBt+V6R46TUve/Saly16vcseveYlW36vGUplqHE5OnbsSMeOHbO/79SpE1deeSVvv/02U6dOBeDOO+/MfrxFixa0bNmSBg0a8PPPP9O9e/cLtjl27FhGjBiR/X18fDy1atWicuXKhIQU/fjP53M6nRiGQeXKlfVLfZl0DgtH569wdP4KR+ev8Lz5HJqmybvv/s2oUatJSEjH19fKlCmdePrptthsnq81wXC9MTMMgypVqnjd+fMGUUlRTPp5Eu9ufhen6cTX6svwDsMZ03kMwb7BOJ1Ooq3Rbv/5i4mBBQtgzhyDiAhXV4bVanLbbTBsmEmHDlYMIwQo/vdu7uQNv7/nT6hdEhXFNQN433VDWeYNvytSvPSalz16zcsWvd5lj17zki2/1wylMtSoVKkSVquVEydO5Fh+4sSJfI9ta7fbueqqq9i7d+9F16lfvz6VKlVi7969uV6g+Pr65jopoMVi8dgvlWEYHt1/aaBzWDg6f4Wj81c4On+F543nMCIijiFDVrByZQQAHTtWZ8GCcJo0ufhQL8XNYpw9X952/jwtLTONN9e/ydTVU4lPcw25c3vT25nWYxr1ytfLsa47f/527nRN/P3++5Cc7FpWsSI88gg89piB66b7CyeALsk8/fvrbT/33nLNAN553VCWefp3RYqfXvOyR6952aLXu+zRa15y5fc1K5WvrI+PD23btmXVqlXZy5xOJ6tWrcpxZ1VeHA4Hf//9N9WqVbvoOkeOHOHUqVN5riMiIiJFw+k0mTdvC82bL2Llygj8/Gz85z/d+PXXO70q0JDcmabJkl1LaPZWM0b9MIr4tHjaVGvD6gdW89ntn10QaLiD0wnffgvh4dC0Kcyb5wo0WrSAd9+Fw4fhxRc5E2hIaadrBhERERGRkqlUdmoAjBgxgvvvv5927dpxzTXXMGvWLJKSkhg8eDAA9913HzVq1ODll18GYMqUKXTo0IGGDRsSGxvLa6+9RkREBEOGDAFcEwJOnjyZ2267jbCwMPbt28fo0aNp2LAh4eHhHjtOERGRsujAgVgeeuh7fvrpMACdO9dgwYJwrriigocrk/zYGrmV4d8P56eDPwEQFhTGy91f5r5W9+XoanGXhARYtAjefBP27HEts1igTx946im47jowSldThuSTrhlEREREREqeUhtq3HHHHURHRzNx4kQiIyNp3bo1y5cvz54I8NChQznaWWJiYhg6dCiRkZGUL1+etm3bsnbtWpo2bQqA1Wrlr7/+4r333iM2Npbq1avTq1cvpk6dmmuruIiIiLif02kyd+4Wnn12NUlJGfj723j55a488cRVWK2lsgG1VDmReIIJP03g3T/fxcTE1+rLyE4jGdNlDEE+QW7f3969MHu2a86MrPnmQkNhyBD497+hnvubQaSE0TWDiIiIiEjJY5imaXq6iLIiPj6e0NBQ4uLiPDZReFRUlCYoLQSdw8LR+Sscnb/C0fkrPE+fw337YnnooeX88ssRAK69tibz54fTsGH5Yq+loPZF7aPh+IYE+QYR90ZcmfsZTMtM4/U/XueF1S+QkO5KF+5odgfTekyjTrk6+dpGfn/+TBNWrYLXX3cNNZX1TrdJExg2DO69F4Lcn594PU///oLn3wuXJDpXnuMNvytSvPSalz16zcsWvd5lj17zki2/74NLbaeGiIiIlA5Op8ns2ZsZO3Y1ycmZBATYmDbtWh5//CosFo0Z5M2y5s0Y+cNI9sfsB6Bd9XbMCp9F59qd3bqv5GT44APX5N87dpxdftNNrjCjZ0/XkFMiIiIiIiJSsinUEBEREa+1Z08MDz64nDVrjgLQrVst5s8Pp379cp4tTC5p8/HNDP9+OL9E/AJAtaBqvNLjFe5peY9b582IiIC33oL//hdiYlzLgoLggQfgySfhiivctisRERERERHxAgo1RERExOs4HE7eeONPxo1bQ2pqJkFBdl599ToeeaSVujO8XGRiJM/9+BwLNi/AxMTP5seoTqMY3Xm02+bNME1Ys8Y1xNRXX4HT6Vpev74ryBg82DV3hoiIiIiIiJQ+CjVERETEq/zzz2kGD17OunXHAOjevTbvvhtO3br6lNqbpWam8vrvr/Piry9mz5txV/O7eKXHK9QOre2efaTCokWuyb83bz67vHt31xBTN98MVqtbdiUiIiIiIiJeSqGGiIiIeAWHw8nMmZuYMOE3UlMzCQ72Yfr06xg6tCWGoe4Mb2WaJot3LmbUD6M4EHsAgKurX82s3rPoVKuTW/Zx7Bi89ZbBvHmVOXXKNXSVn59r0u9hw6B5c7fsRkREREREREoAhRoiIiLicTt3nmLw4OX88cdxAHr1qst//9uL2rVDPFyZ5OXP438y/PvhrI5YDUCN4Bq80uMVBrUY5JZ5M9avdw0x9dlnkJlpAFZq1TL5978NhgyBihULvQsREREREREpYRRqiIiIiMdkZjr5z382MGnSWtLSHISE+DBjxvU8+GBzdWd4scjESMavGs/CLQsxMfG3+TO682hGdRpFoE9gobadkQFffOEKM/744+zyLl1M7rsvlvvvD8XHRz8bIiIiIiIiZZVCDREREfGI7dtPMnjwcjZsiATgxhvr8c47vahZM9jDlcnFpGamMnPdTF5a8xKJ6YkA3N3ibl7u/jK1QmsVatvR0fD22zB3rmu4KQAfH7jzTtcQU1ddZRIVlYZN715FRERERESKxJ4Tewj1D6VKSBVPl5InXRaKiIhIscrMdPLqq+uZPHkd6ekOQkN9mTXreu6/v5m6M7yUaZp8seMLRq8czcHYgwC0r9GeWb1n0aFmh0Jte8sWeOMN+N//IC3NtSwsDB57DB55BKpWdS1zOgu1GxEREREREcnD2r1rGbN4DBUCK/DGnW9Qu2JtT5d0UQo1REREpNj89Vc0gwcv588/TwBw8831efvtntSooe4Mb7Xp2Cae/v5p1hxaA7jmzZjWYxp3tbjrsufNyMyEr792DTG1evXZ5e3awVNPwb/+5erSEBERERERkaKXFWicTDxJVHwUwz4Z5tXBhkINERERKXIZGQ5eeWU9U6euIyPDSfnyfrz++vXcc09TdWd4qWMJxxj/43je2/Je9rwZz3Z+lpGdRl72vBkxMfDuuzBnDkREuJZZrTBwoCvM6NAB9OMgIiIiIiJSfLICjVOJp6hfqT6Zzkx2Hd/l1cGGQg0REREpUlu2RDF48HK2bIkCoE+fBsyb15Nq1YI8XJnkJiUjhRnrZvDympdJykgC4J6W9/By95epGVLzsra5Ywe8+Sa8/z4kJ7uWVazoGl7qsceg5uVtVkRERERERArh3ECjXqV6GIaB3WqnbqW6Xh1sXN6YASIiIiKXkJ7u4Pnnf+Pqqz9ky5YoKlTw46OPbmbJkn4KNLyQaZp8uu1TmsxpwnM/PUdSRhIdanbg94d+54P+HxQ40HA64dtvoVcvaNYM5s1zBRotW8L8+XD4MLz4ogINERERERERT8gt0MhyfrBx6NSh7MceeOABDMNwBSB2O/Xq1WP06NGkpqZmr9OnTx9q166Nn58f1apV49577+XYsWNuq12dGiIiIuJ2f/55gsGDl/PXX9EA9O/fiLfe6kFY2OUNWyRFa8PRDQz/fji/Hf4NgFohtZjWYxp3Nr+zwMODxcfDokWuzoy9e13LLBbo08c1xNR112mIKREREREREU/KK9DIklfHRu/evVm4cCEZGRls2rSJ+++/H8MwmDZtGgDXX38948aNo1q1ahw9epSRI0cycOBA1q5d65b6FWqIiIiI26SlZfLCC7/z8st/4HCYVKrkz+zZ3fnXvxpr7gwvdCzhGGNXjeX9re8DEGAPYEznMTzT6RkC7AEF2tbevTB7NixYAAkJrmWhoTBkCPz731CvnrurFxERERERkYLKT6CRJbdgA8DX15ewsDAAatWqRY8ePfjhhx+yQ43hw4dnb6NOnTqMGTOGfv36kZGRgd1uL/QxKNQQERERt9i4MZLBg5ezbdtJAG6//Qpmz+5OlSrqzvA2KRkp/Gfdf3h5zcskZ7gmubiv1X28dMNL1Aipke/tmCasWgWvv+4aaso0XcubNIFhw+DeeyFII42JiIiIiIh4hYIEGlnODzbsaTlDiW3btrF27Vrq1KmT6/NPnz7NRx99RKdOndwSaIBCDRERESmktLRMJk9ex6uvrsfhMKlc2Z85c3pw++2NPV2anMc0TT7d/imjfxjN4fjDAHSq1YlZ4bO4usbV+d5OcjJ88AG88YZrEvAsN93kGmKqRw/XkFMiIiIiIiLiHS4n0MhybrCRfCCZYxuPERQURGZmJmlpaVgsFmbPnp3jOc8++yyzZ88mOTmZDh068M0337jtWBRqiIiIyGVbv/44gwcvZ8eOUwDceWcT3njjBipXLtjQRVL01h9dz9PLn2bdkXUA1A6tzas9XuVfzf6V7zezERHw1lvw3/9CTIxrWVAQDB4MTzwBV1xRVNWLiIiIiIjI5SpMoJElK9hYl7KOCg0r8MUHXxDqE8rMmTOx2WzcdtttOdYfNWoUDz30EBEREUyePJn77ruPb775xi1DUyvUEBERkQJLTc1k0qTfmD59I06nSZUqAcyd24MBA/Sptrc5Gn+UsavG8sFfHwAQaA9kbJexjOg4An+7/yWfb5qwZo1riKmvvgKn07W8fn148klXoBEaWpRHICIiIiIiIpfLHYFGFrvVTpBvEPGp8cxYP4M37nyDBQsW0KpVK+bPn89DDz2UvW6lSpWoVKkSV1xxBVdeeSW1atXi999/p2PHjoU+JoUaIiIiUiDr1h1jyJAV7Np1GoC7776S11+/gYoVL/0BuRSf5Ixkpq+dzrTfpmXPm/FA6wd48YYXqR5c/ZLPT02FTz5xDTG1efPZ5d27u4aYuukmsFqLqnoREREREREpLHcGGlkshoUAn4Ack4ePGzeOESNGMGjQIPz9L/xswHnm7ri0tLRC7x8UaoiIiEg+paRkMHnyJt5+eyemCWFhgcyb15O+fRt6ujQ5h2mafLztY55d+SxH4o8A0KV2F2aGz6Rd9XaXfP6xYzB3Lrz9NkRHu5b5+7sm/X7ySWjevCirFxEREREREXcoikAji2EYOSYPnzFwBlarlTlz5tC1a1c2bNhAly5dKF++PPv27WPChAk0aNDALV0aAJrCUURERC7pt9+O0qbNh8yb5wo07ruvKdu3P6BAw8v8fuR3Oi3oxN2L7+ZI/BHqhNbhs4GfsfqB1ZcMNP74A+6+G+rUgRdecAUatWrBK6/A4cOukEOBhoiIiHjKAw88gGEYGIaB3W6nXr16jB49mtTU1Ox1+vTpQ+3atfHz86NatWrce++9HDt2zINVi4h4xuUGGjFJMSSlJeVr3XMnDx/xxQjuefAeXn31Vfz8/Fi8eDHdu3encePGPPTQQ7Rs2ZJffvkFX1/fwhxWNnVqiIiIyEUlJ2cwfvwaXn9905nuDH/efjucPn0UZlyOxLREXvj2BSbeOtGt2z0cd5ixq8by0d8fAa55M8Z1HcfwDsPznDcjPR2+/NI1X8Yff5xd3qWLa4ipfv3ApneLIiIiUgDHEo6RkpHi9u0mpCVwbfdrmfbmNDIyM9i+ZTuj/j2KpIwk5sycA8D111/PuHHjqFatGkePHmXkyJEMHDiQtWvXur0eERFvdbmBxon4E+w+sRu7xU7TGk0J8QvJdb3W97TO/vu5wQbVYOPOjdSuWJsff/zRHYdyUbpMFRERkVytXn2YBx/8nn37YgF44IFmjBnTjEaNanq2sBJo9o+zs/8+6etJGIbBhFsmFHq7SelJvLb2NV797VVSMlMwMBjcejAv3PAC1YKrXfR50dGuzou33oLjx13LfHzgrrtg2DBo06bQpYmIiEgZdCzhGHd9eRcJaQlu33bEwQgcKQ4eX/149jJrQyvvL36f8VPGUz24OsOHD89+rE6dOowZM4Z+/fqRkZGB3W53e00iIt6msIGG03SSkpnCjqM7LhpsbPlwC0fWu4Y6NiwGfuX8qNqqKjuu2cGwT4YxqtMoFsxewI8//khkZCTVq1fnnnvuYfz48fj4+LjlOBVqiIiISA5JSemMHfsrb77pmh26Ro0g/vvfXoSH1yUqKsrD1ZU8U7+ZyqxVs3Ism7jU1alxucGG03Tyv7//x5iVYziacBSArrW7Mqv3LNpUu3gisWWLqyvj448ha362sDB47DF45BGoWvWyyhEREREBICUjhYS0BHxtvvjbLt4tejl27t9JalQqmx/djGE1sAfbyUzOxK+6X3ZnSJ8+fdiyZQtRUVGEhobi7+9Pu3btFGiISJlQ2EDDNM3sTv+k9KQ8g43KV1am1d2tMB0mcYfj2PLhFmoaNdll28XId0dSK6UWb7/9Ng0bNmTbtm0MHTqUpKQkpk+f7pZjVaghIiIi2X7++RAPPfQ9+/fHATBkSAumT+9GaKgvTqfTw9WVPFO/mZodYJzvcoONdYfX8fT3T7P+6HoA6pary/Se0xlw5YBc37RmZsLSpfDGG7B69dnlV1/tGmLq9ttdXRoiIiIi7uJv8yfQJzDPdRwOSEqC5GTX+xXTBKsF/P0hIBB8fYBz3tpYDNe0sFZfK6bDJD02HQBb6NmPtq6//npCQ0P58ssviYqKIigoiMqVK7v9+EREvI07Ag0/u1/28kCfwDyDDYvNgl+Ia33/8v5UalyJ07tP0/HWjhzkIFWbVqVJ2ybUrlib+vXr888//zB37lyFGiIiIuI+iYnpPPvsat56awsAtWoF8+674fTqVdejdZVkeQUaWQoSbByOO8yzK5/l420fAxDkE8T4ruN5usPT+Nn8Llg/JgbefRdmz4ZDh1zLbDYYONA1xFSHDpDP97kiIiIibmE64XQMnDjheq/iyASH82x2YeJ6f2Kzgn8AVAuDypXBfuYGDJ9gHzpP7owjzcH+Zfs5uf0kGaczsrc/fPhwTp48yYQJE4iIiOCpp55i48aNpKenu23IExERb+PuQCPLpYKNLPHH4ok5EIN/ef8cc2wM+2QYb9z5BrUr1iYuLo4KFSoU6jjPpVBDRESkjFu1KoIhQ77n4MF4AB55pBWvvnotISG+Hq7M+6RmpHI66XSeXzHJMWw8uJH9J/fna5uXCjaS0pN49bdXeW3ta9nzZjx41YO8cMMLhAWFXbD+jh2urowPPnDd+QhQsaJreKnHH4caNS7v2EVEREQKIyYG9u+DxERwmmC3u7pFLdac6zmdri6OhASIj4eDEVCzJmC6xm4PDHN1gNS7sR5H1xzFbuQcWqpSpUrZXw0aNGDnzp1s2rSJjh07FtORiogUn53HdxZJoJHlYsFG1PYolo1chuk0cWa60unmA5sDZycP33l8J0998hQTr5vIm2++6bYuDVCoISIiUmbFx6cxevRq3n57KwB16oTw7rvh9OhRx8OVFS3TNElMS7xkOHE66TSnk08TkxST/feU9JQiqWnS0kkXhBpO08lHf33EmFVjOJZwDIDr6lzHzPCZXFXtqpzrOuG771xhxg8/nF3esqVriKm77nIN5SAiIiJS3ByZcPAgHD0GTgf4+V0YZJzLYnF92e2uIanS0mDfPkhLB0dcGssGn/kQLcM1NKojyUFqSmr285999llmz55NcnIyV13les+UljWZmIhIKeNv98ff7o/DdOA0nViNPP6BPSO/gUaW84MNgIqNKtLiXy1wpDvY/9N+DItBtdbVsp/jcDoAsKXYGNh3ILfffjtDhw69zKO8kEINERGRMmjFioMMGfI9hw8nAPD446155ZVrCQ4uOW35DqeD2OTY7O6IS4UT53ZSZDoyL3u/FsNChcAKuX8FVKB8YHlW717Nl39+me9tTu47Ocf3aw+v5enlT7Ph2AYA6pWrx/Re0+nfpH+OO2/i42HRInjzTdi790x9Fujb1xVmXHuthpgSERERz8nIgF074eQpV0jhd+nPznIwDNdznA5IdoDh78NVz3YmwNc1/FSmmcmpv0/x4fwP6Rfejw0bNtC7d2969+7Nhg0beOGFFwgICKBDhw5Fc4AiIh5Wt1JdZt05i6c/eZr9J/dTv1J9rHkkxwUNNLKcG2xYHVasPlYCK7s651oNasXqaas5tO4QtTvWJjUjlUOnD9GyfEv+nPsnnTt35p133in0sZ5LoYaIiEgZEheXxsiRP/Puu38DUK9eKPPnh3P99bU9VlN6ZnquQzhdKqCITY4t1H59bD5UDKx40XAi6+/lA8vnWBbsF4zFYslz28O6D8vXnBoAU/pOye7SiIiNYMyqMXyy7RMAgn2Cee7a53iq/VP42s4OB7Z3ryvIWLjQNTQDQGgoDBkCTzwBdete3jkRERERcReHA3btcgUafn5gvfTNwxdlsYJhAScGB04F0rwZtHq4FT+P+ZmAegG88+Y73Nn3ThYvXsykSZNISkqiWrVq9O3blw8//JDNmzdr+CkRKbWurHZlvoKNyw00smQFG5nJmQRZgrKXGxaDhr0asuOrHVRsVZGjCUdpWc4VaFxz9TUsXLjwktfQBaVQQ0REpIxYvvwAQ4eu4MgR16fgTz55FS+91JWgoMJ3Z5imSXJ6cp7zTFwsnEhKSyrUvoN8gy7eOXFeQFEhsALlA1whhb+Pf77HG70cWUFFXsFGVqCRmJ7ItDXTmL5uOqmZqRgYDGkzhKnXT6VqUFXANfzCypWuIaa+/db1PUCTJq6Jv++9F4KCLrorERERkWJ16BCcPFn4QCOLT/UKONMySE2F3buhdWuDOrfUYff/drNlxxaa1WjGjz/+eF4Nh/jwww81/JSIlHqXCjYKG2hkCfQJ5LR5moSUBOJT47Pn2KjWuho7luzg7x/+plvvbmyat4n6deszffp0oqOjs58fFnbhvJCXQ6GGiIhIKRcbm8qIET+zcOE2ABo0KMf8+eFcd12tC9Z1Op3Ep8bnGkycSjzFkZNHSDVTs4d9OjecSM9Mv+waDcPIDhuy/rxUOFEhsALlAsrhY/PeIbPu73o/xxKOMe/HeRc89ugNj3Jvl3t5b8t7jF01luOJxwHoVrcbM8Nn0jqsNQBJSfDhh64wY8eOs8+/6SbXEFM9e2qIKREREfEuCQlw5AjYbe4JNM7l7w/xCRBxCCpfXZk9n+zJMfxUly5dKF++PPv27WPChAk0aNBAXRoiUiZcLNhwV6CRxWaxkWFm5Jg8PN2ZTnCbYBLXJ3L1TVfzxb4vOLDvADVr1szxXDPr7rzC1uCWrYiIiHi5F759gee/fp7n+zzPxFsvPSRQSZfpyCQmOYbF327huak/cDLhFDRK5rqeFel4fTm+OPoq7/z3bCARkxSTPSm203Re9n7tVvsFwcMFIUUu4USof6jb21E97VDcIRrPbkxqZiqEAPHnPBgC83bP4+3db2PielPXoHwDpveaTt/GfTEMg4gImDMH3n0XYmJcTwsKgsGD4cknoVGjYj8kERERkXw5cgQyMyEw0P3bzppE/OhRCKlgoVK3Shcdfqp3794899xz+Pr6XnrDIiKlwPnBRpBPEHuj97ot0AAICXd1Z2TNsdGwakNOJ53mprtvYsbXM6gYVJHRI0a7ZV8Xo1BDRERKvanfTGXS15MAmPT1JAzDyB4ayNulpKcUeCLs00mnSUhNOLuRTmf/+ks6/PL9pfcb4BOQazjhhx81K9ekQlDuAUWgb2CRDulUkpxMPukKNMAVaoAr2Ag5+72JSaA9kOe7Pc+T1zyJj9WXX3+F11+HJUvAeSZfatDAFWQ88IBr7gwRERERb5WSAomJ4O4cISS8dfbffXxc3awnT0FY7zA+n/85DSo0uGD4KRGRsigr2Bj030FsP7Ydu9WOv4+/2/cT6BNIQloCWw9vpceVPZhxhyvQKA4KNUREpFTLbbLmrO+LK9gwTZOE1IQCzTOR9ZWakVq4naf5EepXnvo1q1EpuOJFuyXO7aYoH1g+1zs4nE4nUVFRVKlSpdR1VRSLc8KMcy25cwldqvfg4w9dYcaWLWcf697dNcTUTTe5f+gGERERkaIQF+eaJNzmnhuCL8pqdc3ZEVg8n5+JiJQoe07sIcORgc1qw2k6MU3T7TcgOpwODAxsVhtxKXEciz2mUENERKSwcgs0slxOsOFwOnLOJZFXSHFeOOFwOi77OKwWa77mmbA5A1k4dz/ff30C0gK4om51Fi64iU6dalz2vqXofbKwAoPehqy50/z9XZN+DxsGzZp5tjYRERGRgjBNVwdFcdyM4eMDKemQUsh7gERESpuvt3zN1G+mYrPYaFO7DTuO7yA5I5kAe4Dbgg2H00FqRioVAirQJKwJR2KPMPzT4cy8YyYtarZwyz7yolBDRERKpbwCjSwTl04k4lQEfVr1ybVbIiYpJsf3scmxharJ1+ZLxaCK+Z5nIusr2C/4km88vvpqD4899gMnTjixWKowcmQ7nn++E/7+9kLVLEVv/nwgGmrVgieegCFDoEIFT1clIiIiUnDRJ11zadiLoanXYgGnCelpRb8vEZGSIivQyHBkULN8TQzDoFn1Zmw/tt1twUZWoFE+oDxNqzfFx+ZD/Ur12X9yf7EFGwo1RESk1MlPoJFl/pr5zF8zv0DbD/YLviCQKB9Y/pIBRVGMYXnyZDJPPvkjn3yyC4Arr6zAwoU30r59NbfvSwrGkc/mnNatYfwb0K8f2PTOTEREREqww4dcc4IV57CZaQo1REQAWL5t+QWBBrg+w3BXsJFboAFgsVhyBBuzB82mSbUmbju28+nSWURESp1JSycV+Dm9m/fOEUaUDyyfazhRPqA8dpt3dD988cU/PP74SqKjU7BYDJ599homTuyIn5/+e/cGmzfnb73HHoOBtxZtLSIiIiLFITnFNQQVVsBZ9PszyP+NJCIipd32o9uJS4mjWrlqF4QW7gg2LhZoZLFYLAT5BnEy8ST7T+5XqCEiIlIQk/tOznenBsCUvlOKbdJwd4iKSuKJJ1bx+ee7AWjWrCILF/bm6qvVneFNTp5073oiIiIi3s40PV2BiEjZ9cQNTxCVEMXSrUsBCPELyfF4YYKNSwUaACfiT5CWmcaw7sO4sfmNhT+gPBTDKIciIiLFa8ItE5jSd0q+1i1JgYZpmnz66S6aNVvE55/vxmo1eO65DmzadK8CDS/UoFolyPDLe6UMP9d6IiIiIqWAvx8YBlBM4YYJWIpxqCsREW/ma/dlSt8p9G3VlxPxJ4hPjb9gnaxgw9/uT3JGMmY+0uj8BhqpGakM6z6Mh7o85LYJyS9GnRoiIlIqZQUVeXVslKRA48SJJB5/fCWLF+8BoGXLyixc2Js2bap6uDK5mIE9axP29D9Exp+E4MMwqB+kB8DCX7PXqRZaiYGTa3uuSBERERE3qlETDItrSChbMd1G63vhZ2siImVWVrABuKVjwxsDDVCoISIipVhewUZJCTRM0+STT3bxxBOrOH06FZvNwvjx7Rk3rgM+ProtzZtZrTDnpdoMHFgbMy3UtdC0wvE2ZL3Hmz27eCfSFBERESlKYVXBZgVHOkU+NojTCRYDfH2Ldj8iIiWNu4INbw00QMNPiYhIKZfbUFQlJdA4fjyR/v2XMmjQt5w+nUqrVpXZsOEenn++swKNEmLAAPjiC6h03ghTNWu6lg8Y4Jm6RERERIqCxQIBgcUzeXdGBtjs4HeJ0T5FRMqiwg5F5c2BBijUEBGRMmDCLRNoEtYEgFtb3ur1gYZpmnzwwXaaNVvE0qV7sdstTJ7ciQ0b7qF16yqeLk8KaMAA+Ph/Z79ftcrJgQMKNERERKR0Cg11hRuOzKLdT2YmVKqorlcRkYu53GDD2wMN0PBTIiJSRjQOa8yuyF3c1OImT5eSp2PHEnnkkRV8881+ANq0qcrChb1p2bKyhyuTwjh3Astu3VwX+iIiIiKlUYA/BAZCciwEFNGnThkZrmGuKlaEjKLZhYhIqVDgoajSkwG8OtAAdWqIiIh4BdM0ee+9bTRrtpBvvtmP3W7hxRe78PvvgxRoiIiIiEiJYRiuoTYtVshId//2TSekp0NYmCs8ERGRvBWkYyPAJ8DrAw1Qp4aIiIjHHTmSwMMPr2DZsgMAtGvn6s5o3lxhhoiIiIiUPKGhUL0aHD4CVpt7u1RTUiAoEOrWhSLITERESqX8dmy0rt0aq8WKzXJhbOAtgQYo1BAREfEY0zRZsGAbI0b8RHx8Oj4+ViZP7sTIkVdjs6mZUkRERERKrrp1ITERYmLB3989wUZyMth9oFEj15/pSjVERPItP8GGr8031+d6U6ABCjVEREQ84tCheIYOXcGKFQcBaN++GgsWhNO0aSXPFiYiIiIiUkgpmSlgg7qNIGMvxMeDjx2s9svbnumE1FSw+0LdBmAPhKT0M/sREZF8y0+wcT5vCzRAoYaIiEixMk2Td9/9m2ee+ZmEhHR8fa288EIXhg9vi9Wq7gwRkUtJSUnBNE0CAgIAiIiI4KuvvqJp06b06tXLw9WJiJRt/nZ/gn2DSUhLIC0zDYCqdYATEBcHOMBud827kV+ZmeBwgl8ohFUF/CA29ezjwb7B+Nv93XkYIiKlWkGCDW8MNEChhoiISLE5eDCOoUNXsHJlBAAdO1ZnwYJwmjSp6OHKRERKjr59+zJgwAAeffRRYmNjad++PXa7nZMnTzJjxgwee+wxT5coIlJmVQ+uzse3fUxKRs4OCtOE1b/Ce4vg6FFXqBFaDgL8wXZe94bTCakpru6OlFQICYGbboI773TNpXE+f7s/1YOrF9kxiYiURvkJNrw10ACFGiIiIkXO6TR5++2tjB79C4mJGfj52XjxxS489VQbdWeIiBTQn3/+ycyZMwH44osvqFq1Kps3b+bLL79k4sSJCjVERDzsYgFDw35wR0/48UdYvBi2b4dTR10hxvn8/aF2Feh3L9x8M9SqVbQ1i4iURXkFG94caIBCDRERkSJ14EAsDz30PT/9dBiALl1qsGBBbxo1Ku/hykRESqbk5GSCg4MBWLFiBQMGDMBisdChQwciIiI8XJ2IiOQlMBBuvRVuuQUiI2HfPtdXfLwr3PDzg9q1oWFD10Tj9sucg0NERPInt2AjJT3FqwMNUKghIiJSJJxOk7lzt/Dss6tJSsrA39/GK69cyxNPXIXF4n1vCERESoqGDRuyZMkS+vfvz/fff8/w4cMBiIqKIiQk70kORUTEOxgGVKvm+urSxdPViIiUbecGG0u2LMHX5uvVgQYo1BAREXG7fftieeih5fzyyxEArr22JvPnh9OwobozREQKa+LEiQwaNIjhw4fTvXt3OnbsCLi6Nq666ioPVyciIiIiUvJkBRsh/iHUrlCbezrc47WBBijUEBERcRun0+TNN/9k7NhfSUnJJDDQzrRp1/LYY63VnSEi4iYDBw6kS5cuHD9+nFatWmUv7969O/379/dgZSIiIiIiJZev3ZfxN4/3dBn5olBDRETEDfbsieHBB5ezZs1RAK6/vhbvvhtO/frlPFuYiEgpFBYWRlhYGADx8fH8+OOPNG7cmCZNmni4MhERERERKWoWTxcgIiJSkjkcTmbM2EjLlu+xZs1RgoLszJ3bg5Ur/6VAQ0SkCPzrX/9i9uzZAKSkpNCuXTv+9a9/0bJlS7788ksPVyciIiIiIkVNoYaIiMhl+uef03Tt+gnPPPMzqamZ9OhRh7//foBHH9VwUyIiRWX16tV07doVgK+++grTNImNjeWNN97ghRde8HB1IiIiIiKl2wMPPIBhGBiGgd1up169eowePZrU1FQADh48yEMPPUS9evXw9/enQYMGTJo0ifT0dLfVoOGnRERECiirO2PChN9IS3MQHOzDf/7TjSFDWnj1RFoiIqVBXFwcFSpUAGD58uXcdtttBAQEcPPNNzNq1CgPVyciIiIiUvr17t2bhQsXkpGRwaZNm7j//vsxDINp06axa9cunE4nb7/9Ng0bNmTbtm0MHTqUpKQkpk+f7pb9K9QQEREpgJ07TzF48HL++OM4AL161eW//+1F7dohHq5MRKRsqFWrFuvWraNChQosX76cTz75BICYmBj8/Pw8XJ2IiIiISMmVkJCOv78Nmy3vAZ58fX2z57irVasWPXr04IcffmDatGn07t2b3r17Z69bv359/vnnH+bOneu2UEPDT4mIiORDZqaTV175g6uuep8//jhOSIgP8+eHs3z5bQo0RESK0dNPP83dd99NzZo1qV69Ot26dQNcw1K1aNHCs8WJiIiIiJRQGRkOHnlkBdOmrS/Q87Zt28batWvx8fG56Drndlu7gzo1RERELmHbtmgGD17Oxo0nALjxxnq8804vatYM9nBlIiJlz+OPP84111zD4cOH6dmzJxaL6z6t+vXra04NEREREZHLtHJlBFu3RrNvXyx33dWE+vXL5bremjVr2LdvX47htw3DYMaMGYBrTo2pU6fy448/EhkZSeXKlYmKimLmzJluq1WdGiIiIheRkeHgxRd/p23bD9m48QShob4sWtSbb78doEBDRMSD2rVrR//+/QkMDMQ0TQBuvvlmOnfu7OHKRERERERKnowMB4sWbcc0TWJj0/jwwx15rl+xYkXWrl3L8uXLuf7667HZbGzZsgUgx5waq1atIiMjA8Mw2Ldvn9vqVaghIiKSi7/+iqZDh//x3HNrSE93cMst9dm+/QHuv7+5JgMXEfGw999/nxYtWuDv74+/vz8tW7bkgw8+8HRZIiIiIiIl0sqVEWzbdpLq1YOoUMGPb7/dz/79sRdd326307FjR8LDw1m5ciX+/v7Zc91lTSLevHlzHnjgAXr27Mnzzz/P4sWL3VavQg0REZFzZGQ4mDJlLe3afcCff56gfHk/PvjgJr7+uj81aqg7Q0TE02bMmMFjjz3GTTfdxGeffcZnn31G7969efTRR93a0i4iIiIiUhZkdWkApKU58Pe35qtbI8uOHTswDIPjx4+TkpICwNGjR+nWrRtt27Zl4cKFJCQkaE4NERGRorBlSxSDBy9ny5YoAPr2bcjcuT2oVi3Iw5WJiEiWN998k7lz53LfffdlL+vTpw/NmjXj+eefZ/jw4R6sTkRERESkZMnq0qhUyZ/t20/i72+jRo1gvv12P/fc0zTXuTUiIyMJCgoiMzOTtLQ0DMOgfPnyzJkzh7vuuotu3bpRp04dpk+fzvr163n99deZNGmS22pWqCEiImVeerpr7oyXXvqDzEwnFSr4MXt2d+68s4mGmhIR8TLHjx+nU6dOFyzv1KkTx48f90BFIiIiIiIl07ldGjExqaSmZpKe7qBmzWBiYlzdGhMnXvjeu3Llyqxdu5akpCRmzpyJzWajYcOGvPrqq/j7+7N371727t1LzZo1s58zatQoRo4c6Za6NfyUiIiUaX/+eYKrr/6QKVPWkZnpZMCARuzYMZi77rpSgYaIiBdq2LAhn3322QXLP/30Uxo1auSBikRERERESqZzuzSOHk3EarVgmnDkSALly+c+t0aXLl3o1KkTDRs2pFWrVixYsIA//viDypUrExUVxb///W9M0+To0aM0atSIe++9F4fDgWmabqtbnRoiIlImREe5xnXctes0dIO0tEymTv2dV175A4fDpFIlf+bM6c7ttzdWmCEi4sUmT57MHXfcwerVq+ncuTMAv/32G6tWrco17BARERERkQud36WRluYgIMCG02kSF5d2yW6NLBaLhXHjxjFixAgGDRqEv78/R48e5frrr8+eU8NicW9vhTo1RESk1DNNkx07TgGwZMleNmw4Ttu2H/Dii7/jcJjcfvsVbN/+AP/6l4abEhHxdrfddht//PEHlSpVYsmSJSxZsoRKlSqxfv16+vfv7+nyRERERERKhAu7NAwMw8hXt8b5br/9dqxWK3PmzMmeJLx27dpMnz6d6OhoIiMjiYyMdFvt6tQQEZFSb8WKg8TGpkI5iDgYT4cOH+F0QuXK/rz1Vg8GDmzs6RJFRKQA2rZty4cffphjWVRUFC+99BLjxo3zUFUiIiIiIiXDxbo0svj6WgvUrWGz2XjiiSfynFMDcNsQVOrUEBGRUs00TSZMWAPndGA4nXDHHY3ZsWOwAg0RkVLi+PHjTJgwwdNliIiIiIh4vYt1aWTJq1tj0aJFLFmy5IJtjhkzJsecGrl9uYtCDRERKdVWrDjIhg0n4Lz/PAcPbk6lSgEeqkpEREREREREpPjl1qXh62u9YL2sbg3DgNhYV7eGt1CoISIipVZWl4bVmnOeDKvVYMKENW69S0BERERERERExNtdqksjy+XMrVFcFGqIiEipldWl4XDkDC8cDpMNG06wYsVBzxQmIiIiIiIiIlLM8tulkcVbuzU0UbiIiJRKWV0aFotrDo3zWSwwYcIaevWqm+sdCSIi4l1GjBiR5+PR0dHFVImIiIiISMl0bpfGtm0nL9qlkcXVreHgyJEEatQI5ttv93PPPU2pX79c8RWdC4UaIiJSKqWnOzh0KCHXQANcQcfhwwmkpzvw9dV/hyIi3m7z5s2XXOfaa68thkpEREREREqe3Lo0AgIu/XlIVrdGzZrBxMS4ujUmTuxU1OXmSZ/iiIhIqeTra2PDhntYunQvTz754wWPv/nmDfTr10iBhohICfHTTz95ugQRERERkRKroF0aWbyxW0NzaoiISKlVs2Ywb7554Z29VqvB++9vp0aNIA9UJSIiIiIiIiJSfAo6l8b5vG1uDYUaIiJSas2Zs4Xdu2MuWK6JwkVERERERESkrDi3S+Po0cR8d2lkcXVrwJEjCZQv78e33+5n//7Yoiv4EhRqiIhIqbRvXwzDh1847FSWrInCTdMsxqpERERERERERIpPYbs0snhTt4ZCDRERKXWiopIID/+CzMyLBxbnThQuIiIiIiIiIlIaFbZLI4s3dWso1BARkVIlMTGdW275in374qhZM4jly29j06Z76datFgBjx7Vn06Z72bTpXjZsuFcThYuIiIiIiIhIqeSuLo0s3tKtoU9yRESk1MjIcPCvf/0fGzZEUrGiPytX/ovGjSsAELrOF4DatYNp06aqJ8sUEZFCio2NZf369URFReF0OnM8dt9993moKhERERER75LVpREYaGf37tMYBjidJnD5Q3E7HCYHD8ZRtWog3367n3vuaUr9+uXcVnN+KNQQEZFSwTRNHn54BcuWHcDf38a33w7IDjRERKT0+L//+z/uvvtuEhMTCQkJydE6bxiGQg0RERERkTN+++0ovr5Wjh9PJDU1Ex8fKxkZzks/MQ+GYRAbm0ZQkA++vlbWrTumUENERORyPPfcGhYt2o7VavDZZ7fSvn01T5ckIiJF4JlnnuHBBx/kpZdeIiAgwNPliIiIiIh4rWefbU+fPg154omVhIT4UKVKoFu2e/hwPC1bVuKll66lQYNybtlmQSjUEBGREu+ttzbz0kt/APD227245ZYGHq5IRESKytGjRxk2bJgCDRERERGRSwgO9mHz5iji49OpXz8Uq9U9U2zXqhXCnj2xREYmeWSUDE0ULiIiJdrixbt54olVAEyZ0pmHHmrh4YpERKQohYeHs3HjRk+XISIiIiLi9U6dSuHjj3cSGGh3W6ABEBRkJzPTyaJF287M0VG81KkhIiIl1q+/HmHQoG8xTXjkkVY891wHT5ckIiJF7Oabb2bUqFHs2LGDFi1aYLfbczzep08fD1UmIiIiIuJdvvxyN4cOJRAUZOfIkQS3bts0TTZtOsGaNUe49tpabt32pXhVqHH48GEMw6BmzZoArF+/nv/97380bdqUhx9+2MPViYiIN9m2LZo+fb4iLc1Bv34NmTOne47JYkVEpHQaOnQoAFOmTLngMcMwcDgcxV2SiIiIiIhXMgxo27ZqkW3fYjFwOMp4p8agQYN4+OGHuffee4mMjKRnz540a9aMjz76iMjISCZOnFig7c2ZM4fXXnuNyMhIWrVqxZtvvsk111yT67qLFi1i8ODBOZb5+vqSmpqa/b1pmkyaNIn//ve/xMbG0rlzZ+bOnUujRo0KfrAiInLZDh+Op3fvL4mNTaNz5xr87383u7WNUkREvJfT6XTr9nTNICIiIiKl1dChrRg6tJWny3A7r/oEaNu2bdkXEJ999hnNmzdn7dq1fPTRRyxatKhA2/r0008ZMWIEkyZN4s8//6RVq1aEh4cTFRV10eeEhIRw/Pjx7K+IiIgcj7/66qu88cYbzJs3jz/++IPAwEDCw8NzXMSIiEjRiolJpXfvLzl6NJErr6zA11/3w9/ffuknioiInEfXDCIiIiIiJY9XdWpkZGTg6+sLwMqVK7PHw23SpAnHjx8v0LZmzJjB0KFDs++kmjdvHt9++y0LFixgzJgxuT7HMAzCwsJyfcw0TWbNmsVzzz1H3759AXj//fepWrUqS5Ys4c4777zgOWlpaaSlpWV/Hx8fD7juLnP3HWb54XQ6MU3TI/suLXQOC0fnr3B0/iAlJYO+fb9ix45TVK8eyHffDaBcOd/8nZMz3ZBl/RwWhn4GL595zjnT+bs8+vkrHG84f+7c9y+//ML06dPZuXMnAE2bNmXUqFF07dq1QNvxhmsG8L7rhrLMG35XpHjpNS979JqXLXq9yx695iVbfl83rwo1mjVrxrx587j55pv54YcfmDp1KgDHjh2jYsWK+d5Oeno6mzZtYuzYsdnLLBYLPXr0YN26dRd9XmJiInXq1MHpdNKmTRteeuklmjVrBsCBAweIjIykR48e2euHhobSvn171q1bl+sFyssvv8zkyZMvWB4dHe2RO7WcTidxcXGYponF4lVNOiWGzmHh6PwVTlk/fw6Hk6FDf+XXX48SEmLnww+74eeXSlRU/v49zfqwKCEhIc87cOXiyvrPYGHExMZk/z0qKkrn7zLo569wvOH8JSS4Z2LCDz/8kMGDBzNgwACGDRsGwG+//Ub37t1ZtGgRgwYNytd2vOWaAbzvuqEs84bfFSlees3LHr3mZYte77JHr3nJlt9rBq8KNaZNm0b//v157bXXuP/++2nVyjXe19dff33RcW1zc/LkSRwOB1Wr5pwEpWrVquzatSvX5zRu3JgFCxbQsmVL4uLimD59Op06dWL79u3UrFmTyMjI7G2cv82sx843duxYRowYkf19fHw8tWrVonLlyoSEhOT7eNzF6XRiGAaVK1fWL/Vl0jksHJ2/winL5880TZ588keWLTuMj4+Vr77qx3XX1SrQNrI6AYODg6lSpUpRlFnqleWfwcIqf+zsG7MqVaro/F0G/fwVjjecPz8/P7ds58UXX+TVV19l+PDh2cuGDRvGjBkzmDp1ar5DDW+5ZgDvu24oy7zhd0WKl17zskevedmi17vs0WtesuX3msGrQo1u3bpx8uRJ4uPjKV++fPbyhx9+mICAgCLdd8eOHenYsWP29506deLKK6/k7bffzu4YKShfX9/sD9HOZbFYPPZLZRiGR/dfGugcFo7OX+GU1fP30ku/M3fuVgwDPvzwJm64oU7BN2Kc+ePMOZTLU1Z/BgvLOOd86fxdPv38FY6nz5+79rt//35uvfXWC5b36dOHcePGuWUfF1MU1wzgndcNZZmnf1ek+Ok1L3v0mpcter3LHr3mJVd+XzOvemVTUlJIS0vLDjQiIiKYNWsW//zzT4Huqq1UqRJWq5UTJ07kWH7ixImLjn97PrvdzlVXXcXevXsBsp9XmG2KiEjBLVz4N+PHrwHg9ddv4PbbG3u4IhER8aRatWqxatWqC5avXLmSWrXy38WnawYRERERkZLJq0KNvn378v777wMQGxtL+/bt+c9//kO/fv2YO3duvrfj4+ND27Ztc1zsOJ1OVq1alePOqrw4HA7+/vtvqlWrBkC9evUICwvLsc34+Hj++OOPfG9TREQK5rvv9jN06AoAnn32Gp58so2HKxIREU975plnGDZsGI899hgffPABH3zwAY8++ihPP/00I0eOzPd2dM0gIiIiIlIyedXwU3/++SczZ84E4IsvvqBq1aps3ryZL7/8kokTJ/LYY4/le1sjRozg/vvvp127dlxzzTXMmjWLpKQkBg8eDMB9991HjRo1ePnllwGYMmUKHTp0oGHDhsTGxvLaa68RERHBkCFDAFfb0tNPP80LL7xAo0aNqFevHhMmTKB69er069fPvSdCRERYv/44t9/+NQ6HyX33NeXll7t6uiQREfECjz32GGFhYfznP//hs88+A+DKK6/k008/pW/fvgXalq4ZRERERERKHq8KNZKTkwkODgZgxYoVDBgwAIvFQocOHYiIiCjQtu644w6io6OZOHEikZGRtG7dmuXLl2dP2nfo0KEcY3TFxMQwdOhQIiMjKV++PG3btmXt2rU1Vl+RAAC0jklEQVQ0bdo0e53Ro0eTlJTEww8/TGxsLF26dGH58uVum/RQRERcdu8+zc03LyY5OZPw8Lq8+244hmF4uiwREfES/fv3p3///oXejq4ZRERERERKHsM0TdPTRWRp2bIlQ4YMoX///jRv3pzly5fTsWNHNm3axM0330xkZKSnSyyU+Ph4QkNDiYuLIyQkpNj373Q6iYqKokqVKpoo5zLpHBaOzl/hlJXzFxmZRKdO/+PAgTjatq3Kzz/fQVCQT6G3229OP5ZuWcrcu+fyaLdH3VBp2VNWfgaLwo9b9tF9aUNIC8bxQqzO32XQz1/heMP58/R74ZJE58pzvOF3RYqXXvOyR6952aLXu+zRa16y5fd9sFd1akycOJFBgwYxfPhwbrjhhuxxZ1esWMFVV13l4epERKSoJSSkc9NNX3LgQBwNGpTj228HuCXQEBGRkq1ChQrs3r2bSpUqUb58+Ty7906fPl2MlYmIiIiISHHzqlBj4MCBdOnShePHj9OqVavs5d27d3dLe7mIiHiv9HQHAwYsZfPmKCpX9uf77wdStWqgp8sSEREvMHPmzOxhamfOnKkhCUVEREREyjCvCjUAwsLCCAsL48iRIwDUrFmTa665xsNViYhIUXI6TR58cDkrV0YQGGjnu+9uo0GDcp4uS0REvMT999+f/fcHHnjAc4WIiIiIiIjHedXAYk6nkylTphAaGkqdOnWoU6cO5cqVY+rUqTidTk+XJyIiRWTMmNV89NFObDYLX3zRh3btwjxdkoiIeCmr1UpUVNQFy0+dOoXVavVARSIiIiIiUpy8qlNj/PjxzJ8/n1deeYXOnTsDsGbNGp5//nlSU1N58cUXPVyhiIi426xZm3jttQ0AzJ8fTu/e9TxckYiIeDPTNHNdnpaWho+P5mESERERESntvCrUeO+993j33Xfp06dP9rKWLVtSo0YNHn/8cYUaIiKlzKef7mL48J8AeOWVrtx3XzMPVyQiIt7qjTfeAMAwDN59912CgoKyH3M4HKxevZomTZp4qjwRERERESkmXhVqnD59OtcLkSZNmnD69GkPVCQiIkXlxx8Pcd99ywB48smrGD1a8yeJiMjFzZw5E3B1asybNy/HUFM+Pj7UrVuXefPmeao8EREREREpJl4VarRq1YrZs2dn34WVZfbs2bRs2dJDVYmIiLtt3RpFv35LSE93MHDgFcyceT2GYXi6LBER8WIHDhwA4Prrr2fx4sWUL1/ewxWJiIiIiIgneFWo8eqrr3LzzTezcuVKOnbsCMC6des4fPgw3333nYerExERdzh4MI4bb/yShIR0rruuJh98cBNWq8XTZYmISAnx008/eboEERERERHxIK8KNa677jp2797NnDlz2LVrFwADBgzg4Ycf5oUXXqBr164erlBERArj1KkUevf+kuPHk2jRohJLlvTDz8+r/isSEZES4MiRI3z99dccOnSI9PT0HI/NmDHDQ1WJiIiIiEhx8LpPkqpXr37BhOBbt25l/vz5vPPOOx6qSkRECis5OYNbblnMP/+cplatYJYtu41y5fw8XZaIiJQwq1atok+fPtSvX59du3bRvHlzDh48iGmatGnTxtPliYiIiIhIEdN4HyIiUuQyM53ceec3/P77ccqX92P58tuoUSPY02WJiEgJNHbsWEaOHMnff/+Nn58fX375JYcPH+a6667j9ttv93R5IiIiIiJSxBRqiIhIkTJNk8cfX8n//d8+/Pxs/N//9adp00qeLktEREqonTt3ct999wFgs9lISUkhKCiIKVOmMG3aNA9XJyIiIiIiRU2hhoiIFKnJk9fy3//+hcVi8PHHN9O5cw1PlyQiIiVYYGBg9jwa1apVY9++fdmPnTx50lNliYiIiIhIMfGKOTUGDBiQ5+OxsbHFU4iIiLjVO+9sZfLkdQDMmdOdfv0aebgiEREp6Tp06MCaNWu48soruemmm3jmmWf4+++/Wbx4MR06dPB0eSIiIiIiUsS8ItQIDQ295ONZLeYiIlIyfP31Xh57bCUAEyZ04NFHW3u2IBERKRVmzJhBYmIiAJMnTyYxMZFPP/2URo0aMWPGDA9XJyIiIiIiRc0rQo2FCxd6ugQREXGjtWuPcscd3+B0mjz0UAsmT+7s6ZJERKSUqF+/fvbfAwMDmTdvngerERERERGR4qY5NURExK127TrFrbd+RWpqJrfcUp9583piGIanyxIRERERERERkVLAKzo1RESkdDh2LJHw8C84fTqV9u2r8cknt2CzKT8XERH3sVgseYblDoejGKsREREREZHiplBDRETcIi4ujRtv/JJDhxK44oryfPNNfwIDfTxdloiIlDJfffVVju8zMjLYvHkz7733HpMnT/ZQVSIiIiIiUlwUaoiISKGlpWXSr98S/vormrCwQJYvv41KlQI8XZaIiJRCffv2vWDZwIEDadasGZ9++ikPPfSQB6oSEZGi9sADD/Dee+8BYLPZqFmzJrfffjtTpkzBz88PgD59+rBlyxaioqIoX748PXr0YNq0aVSvXt2TpYuIiJsp1BARkUJxOk3uu28ZP/98mOBgH5Ytu4169cp5uiwRESljOnTowMMPP+zpMkREyrxjCcdIyUhx+3YT0hK4tvu1THtzGhmZGWzfsp1R/x5FUkYSc2bOAeD6669n3LhxVKtWjaNHjzJy5EgGDhzI2rVr3V6PiIh4jkINERG5bKZpMmLET3z22T/Y7Ra++qovrVtX8XRZIiJSxqSkpPDGG29Qo0YNT5ciIlKmHUs4xl1f3kVCWoLbtx1xMAJHioPHVz+evcza0Mr7i99n/JTxVA+uzvDhw7Mfq1OnDmPGjKFfv35kZGRgt9vdXpOIiHiGQg0REbls06dv4PXX/wTgvfdupHv3Ov/P3n3HR1GubRz/bUnvIQkhpFBCEUVRUCyooCgRLEhRREU4vIgeERQbKorYOHBQqgKeI2BBPRZEKYKIYkMRURQEgtRQEkII6XV35/1jSUJIAoGUTbm+fvJJdnZm9p5dEnf2mue5XVyRiIg0dEFBQaUahRuGQWZmJt7e3rz77rsurExERHILc8nMz8TD6oGX1ata951oScRmthHoGQhA1oEscnbn4BbsVu7IkNTUVBYtWsTll1+uQENEpIFRqCEiImflnXf+4vHHvwPglVe6c8cd57i4IhERaQymTZtWKtQwm82EhobStWtXgoKCXFiZiIgU8bJ64ePuU637tFqsHP7jMN+N/A7DYeAodIAJIm+PLLXeE088wezZs8nJyeHSSy9l2bJl1VqHiIi4nkINERE5Y19+uZd//GMVAGPHdmbs2C4urkhERBqLoUOHuroEERFxkSYdmtDxHx2x59vZ/cVu7NgJvCiw1DqPPfYYw4cPZ9++fUycOJEhQ4awbNmyUoG4iIjUbwo1RETkjGzcmET//p9hszm44472/Pvf3V1dkoiINHB//vlnpdc9//zza7ASERFxJYuHBZ9w5wiQC+69gLXj1uL2oxsMLFknJCSEkJAQ2rZtyznnnENUVBQ///wzl112mYuqFhGR6qZQQ0REKm3XrjR6915MVlYh114bzcKFN2A264onERGpWZ06dcJkMmEYxinXM5lM2O32WqpKRERcyWQ2EXNjDDve20HepLxy13E4HADk5+fXZmkiIlLDFGqIiEilJCdnExf3McnJOXTqFMbixbfg7m5xdVkiItII7Nmzx9UliIhIHRR6cSh/f/A37775Ln179WXDhg1069aNoKAgdu3axTPPPEPr1q01SkNEpIFRqCEiIqeVlVXAjTd+ys6dabRo4c+KFf3w9/dwdVkiItJIxMTEuLoEERGpg8wWMyHdQ3hj1hsMumUQixcvZsKECWRnZ9OsWTPi4uIYP348Hh46dxERaUgUaoiIyCkVFtoZOPBzNmxIokkTL1atGkCzZr6uLktERBq5rVu3kpCQQEFBQanlN998s4sqEhGRmtTpvk7lLg+PC+ejNz+idXBrvv7669otSkREXEKhhoiIVMgwDEaM+JKVK/fi5WVl+fJ+tG0b7OqyROqNhPQEUnJS2JG237nAZOe3xN8wm80AhHiHEB0Q7cIKReqf3bt3c+utt7J58+ZSfTZMJmePJ/XUEBERERFp2BRqiIhIhcaP/4G33voLi8XEhx/eRNeuzVxdkki9kZCeQLvZ7cizndC40j2Hi/97cfFNT6sn8aPiFWyInIExY8bQsmVL1qxZQ8uWLfnll184evQojzzyCFOnTnV1eSIiIiIiUsMUaoiISLlmz/6Nl19eD8Abb1zPjTe2dnFFIvVLSk5K6UCjHHm2PFJyUhRqiJyBn376ia+//pqQkBDMZjNms5lu3boxadIkRo8eze+//+7qEkVEpAZsmruJA98dAMBkMeEZ7ElIlxCCe5eMJL/55pvZtGkTycnJBAUF0bNnTyZPnkxERISryhYRkRpgdnUBIiJS93zyyQ5Gj3bOR/vCC1fwj390dHFFIiIiTna7HT8/PwBCQkI4dOgQ4GwmHh8f78rSRETkDBkG5OZARgZkpENWFpxqFsHQC0Lp+XpPrpl+DefedS6H1h4icWli8f09evTgww8/JD4+nk8++YRdu3YxYMCAWjgSERGpTRqpISIipXz33X7uvHM5hgH33XcBTz99qatLEhERKXbeeefxxx9/0LJlS7p27cqUKVNwd3fnjTfeoFWrVq4uT0RETqOwAI4cgdRjkJkJtkJwONsjYTKBxQLe3hAYAKFh4OsDONsmYbaa8Qz0BMCriRfB3wWTuS2zeN8PP/xw8c8xMTGMGzeOvn37UlhYiJubW20dooiI1DCFGiIiUmzLliPcfPMS8vPt9O0by+zZ1xY3XhWRM+MwHK4uQaRBGj9+PNnZ2QA8//zz3HjjjVx55ZU0adKE//3vfy6uTkREKlJQAAkJkHwY8gucOYXF6vxyOz6PiGE4R2pkZEBaGhw4AIGBEF3OTJ0Z+zNI35mONbj8j7ZSU1NZtGgRl19+uQINEZFKyC/M59XVrxIdHM3groPr9OdBCjVERASA/fsziIv7hPT0fK64ojnvvdcHi0WzFIqcqayCLN7a9BZT1k1xdSkiDVKvXr2Kf46NjWX79u2kpqYSFBRUp0+8REQaLQNSUmD3bsjOAavVORKjoj/ZFkvJz4WFkHIU0tLBlAnpfyTzxbAvMBwGjkIHmKD57c1Lbf/EE08we/ZscnJyuPTSS1m2bFkNHpyISMOQX5jPc0ufY/Fvi/F08yTfls+wK4bV2ffX+rRKREQ4diyPuLhPOHgwi3POCebzz/vi5aWrmUTOxN60vTz65aNEvhrJqC9GkZCe4OqSRBqkd999t3ikRpHg4OA6e8IlItKYGQbs3Qtbt0FOrjPM8PCoONA4mZsb+Pg4R3VkZYF7VBMue/4quj3fjcirIml2ZTMCLwostc1jjz3G77//zpdffonFYmHIkCEYhlHdhyYi0mAUBRqf/v4p4QHheFg9mLFmBgt+XFDu38+hQ4diMpkwmUy4ubnRsmVLHn/8cfLy8gDYu3cvw4cPp2XLlnh5edG6dWsmTJhAQUFBtdWsUENEpJHLzS3k5ps/ZevWozRv7svKlQMIDvZydVki9YJhGHy37zv6f9if1jNb88pPr5Cen05scCyPXfaYq8sTaZAefvhhmjZtyuDBg1mxYgX2U3WUFRER1zkeaOzdB2bzqUdnnI67h3OaKhsW9qT44NXMnwvuvYCM3Rkc/fFoqXVDQkJo27Yt1113HR988AErVqzg559/rvrxiIg0QCcGGk39m+Lv6U9T/6anDTbi4uJITExk9+7dTJs2jXnz5jFhwgQAtm/fjsPhYN68efz1119MmzaNuXPn8tRTT1Vb3Qo1REQaMbvdwZ13ruCHHw4SEODBF1/0Jzra39VlidR5+bZ83tr0Fp3f6MzVC69m8bbFOAwHPVv1ZOkdS4kfFc+orqPwtHqecj+eVk9CvENqqWqRhiExMZEPPvgAk8nEbbfdRrNmzXjggQdYt26dq0sTEZETHD7s7KFhtYK7e9X3Z8I5NVVaGuz4GzCZiLkxhkOfHSIvN6/cbRwOZ4+z/Pz8qhcgItLAlBdoFDldsOHh4UF4eDhRUVH07duXnj17snr1asAZeCxYsIDrr7+eVq1acfPNN/Poo4+yePHiaqtdPTVERBopwzAYPfprPv30b9zdLXz2WV86dgx1dVkidVpSVhJzf53LnF/nkJydDDiDibvPv5vRXUdzXth5xetGB0QTPyqelJwU9u+Hvn3By8vBd9+B2ey8riTEO4TogHI6X4pIhaxWKzfeeCM33ngjOTk5fPrpp7z33nv06NGDyMhIdu3a5eoSRUQavfx82LPX+XN1BBon8vCAlCPO0CT04lD+/uBv3n3zXfr26suGDRvo1q0bQUFB7Nq1i2eeeYbWrVtz2WWXVW8RIiL13KkCjSJN/ZtyOOMwM9bMAKiwx8aWLVtYt24dMTExFT5eeno6wcHB1Va/Qg0RkUbq5ZfX8/rrmzCZYNGi3lx9dZSrSxKps35L/I0Z62fwwZYPKLA75wFt7tecUZeMYsRFI2ji3aTc7aIDookOiCYgB0gEi6+Di5qVhBoiUjXe3t706tWLY8eOsW/fPrZt2+bqkkREBDh4EHJzwNun+vdttTobiO/ZA+ecbyakewhvzHqDQbcMYvHixUyYMIHs7GyaNWtGXFwc48ePx8PDo/oLERGppyoTaBQpL9gAWLZsGb6+vthsNvLz8zGbzcyePbvcfezcuZNZs2YxderUajsGhRoiIo3QggWbGT/+BwBmzLiGAQPaubgikbrH5rDx2fbPmL5+Oj8k/FC8/NLIS3mo60P0O6cfbhY3F1Yo0ngVjdBYtGgRa9asISoqijvuuIOPP/7Y1aWJiDR6BQWQmgpu7mffQ6M8/r06Ff/s6Qk5OZB6FMLjwvnozY9oHdyar7/+uvoeUESkATqTQKPIycEGQI8ePZgzZw7Z2dlMmzYNq9VK//79y2x78OBB4uLiGDhwICNGjKi241CoISLSyKxYsZsRI74EYNy4S3jwwYtcXJFI3XIs9xhv/v4ms36ZRUJ6AgBWs5WBHQYypusYukZ2PeN9FvUxLiyEtWvh6qudc0KLyJkbNGgQy5Ytw9vbm9tuu41nnnlG04qIiNQhGRlgs4FXDQ6OMJnAbILkIxAZUHOPIyLSkJxNoFHkxGDD97AvoT6hxMbGAjB//nwuuOAC3nzzTYYPH168zaFDh+jRoweXX345b7zxRrUei0INEZFG5JdfEhk48HPsdoMhQzrw8stXurokkTojPiWemetnsvCPheQU5gDQxKsJIzuP5J8X/5Pm/s3Par+LF8MDDzh/zs83c+21EBkJM2ZAv37VVb1I42GxWPjwww/p1asXFqWDIiJ1TmYmmM04O3vXIHd3yMtzfomIyKlVJdAoUhRsbEvcRo5fDoZhYDKZMJvNPPXUU4wdO5bBgwfj5eXFwYMH6dGjB507d2bBggXVPgWzJnQWEWkkduxIpU+fxeTk2OjVqwX//W+vchs8iTQmhmGwaucqei/qTfvX2vP6r6+TU5jDeWHn8d+b/sv+h/fz0rUvVSnQGDAAkpJKLz940Ll88eJqOAiRRmbRokX07t1bgYaISB2UkeGcfqo2/kRbrM7RsPkFNf9YIiL1WXUEGkWa+jfFbDaz+8huFvy4AMMwABg4cCAWi4XXXnuNgwcP0r17d6Kjo5k6dSpHjhwhKSmJpJNPjKtAIzVERBqBpKRs4uI+ISUlly5dmvLxxzfj5qYPg6Txyi7I5p0/32Hm+plsS3E2FjZh4qZ2NzGm6xh6tOhR5dDPbocxY+D4e7xSDMM5bcJDD8Ett2gqKpHK6N27N++//z4BAc55Rv71r39x3333ERgYCMDRo0e58sor2bp1qwurFBFp3PbtA4ejdt/b5OfX3mOJiNQ31RloFPFy88JutpdqHm61Whk1ahRTpkzBy8uLnTt3snPnTiIjI0tta5R3gnwWNFJDRKSBy8jIp3fvT9izJ53WrQNZvrwfvr7uri6r1sUnxQOwYvMKF1cirpSQnsATq58galoU9y+/n20p2/Bz92NM1zHseHAHnw36jGtaXlMto5i+/x4OHKj4fsOA/fud64nI6a1atYr8Ez65evnll0lNTS2+bbPZiI+Pd0VpIiJyXHrG8Ys3avHTJrut9h5LRKQ+qYlAA6DTXZ3odn83PKwezFgzo3jExrhx40hOTuaBBx7AMIxyv6qLRmqIiDRgBQV2+vf/nN9/TyYszJtVqwYQFubj6rJq3QvLXmB70nYAlv65lBeWvcAzNz7j4qqkthiGwbr965i+fjqfbvsUu+Hs2t0qqBWjLxnNsAuH4e9RPW/uwHl14saNMHt25dZPTKy2hxZp0E4+CarOkyIREakedufbLBzmXGojazAsueWOihURaexqKtA40YnNw8E5YqO2pjlXqCEi0kA5HAbDhq3kq6/24ePjxvLl/WjdOtDVZdW6F5a9wLOfPVtqWdFtBRsNW4G9gA//+pDpP09nY+LG4uU9WvTgoUsfok+bPljM1TM3wrFjsHo1LF8OX3wBR45UfttmzaqlBBERERGX8/Xwwmzzw+GRicNS8/NCGRbwNPnh5eZV448lIlJf1EagUcRVwYZCDRGRBuqJJ77lvfe2YbWa+eSTm+nSJdzVJdW68gKNIgo2Gq7k7GTm/TqP1399naQsZyMyD4sHd51/F6O7jub8pudX+TEMAzZvhhUrnF/r1pVcmQjg5wc9e8LatZCWVn5fDZMJIiPhyiurXI5Io2AymcqcINXWlWAiIlI5F7SKoPXv7+MTkIt3LQwQ37cPRv7Diwi/iJp/MBGReqA2A40irgg2FGqIiDRA06b9ytSpvwIwf34vevVq6eKKat+pAo0iCjYalj+S/mDG+hm8t/k98u3OKwOb+TbjgYsf4N7O9xLqE1ql/WdlwZo1JUHGyf0yzjkH+vSB3r3hiivA3R0WL4YBA5wBxonBRtH7u+nT1SRcpLIMw2Do0KF4eHgAkJeXx3333YePj/NTs3x1ihURcbmWLSHIGkF+KvjU8CdONht4ZEOXdjX7OCIi9YUrAo0itR1sKNQQEWlgPvhgO2PHrgXgX/+6krvvPte1BblAZQKNIgo26je7w87SHUuZsX4Ga/euLV5+ccTFPHTpQwzoMAB3i/tZ7dsw4O+/S0KMb7+FgoKS+7284JprnCHGDTc4T+JP1q8ffPwxjBlTOgSJjHQGGv36nVVpIo3SPffcU+r2XXfdVWadIUOG1FY5IiJSDnd36NgRvvkGwsJq9rEyMpyjY9sp1BARAeC1b17jk42f0CywWa0GGkWa+jclKT2JmWtmEh4QTu+OvWvssRRqiIg0IF9/ncCQISsAePDBC3n88UtcXJFrTPhswhmvr1CjfknPS2f+7/OZ9css9qTtAcBisjCgwwDGdB3DpZGXntVVIXl5zvBi+XJnkLFrV+n7W7YsGY3Rvbsz2Didfv3gllvg228dxMdn0K6dP1dfbdYIDZEztGDBAleXICIildCnjzPUKChwhhw1JTUV4uKgefOaewwRkfqkfbP2+Hv5k5GbgZ+HX61P1epwOMjKzyIiMIKWITU7Y4hCDRGRBmLTpmT69l1CYaGDgQPbMm1aj0Y71/hzNz/HhM8rH2xMvGViDVYj1envo38zc/1MFv6xkKyCLACCPIMY2Xkk/7z4n0QFRJ3xPvftKxmNsWYN5OaW3OfmBldd5Qwx+vSBtm1Lpo46ExaLMwTp0CGPsDB/zOYz34eIiIhIfdC9u3NU6uHDEHXmb80qJTsbrFbo27dm9i8iUh/17tibAlsBLy5/kYNpB2ke2LzWPhdyOBzsTtlNRGAEr972Kuc0O6dGH0+hhohIA7B3bzo33PAJmZkFXH11JG+/3RuLpXF+arojaQff7vi20us/f8vzGqVRxxmGwZo9a5j+83RW/L0CA2dzig6hHRjTdQx3nX8X3m7eld5fYSH8+KMzxFi+HLZuLX1/8+bOEKN3b7j2Wue0BiIiIiJSOd7ecM898NJLkJPjvF2dHA44eBC6dXP2MRMRkRJ9L+wLUKvBxsmBxgVRF9To44FCDRGReu/o0Vzi4j4hKSmbjh1DWLKkL56eje/Pe35hPv/64l+8/MXLFNgK8HTzpFtsN77a9lWF2yjQqNtyCnNY9OciZqyfwV9H/ipe3qdNH8Z0HUPPVj0r/eYsMRFWrnSGGKtXO+dgLmI2w+WXlwQZ559/dqMxRERERMTp9tudU3p++y3ExlKto1QTEyEkBMaNc47WEBGR0moz2HBFoAEKNURE6rWcnEJuvHEx8fGpREX58cUX/QkM9HR1WbVubfxaRr4zkh2HdwDQ69xevH7n67QKbVVh03AFGnXXgYwDvL7hdeZtnEdqbioAPm4+DOs0jAe7PkjbJm1Puw+7HX75pWRaqd9+K31/SIizuXfv3nD99RAcXBNHIiIiItI4Wa3O0GH3btizx9mXrDqCjZQU56jbRx6BNm2qvj8RkYaqNoINVwUaoFBDRKTestkc3H77Un7+OZGgIE9WrRpA8+aNa56clMwUHv3oUd766S0Amvo3ZcagGdzW5bbi/1kXBRcnBhsKNOqmnw/8zPSfp/Px1o+xG3YAWgS24MFLHuQfF/6DQM/AU25/9CisWuUMMVaudN4+UZcuJU2+O3dGjbpFREREalCrVjB5Mjz6KOzaBS1aOPuVnQ3DgKQkyM+HkSNh8OBqLVVEpEGqyWDDlYEGKNQQEamXDMPg/vtXs2zZbjw9rSxdeivnnNPE1WXVGsMwWLhuIY9+9Cip2amYTCZGXjWSSf0mEegdWGb9Z258BsMweO7z53ju5ucUaNQhBfYCPt76MTPWz+CXg78UL7865mrGdB3Dze1uxmIuP30wDNi0qWQ0xs8/O+dYLhIQAL16OUOMuDho2rSGD0ZERERESuncGWbMgPHjIT7eOVo2KOjMpvrMy4P9+53v7R59FIYM0VShIiKVVRPBhqsDDVCoISJSL02cuI7//nczZrOJ99/vwxVXNHd1SbVmW+I27nv3Pr7b8R0A50eez7y75nFp60tPud34PuO59+J7CQsLq40y5TRSclKY9+s8Xv/1dQ5lHgLA3eLO4I6DGdN1DJ3CO5W7XUYGfPWVszfGF18451Q+UceOJb0xLrvs7K8GFBEREZHqcf758M47MHs2LF4Mf//tDDaCgyseOWsYkJkJR444A4wuXZzTWZ13Xu3WLiLSEFRnsFEXAg1QqCEiUu/Mm/cnEyf+BMDrr/ekb9/GMZlsXmEeL694mX998S8K7YV4uXsx8eaJPHTtQ7hZ9cl1fbH58GZmrJ/Bos2LyLPlARDuG879Xe5nZOeRNPUtPZzCMGDbtpLRGN9/DzZbyf3e3tCzZ0mQERVVm0cjIiIiIpUREABPPeXsafbZZ7B6tbPXBjiDjaILUex2Z88MAC8vuOQS6NfP2QPNs/G1DhQRqTbVEWzUlUADFGqIiNQrK1fuZ9Qo5wiFZ565lJEjXfc/kNr01davuH/R/exM3glA7469eW3wa7QIaeHawqRSHIaD5TuWM339dL7e83Xx8s7NOjOm6xhuO/c2PKwexctzcuCbb0qCjL17S++vTZuS3hhXXQUeHoiIiIhIHWcywUUXOb8eeAA2boSdO+Gvv+DYMWeg4e0N7dtDbCx06ADnnquppkREqktVgo26FGiAQg0RkXpj3bpD3H//DzgcBsOHd2TixCtcXVKNS85I5pGPHuHdn98FoFlAM2beMZP+F/WvtuZWUnMy8jNYuGkhM9fPZNexXQCYTWb6ndOPh7o+xOVRlxe/jrt3OwOM5cudgUZ+fsl+PDyge3dniHHDDc5QQ0RERETqr7Aw5/s6ERGpXWcTbNS1QAMUaoiI1Avbth3l5ps/JS/PTp8+rZg797oG/aG+w+Fg/o/zefzjxzmWcwyTycQD3R/gxb4vEuAd4Ory5DR2pe5i1i+zmP/7fDILMgEI9AxkxEUjeODiB4gJjCE/H9asKRmNER9feh/R0SVTSl1zDfj4uOBARERERERERBqYMwk26mKgAQo1RETqvEOHsoiL+5hjx/Lp3DmEDz7og9VqdnVZNeavg38x8t2R/LjzRwA6RXVi3t3zuKTlJS6uTE7FMAy+2fsNM9bPYGn8UgwMANqHtGf0JaMZcsEQjiX78MVHzhDjq68gK6tke4sFunUrmVaqQwdNNSAiIiIiIiJSEyoTbNTVQAMUaoiI1GlpaXnExX1MQkImbdsG8dZb3fH2bphNsXMLcnlx+YtMWTUFm92Gt7s3L9zyAqOvHY3Vov9d1VW5hbm8t/k9ZqyfwebkzcXL42LjeLDLQ/gmX8fKT81cMQL++KP0tk2blozGuO46ZwNJEREREREREal5pwo26nKgAQo1RETqrPx8G7fe+hmbN6cQHu7DF1/0w9s7//Qb1kOrtqzin+/9k91HdgNw0wU3MfuO2UQ3iXZxZVKRQ5mHeH3D68zbOI+UnBQAvN28ua3tPbRLG82mpe25cxSkpZVsYzJB164lQcaFF4K54Q46EhEREREREanTygs2DMOo04EGKNQQEamTHA6DIUO+YO3a/fj5ufPFF/1p0SKA5ORkV5dWrZLSkxj74Vje/+V9AJoHNmfWHbPoe2HfBt0zpD77Pfl33vnxHT7a+hE2hw2AcM9oOuY9yNHlw3lrfBCGUbJ+UBDExTlDjLg4CAlxUeEiIiIiIiIiUsaJwcaBYwfIt+XX6UADFGqIiNQ5hmHw8MPf8OGH8bi5mVmypC+dOoXhcDhcXVq1cTgcvPHdG4xbPI703HTMJjMPXvMgL/R9AT9PP1eXJycptBeyeNtiZqyfwU8HfipeHpbXjby1D5H0yy0kOUreUnTqVNIb45JLwKp3GyIiIiIiIiJ1VlGw8dLyl2ji26ROBxqgUENEpM759783MHPmbwC8/XZvrrmmYU3BtPnAZka+O5Kfdjk/HL8o+iLeGPIGnWM6u7gyOdnRnKP857f/8NqG1ziQcQAAk8MNtgzC+GkMyYnO18zPz9kTo2g0RvPmrqxaRERERERERM5U3wv7EuIbQqhfKO3C27m6nFNSqCEiUoe8885fPPHEdwC8+mp3Bg1q7+KKqk92fjbPL32eV796FZvdhq+HLy/2fZEHejygRuB1zF/Jf/HKjzNZtPkdCoxc58KsMPj1foxf74OscM45B3oPdgYZ3bqBu7traxYRERERERGRqunWppurS6gUfYokIlJHfPnlXv7xj1UAPPJIFx5+uIuLK6o+Kzav4IFFD7D36F4Abr3wVmYOmklkcKRrC5NiDsPBf9d+wSvrZrDDtrrkjsRO8PNDeOwcxDVXuXPV0xkMHOigdWt1+BYRERERERGR2qdQQ0SkDti4MYl+/T7DZnMwePA5TJlytatLqhaH0g7x0AcP8dHGjwCICo5i9h2zubnTzS6uTADy8mDl11lMX7uQdfaZFPr/7bzDYYbtfQlPGEP/LlfSZ5KJ7t3Bw8NBcnIuYWHqeyIiIiIiIiIirqFQQ0TExXbtSqN378VkZxfSs2cMCxbEYTabXF1WldgdduZ+O5enPn2KjNwMzCYzD/V8iIk3T8TX09fV5TVq+/bBihXw8Vd7+C5/NraOb4JPuvPOvACiU/6Pu9uN4q6JLWjXDkwn/FNsQL3qRURERERERKSeUqghIuJCycnZxMV9THJyDp06hfHJJzfj7m5xdVlVsilhEyPfHckve34B4OIWFzPv7nlcGH2hiytrnAoL4ccfnUHG8hUGW7O+g0tnwHmfgdmZUgQUtuW2FqOZeOs9NGui0ElERERERERE6i6FGiIiLpKVVUCfPovZuTONFi38+eKL/vj7e7i6rLOWlZfFc0ufY/pX07E77Ph5+vHyrS9zf/f7sZjrd1BT3yQmwsqVziDjyy8hIycPzvsAus2AZpuK17ss7HqevnYMN7SJw2xSjwwRERERERERqfsUaoiIuEBhoZ2BAz/n118PExLixapVAwgP93F1WWdt6R9LGfXeKBJSEwAY0HkAMwbNICIwwsWVNQ52O2zYcHw0xnL47bfjd/gmQZc5mC6Zg+F9BAAvqxdDLhjC6K6j6RDawXVFi4iIiIiIiIicBYUaIiK1zDAMRoz4kpUr9+LtbWXZsn60bRvs6rLOysFjBxn9wWgW/7YYgJgmMbw2+DX6nN/HxZU1fEePwqpVziBj5Urn7WLNNtKkzwyONf8Ah6kQA4j0j2TUxaMY0XkEwV7189+biIiIiIiIiIhCDRGRWvb00z/w1lt/YbGY+PDDm+jatZmrSzpjdoed1755jfFLxpOZl4nFbGHsdWOZcNMEfDzq74iTuswwYNMmZ4ixYgX8/HPpxt3+gTbO7b+Eo7Ez2JH/A0UZx+VRlzOm6xhubX8rbhY3V5QuIiIiIlJlQ4cO5a233gLAarUSGRnJwIEDef755/H09GTv3r288MILfP311yQlJREREcFdd93F008/jbu7u4urFxGR6qRQQ0SkFs2e/RuTJq0H4I03rqdPn9YurujM/bbvN0a+M5Jf9/0KQNeWXZl39zwuiLrAxZU1PBkZ8NVXJUFGYmLp+zt2hGt6HyP/vP+y4shsfspIgHywmq3cfu7tjOk6houbX+ya4kVEREREqllcXBwLFiygsLCQjRs3cs8992AymZg8eTLbt2/H4XAwb948YmNj2bJlCyNGjCA7O5upU6e6unQREalGCjVERGrJxx/HM3r01wC88MIV/OMfHV1c0ZnJzMvk2c+eZeaamTgMBwFeAfyr37+496p7MZvVZLo6GAZs3+7si7FiBXz/PdhsJfd7e0PPntC7N7S5bDsf75/Jf/54i5xdOQCEeIdwX+f7uP/i+4nwUz8TEREREWlYPDw8CA8PByAqKoqePXuyevVqJk+eTFxcHHFxccXrtmrVivj4eObMmaNQQ0SkgVGoISJSC777bj933bUCw4D77ruAp5++1NUlnZElvy/hwfcf5MCxAwDcfvHtTLttGs0C69/UWXVNTg58803JaIy9e0vf36aNM8To0weu6Obgu4NfMmP9DFZ+urJ4nfObns+YrmMY3HEwnlbP2j0AEREREZEzVFAAGzY4L+jZvt05Itluh4AAaN/e+R740kuhSZOK97FlyxbWrVtHTExMheukp6cTHKx+ciIiDY1CDRGRGrZ58xFuvnkJ+fl2+vaNZfbsazGZTK4uq1L2p+7nwfcf5LNNnwHQMqQlr9/5OnHnxZ1mSzmV3btLQoxvvoG8vJL73N2he3dniHHDDc4TuuyCbN7+420efHMG8UfjATBh4uZ2NzOm6xi6t+heb/5NiYiIiEjjlZ0Nn3zi/Nq9u2RUsrs7mEzO299+6/y5SROIi4M77oBWrZzrLVu2DF9fX2w2G/n5+ZjNZmbPnl3uY+3cuZNZs2ZplIaISAOkUENEpAbt35/BDTd8Qnp6Pldc0Zz33uuDxVL3p2qy2W3M+noWz3z2DNn52VgtVh67/jHG9xmPt4e3q8urdwoKnFNJFU0rFR9f+v6oKGeI0bs3XHMN+Bzvtb4vbR+Pr36N//z2H9Ly0gDwc/dj+IXDGXXJKFoH17+eLCIiIiLSOG3cCFOmwJ9/gpsbNG0KXl7lr2u3Q0oKvP02fPEF3HcfOBzQo0cP5syZQ3Z2NtOmTcNqtdK/f/8y2x88eJC4uDgGDhzIiBEjavjIRESktinUEBGpIampucTFfcLBg1mcc04wn3/eFy8vN1eXdVob9mxg5Lsj+T3hdwAub3058+6ex3nNz3NxZfXLwYMlozG++gqyskrus1igW7eSaaU6dHBejQZgGAY/JPzIjPUzWLxtMQ7DAUDroNaM6TqGoZ2G4ufh54IjEhERERE5O0uWwKRJkJ4O0dHg4XHq9S0WZ+gRGuqcmmrSJOf75ZYtfYiNjQVg/vz5XHDBBbz55psMHz68eNtDhw7Ro0cPLr/8ct54440aPCoREXEVhRoiIjUgN7eQW25ZwtatR2ne3JeVKwcQHFzBZUh1REZuBuOXjGf2N7MxDINA70Cm9J/C8G7D1Qi8Emw2+PnnkiDjjz9K39+0qTPE6N3b2ew7MLD0/fm2fD7860Omr5/Ob4m/FS+/tuW1PHTpQ/Ru0xuzSa+DiIiIiNQvy5bBiy86R1/ExpZczFMZZjM0bw6ZmfD775CfD4WFzpEeZrOZp556irFjxzJ48GC8vLw4ePAgPXr0oHPnzixYsEDnMSIiDZRCDRGRama3Oxg8eDk//HCQgAAPVq7sT3S0v6vLqpBhGCz+bTGjPxjNobRDANzZ9U5eue0Vmvo3dXF1dduRI7BypXNaqVWrIC2t5D6TCbp2LQkyLrzQeVJ2suTsZOb+OpfXN7zO4ezDAHhaPbmr412M7jqajk071s7BiIiIiIhUs927nVNOFRQ4R2icbRs4Pz/nVFWHD8MHH8DddzuXDxw4kMcee4zXXnuNO+64g+7duxMTE8PUqVM5cuRI8fbh4eHVcDQiIlJXKNQQEalGhmHw4INrWLJkJ+7uFj77rC/nnRfq6rIqtO/oPka9N4plfy4DoHVoa+bcNYfrOlzn4srqJofDORdw0WiMDRvAMEruDwpyNjPs3Rt69XIOl6/I74m/M2P9DN7f8j4F9gIAIvwieODiB7i3872EeIfU8NGIiIiIiNQcmw3+9S9ITobWrc8+0CjiHJ0B8+bBZZc5R31YrVZGjRrFlClT8PLyYufOnezcuZPIyMhS2xonvmkXEZF6T6GGiEg1evnl9cyZ8wcmEyxa1Jurr45ydUnlKrQVMmPNDCZ8PoGcghzcLG48EfcET/V+Ci/3uj1NVm1LS4Mvv3SGGF984TwpO1GnTiW9MS65BKyn+D+r3WHn8/jPmb5+Ot/t+654edfmXRnTdQwDOgzAzVL3+66IiIiIiJzOTz85v5o1K3/E8tlIT/+MH3800a6dlejoSAYOHMjzzz/PuHHj2Lt3L7/99htff/01SUlJREREcNddd/H0009Xz4OLiEidoVBDRKSazJ+/mfHjfwBg5sxrGDCgnYsrKt/Pu35m5Lsj+fPAnwBc2eZK5t09j3OanePiymqO3Q7ffgvx8Z60awdXX+1sPlgew4AtW0pGY/z4o3P7In5+cN11ziAjLs45x+/ppOWlMf/3+cz6ZRZ70/YCYDVbGdBhAGO6juHSyEurfpAiIiIiInXI5587+1/4+lbfPkND42jefAEWSyEPPriR0aPvwWQyMXnyZLZv347D4WDevHnExsayZcsWRowYQXZ2NlOnTq2+IkRExOUadMek1157jRYtWuDp6UnXrl355ZdfKrXdBx98gMlkom/fvqWWDx06FJPJVOorLi6uBioXkfpm+fJd3HvvlwCMG3cJo0Zd5OKKykrPSeeBRQ9w+eTL+fPAnwT7BPPmPW+y9tG1DTrQWLwYWrSAa681889/BnLttWZatHAuL5KV5TzpGjnSOdfv+efDuHHw3XfOQOOcc+CRR2DNGkhJgU8+geHDTx9o7Di6gwdXPEjkq5E88uUj7E3bSxOvJjzV7Sn2jtnL+/3fV6AhIuJiOmcQEal+aWnwww/O6Vmrk9nsQbNm4eTkRBEY2JeePXuyevVqAOLi4liwYAHXX389rVq14uabb+bRRx9l8Ylv/EVEpEFosCM1/ve//zF27Fjmzp1L165dmT59Or169SI+Pp6wsLAKt9u7dy+PPvooV155Zbn3F/1PsoiHh0e11y4i9cv69YncdttS7HaDIUM68PLL5f/9cBXDMPjo148Y878xJKUnATDksiFMHTiVUL+62++jOixeDAMGlO57AXDwoHP50KFw4IBzFEdBQcn9np5wzTUlTb5btqz8YxqGwVe7v2L6+ums+HtF8fJzQ8/loUsf4s6Od+Llpim+RETqAp0ziIjUjL//hmO2Q4Q0yyXbvXr2abNkYrdnk+e9i3wf+OK7faxbt46YmJgKt0lPTyc4OLh6ChARkTqjwYYar776KiNGjGDYsGEAzJ07l+XLlzN//nzGjRtX7jZ2u50777yTiRMn8v3335OWllZmHQ8PD8LDwytVQ35+Pvn5+cW3MzIyAHA4HDgcjjM8oqpzOBwYhuGSx24o9BxWTUN8/nbsOEafPp+Qk2OjV68WvPHGdRiGUSON6M7m+duTsocH3nuAVX+tAqBNWBtev/N1rml/TfE+Gyq7HcaMMR0PNEp3JSx6eU74vImWLQ1694YbbjDo3h28TsgdKvM05RTm8O7md5m1fhZbU7bifFQTvdv0ZkzXMVzT4hpMx7sj1tfnvSH+DtcmPX9Vo+evaurC81cXX7u6cM4Ade+8oTGrC78rUrv0mteMjTsSOdD1Tg57Z1bbPrO376VwayrffN4WHAbzfjUwm83MnDmz3Ndv586dzJo1iylTppS6X69546LXu/HRa16/VfZ1a5ChRkFBARs3buTJJ58sXmY2m+nZsyc//fRThds9//zzhIWFMXz4cL7//vty11m7di1hYWEEBQVxzTXX8OKLL9KkSZNy1500aRITJ04ss/zIkSPk5eWd4VFVncPhID09HcNw/o9fzpyew6ppaM9fcnIuN964kqNH87jggmBee60rx44drbHHO5Pnr9BeyLwf5/HK2lfIK8zD3eLOg1c9yKgrR+Hp5knyyd2uG6B169w5cOD0V2Xdc082w4fnEBtr53jmQGam86syDmYdZOFfC1m0bRHH8o8B4OPmw6B2gxh+3nBaBjiHeRw5cuSsjqMuaWi/w7VNz1/V6Pmrmrrw/GVW9g9rLakr5wxQ984bGrO68LsitUuvec2w25I5PywcD7coTI7qGaqxzT2D/FaetL35cvLzcklc/xeXterElVdeWeb8JjExkX79+nHjjTdyyy23lLpfr3njote78dFrXr9V9pyhQYYaKSkp2O12mjZtWmp506ZN2b59e7nb/PDDD7z55pts2rSpwv3GxcXRr18/WrZsya5du3jqqae44YYb+Omnn7CU03H2ySefZOzYscW3MzIyiIqKIjQ0FH9//7M7uCpwOByYTCZCQ0P1S32W9BxWTUN6/jIy8hk69Ev278+mdesAVq68jbAw7xp9zMo+f+t2reP+d+9ny6EtAHRv253X73ydduF1s3F5TcnNrdx6113nxRVXnNl0UIZh8PPBn5mxfgaLty3Gbjg7ibcMbMmDlzzI0AuGEuAZcKYl13kN6XfYFfT8VY2ev6qpC8+fp6enSx63InXlnAHq3nlDY1YXflekduk1rxk5hVlsSv0bH0sgVsOnWvaZlp+HgZ195JNjycXSM4itS7eydOlShg8fXrzeoUOHuP322+nWrRsLFiwo87rqNW9c9Ho3PnrN67fKnjM0yFDjTGVmZnL33Xfzn//8h5CQkArXGzRoUPHPHTt25Pzzz6d169asXbuWa6+9tsz6Hh4e5c6fazabXfZLZTKZXPr4DYGew6ppCM9fQYGdAQOW8vvvyYSFebNq1UDCw31r5bFP9fwdyz7Gk4ufZN538wBo4tuEVwa+wpDLhhRPe9SYnK6Jd8l6Zir7z7HAXsDHWz9m+s/T2XBoQ/HyHi16MKbrGG5seyMWc/kfWDUUDeF32JX0/FWNnr+qcfXzV99ft5o6Z4C6ed7QmLn6d0Vqn17z6ucfYMJhGMVf1cJwTiPrMAxsdgN3d7j/4ft59tlnueuuu/Dy8uLgwYNcc801dO7cmYULF1YYJus1b1z0ejc+es3rr8q+Zg0y1AgJCcFisXD48OFSyw8fPlzu3La7du1i79693HTTTcXLiubvslqtxMfH07p16zLbtWrVipCQEHbu3FnhCYqINCwOh8GwYStZsyYBHx83VqzoR+vWgS6tyTAMPvjlAx7630MkZzqHVQ+7YhhT+k8hxK/iD10auiuvhMhIZ1Pw8s6jTCbn/RX0eC3lSPYR5m2cx+sbXicxKxEAD4sHd3a8k9FdR3NB+AXVXL2IiNQ0nTOIiNScmBgwrzvem64Grq8yDPDyhBtuuYGpE6fy2muvcccdd9C9e3diYmKYOnVqqelfz6TPkYiI1H0NMtRwd3enc+fOrFmzhr59+wLOE441a9YwatSoMuu3b9+ezZs3l1o2fvx4MjMzmTFjBlFRUeU+zoEDBzh69CjNmjWr9mMQkbrpiSe+5b33tmG1mlm8+BY6d3btm+Ndybu4f9H9rN66GoB24e2Ye9dcurfr7tK66gKLBWbMgAEDnAHGicFG0cCV6dOd61Xkz8N/MuPnGSzavIh8u7OBa7hvOA9c/AAjO48k1Ce05g5ARERqlM4ZRERqTnSU8322LQ/c3ap33w6H8/28h4czVB41ahRTpkzBy8uLnTt3snPnTiIjI0ttY1TXaBEREakTGmSoATB27FjuueceunTpwiWXXML06dPJzs5m2LBhAAwZMoTmzZszadIkPD09Oe+880ptHxgYCFC8PCsri4kTJ9K/f3/Cw8PZtWsXjz/+OLGxsfTq1atWj01EXGPatF+ZOvVXAObP78X117dwWS0FtgKmrprKC8tfIK8wDw+rB0/3eZrHez2Oh1vZ6Ssaq3794OOPYcwYOHCgZHlkpDPQ6Nev7DZ2h53lfy9n+s/T+WbvN8XLu0R04aGuDzHw3IG4W6qn2aGIiLiWzhlERGqGhwcEBEBqNlBNoYZ/r04A5OWCpzf4HJ8BeNy4cYwbNw6ABx54oHoeTERE6rQGG2rcfvvtHDlyhGeffZakpCQ6derEypUrixsBJiQknNG8ahaLhT///JO33nqLtLQ0IiIiuP7663nhhRfKnf9WRBqWDz7YztixawGYPPkq7r77XJfV8v3f3/PPRf9ka+JWAK4951rm3DmHNk3buKymuiohPYEWl6Ww+CcYMcLgjz9MDB7s4KGHzFgskJAeQnRANAAZ+RnM/30+s36Zxe5juwGwmCz079CfMV3HcFnkZY2yN4mISEOmcwYRkZrj7w/pyVBQAO7VdE2QwwF2BzQLBbPemouINFoNNtQAGDVqVLlDxwHWrl17ym0XLlxY6raXlxerVq2qpspEpD75+usEhgxZAcDo0Rfx2GMXu6SO1OxUHlnyCO9tfA+AUL9QXr3tVe7seqc+bC9HQnoC7Wa3I8+W51xwqfPrPeC9N52LPK2erLprFZ9s/YQFmxaQWZAJQJBnEPd2vpcHLn6AqIDypxMREZGGQecMIiI1w9MTmoZB0n6wWqE6+vXm5YKvr3O/mbaq709EROqnBh1qiIhU1aZNyfTtu4TCQgcDB7Zl2rQetR4gGIbBovWLGPvhWI5kOpvd/d+V/8fk/pMJ9gmu1Vrqk5SclJJAowJ5tjyuXnh18e1zQs5hTNcx3HX+Xfi4+9R0iSIiIiIiDVrz5pCdBpmZ4O1d0tvubOTlgcUKsa2d31GoISLSaCnUEBGpwN696dxwwydkZhbQvXsUb7/dG3Mtj3HekbSDf773T9ZsWwNA29C2/Gfof7iq7VW1WkdD17tNb8Z0HcN1ra7TqBcRERERkWpiscA57WHLFsjJAS+vsxuxkXf8WqVWrSAoGLILqrdOERGpXxRqiIiUIyUlh7i4T0hKyqZjxxCWLOmLp2ft/cnML8xnyqopvLT8JfJt+Xi6eTK+93ju7nQ3kRGRtVZHY/DJbZ/Q75xyOoaLiIiIiEiV+fjCeefBtm2QmQVubpXvseGwQ24euLs5A41mzWq2VhERqR8UaoiInCQnp5CbbvqU+PhUoqP9+OKL/gQE1F5zz2/jv2XkuyOJT4oH4PoO1/P6na/TMqQlycnJtVZHY9EisIWrSxARERERadB8fKHThbB3LyQmQnY2WC3OgMNsKb2uwwF2u7PBuMkEQUHOKad8/VxSuoiI1EEKNURETmCzObj99qX8/HMiQUGerFw5gObNa+fdc0pmCo99/BgL1y0EoKl/U6bdNo1BlwzCZDLhcDhqpQ4REREREZHqZrVCbCyEN4XDh+FwsjO4sDugaAJYA2eQYbVAaAiEh0Nwk+ppMi4iIg2HQg0RkeMMw+D++1ezbNluPD2tLFt2K+ec06RWHvftn97mkY8e4WjWUQBGXjWSSf0mEeQTVOOPLyIiIiIiUlt8/ZxfLVo6R2zkZENBIWA4e3B4e4O3D3jU3mB5ERGpZxRqiIgc99xz6/jvfzdjNpv44IMbufzy5jX+mNsTt3P/ovtZG78WgPOan8e8u+ZxeezlNf7YIiIiIiIirmKxgL+/80tERORMKNQQEQHmzfuD55//CYDXX+/JLbfE1ujj5RXmMWnFJP618l8U2Arwcvdiwo0TGHvdWNysbjX62I1FiHcInlZP8mx5Fa7jafUkxDukFqsSEREREREREZGqUKghIo3ekiV/889/fgXAs89exsiRF9To43297Wvue/c+/k7+G4AbzruB1wa/RsvQljX6uI1NdEA08aPiSclJAeD6Vx7hqN9a4ryf5KW7BgDO4CM6INqVZYqIiIiIiIiIyBlQqCEijdqPPx7kjjuW43AY/N//deS552pu2qcjmUd45MNHeOfndwAIDwhn5qCZDOg8AJPJdJqt5WxEB0QXhxZu9kAAgi3RXNTsIhdWJSIiIiIiIiIiZ0uhhog0Wtu2HeWmmz4lL8/GjTe2Ys6c62okXHA4HCz4cQGPf/I4qdmpmEwm7r/6fl6+9WUCvAOq/fFEREREREREREQaKoUaItIoHTyYSa9eH3PsWB6XXtqM//3vJqxWc7U/ztZDW7nv3fv4/u/vATg/8nzeuPsNurbqWu2PJSIiIiIiIiIi0tAp1BCRRictLY8bbviE/fszads2iKVLb8Xbu3qbc+cW5PLS8peYsmoKhfZCvN29mXjzRMZcO0aNwEVERERERERERM6SQg0RaVTy8mz07buEzZtTCA/3YdWqAYSEeFfrY6zeupr7372fXUd2AXDj+Tcye/BsYprEVOvjiIiIiIiIiIiINDYKNUSk0XA4DIYMWcG33x7Az8+dL77oT4sW1dfT4nDGYcb+byzv/fIeABGBEcy6Yxa3XnirGoGLiIiIiEijk2vLrVf7FRGR+kGhhog0CoZh8PDD3/DRRztwczOzZElfOnUKq5Z9OxwO/vvDf3nikydIy0nDZDIxqscoXuz7Iv5e/tXyGCIiIiIiIvWFl5sXfh5+ZOZnkm/Lr5HH8PPww8vNq0b2LSIidZtCDRFpFP797w3MnPkbAG+/3Ztrromulv1uObiFke+MZN2udQBcGH0h8+6ax8UtL66W/YuIiIiIiNQ3EX4RvN//fXILa25EhZebFxF+ETW2fxERqbsUaohIg/fOO3/xxBPfAfDqq90ZNKh9lfeZk5/DC8tfYOqXU7HZbfh4+PDCLS/w4DUPYrXoT6uIiIiIiDRuChxERKSm6JM3EWnQVq3awz/+sQqARx7pwsMPd6nyPlduWck/F/2TPSl7AOjbqS8z75hJVHBUlfctIiIiIiIiIiIiFVOoISIN1q+/JtG//+fYbA4GDz6HKVOurtL+EtMSefjDh/nfhv8BEBkUyaw7ZtH3wr7VUK2IiIiIiIiIiIicjkINEWmQdu1Ko0+fxWRnF9KzZwwLFsRhNpvOal8Oh4N5381j3OJxZORmYDaZGXPtGCbeMhE/T79qrlxEREREREREREQqolBDRBqc5ORsevX6mOTkHDp1CuOTT27G3d1yVvv688CfjHxnJD/v/hmALjFdmHf3PC6Kuag6SxYREREREREREZFKUKghIg1KVlYBffosZteuNFq2DOCLL/rj7+9xxvvJzs9m4tKJvLr6VewOO36efrzU9yX+2eOfWMxnF5CIiIiIiIiIiIhI1SjUEJEGo7DQzoABn/Prr4cJCfFi5cr+hIf7nPF+lv+5nAfee4B9R/cB0P+i/swYNIPmQc2ru2QRERERERERERE5Awo1RKRBMAyD//u/VaxatRdvbyvLlvWjbdvgM9rHobRDjPlgDB9v/BiA6OBoZg+ezU0X3FQTJYuIiIiIiIiIiMgZUqghIg3C00//wNtvb8ViMfHRRzfTtWuzSm9rd9iZs3YOT336FJl5mVjMFh7u+TATbpqAr6dvDVYtIiIiIiIiIiIiZ0KhhojUe7Nn/8akSesB+M9/rqd371aV3vb3hN8Z+c5INuzdAEDXll2Zd/c8Loi6oEZqFRERERERERERkbOnUENE6rWPP45n9OivAXjxxW4MG9axUttl5WXx7GfPMmPNDByGA38vfybdOomRV49UI3AREREREREREZE6SqGGiNRb3367nzvvXIFhwD//2Ymnnupaqe0+3/Q5o94fxf7U/QDc1uU2pt0+jYjAiJosV0RERERERERERKpIoYaI1EubNx/hlluWUFBg59Zb2zBz5jWYTKZTbrM/dT+j3x/Nkk1LAGjRpAWv3/k6N3S8oRYqFhERERERERERkapSqCEi9c7+/RnccMMnpKfn061bcxYt6o3FYq5wfZvdxuxvZvPMkmfIys/CarHyyHWP8OyNz+Lt4V2LlYuIiIiIiIiIiEhVKNQQkXolNTWXXr0+5uDBLDp0aMLnn9+Kl5dbhetv3LeRe9++l98SfgPgstaXMe+ueXSMrFzvDREREREREREREak7Kr60WUSkjsnNLeTmm5ewbVsqzZv7snJlf4KCPMtdNyM3gzEfjOGSly7ht4TfCPAKYO5dc/nh8R8UaIiIiIiIiNQzQ4cOxWQyYTKZcHNzo2XLljz++OPk5eUBsHfvXoYPH07Lli3x8vKidevWTJgwgYKCAhdXLiIi1U0jNUSkXrDbHQwevJwffzxIQIAHK1f2JyrKv8x6hmGw5PclPPj+gxxMOwjAHZfcwau3vUp4QHhtly0iIiIiIiLVJC4ujgULFlBYWMjGjRu55557MJlMTJ48me3bt+NwOJg3bx6xsbFs2bKFESNGkJ2dzdSpU11duoiIVCOFGiJS5xmGwahRa1iyZCceHhY+/7wv550XWma9hKMJjHp/FEv/WApAq9BWzLlzDtefe31tlywiIiIiIiLVzMPDg/Bw58VqUVFR9OzZk9WrVzN58mTi4uKIi4srXrdVq1bEx8czZ84chRoiIg2MQg0RqfNeeuln5s79A5MJFi3qw1VXRZW632a3MWPNDCZ8PoHs/GysFiuP93qc8X3G4+Xu5aKqRUREREREGq9DmYfILcyttv1l5meSXZDNrtRdAOz7ex/r1q0jJiamwm3S09MJDg6uthpERMQ5HeBbb70FgNVqJTIykoEDB/L888/j6emcJv6ll15i+fLlbNq0CXd3d9LS0qq1BoUaIlKnzZ+/mWee+RGAmTOvoX//tqXu/2XPL4x8ZySb9m8CoFtsN+beNZdzm59b26WKiIiIiIgIzkDjjk/uIDM/s9r2uW/vPlJ/SaVtRFsMu4FhMzCbzcyePbvc9Xfu3MmsWbM0SkNEpJIMw+DzPz4n3D+crq26VrjeDz/8UGqb7OxsZsyYgd1u55VXXgFgzZo1HDp0iMLCQnJzqy/gLqJQQ0TqrOXLd3HvvV8C8OSTXRk16qLi+9Jz0nl6ydO8vvZ1DMMgyDuIfw/4N8OuGIbZbHZVySIiIiIiIo1ebmEumfmZeFg98LJWz+j5REsiQecE0XZIW3Jycji46iDXxl5L//79y6x78OBB4uLiGDhwICNGjKiWxxcRacgMw2D+D/OZ+fVMfD18mdRvEle1varC9cPCwvjjjz+KexwNHDiQd999tzjUuPLKK7nppptYtWoVq1atqvZ6FWqISJ20fn0iAwcuxW43uOeec3nppW6A84/sJ799wuj3R5OYngjAXZfexSsDXyHMP8yVJYuIiIiIiMgJvKxe+Lj7VMu+rBYrhqdBaFQo2QXZWIZY+GPWH7z55psMHz68eL1Dhw7Ro0cPLr/8ct54441qeWwRkYasONBYMxN3qztpuWk8ufjJUwYbZrO5uMdReno6FouFrKys4vsnTpwIwF9//VUjNetyZhGpc3bsSKVPn8Xk5tqIi2vBf/5zPSaTib0pe7lx1o0MnDuQxPREYsNiWf3wat4Z/o4CDRERERERkUbEZDZx/8P3M378+OKpTQ4ePEj37t3p3LkzCxYs0Ch+EZHTODHQ8HDzIDwgnJZNWhYHG9/t+K7c7Q4fPoyvry+enp507NiR/Px8mjdvXmt166+7iNQpiYlZ9Or1MUeP5tKlS1M++uhmMDmYsnIKHSZ0YMXmFbhZ3Hj2xmfZ/Nxmenbo6eqSRURERERExAVuuOUGLBYLr732WnGgER0dzdSpUzly5AhJSUkkJSW5ukwRkTrp5ECjqX9TAEwm02mDDcMwcDgcOByO4mWTJk2qtdo1/ZSI1BkZGfn07r2YvXsziI0NZPnyfmw+vJGR74xk88HNAFzV9irm3jWXc5qd4+JqRURERERExJWsViujRo1iypQpeHl5sXPnTnbu3ElkZGSp9QzDcFGFIiJ1U0WBhsNhYDabioONPUf3lDsVVWhoKOvWrSM7O5tXX32VpUuXkpaWVmv1a6SGiNQJBQV2+vX7jE2bkgkL8+bDJdcx4ctHuWLyFWw+uJlgn2AWDF3A2kfXKtAQERERERFpZDrd14mLH7m4zPJx48aRnJzMAw88gGEY5X6JiEiJigINwzDYvCWF3bvTgVOP2LBYLMTGxnLBBRewYMECfHx8eOSRR4qnA6xpCjVExOUcDoOhQ79gzZoEfHytjJlp5ob5XZn77VwMw+Cey+5h+wvbGXrFUEwmk6vLlXokIT2B3xJ/47fE3yi0pAGQai9ZlpCe4NoCRURERERERERqSUWBBsDRo3mkHcsjMTGLvDwbULmpqMxmM5MmTSIzM5Pp06cDkJCQwKZNmzh69CgAmzZtYtOmTaWaiVeFpp8SEZd7/PFvef/97VgCj9H+gR94+ivnH8i2Tdsy96659Gjfw8UVSn2UkJ5Au9ntyLPlORf4Ob+tzJnEyjec8zx6Wj2JHxVPdEC0i6oUEREREREREal5pwo0DMNg//5MHIZzNpWDB7No3ToQoMxUVLmFZUdjDBo0iAceeICXXnqJ0aNH88gjj/Dxxx8X33/hhRcCsGLFCm644YYqH4tGaohIrXlx+YtEPBPBi8tfLF726qu/8sq0n+GCrzHf9gobD3+Hu9WdiTdP5M8JfyrQkLOWkpNSEmhUIM+WR0pOSi1VJCIiIiIiIiJS+04VaIBzlEZGRj6eHhasVjNJSdnFozWg9IiNwpBCYs+NLbW91WrlySefxNvbGwAfH59y6/Dy8qqW49FIDRGpFS8se4EJn08AYMLnEzCZTMRmDuCRKQug7ycQfJhCB/Ro14M5d82hXXg7F1csIiIiIiIiIiJSv50u0CgapWEYYLGYsFgsZGcXlhqtASXBBjeCw8vBdzu+K9U8fNy4cYwbNw6AhQsXsnDhwho7JoUaIlLjXlj2As9+9mypZc9+9iykvAE3HQAgxDeEV297lbsuvUt9M+SMZRdkk5CewL70fexL28e+9H1sStrk6rJERERERERERFzmdIEGlIzS8PCwFC8rGq3RvLkvnp4lEcLJU1FN6jepVLBRWxRqiEiNKi/QKBbiDDT+ccU/mDJgCk18m9RiZVJfGIbB0dyjxWFF0fcTQ4yjuUddXaaIiIiIiIiISJ1RmUDj5FEaRTw8yh+tAXUj2FCoISI15pSBxglahLRQoNGI2Rw2DmUeKh1WFAUYx2/nFOacdj/+Hv7EBMQQHRBNTEAMVrOVmb/MrIUjEBERERERERGpOyoTaED5ozSKVDRaA1wfbCjUEJEaUdlAAyhe75kbn6nJksRFcgpzioOK4tEVJ4y4OJhxELthP+1+wn3DiwOLmIAYYgJLAoyYwBgCPQNLrf9b4m8KNURERERERESk0fn090+Z+fWpA42KRmkUOdVoDSgbbMy9ay4dIztW96GUS6GGiNSICZ9NOOP1FWrUP4ZhkJqbWu4Ii6IQ40jOkdPux2q2EuUfRUxgTHFoER0QXXw7KiAKT6tnLRyRiIiIiIjURV8//DU5h08YwW2Gfm/049s13xIYGMjevXvp378/f/31F/n5+ZjNZsaPH8/TTz+Nu7u76woXEXGBIO8gPKwe5NvyMQyj3P61pxqlUeRUozUAHIYDh8OBj4cPPh4+1XoMp6JQQ0RqxMRbJlZ6pEbR+lL32B12DmUeKtOE+8QQI7sw+7T78XX3LR5RceIUUUW3w33DsZgr/p/o2QjxDsHT6kmeLa/CdTytnoR4h1Tr44qIiIiISM1w83Pj0qcuJTs7mwMbDvDnqj/p1asX69evZ/v27djtdoYOHYrNZuODDz5g7ty5ZGdnM3XqVFeXLiJSq3q078GEmyYwcelE9h/bT1RQVKlg43SjNIqcarSG3WFnT8oeooKjePW2V2kV2qqmDqcMhRoiUiOKRl1UJth4/pbnNUrDRXILc0lITygTWhTdPpBxAJvDdtr9hPmElUwJ5V8ywqLoe6BnYLlXBdSk6IBo4kfFk5KTAsDYsQbffmviyScdDBhgBpzBR3RAdK3WJSIiIiIiZ8dsNhMQE4C1wIoRacAfsHnzZgDi4uKIi4sDYOHChXz88cc8+uijzJkzR6GGiDRKfc7vA1BusFGZURpFyhutcXKgcV7z82ruQMqrqVYfTUQalWdufIa//z7GO1unVbjO3R0eVqBRQwzDIC0vrfQIi7R9JGSUTBOVnJ182v1YzVYi/SPLHWERExhDlH8UXm5etXBEZy46ILo4tAjMNSDRRLSbg4uamV1cmYiIiIiIGA7IzYPCAjAAiwW8vcBSiU+r0n5PIyU5BS+vis9F0tPTCQ4Orr6CRUTqmfKCDaBSozSKnDxaw9WBBijUEJEaZBgG29+/EJPteozOX5a537Txerb/cSHGQ+XP7Sen5jAcJGYmljvCouh2VkHWaffj4+ZTuun2SdNERfhFVPvUUCIiIiIi0jg57JCaCoeTISMDCgud4QaAyXQ82PCB0FAICwV3j5Jt89PzWTZ4Wan93X///eU/jsPBrFmzNEpDRBq9k4MNL3tIpUdpFCkardGsmTeHshJcGmiAQg0RqUEFBXYSEjIxDl8HPmnQ/peSO3+9HmPTdewPz6SgwI6Hh/4cnSzPlsf+9P0lIUXaPuIPx5Ocn1w8NVSho/C0+wn1Di3VdLt4mqjjIUawV7BCJRERERERqRF5tjzsDjvgDDMOHIC83JKRGRZ3MB0fSG0YYHdAWjakZsLu/dC0KTRr5ryoy+pjpeOojhRkFvD3h3/TPKA5//73v8s85rFjx8jOzmbYsGGMGDGiFo9WRKRuKgo2nvv8OeIT9uAw/Co1SqOIh4eFrOx8Nu+Lp1Obti4NNEChhojUIA8PKxs23MWRI7l89F0Y//rrFzBg5MWPce/9jwAQFubdaAONtLy00iMsTmjCvS9tH4ezD592HxaTheb+zUtPCVU0TdTx4MLbzbsWjkZERERERKS0fHs+2w9vx2a3kZcPNhvgCSZvqMxHafkGZOXAnr1gt+WDGfYF76N92/ac0+Ectj2yjaFDh7Jw4cLibQ4dOsTkyZOxWCy88cYbNXRkIiL1T5/z+/DHH8m88NfzWHzSMIwQTJX6awwGDhw+Ryk41oTHr37BpYEGKNQQkRoWFeVPVJQ/6/f6w1/glhvG6yP+hdncsHsaOAwHSVlJzh4WFTThzsjPOO1+vKxepaaCamJpwjkR59AyqCUxgTFE+EVgNetPuYiIiIiI1D0Oh4NCu438XDO2Qgtms3OKKYzKbW8BMIG9EAyHCZNhYHPYcDgcmK1mbr/7dt6Z/w6vvvoqwcHBHDx4kB49ehATE0NeXl6DP+8UETkTdruD7aubEpJwM5mxy8kzH8XT3uS0wYaBgxxrMj62UHy39+f3NSZ6XlRLRVdAn4SJiJyFfFs++zP2lzvCIiE9gf0Z+ymwF5x2P028mpQ7wqJo5EUTrybFU0M5HA6Sk5MJCwvTm3MREREREanz8mx55OU5sNnMWMwGZzvrrcUMDpzTU9nsDvJseZhMJp6Y+ASL5i9iyJAhzJs3jyuuuIKmTZty5ZVX8tdff/HVV18BcOmll+Lr61t9ByYiUg99++0Bfv89mTahl5OV7csOn/+RZzl1sFEUaHjZQ+iQNZRctxA+/XQnd9xxDpGRfrV8BCUUaoiIlCM9L73MCIsTp4lKykrCOM3lRWaTmeZ+zUuNtDhxmqiogCh83fXGWkREREREGhYvNy/8PPzYnZiCzWHHZAEwV3aARlkmwGQABvkFdvLtBYT6hBDkF8R1113HihUr+N///se+ffvYt28fv/zi7Od43XXXAfDNN9/QvXv3Kh+XiEh9Zbc7WLhwC3a7Ax8fN3zyOwOcMtgoFWhkDsXfFo1vqMHOnWm8//42HnvsElccCqBQQ0QaIYfhIDk7uSSsOGlaqH1p+0jPTz/tfjytniVNt/1Lj7CIDoimuV9z3CxutXBEIiIiIiIidUeEXwSzu7/PkIe3k9zyQbzM/lgMr6rtdBgUOnLJz8zgznazGHZTeyL8Ili5cmXxKmPHjq1i5SIiDdN33zlHaRgGHDiQeXxpW4IDbiKp2WdkmpNxKwguDjYMHBR4pOCWHUyT/QPJyAsiA+d2NpuDTz/dyeDB59C8uWtGayjUEJEalZCeQEpOCgnpCQA4DIPfEn8rnj4pxDuE6IDoan3MAnsBBzIOlAotigOL9H3sT99Pvj3/tPsJ9goud4RF0RRRod6hxVNDSd1mt0NKivPnHTucty0W19YkIiIiItKQ/f5dBOn7c/Fs44HF7oXV4VP1nZqh0J7PhtUxPHVHRNX3JyLSSDgcBh06BONwnHxPDxLxY6v7+zjcM/AyQgCDbHMSAY6mXFD4DwIiY8rsz9PTQm6urTZKL5dCDRGpMQnpCbSb3Y48Wx4khIMZ7A4bF//34uJ1PK2exI+KP6NgIzM/s8IRFvvS95GYmXjaqaFMmGju37wksDhhhEVRcOHn4bq5AaX6LF4MY8bAgQPOAGraNDMffQQzZkC/fi4uTkRERESkAbLbne/D3Wpg4LrF6rxQ6Y8/oFOn6t+/iEhDdO21MVx7bdlwwulmlv/ZlYlLJ5JXmEehvZCOwR149bZXOa/5ebVaZ2Up1BCRGpOSk+IMNE4hz5ZHSk5KcahhGIZzaqhyRlgU3T6Wd+y0j+1h8SjddPukJtyR/pGaGqoRWLwYBgxwNhQ80cGDzuUff6xgQ0RERESkuiUkwIEDEBgCSdW8b4sZ8gvgzz8VaoiIVJc+5/cBYOLSiYQHhNfpQAMUaohIHfD8t8+TXZhNQnoCCekJpw1CAAI9A8tOCXXC7TCfME0N1cjZ7c4RGicHGuBcZjLBQw/BLbdoKioRERERkeq0cydkZ0NYVM09Rnx8ze1bRKQx6nN+H0J8Q2ji24TYsFhXl3NKCjVExOU+i/+s1G0TJpr5NTtlE25/D38XVSv1xfffO68Oq4hhwP79zvW6d6+1skREREREGrykJOdFRDV18ZC7O+zeXTP7FhFpzLq26urqEipFoYaIuNy9F93LZVGXFYcWkf6RuFvcXV2W1BN2O+zb57xSq+hr+3bYtKly2ycm1mh5IiIiIiKNTmFhze7fVAuPISIidZdCDRFxuZFdRnJRs4tcXYbUcWlppUOLop937oT8/LPfb7Nm1VaiiIiIiIgAHh7lTwNbXRwGeHrW3P5FRBqToUOH8tZbbwFgtVqJjIxk4MCBPP/883ge/2P70ksvsXz5cjZt2oS7uztpaWkurFihhoiI1CE2m3MY+YmjLoq+kpMr3s7dHdq0gfbtoV0751dsLAwc6ByJUd4JlckEkZFw5ZU1dzwiIiIiIo1RZCSYzWC31cz+Cwqgbdua2beISGMUFxfHggULKCwsZOPGjdxzzz2YTCYmT54MQEFBAQMHDuSyyy5j1qxZxX1sXRWCKNQQEZFal5JSfnCxa9eph5FHRJSEFid+xcSUP1/vrFkwYIAzwDgx2CjqIT99upqEi4iIiIhUt9hY8PGBnNyaewyFGiIi1cfDw4OmTZuy+eBmet/Ym549e7J69eriUGPixIkALFy4EDizEOTNN9+s9noVaohIjQnxDsHT6kmeLa/CdTytnoR4h9RiVVJbCgqcIcXJwcX27ZCaWvF2Xl7OE5STg4u2bcH/DPvD9+sHH38MY8aUbhoeGekMNPr1O6tDExERERGRU4iIcL5/X7+j+vdtt4OPN3TuXP37FhFpzBauW8jctXPp6NORdevWERMTU+G6Hh4ehIeHAxAVFXXaEKS6KdQQkRoTHRBN/Kh4UnJSmPa/93n3r6lgMrHh/zZgNpsBZ/ARHRDt4krlbBmGc1qok0OL+HjYs8d5wlGRqKjSoUXR1FFFQ9WrS79+cMst8O23DuLjM2jXzp+rrzZrhIaIiIiISA0xmaB/f/jp5ervrWGzQadOzulnRUSkeixdtpSlK5ZiOAx+sf+CyWxi+ozpldp2y5Ytpw1BqptCDRGpUdEB0UQHRNPU9+viZRc1u6g41JD6IS/P2ZD7xAbdRV/p6RVv5+tbMurixH4Xbdo4h6PXFosFuneHDh3yCAvzr9bQREREREREyurZE1osgkOF4FZNFxTZCsFsgT5x1bM/cb3KNChOTU3lwQcfZOnSpZjNZvr378+MGTPw9fV1Zeki9cIHH2ynbdsgLrqoabn3G4bBzuSdeMd40/KWlgS5BxH/VTx59jw2WTdxq+1W3K3uZbZbtmwZvr6+2Gw28vPzMZvNzJ49u6YPp5hCDRERAZxXUB06VH6vi717K77CymSCFi3K73UREVHSv0JERERERBoPPz8Y/n/w0ypnGGGtYrDhcDj77wU0gS5dqqdGqRtONzf/nXfeSWJiIqtXr6awsJBhw4Zx77338t5777m4cpG6bceOVKZN+5UWLQJYtKgPVmvpKzwNw2DhuoVsPbQVq4eVmJbOkRZd7u7CN//6hgXzFwAw8eaJZYKNHj16MGfOHLKzs5k2bRpWq5X+/fvXzoGhUENEpNHJyYEdO8oPL7KyKt4uIKD84CI21tkHQ0RERERE5ESXXAxBv0BaIhQ6wM3t7PZjOCA3F3z9ITREF041NKeam3/btm2sXLmSDRs20OV4mjVr1ix69+7N1KlTiYiIcGXpInXau+9u5ejRXLKzC/nqq33ExbUsvq8o0Jj+1XTMZjNWa0lMYDKbaNerHX8t/otPzvsEKBts+Pj4EBsbC8D8+fO54IILePPNNxk+fHitHJtCDRGRBsjhcDbGLq/Xxf79FW9nNkOrVmWDi/btISxMJw8iIiIiIlJ5JtPx8whHLseSnSMt3D3ObB92GxQUgGPdNo78lcQRoP1D7cudpuill15i+fLlbNq0CXd3d9LS0qr9mKRmnTw3/08//URgYGBxoAHQs2dPzGYz69ev59Zbb3VVqSJ12o4dqaxcuZewMG/S0/NZsGALPXvGYLWaSwUaHlYPvNy8KLQVltq+WadmbFuyDdsWG5+6fUr6kXSGdR5GQkIChmGQnp7Opk2biI2NxdfXl6eeeoqxY8cyePBgvGrhyleFGiIi9VhmZsmoixP7XezY4bySqSLBweU36W7dGtzLTpUoIiIiIiJyxrzcvPD39IPwTCzu+aSkQLYNrFZn37tTcRjOaaswgV8IpB1OLb7PwCAnJ4cZM2Zgt9t55ZVXAFizZg2JiYkUFhaSe6oTIqlTTjU3f1JSEmFhYaXWt1qtBAcHk5SU5IpyReqFd9/dSnp6Pm3aBOLt7cZff6Xw1Vf76NWrRalAo6l/UxJJLLO92WKmxVUt2P3Nbi6+7GIWvb6IOX/OKb5/7dq1XHjhhXzzzTd0796dgQMH8thjj/Haa6/x6KOPkpCQQGpqKgkJCdjtdjZt2gRQHIJUlUINEZE6zm6HhATYuhU2bvTm0CFTcZBx6FDF21mtzpDixAbdRV8hIbVXv4iIiIiINE4RfhG83/99cgudAcPOXfDOO/DHBsjKBjcreHmXXFjlsENOLuTlOaeqiomG/gPg2mug57vXkE02X/30FU08mrBx40YGDhzIu+++WxxqXHnlldx0002sWrWKVatWueqwBdh9ZDfzf5zP8CuG0zK05SnXdfXc/CINTdEojSZNPDGZTHh5WTEMg/kLNnPA+2tmfTOzONAA6HRXp3L3E3tdLLHXxRavczjjMLdeeCsHlxwkIyODJUuWFK9rtVoZNWoUU6ZM4f777+fZZ5/lrbfeKr7/wgsvBCgOQapKoYaI1C6fVF5c/iLP3vSsqyupc9LSyu9z8fffkJ8PYAb8y2wXFlZ+r4uWLc9+zloREREREZHqEOFX0vOgdTBc38V5jrN6NWza5DznyTvmvN/dAq0i4fzz4aqr4PLLS85p3CxuuFnd6BTbCYD09HQsFgtZJzQGnDhxIgB//fVXbRyaVGD3kd2M/mA0Ww5u4c/9fzJj0IxTBhunmps/PDyc5OTkUuvbbDZSU1OL+3CISGknjtIoEt7Mhx+Tl/DrZz8REuxXHGhUlr+n8/OoT3//lFv73sp/b/5vmXXGjRvHuHHjAFi4cCELFy4862M4HYUaIlIrfjq42vmDCSZ8PgGTycQzNz7j2qJcwGaDPXvKBhfbt8NJ79NKcXeHNm0MYmLyOf98D9q3NxWHF0FBtVe/iIiIiIhIVZhM0Lat8wuc/TLS0519Ab29wc+v4m2TkpJKTVME0KZNm1qoWiqrKND4+/DftA1rS/zheMZ8MOa0wUYRs9lcam7+yy67jLS0NDZu3Ejnzp0B+Prrr3E4HHTt2rWmD0ek3jl5lAY4p+w7Gvw9R62r8U71okNM2Gn2Ur4Tgw0o2zy8NinUEJEa98KyF1h34MtSy579zDlSo6EGG0ePlm3QHR8Pu3Y5m+NVpFmz8pt0x8SAyWSQnJxGWFgYZrM6douIiIiISP3lcDjPj3buhMRE5+2AgJIpdL29y25jMpkwDAPDMIpvT5o0qZYrl4qcGGi0Cm2F1WylVUirMw42Tp6bPy4ujhEjRjB37lwKCwsZNWoUgwYNIiIi4rT7EmlsTh6lYWBwwOsbdnsvw93hTmGGN0eO5BIWVs4f2UqoK8GGQg0RqVEvLHuhOMA4WX0PNgoKYPfussFFfLwz1KiIp6fzqqQTG3S3a+dc5l92dqliDkf1H4OIiIhIfTd06NDiOZutViuRkZEMHDiQ559/Hk9PTwBSU1N58MEHWbp0KWazmf79+zNjxoxqaVQpIqd2KPNQcU8NgIJC+OZr+GIl7NkNuXlQdMmWw3BONxUS4uyjcUNvCAt13ldoLyQoOIjPv/ocX5Mvr776KkuXLiUtLa3Wj0nKKi/QALBazjzYOHlu/kWLFjFq1Ciuvfba4r/hM2fOrI3DEqlXTh6lcWKgYTaseBiB5GBj//5MQkO9ikdynKm6EGwo1GhEXlz+Is99/hzP3fyc+hlItbLbHRw6lMXevRns2ZPO3r3p7N2bwdr0+ewJ/uiU29b1YMMw4MiRsqFFfLwz0LDbK942MrL8Jt1RUWA2194xiIiIiDR0cXFxLFiwgMLCQjZu3Mg999yDyWRi8uTJANx5550kJiayevVqCgsLGTZsGPfeey/vvfeeiysXadgOZR7ijk/uIDM/E3D2CkxKgpwcIAgsXcFy0rmRYcB+G2zcC9P+4+whGBAASVlJOAodPLXpKd7v/z4LFiygRYsWxdMUeXl51frxiVNFgUaRUwUbFc25f+Lc/D4+Pvp7LVIJJ47SKBNoOAIB8PS0kJlZUKXRGuD6YEOhRiPxwrIXmPD5BKBx9zOQs+NwGCQmZh0PLDKKQ4uiACMhIROb7aRhBBetgou+qtT+60KwkZfnHPZcXqPuU1344+NTfpPutm2d94mIiIhIzfPw8ChuGBsVFUXPnj1ZvXo1kydPZtu2baxcuZINGzbQpUsXAGbNmkXv3r2ZOnWqpi8ROUF1j3zKLcwlMz8TD6sHtlwvDu12BhteHmCyAAZQzoVibmbABPm5kLwPrM2dvRYMDDLzM8ktzMVsNvPyyy9zzz33MH36dJ588kkSEhJITU3l6PGh85s2bQIgNjZWI7NqyOkCjSJnM2JDRCrv5FEaBzzXlgk0gOLpzKs6WgPKBhsv9n0Ri9ly9gdxBhRqNALlTf9TFz5ElrrD4TBISsouDiv27k0vFWAkJGRSUFDekAQDvLIgJBlLcAq+UWlYmxylwDuJTBLPqIYJn02o8X+PhuGcq/XkBt3x8bBvX8XTO5lMzp4W5fW6iIhw3i8iIiIidcOWLVtYt24dMTExAPz0008EBgYWBxoAPXv2xGw2s379em699VZXlSpSJ9XEyCeT3Yu9f/tQmAt+3jjnm6rE9LpWdyjIh8QEMOxmTKbSGw0aNIgHHniAl156idGjRzN27Fg++eST4vsvvPBCAFasWMENN9xwxs+FnFplA40iCjZEas7JvTSOucdjN+XhYW9aZt3qGq0B4OvuS6Ijkd8TficrL4sA74Aq7a+yFGo0cA25n4FUnmEYHD6cU2aERVFosW9fBvn5p5hHyWzD0iSVkNhMfJqnYQ46Qp5HEqn2BHLszmHEdiC9CjVOvGViFbYuLScH/v67bHCxYwdkZla8nb9/2dCiXTuIjQWNZBYRERGpu5YtW4avry82m438/HzMZjOzZ88GICkpibCwsFLrW61WgoODSUpKckW5InVadY98MgxISIDcXPApCjTOgLsH5OWCERqMj0deqfusVitPPvkkr776KkCFozE0NVX1O9NAo4iCDZHqd/IoDYC2mYOw+eeR5vY3XrZQzLgVr19dozUcDge7U3YT0ySGfw/4d60FGqBQo0E7VaBRRMFGw2AYBkeO5JQ7ysL5PYO8PNsp92G2QPNW0KR1Ft7NjmEEHCHH/SAphQkkZiVgNxwcPnGDAuc3k8lEy5CWtGvajvbh7WkXXvL9je/eKJ727FQm3jzxjP8NOhxw4ED500UlJJziOM3QsmXZJt3t2kHTphp1ISIiIlIf9ejRgzlz5pCdnc20adOwWq3079/f1WWJ1HvVMfIpO9s5pa+HB2ccaBTx9AT7lZ0IbJ4NpJW678TeCwsXLqywR4NUn7MNNIoo2BCpXieP0gDwMALokDGUrf4Lyw02qjpaoyjQiAyK5NXbXqVjZMfqOJRKU6jRQFUm0CiiYKPuMwyDo0dzyx1lURRg5OaeJrQwm4iM9CW6hQ/BLbLwCj+G3e8wWZZDHM7fy95jO9mfc4z9RRtkld7ez9OvTGjRPrw9sWGxeLp5lvuYT1z/FJMnryfnnBUV1uW9vQ9PzH6qwvuzssoPLnbsON5crgJBQeU36W7d+vibaRERERFpMHx8fIiNjQVg/vz5XHDBBbz55psMHz6c8PBwkpOTS61vs9lITU0tvhpdREpU98in9HTnRWlV6h9rAqsVjh4F35Aq7EeqzDAMnv70af469Bftw9ufcaBRpCjY2HxwM+OXjOfd/3u3SnP7izQ0le1xdM89I1mxYhkmk5nc3Es599x/YLU6R6edKtioymgNVwcaoFCjwZrw2emvjj95fYUarmMYBseO5VXYiHvv3gyyswtPuQ+TCZo396NFC39atgwgLNqBW8hRCn0Ok27aT2LOXnYcjuenlN3YHXZIxflVah8mYoJjaN+sfZmRF+EB4Wf8BuNw3iEKuv4KmX5glDPvk8mPgks2cCj7EByKLhNcbN8Ohw5VvH+r1RlSlNeoOyREoy5EREREGiOz2cxTTz3F2LFjGTx4MJdddhlpaWls3LiRzp07A/D111/jcDjo2rWri6sVqXuqc+RTfr5zpIa1Gj59cneHrHznNFbiOiaTiVs63cL2pO0kZyQTEVh2yrHKSs5MJsg7iFs63aJAQ6Qclelx9PvvO2nefCyRkV788cdr/PnnXC666OHifZwq2Dib0Rp1IdAAhRoN1sRbJlZ6pEbR+lKzjh3Lq7AR9969GWRmFpx2HxERvsWhRYsWAURGe+ERkk6+VxKpjgR2HllPfFI8y5O2k3o4ldLzRZXw9fAtM+KiXXg72oS1wcu9+uYaPZyZgs0/GfyBjONfRfwB/0xsZNK2Uwq2/dEV7ic0tPwm3S1bgptbhZuJiIiISD1wKPMQuYVn/imll5sXEX7lf5g2cOBAHnvsMV577TUeffRR4uLiGDFiBHPnzqWwsJBRo0YxaNCgcuf/F2nsqnPk0/4DzlEabpaq11X0mXd+ftX3JRWrzNXh18dez6wfZ/Hztz/zh+kPIjpFcG7/c7F6VP5jxkNphzAMg8fjHue2i2+rkWMRqe8q0+OoQ4eJBAS0pkkTL8477//45ZeX6NDhHjw9g0v2U0GwcaajNepKoAEKNRqsolEXlQk2nr/leY3SqAbp6fkVNuLeuzeD9PTTv/MKD/c5Hlj406JFyffAcDvZ1oPsSd1JfNIfbE/azseH49n12y5s9oqnnYppEuMccXHSyIuIwIhquQrCMAxybblkF2STVZBFduHx78dvf75+S8nK/se/Zxz/2b/kLpvNedVNbGzZJt3t2jmnkhIRERGRhudQ5iHu+OQOMvPLGdV7Gn4efrzf//1y77NarYwaNYopU6Zw//33s2jRIkaNGsW1116L2Wymf//+zJw5s6rlizR4VR35lHbM2SjcbAaM6qnJduqZl6UaVObq8OzD2Ux4bQLvrHuH/Uv2Y//AzkX3XFSp/Z8YaAy6ZFBNHopIg1FejyNPT19stuYEBzsDx5CQ8zGZTBw79jfNmpX+m1xRsFHZ0Rp1KdCABh5qvPbaa/z73/8mKSmJCy64gFmzZnHJJZecdrsPPviAO+64g1tuuYUlS5YULzcMgwkTJvCf//yHtLQ0rrjiCubMmUObNm1q8CjOXmWCDQUalZeZWcDu3cfYtGk/aWkH2Lcvs1SAkZZ2+tCiaVPvUmHFiaMuIpp7kZi1n/jD8WxP2kZ8UjzfJW0nfls8KVkpFe7Tx8OHdk3blRl50SasDd4ezj9GDsNBdkF2cejw5+E/yw0hKrx9ivWMM3lnelKYUWTsIzDlIbBUw9U7IiIiImeisZ8zuFpuYS6Z+Zl4WD3wslZ+xHCuzbldbmFuhU2BT2we7OPjw3vvvVcdJYvUaXVt5FNNzCikWYpqXmWuDt+wYQNdunQh9rxYnil4ht2LdtOhbwc8A8rvuVlEgYZI5Z2qx9GWLbsBX/z93SksdBRvY7X6kpOTSkGBvcz+TPjS5ugQtge+RYbHTjxtIZhNbhiGccrRGnUt0IAGHGr873//Y+zYscydO5euXbsyffp0evXqRXx8fJmmVifau3cvjz76KFdeeWWZ+6ZMmcLMmTN56623aNmyJc888wy9evVi69atxUPw6pp7rryHQ5mHmPv13DL33XfNfdxz5T0uqKpuysoqKDO64sQpolJT8067j9BQr1KhxYmjLmJi/PH2duNY9jG2J20nPime7UnbWbUznu0/bGfXkV0U2ivum9HErwnhgeE08WtCkF8QPt4+eHt5gwVybDlkFWTx3bHvWHF4Bdm/lg4hcm01P+mot5s3Pm4++Lr74uPu/J6WZmd71obTbtuurQINERERqX06Z6g7vKxe+Lj7nNE2+TbNQSNyoro48ikoyBlCOOyA+YzLKpelwX6SVTeVd3V4YGAgXbp0AWBw18HYbXaGvDeE3X/tpsPlHSrclwINkTNzqh5He/ZkYBiQl2fn0KHs4m0cDoO0tHwsluwK9mohMGkAuTEfkOW7G7f8JpjNbuTl2cjMLMTf373U2nUx0IAGHGq8+uqrjBgxgmHDhgEwd+5cli9fzvz584uv1jmZ3W7nzjvvZOLEiXz//fekpaUV32cYBtOnT2f8+PHccsstALz99ts0bdqUJUuWMGhQ3ftjnJCeQLvZ7ciz5ZX0NCjiD3N3zGXh7oXEj4onOqDifgYNRXZ2Afv2ZVTYiDsl5fQf/Ddp4knz5t60ah1E82gvmka6ERJhISgc/MNsONzzyCrIIiMviYTUX/jpaAIf7z7I4d8Ok5KRQlpWGvkFpzj5MuH8rSz6civ5+aj5KEeNo2V7U5wBE6ZSocPJIUTx7YqWV3Db280bs6nsO9QNB37jkjc7n7auCy88u+MRERERqQqdM4hIQ1IXRz5FRoHZcnzK4SqGGobDGZB4elRtP3J6p7o6PCkpqUzwf/cVdzMqYBQFmQUcSjtUbvNwBRoiZ+5UPY6uu+481qx5m4UL4wB47rkxLFv2IQCHD7+HybSWa6+9ifvuewwPD+eFNenpx/j3v5/m++9Xw04TYe2bwGU5NG8Sg4fVo94EGtBAQ42CggI2btzIk08+WbzMbDbTs2dPfvrppwq3e/755wkLC2P48OF8//33pe7bs2cPSUlJ9OzZs3hZQEAAXbt25aeffir3BCU/P5/8EzpYZWQ4P4l2OBw4HI4y61e35KxkZ6ABzlAjG7ADXhRPAZRnyyM5K5lIv8gar6em5eYWHp8SqiSw2L33GHv2pJGQkMXRI6e/ksvD145PaAGeIbm4hWRiCcqAwFTs/kfI90si15zGXwVZ/GkcH8KVCWwBNgE2oPD499PN8Wmh3OACC2ACq9labsjg7e6Nr1v5IYSPu89pQwpPq2e19NIow3BOcXUyk6ly/85Nptr5najvHA4HhmHouTpLev6qTs9h1ej5qxo9f1VTF56/uvba1ZVzBnD9eYMrGYaB6YT/Kqto/er+d10XflekdjWk17zo98nb6n1GI59MmCiwFdTI8+BmNfD3NZGRY8LsVrVz0dRVv1MQf4C/gfbj2hMZGcmAAQOYOHFi8Ui4l19+mRUrVrBp0ybc3d1JTU0ts5+G9JrXBMMw6N69O6+//jrZ2dlMnz4dq9XKrbfeWvzcQdn/r3tYPbiuw3X8YfxBYlpiqWDjUNohMODxXo9zW5fbavW51+vd+DSU19wwjDLHMW7cOB599FEGDRrE1VdfQWZmOg7Hfjp37kxwsCddunTh119/5aef1nHgwAGGDRtGeLgP//rXvwDo3XsoSUlJrF79JYWFhQwdNhRjvUFazyRi/GNKvRdzOBzsTdlLVFAUUwdO5dyIc2vlOa3sYzTIUCMlJQW73U7Tpk1LLW/atCnbt28vd5sffviBN998k02bNpV7f1JSUvE+Tt5n0X0nmzRpEhMnTiyz/MiRI+TlnX4qo6rafuCkY7XgDDW8y64XaXFdqGF32J3NpguzybHllP5emEO2LZvcwlzSs7NJSswn+VAhRxMNjiVBRrKVrCNu5KZ4YcuoxJUoHrkQeAyCUiEw1flz4LHin/M98ygTfRg4n7cMSgKLovDiFL9nZrMZf29/gnyDCPUPJTwgnOaBzYkOiibIKwgfNx+83bydX1bn1E1F390t7hXv+EwVgFFgkJmdSSZnPgy5Ksp7A1nResmW5Bqupv5zOBykp6djGAZmczWN3W5E9PxVnZ7DqtHzVzV6/qqmLjx/mZm1+z7kdOrKOQO4/rzBlTIzM2nj0QYfqw+elspPz5VnzSPbI5vM1EySC6vvfWRd+F2R2tWQXvO69vtUVNMlzdqQig8WPLGe5adQhgGbPXZgtG/OZfd15r4L7yNpVxJjxowhNzeX8ePHA3Ds2DF69erF+eefz/vvv09yctnjaUiveU3Iy8vDarXi7++Pv78/kyZN4tprr2X69OkMHjwYb29vDh8+XOq5tdlspKam0q19N3q17sXi3xdjtpsJ9g0mNSuVWL9Y+l3Uj6tirir3NalJer0bn7r+mhuGwbp1B2nbNpjQ0Iobc+fl5ZGfn1/qd+bqq6/GZDIVTwfYo0cP/vGPfzBlyhQOHDjA1q1bufXWW4mOjiY6Oppu3brxxRdfMHbsWHbs2MGqVav44osvaNmyJQAvvvAid911FyMHj+RQwSGaejfFzeKGw+HgcMZhLm9+OcO7DSfMLazWfncre87QIEONM5WZmcndd9/Nf/7zH0JCQqptv08++SRjx44tvp2RkUFUVBShoaH4+5fTLbm6Ha7kep6ccs7gIoX2wtINowvLNo7OKcyp8P4TG1WfuJ/ifg82C6QHQlowpAWVfD92/HtWwOmPxT3veGBx7ITQIhXPkFx8mxbi5289aWRDML7u0fi4++BmciM3N5fsnGzSMtM4mnmUw+mHSUpLwmaveOhFRGAE7Zu2p214W9qHt6dtU+f3qKCoOvnHsza19WiLp8WTPHvFJ+OeFk/aRrYlLOD0/wYbO4fDgclkIjQ0tNH/2zobev6qTs9h1ej5qxo9f1VTF56/+t5PoqbOGaAOnDechWHDhvH2228Dzvn2y7tiOjU1ldGjR7Ns2TLMZjP9+vVj+vTp+Pr6Fu8nyy2Lv/P/JtAUiI+58leWZ9uySctPwy/Yj7Cg6nsfWRd+V6R2NaTXvK79PhXVtJ+/SXUEcvSwD97eZ9foOycHCgqz8fbI54DnAdqc24Ze3XqxdOlS1q1bV/yZxr///W8AFi5cyAcffFDuZx0N6TU/Uw6Hgdl86hfA09OTvLy8Us/d+PHjefTRRxk5ciTXX389Y8eOZf9+59XhAF9++SUOh4Prr7+eiIgI7B52XvnyFbKPZOPj4cNjvR5jwMUDavTYKtKYX+/Gqq6/5r//nsyUKdvp3j2SSZOuqnC98n4XAR588EGmTZvGo48+yocffsiDDz7IbbfdRn5+PuHh4SxcuBBfX1+2bNnCb7/9RkxMDGFhYSxbtozAwECuv/764n0NGDCAIUOG0LVJV9bkreG7Pd8RFRzFobRDNA9qznN9nuO85ufV2HNR0XFXRoMMNUJCQrBYLBw+XPpT/cOHDxMeHl5m/V27drF3715uuumm4mVFQ12sVivx8fHF2x0+fJhmzZqV2menTp3KrcPDwwMPj7KTPZrN5lr5pTKd5n9URd7+822+2ftNuSHFibcL7AVVK8huhvSg44FFKKS1Kwks0oIgyx+MUz8vVk8HgeEOgptBaISV8Eh3mkd7ExXjS8sWAYSH+uPn4VdqGqYT+z3YHXYSjiY4G3Ufjmd74nbiDzobdielV3z1nIfVg7ZN29IuvB2RvpFc1Poizml2Dm2btsXfq26eaNYFLYJaEP9gPCk5KWzaBMOHQ/PmNpYsKfkdCPEOaRQ9XaqLyWSqtb8hDZGev6rTc1g1ev6qRs9f1bj6+atrr1tdOWcA1583nA2TyURcXBwLFiygsLCQjRs3cs8992A2m5k8eTIAd999N4mJiaxevZrCwkKGDRvGfffdV2oufpPJhHHCf5VVtH7Rv+vqPra6/NxL9Wsor3ld/X0yMGgeZZCbaZCVzRkHG3l5gAl8/YBCimvdunUrP/30EzExMWXqLrpd0fE0lNf8TGRnF3D//V8xaFB7evduVeF6JpOpzL+F22+/nSeeeII5c+bw6KOPEhcXx8iRI5k7dy6FhYWMHj2aQYMGERnpnAVk8KWDwQTzvpvH/Vff7/IeGo3x9W7s6uprbhgGb7/9F0lJ2axZs5+tW49y3nmh5a771ltvlbv8ySefLJ4+1c/Pj/fffx+AoUOH8u677xIREVGmH47ZbCY5OZmwsLBSz4m7uzvBwcHkZuby6l2v8vD/HubHnT8SGxbLK7e94pIeGpV9zRpkqOHu7k7nzp1Zs2YNffv2BZwnHGvWrGHUqFFl1m/fvj2bN28utWz8+PFkZmYyY8YMoqKicHNzIzw8nDVr1hSfkGRkZLB+/Xruv//+mj6kGrV69+ozWr+o38PJDaO9zX6YMwNxHAuk4Kgv+SneZCV7kJls4ViiibQjBuW0XSjF29tKixYBtGjhT4sWAbRsWfJzixb+NGniVameEJl5mcQnOcOKE7/vOLyDfFvFvTWaBTSjXXg72oe3L/netB3RTaKxmC04HI5y/whIxaIDookOiCZ3N5AIHl42LmpW9/7HIiIiIo2LzhmqzsPDozjIiYqKomfPnqxevZrJkyezbds2Vq5cyYYNG+jSpQsAs2bNonfv3kydOpWIiLJNZEWk4XJ3hw4d4K+tkJ0NHh6cdioqwwG5eWCxQOvWkLwVDv56lNQxqXQY04GC/IJSDazl1JYt28369YlkZBRwzTXReHpW/iNBq9XKqFGjiqe8WbRoEaNGjeLaa6/FbDbTv39/Zs6cWWqbwV0H0+vcXjTxbVLdhyJSb/3+ezLff3+Q6Gh/kpNzeOedrUyefPUpt0nPSefHXT9yfYfrsVpO/Xvbo0cP5syZQ3Z2NtOmTcNqtdK/f/9K1RbqF8q026cxd+1cbr3o1lofoXGmGmSoATB27FjuueceunTpwiWXXML06dPJzs5m2LBhAAwZMoTmzZszadIkPD09Oe+80i9UYGAgQKnlDz30EC+++CJt2rShZcuWPPPMM0RERBSfBNV5eZ5gzoOkZtAysXjxze1u5tLml5bfYPqE255mb9KOGBzan8uePenFDbmLvh84kIndXtHVIM7lnp7W4yGF//HAonRoERrqXelG1g6Hg4TUhJLQomjkxeF4ZxOqCrhb3Z2jLpqWDi/aNm1LgHclprgSERERkQZB5wzVZ8uWLaxbt46YmBgAfvrpJwIDA4sDDYCePXtiNptZv349t956q6tKFZFTGDp0aPHVwUVTyw0cOJDnn3++1NRyDz74IEuXLi3+QHvGjBmlppYrj48vnH8+7NoFKSmQn+ccsWFA8QWQJtPxZQbO0Rm+zkAjOBiSgcBzAml2ezMmXz2Zj+d/fEYf2DVm2dkFvPPOX5jNsGPHMb74Yg+33tqm3HUXLlxY7vJx48Yxbtw4AHx8fEqNuquIAg2REoZh8NZbW8jJKSQiwgfDMFizJoEtW45UOFojPSedRz9+lPW71/Pr3l95qvdTpww2fHx8iI2NBWD+/PlccMEFvPnmmwwfPpzw8PAyfTGK+uEUXaQS6hfKMzc9U01HXLMabKhx++23c+TIEZ599lmSkpLo1KkTK1euLG7al5CQcMZXij/++ONkZ2dz7733kpaWRrdu3Vi5cmX9mB/YANIDICgP/rgIWiynqKH9befexp0d78Rud3DoUNbxwCKDjXvT2bs3hb17d7FnTzr7958qtHDy8LAQE1N+YNGyZQBhYZUPLYpk5WURfzi+zMiLHYd3kFdYca+Gpv5NS4+4OP49pkkMFrPljGoQERERkYZH5wxVs2zZMnx9fctMcQDOpuknzwFttVoJDg4+ZdN0EXG98qaWM5lMxVPL3XnnnWWmlrv33ntP+SF3cS9NM0REQ54djh6FwkLKTJRlAsxm8PODZtHg4QvZBTh7XbqBR5gH55x3TpkP7KRiy5btZteudKKj/Tl0KJu33vqLG25oeUajNUSkaopGaRR9NhoY6MHff6dVOFqjKND4bsd3BHgF8P4vzmmmThdsFDGbzTz11FOMHTuWwYMHc9lll5GWlsbGjRuL++F8/fXXOBwOunbtWr0HWwsa9F+vUaNGlTt0HGDt2rWn3La8ZNpkMvH888/z/PPPV0N1NS/QM7Dkxq62UBQApIbC6t7gkQ9pQUxZmc6zyf8hISETm+3U80O5uZnLDS2Kpolq2tTntE2nyuNwONh/bD/xSfFlRl4cTDtYcT0WN9qEtSkOLIrCi3bh7Qj0DqxwOxERERER0DlDVVRligMRqbuqc2o5Lzcv/Dz8yMzPJN+WT2YmHD4MBQVg9QV3s3NUhuN4slEUaBgG5NogPgGCMyE0FArsBTgMB34efni5eZX5wM7Ly6s2n6Z6o2iUhpubGXd3C82a+Zx2tIaIVK+TR2kAx5uZe5U7WuPEQCMqKAovdy88rB5nHGwMHDiQxx57jNdee624H86IESOK++GMGjWKQYMG1ctpQRt0qNHYNfM93pzQAL7uBV0+K7lzXY/iH/8kp/hnq9UZWpw8wqLo52bNfM8qtCiSlZfFjsM7ygQXO5J3kFuQW+F2oX6hZfpctG/WnhZNWlTql1hERERERKpXVac4EJG6r6pTy0X4RfB+//fJLcxl9Vfw2nsQngfNIpy9Mk7FMCDtGKRnwMU9oUX042RlZvBW/7eI8HN+AHfyB3YJCQmkpqaSkJCA3W5n06ZNAMTGxp52eqyGqmSUhh/gnGHDYjFptIZILTp5lEaR8kZrlBdoAMUXb59JsHE2/XDqC/3lasBCvEPwtHqStz0aDkWDd7rzjlZ/QMK50Coea4sD/Pu2J+ncoRUtWvgTEeGLxVK1Bs6GYXDg2IEy00VtT9rOgWMHKtzOarESGxpbZsqoduHtCPYJrlJNIiIiIiJSc6o6xUHx1DSVdKbri0jlVffUchF+EWzaBAtfBUsetI0Ekw2wnb4WXx8IcMD3n4G/vx/NmtmLA42ixz7xA7tnn322uCcIwIUXXgjAN998Q/fu3c/oeWgITh6lUUSjNURqT3mjNIqcPFojqpV7uYFGkVMFG9XZD6c+UKjRgEUHRLP9ge307r6SrZ0Wg+/xUCP2d0gLpYPRjxWfjCcmMOas9p+Tn+McdXFCg+6iXhfZ+dkVbhfiG1Kmz0W7pu1oGdISN6vbWdUiIiIiIiJnzzAgPh42bICsLHB3hxYtoFs38PCo3D7OZoqDk6emORNFU9CISPWq7qnl8vJg0iRIT3c2/T7DNpv4+UFuLhjGQp57ruz9J35gt3Dhwgo/2GuMTh6lUUSjNURqT0WjNIoUjdb471u/kN1xSYWBRvH6ZzFioyFqfEfcyGxf72CraTF0+bL0HV2+ZOuvsH399cT0qnh7wzA4lHao1IiLou8JqQkVbme1WGkd2to5TdRJIy+a+DappqOT+sRud37PyjKxdi1cffXphxuLiIiISM377jt47z1noJGdXTKfvdnsDDZuvRXuuAN8fE69n7OZ4uDEqWnOlJebV6krtkXEqaojn6p7ark1a2DzZoiKOvNAo0hoKPz9NyxaBJ06nd0+GpuKRmkU0WgNkZp3qlEaRUwmE0FNDd7d+W98OUBseIsKA40iCjYUajRohmHwj1mPlg00inT5kn/MepQD139MXmFe6V4XSfHEH3Y27c7Kz6rwMYJ9gsuEFu3D29MqpJVGXUixxYvhvvucPycnW7j2WoiMhBkzoF8/19YmIiIi0lgZBrz9NsycCTk5EBICzZqVfOiYnw8HD8LUqfDzzzBlCgQfnxW2Oqc4UDAhUj1qYuRTVaeWA/j0U+f3yo76Ko/J5Pwb9d13sH+/MyCRU6tolMb/t3fncVFX+//AX8Muu4rsiAqIGwKakpZJSoBXTb511WuKmEtlavHLtbKL1k0t7Wq3vNZ1ASt3r0sXt1yAFLnhvmeIa4aSaMom4HB+f3wuIwPDMOMwDDPzevaYRw9nzud8zuecw2fm83l/zjlVOFqDSP/qG6UBABWyYvzmux4PZD/Dscin3oBGFXMPbJjPkZqhOds/xG9eW9Sm+c1rC5q/3RwPHj6AEEJlGksLS2nURY1FuoM9guHm5KaPopMJ2bIF+POfpYvm6m7elN7fvJmBDSIiIiJD2LYNWLwYsLYG/vdQthJbW+nGYWkpcPAg8P77wJIlut2YJCL9qTnySQgg9zKQni5NL/ewFHB0BMLCpZHznh6Pt1U38ulJppar8uABcO4c4Oqq+/G5ugKXLz8e9UF1q2+URhWO1iDSH01GaVTIinHe6RvctbkA+4du+KOgEoWF5XBystFoH+Yc2DCPozRDH6V+hA93zNEo7f1Saa2N5vbNFcEKpVEXrdrBxkqzPyYyX5WV0tN8Dx8+fpWUAG++WTugAUjvyWRAYiIwZAinoiIiIiJqTMXFwLJl0m+4OmaMUWjWTBpl++OPwIEDwIABjVNGItJeVWDi+nVg3rzH6+TY2krXXBUVwKl04PsUIDYWmDpVWrNCnSeZWq7K5cvSdWGrVrofm4WF9P/cXN3zMnX1jdKowtEaRPpT3yiNSshxwekb3LU5D7tHLWFpZYPisgr8+mshOnbUfOr+6oGNZjbNMD1mekMdQpPGs5WJStqepFV6GWQoWFJQ51AoavoqK5UDCnW9agYeGupVXq59mYWQhg4fPAhERjZ4lRARERFRHdLSgF9/1fxpZ3t76bfb1q3SjVBeNhA1Xbm5wFtvAZcuAR4eytPKAdK14927wLp1wLVr0ggsFxfps4acWg4ACgulQIpNAz0nKYQUpKG6aTpKowpHaxDpx4YNP6OgoBSlpY/w+++11zoSqMT9gAqUu8hRWf4IFpUWqKwUKCgoRXFxBRwctJvWX/a//8wFgxomau6Qufjr9r9qlZ4BDd3I5U8WDGioIENFhaFr4DGZTHqiTyaTngKsT16e/stERERERI/t2iX9X5sbjW5uwPHjwJUrQLt2+ikXEemmuBiYOVMKaAQEqB4Rb2Eh/T07OgKHDwMffQQsXKifYKWlpZRvZWXDjc7nKH/1UlMvIyfnD7i7N0NxsWY3CuTySo7WIGpgzz/fGj4+6kdLPZS3x7a8JbhUfAzNrZ1gbWELmUwGGxsLjffzR8kfKCgqwCsRryAxKlHHUhsPnqlM1AeDPsCVK0Dy6foDG692/RAfDPqgEUqlX48eNUxwQH2QQYaiopZ49EhW67NHjwxdA49ZWEhBBTu7J3/Z2j75tlZW0g/X9HTg+efrL6+Xl96rhIiIiIiquXFD+t2mDXt76enu339nUIOoqdq3Dzh/HmjTpv6b/3Z2gLv74zU3OnRo+PJ4e0vXpqWl0vo9uqia1ljF0h1UzaFDN+HgYI3i4kcANLtR4eBgjbt3S3Hx4l2Ehrrrt4BEZiI2ti1iY9vWm+6N0jBM2zQNGb9kwLO5n8YLhQPKAY1ZA2aZzXoaAIMaJu0Ftw+QfBTAU2oCG0c/xAv9dA9oCNFwQYUnHckgl+t8GBqQAaj/l5iVlW4BBV2DC1ZN5C+7Tx9p/uWbN1WvqyGTSZ/36dP4ZSMiIiIydxyoTWRahAC2bJH+tjUdheXiAuTnAzt26Ceo4ecHtGgBFBQAzs665VVWJgVGgjhDkloff/ws8vNLtN7O2toCrVvr2EhEpDXnZs5YNHSRIrDhp2Fgw5wDGgCDGibN3R3Ayf8FLFQFNo5+CJz8AAcOADk5ugcaKisb9fDUsrZuuKBC9eCCjU0lHj78A56errC3t6gzXVMJKhiapSXw+efAn/8s/bCuHtiouohesoTDh4mIiIgam5eX9ovtlpZKv3WbN9dPmYhINzdvSqM0Wmq+vixkMsDBAdi7V1o0vKFZWgIDBgBffSVdD+oSTM3PB/z9gfDwhiufKXJ2toWzs62hi0FEWtA2sGHuAQ2AQQ3zoCqw8b+ABgCsWNHwu7SxafiggqajF2xt9XeTvLISyM8vh7u7NMUT1e+ll4DNm4G335YWo6zi6ysFNF56yWBFIyIiIjJbMTHAjz9K67JpOiXMnTtAjx58SpqoqapalLtq0W9N2dhIi2/L5fq5lh40CFi/Xpq6zv0JZzYqKwPKy4GXX264RceJiJoSTQMbDGhIzPOozUR+frV/VAU2uicBx+Y+/jekqX86dWq44IKtLW/4k7KXXgKGDAEyMipx8eIDBAc7o29fC47QICIiIjKQqCjgiy+A27elh03q8/Ch9JT1//0fp60iaqqsraVrcW1nUahaxFtf1/FBQcCoUcDSpYCTk7TGhrblu34dCAsDhg/XSxGJiJqE+gIbDGg8Zr5HbgZqLb588gOlYEaVDz8EIiMbpUhkxiwtpX7WqdNDuLs7M/BFREREZEDOzsC4ccAnn0gjMNzc6k5bXi7dUHz6aSkYQkRNk4eH9Lf94IE0pZSmiouBkBD9BizHjQNOngQOHZLW2dA0sFFZCVy9Cnh6Au+/D9jb66+MRERNQV2BDQY0lPG2ogmrWqS5rh8mMpn0Y4KLNBMRERERmZ+RI4Hx44GSEuDyZWn6meoePQLy8qQbit27AwsWaHejlIgal5OTNNVTYaHyeobqlJdLaePi9Fo02NsDCxdK9x9u3gRu3ap/RElhIXDpkhTQ+OQTKfBCRGQOqgIbfdv3xY17N3D7wW0GNGpgUMOEVS3SDNQObHCRZiIiIiIi8yaTAW+9Bfztb9LNwnv3gJwc6XXpkhTMcHQEXntNmjam1khwImpyBg6URmsoTUddByGkAIOfH9Cvn/7L1qIF8OWXwP/7f4CVlXSeuXFDOveUlkrT3D14IAVTc3KAP/6Q1v9JSQEiIvRfPiKipqR6YKPsURkDGjWwFkwcF2kmIiIiIqK6yGTA4MHSjdCjR4EjR6QRGzY2gL8/0L+/9osOE5HhdOwIjB0rBSJ//x1o1Up1OiGkaeXs7YEZMxpvFJadnRQojY0Fdu4Edu+WRm3cuSOVydpaOucMGCCNOunRg2t2EpH5qgpsHM49jKiOUQxoVMOaMANcpJmIiIiIiNSxsAB69pReRGTcXn8dkMuB5GRpxEPz5oCrq/R3/ugRUFAgTe3k5gbMni0FLxtb69bAG29IZS0oeDwdlYuL9BAm71cQEUmcmzkjtkusoYvR5DCoYSa4SDMRERERERGR6bOwACZNkoKU338P7NsnjcqorJTuDbRqBbzyCvDii0BgoGHLKpNJwRU3N8OWg4iIjAuDGkREREREREREJkQmezz6atIk4PJlac0KBwegUydp3Q0iIiJjxaAGEREREREREZGJ8vKSXkRERKaCkxAREREREREREREREZFRYFCDiIiIiIiIiIiIiIiMAoMaRERERERERERERERkFBjUICIiIiIiIiIiIiIio8CgBhERERERERERERERGQUGNYiIiIiIiIiIiIiIyCgwqEFEREREREREREREREaBQQ0iIiIiIiIiIiIiIjIKDGoQEREREREREREREZFRYFCDiIiIiIiIiIiIiIiMAoMaRERERERERERERERkFBjUICIiIiIiIiIiIiIio2Bl6AKYEyEEAODBgwcG2X9lZSUKCwthZ2cHCwvGs54E61A3rD/dsP50w/rTHetQN6w/3bD+dNMU6q/qN3DVb2Kqm6GvG8xZU/hbocbFNjc/bHPzwvY2P2xz46bpNQODGo2osLAQAODn52fgkhARERERGUZhYSFcXFwMXYwmjdcNRERERGTO6rtmkAk+KtVoKisr8dtvv8HJyQkymazR9//gwQP4+fnhxo0bcHZ2bvT9mwLWoW5Yf7ph/emG9ac71qFuWH+6Yf3ppinUnxAChYWF8Pb25lNz9TD0dYM5awp/K9S42Obmh21uXtje5odtbtw0vWbgSI1GZGFhAV9fX0MXA87Ozvyj1hHrUDesP92w/nTD+tMd61A3rD/dsP50Y+j64wgNzTSV6wZzZui/FWp8bHPzwzY3L2xv88M2N16aXDPwESkiIiIiIiIiIiIiIjIKDGoQEREREREREREREZFRYFDDjNja2iIpKQm2traGLorRYh3qhvWnG9afblh/umMd6ob1pxvWn25Yf0Sa4d+K+WGbmx+2uXlhe5sftrl54ELhRERERERERERERERkFDhSg4iIiIiIiIiIiIiIjAKDGkREREREREREREREZBQY1CAiIiIiIiIiIiIiIqPAoAYRERERERGRCm3atMGSJUsMXQxqIiIjI5GYmGjoYlADY7tSlTFjxiAuLs7QxaAGxu9y08SgholZunQp2rRpAzs7O0RERCA7O1tt+k2bNqFDhw6ws7NDSEgIdu7c2UglbZq0qb+UlBTIZDKll52dXSOWtmn58ccfMXjwYHh7e0Mmk2Hbtm31bpOeno5u3brB1tYWgYGBSElJ0Xs5mzJt6zA9Pb1WH5TJZLh161bjFLgJmT9/Pnr06AEnJye4u7sjLi4OFy9erHc7ngMfe5I65HnwsWXLlqFr165wdnaGs7MzevXqhV27dqndhv3vMW3rj31PvQULFkAmk9V7g4Z9kEzJnDlzap0XOnTooJQmMjKyVpo33nhDp/2OGTNGKb+WLVsiNjYWp0+f1ilfekxfbVuz7WQyGWJjYxu0rC4uLujTpw8yMjJ0ypckmvQFAMjKykK/fv3g4OAAZ2dnPPfccygtLW2w/bJdG5a+2lXTfLVR8zeoo6Mjunfvji1btuiUL0nqa7OrV6+qvAcik8mwadOmJ94vv8uNE4MaJmTDhg145513kJSUhOPHjyM0NBQxMTHIz89Xmf7w4cMYMWIExo0bhxMnTiAuLg5xcXE4e/ZsI5e8adC2/gDA2dkZeXl5ite1a9cascRNS3FxMUJDQ7F06VKN0l+5cgUDBw7E888/j5MnTyIxMRHjx4/Hnj179FzSpkvbOqxy8eJFpX7o7u6upxI2XRkZGZg0aRL++9//Yu/evaioqEB0dDSKi4vr3IbnQGVPUocAz4NVfH19sWDBAhw7dgxHjx5Fv379MGTIEJw7d05levY/ZdrWH8C+V5cjR47g66+/RteuXdWmYx8kU9S5c2el88KhQ4dqpZkwYYJSmk8//VTn/cbGxiry279/P6ysrDBo0CCd86XH9NW21dsuLy8P69ata9CyZmVlISgoCIMGDcL9+/d1zpvq7wtZWVmIjY1FdHQ0srOzceTIEUyePBkWFrrd/mK76pe+2lWTc4e2qv8GPXHiBGJiYjBs2DCNHqqj+qlrMz8/P6XP8vLyMHfuXDg6OmLAgAE67Zff5UZIkMno2bOnmDRpkuLfcrlceHt7i/nz56tMP2zYMDFw4ECl9yIiIsTrr7+u13I2VdrWX3JysnBxcWmk0hkXAGLr1q1q08yYMUN07txZ6b3hw4eLmJgYPZbMeGhSh2lpaQKAuHfvXqOUyZjk5+cLACIjI6PONDwHqqdJHfI8qF7z5s3FihUrVH7G/lc/dfXHvqdaYWGhCAoKEnv37hV9+/YVb7/9dp1p2QfJ1CQlJYnQ0FC1aer7u1DF399fLF68WPHv5cuXCxcXF7Fv3z4hhBAJCQliyJAhStscPHhQABD5+fla7YtU01fbqmq7+tTcT2pqqnB2dhbfffddnWW9ceOGACCys7O12hfVpklfiIiIELNnz9YqX7arYemrXTXJt6aa54Xs7Gzh5uYmFixYIIRQ/RtULpcLa2trsXHjRq32RbU9SZuFhYWJsWPHqk3D73LTxJEaJqK8vBzHjh1DVFSU4j0LCwtERUUhKytL5TZZWVlK6QEgJiamzvSm7EnqDwCKiorg7+8PPz+/ep8oJWXsfw0nLCwMXl5eeOGFF5CZmWno4jQJVU9MtWjRos407IPqaVKHAM+Dqsjlcqxfvx7FxcXo1auXyjTsf3XTpP4A9j1VJk2ahIEDB9bqW6qwD5IpysnJgbe3N9q1a4eRI0fi+vXrtdKsWbMGbm5u6NKlC959912UlJRonP+nn36KWbNm4YcffkD//v1VpikqKsJ3332HwMBAtGzZ8omPhZTpq23T09Ph7u6O4OBgTJw4EQUFBRqXae3atRgxYgTWrFmDkSNHqkxTVlaG5ORkuLq6Ijg4WOO8qW7q+kJ+fj5++uknuLu7o3fv3vDw8EDfvn21ejqf7WoY+mpXTc4ddTlw4ABeeOEFfPzxx5g5c6bKNHK5HKtXrwYAdOvWTeO8qW7atNmxY8dw8uRJjBs3TuP8+V1uOqwMXQBqGHfu3IFcLoeHh4fS+x4eHvj5559VbnPr1i2V6c1xPv4nqb/g4GCsWrUKXbt2xf3797Fo0SL07t0b586dg6+vb2MU26jV1f8ePHiA0tJSNGvWzEAlMx5eXl746quv8NRTT6GsrAwrVqxAZGQkfvrpJ7P+QVVZWYnExEQ888wz6NKlS53peA6sm6Z1yPOgsjNnzqBXr154+PAhHB0dsXXrVnTq1EllWva/2rSpP/a92tavX4/jx4/jyJEjGqVnHyRTExERgZSUFAQHByumo+jTpw/Onj0LJycnAMArr7wCf39/eHt74/Tp05g5cyYuXryo0VzoM2fOxLfffouMjAx07txZ6bPU1FQ4OjoCkKYT9fLyQmpqqs7T3ZBEX20bGxuLl156CW3btkVubi7ee+89DBgwAFlZWbC0tFRbpqVLl+L999/Hf/7zH/Tt21fpszNnzij6Q0lJCZycnLBhwwY4OzvrWBNUX1+4fPkyAGle/kWLFiEsLAzffPMN+vfvj7NnzyIoKEht/mxXw9BXu2py7qjL1q1bMXr0aKxYsQLDhw9X+uz+/fuKvlBaWgpra2v861//QkBAQAPUhnnTts1WrlyJjh07onfv3hrlz+9yE2PooSLUMG7evCkAiMOHDyu9P336dNGzZ0+V21hbW4u1a9cqvbd06VLh7u6ut3I2VU9SfzWVl5eLgIAArYdEmiJoMHVSUFCQmDdvntJ7O3bsEABESUmJHktnHDSpQ1Wee+45MWrUqIYvkBF54403hL+/v7hx44badDwH1k3TOqzJ3M+DZWVlIicnRxw9elTMmjVLuLm5iXPnzqlMy/5Xmzb1V5O5973r168Ld3d3cerUKcV79U3Fwj5Ipu7evXvC2dm5zmnshBBi//79AoC4dOlSnWn8/f2Fr6+vaN68ucjNza31eUJCgoiKihI5OTkiJydHZGdnizFjxgh3d3dx9erVBjkWUtZQbVtTbm6uAKCYjkSVvn37Ch8fH2Ftba1y2qGkpCTRsWNHRX84fvy4mDlzpnB0dBRHjhzRuCykmZp9ITMzUwAQ7777rlK6kJAQMWvWrDrzYbs2LQ3VrvXlq0pCQoLw9PQUlpaWKq/Hk5OThZOTk6IvnDp1Snz22WfC1tZWfP/99xqXhTSjrs1KSkqEi4uLWLRoUb358LvcNDHcZCLc3NxgaWmJ27dvK71/+/ZteHp6qtzG09NTq/Sm7EnqryZra2uEh4fj0qVL+iiiyamr/zk7O3OUhg569uxp1n1w8uTJSE1NRVpaWr1Pa/McqJo2dViTuZ8HbWxsEBgYiO7du2P+/PkIDQ3F559/rjIt+19t2tRfTebe944dO4b8/Hx069YNVlZWsLKyQkZGBv7xj3/AysoKcrm81jbsg2TqXF1d0b59e7XnhYiICACo99zRp08fyOVybNy4UeXnDg4OCAwMRGBgIHr06IEVK1aguLgYy5cvf/IDoDo1ZNtW165dO7i5udW7TXh4OFq1aoVVq1ZBCFHr86rvs8DAQISHh2PBggXw8fHBkiVLNC4LaaZmX/Dy8gKAWiM9O3bsWO+0Q2zXpqMh21VdvnUJCAhAhw4dsGrVKlRUVNT63MLCQtEXunbtinfeeQeRkZH45JNPNC4LaUZdm23evBklJSUYPXq0Rnnxu9z0MKhhImxsbNC9e3fs379f8V5lZSX2799f53zUvXr1UkoPAHv37lU7f7WpepL6q0kul+PMmTOKL1xSj/1PP06ePGmWfVAIgcmTJ2Pr1q04cOAA2rZtW+827IPKnqQOa+J5UFllZSXKyspUfsb+Vz919VeTufe9/v3748yZMzh58qTi9dRTT2HkyJE4efKkymlU2AfJ1BUVFSE3N1fteeHkyZMAUO+5o2fPnti1axfmzZuHRYsW1btvmUwGCwsLlJaWalVm0kxDtm11v/76KwoKCurdJiAgAGlpadi+fTumTJmiUd6WlpbsD3pQsy+0adMG3t7euHjxolK6X375Bf7+/mrzYrs2HQ3ZruryrYubmxsOHDiAS5cuYdiwYSoDGzWxL+iHujZbuXIlXnzxRbRq1UqjvPhdboIMO1CEGtL69euFra2tSElJEefPnxevvfaacHV1Fbdu3RJCCBEfH680NC8zM1NYWVmJRYsWiQsXLoikpCRhbW0tzpw5Y6hDMCht62/u3Lliz549Ijc3Vxw7dkz85S9/EXZ2dhpPlWFqCgsLxYkTJ8SJEycEAPH3v/9dnDhxQly7dk0IIcSsWbNEfHy8Iv3ly5eFvb29mD59urhw4YJYunSpsLS0FLt37zbUIRictnW4ePFisW3bNpGTkyPOnDkj3n77bWFhYaF2yLypmjhxonBxcRHp6ekiLy9P8ao+lRnPgeo9SR3yPPjYrFmzREZGhrhy5Yo4ffq0mDVrlpDJZOKHH34QQrD/1Ufb+mPfq1/N6afYB8nUTZ06VaSnp4srV66IzMxMERUVJdzc3ER+fr4QQohLly6JDz/8UBw9elRcuXJFbN++XbRr104899xzavP19/cXixcvFkIIcfDgQeHo6Kj4txDSlBWxsbGK783z58+LN998U8hkMpGWlqanozUvDdW2wcHBYsuWLUII6Xf3tGnTRFZWlrhy5YrYt2+f6NatmwgKChIPHz6ssyzVz60///yz8PT0VDrXJiUlic6dOyv6wy+//CI++ugjAUCsXr26YSvGDNXXF4SQrpGcnZ3Fpk2bRE5Ojpg9e7aws7NTOxUZ29WwGqpd+/XrJ7744gut8q0pISFBDBkyRAghRF5enujQoYN4+eWXRUVFhRBCmn7K2dlZ0RcuX74svv76a2FpaSnmzp3bwDVjfjRts5ycHCGTycSuXbs0ypff5aaJQQ0T88UXX4jWrVsLGxsb0bNnT/Hf//5X8Vnfvn1FQkKCUvqNGzeK9u3bCxsbG9G5c2exY8eORi5x06JN/SUmJirSenh4iD/96U/i+PHjBih105CWliYA1HpV1VlCQoLo27dvrW3CwsKEjY2NaNeunUhOTm70cjcl2tbhJ598IgICAoSdnZ1o0aKFiIyMFAcOHDBM4Q1MVb0BUOpTPAeq9yR1yPPgY2PHjhX+/v7CxsZGtGrVSvTv319xQ14I9r/6aFt/7Hv1qxnUYB8kUzd8+HDh5eUlbGxshI+Pjxg+fLjSza7r16+L5557TrRo0ULY2tqKwMBAMX36dHH//n21+Va/ESKEEBkZGcLBwUH84x//EEJIv8+qf286OTmJHj16iM2bN+vlOM1RQ7Vt9d81JSUlIjo6WrRq1UpYW1sLf39/MWHCBMUDbXWpeW49f/68cHd3F++8844QQrr5Xb0/2Nvbi5CQELFs2bKGqQwzV19fqDJ//nzh6+sr7O3tRa9evcTBgwfV5st2NayGald/f3+RlJSkdb7VVQ9qCCHEb7/9Jtq3by+GDRsmHj16JJKTk5X6gq2trWjfvr34+OOPxaNHj3SqB9K8zd59913h5+cn5HK5Rvnyu9w0yYRQMVkgERERERERERERERFRE8M1NYiIiIiIiIiIiIiIyCgwqEFEREREREREREREREaBQQ0iIiIiIiIiIiIiIjIKDGoQEREREREREREREZFRYFCDiIiIiIiIiIiIiIiMAoMaRERERERERERERERkFBjUICIiIiIiIiIiIiIio8CgBhERERERERERERERGQUGNYiIiJ5AZGQkEhMTDV0MIiIiIiKjw9/SmouPj8e8efP0uo/z58/D19cXxcXFet0PEVFDYVCDiIiarDFjxkAmk0Emk8Ha2hpt27bFjBkz8PDhQ0MXjYiIiIjIJFX/DW5jY4PAwEB8+OGHePTokcHKdPXqVchkMlhaWuLmzZtKn+Xl5cHKygoymQxXr141TAH15NSpU9i5cyfeeustxXuRkZGQyWRYsGBBrfQDBw6ETCbDnDlzaqWvenl4eGDo0KG4du2aIk2nTp3w9NNP4+9//7tej4eIqKEwqEFERE1abGws8vLycPnyZSxevBhff/01kpKSDF0sIiIiIiKTVfUbPCcnB1OnTsWcOXOwcOFCQxcLPj4++Oabb5TeW716NXx8fAxUovqVl5c/8bZffPEFhg4dCkdHR6X3/fz8kJKSovTezZs3sX//fnh5edXKZ8KECcjLy8Nvv/2G7du348aNGxg1apRSmldffRXLli0zaPCKiEhTDGoQEVGTZmtrC09PT/j5+SEuLg5RUVHYu3cvAKCsrAxvvfUW3N3dYWdnh2effRZHjhxRbJuSkgJXV1el/LZt2waZTKb495w5cxAWFoZvv/0Wbdq0gYuLC/7yl7+gsLBQkaa4uBijR4+Go6MjvLy88Nlnn+n3oImIiIiIDKjqN7i/vz8mTpyIqKgofP/99wCk3+DTpk2Dj48PHBwcEBERgfT0dMW2BQUFGDFiBHx8fGBvb4+QkBCsW7dO7f527NgBFxcXrFmzRm26hIQEJCcnK72XnJyMhISEWmnPnj2LAQMGwNHRER4eHoiPj8edO3cUn0dGRmLKlClITExE8+bN4eHhgeXLl6O4uBivvvoqnJycEBgYiF27dinlm5GRgZ49e8LW1hZeXl6YNWuWUiAgMjISkydPRmJiItzc3BATE4OxY8di0KBBSvlUVFTA3d0dK1euVHmscrkcmzdvxuDBg2t9NmjQINy5cweZmZmK91avXo3o6Gi4u7vXSm9vbw9PT094eXnh6aefxuTJk3H8+HGlNC+88ALu3r2LjIwMleUhImpKGNQgIiKjcfbsWRw+fBg2NjYAgBkzZuDf//43Vq9ejePHjyMwMBAxMTG4e/euVvnm5uZi27ZtSE1NRWpqKjIyMpSGc0+fPh0ZGRnYvn07fvjhB6Snp9e6CCAiIiIiMlXNmjVTjDiYPHkysrKysH79epw+fRpDhw5FbGwscnJyAAAPHz5E9+7dsWPHDpw9exavvfYa4uPjkZ2drTLvtWvXYsSIEVizZg1Gjhypthwvvvgi7t27h0OHDgEADh06hHv37tW68f/HH3+gX79+CA8Px9GjR7F7927cvn0bw4YNU0q3evVquLm5ITs7G1OmTMHEiRMxdOhQ9O7dG8ePH0d0dDTi4+NRUlICQBoN8ac//Qk9evTAqVOnsGzZMqxcuRJ/+9vfauVrY2ODzMxMfPXVVxg/fjx2796NvLw8RZrU1FSUlJRg+PDhKo/19OnTuH//Pp566qlan9nY2GDkyJFKAZ6UlBSMHTtWbf0BwN27d7Fx40ZERETUyjMsLAwHDx6sNw8iIkNjUIOIiJq01NRUODo6ws7ODiEhIcjPz8f06dNRXFyMZcuWYeHChRgwYAA6deqE5cuXo1mzZnU+7VSXyspKpKSkoEuXLujTpw/i4+Oxf/9+AEBRURFWrlyJRYsWoX///ggJCcHq1as5LJuIiIiITJ4QAvv27cOePXvQr18/XL9+HcnJydi0aRP69OmDgIAATJs2Dc8++6ziBruPjw+mTZuGsLAwtGvXDlOmTEFsbCw2btxYK/+lS5fizTffxH/+859aIxlUsba2xqhRo7Bq1SoAwKpVqzBq1ChYW1srpfvyyy8RHh6OefPmoUOHDggPD8eqVauQlpaGX375RZEuNDQUs2fPRlBQEN59913Y2dnBzc0NEyZMQFBQEP7617+ioKAAp0+fBgD885//hJ+fH7788kt06NABcXFxmDt3Lj777DNUVlYq8g0KCsKnn36K4OBgBAcHo3fv3ggODsa3336rSJOcnKxyaqkq165dg6WlpcqRFwAwduxYbNy4EcXFxfjxxx9x//79Ouvwn//8JxwdHeHg4ICWLVvi4sWLijqsztvbW2mtDSKipsrK0AUgIiJS5/nnn8eyZctQXFyMxYsXw8rKCi+//DJOnz6NiooKPPPMM4q01tbW6NmzJy5cuKDVPtq0aQMnJyfFv728vJCfnw9AGsVRXl6u9CRTixYtEBwcrOORERERERE1TVUPFlVUVKCyshKvvPIK5syZg/T0dMjlcrRv314pfVlZGVq2bAlAmjZp3rx52LhxI27evIny8nKUlZXB3t5eaZvNmzcjPz8fmZmZ6NGjh8ZlGzt2LHr37o158+Zh06ZNyMrKqvXA0alTp5CWlqYyYJCbm6sof9euXRXvW1paomXLlggJCVG85+HhAQCKa4MLFy6gV69eStPZPvPMMygqKsKvv/6K1q1bAwC6d+9ea7/jx4/Hv/71L8yYMQO3b9/Grl27cODAgTqPs7S0FLa2tkr7qi40NBRBQUHYvHkz0tLSEB8fDysr1bf5Ro4ciffffx8AcPv2bcybNw/R0dE4duyY0nVQs2bNFKNSiIiaMgY1iIioSXNwcEBgYCAA6Ums0NBQrFy5UqMLHwsLCwghlN6rqKiola7mk10ymUzpSSsiIiIiInNS9WCRjY0NvL29FTfLi4qKYGlpiWPHjsHS0lJpm6oAwsKFC/H5559jyZIlCAkJgYODAxITE2stmB0eHo7jx49j1apVeOqpp+q8eV9TSEgIOnTogBEjRqBjx47o0qULTp48qZSmqKgIgwcPxieffFJr++oLaau6Dqj+XlWZtL02cHBwqPXe6NGjMWvWLGRlZeHw4cNo27Yt+vTpU2cebm5uKCkpQXl5uWL63ZrGjh2LpUuX4vz583VO7wUALi4uimuqwMBArFy5El5eXtiwYQPGjx+vSHf37l0EBARoephERAbD6aeIiMhoWFhY4L333sPs2bMREBCgmKe2SkVFBY4cOYJOnToBAFq1aoXCwkIUFxcr0tS84KlPQEAArK2t8dNPPyneu3fvntKwdSIiIiIiU1L1YFHr1q2Vnv4PDw+HXC5Hfn4+AgMDlV6enp4AgMzMTAwZMgSjRo1CaGgo2rVrp/K3c0BAANLS0rB9+3ZMmTJFq/KNHTsW6enpda4h0a1bN5w7dw5t2rSpVU5VAQdNdezYEVlZWUoPTmVmZsLJyQm+vr5qt23ZsiXi4uKQnJyMlJQUvPrqq2rTh4WFAQDOnz9fZ5pXXnkFZ86cQZcuXRTXQJqoCkiVlpYqvX/27FmEh4drnA8RkaEwqEFEREZl6NChsLS0xLJlyzBx4kRMnz4du3fvxvnz5zFhwgSUlJRg3LhxAICIiAjY29vjvffeQ25uLtauXYuUlBSt9ufo6Ihx48Zh+vTpOHDgAM6ePYsxY8bAwoJfoURERERkXtq3b4+RI0di9OjR2LJlC65cuYLs7GzMnz8fO3bsACCtJ7F3714cPnwYFy5cwOuvv47bt2/XmV9aWhr+/e9/IzExUeNyTJgwAb///rvSKIPqJk2ahLt372LEiBE4cuQIcnNzsWfPHrz66quQy+VaH3eVN998Ezdu3MCUKVPw888/Y/v27UhKSsI777yj0fXB+PHjsXr1aly4cAEJCQlq07Zq1QrdunVTLIquSvPmzZGXl6dYD7AuJSUluHXrFm7duoVTp05h4sSJsLOzQ3R0tCLN1atXcfPmTURFRdV7HEREhsY7MkREZFSsrKwwefJkfPrpp/j444/x8ssvIz4+Ht26dcOlS5ewZ88eNG/eHIC09sV3332HnTt3IiQkBOvWrcOcOXO03ufChQvRp08fDB48GFFRUXj22WdVzpNLRERERGTqkpOTMXr0aEydOhXBwcGIi4vDkSNHFOtJzJ49G926dUNMTAwiIyPh6emJuLi4OvMLDg7GgQMHsG7dOkydOlWjMlhZWcHNza3ONSS8vb2RmZkJuVyO6OhohISEIDExEa6urjo9nOTj44OdO3ciOzsboaGheOONNzBu3DjMnj1bo+2joqLg5eWFmJgYeHt715t+/PjxWLNmjdo0rq6u9Y4+Wb58Oby8vODl5YXnn38ed+7cwc6dO5XWCVy3bh2io6Ph7++v0bEQERmSTNScbJyIiIiIiIiIiIgaVFFREXx8fJCcnIyXXnqp3vSlpaUIDg7Ghg0b0KtXL72Vq7y8HEFBQVi7di2eeeYZve2HiKihcKFwIiIiIiIiIiIiPamsrMSdO3fw2WefwdXVFS+++KJG2zVr1gzffPMN7ty5o9fyXb9+He+99x4DGkRkNDhSg4iIiIiIiIiISE+uXr2Ktm3bwtfXFykpKejfv7+hi0REZNQY1CAiIiIiIiIiIiIiIqPAhcKJiIiIiIiIiIiIiMgoMKhBRERERERERERERERGgUENIiIiIiIiIiIiIiIyCgxqEBERERERERERERGRUWBQg4iIiIiIiIiIiIiIjAKDGkREREREREREREREZBQY1CAiIiIiIiIiIiIiIqPAoAYRERERERERERERERmF/w8Km31IYuzIRwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "==== Performance Summary ====\n", + "\n", + "Memory Usage Comparison:\n", + " 4-bit Avg: 55770.34 MB\n", + " 8-bit Avg: 57204.05 MB\n", + " Difference: 2.6% more memory with 8-bit\n", + "\n", + "Evaluation Loss Comparison:\n", + " 4-bit Avg: 0.4618\n", + " 8-bit Avg: 0.4909\n", + " Difference: 6.3% higher loss with 8-bit\n", + "\n", + "Efficiency Analysis: 8-bit provides more efficiency memory usage relative to loss\n" + ] + } + ], + "source": [ + "# Visualize memory usage across quantization methods\n", + "plot_memory_metrics(flflow_4bit, flflow_8bit)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "63f3d4e1", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABdUAAASmCAYAAADI7kK9AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs3Xl4VOXd//HPmX0PWQmBkIQgirggggi4YEVFrdW674rdrHWrtWpr3arWpdUqVWvrr4pb3X1says+ikUFZREREFER2bOQdSaTyezz+yMyD5EgAZNMMnm/ritXmTn3OfOdyeT4PJ9zn+9tpFKplAAAAAAAAAAAwE6ZMl0AAAAAAAAAAAD9BaE6AAAAAAAAAABdRKgOAAAAAAAAAEAXEaoDAAAAAAAAANBFhOoAAAAAAAAAAHQRoToAAAAAAAAAAF1EqA4AAAAAAAAAQBcRqgMAAAAAAAAA0EWE6gAAAAAAAAAAdBGhOgAAGLBmzZolwzDSP92hvLw8fbybb765W44JoHdsez6YNWtWpssBAABAH0WoDgAAetW2oXNXf+bOnZvpsrPKhRde2O0XE/qqSy+9dLvv08cff5zpsvqsr19o2vpjsViUn5+vgw8+WLfddpv8fn+mSwUAAAAyxpLpAgAAADJlwoQJ+v3vf9+tx7z++uvTgePkyZO79djYNZFIRM8888x2z8+aNUt/+MMfMlBR/5VIJNTY2KiFCxdq4cKFevrpp7Vo0SJ5vd5MlwYAAAD0OkJ1AADQq7YNnSWpqalJv/vd79KPjzrqKB199NEd9qmsrNzh8QKBgHw+327VMmbMGI0ZM2a39t2RH/3oR916POy+f/7zn2psbNzu+aefflp33nmnLJae/T+Fv813s6+4+OKLVVlZqYaGBj377LNat26dJOnTTz/VY489pssvvzyzBQIAAAAZQPsXAADQq370ox/p6quvTv98PYSePHlyh+2nnnqqhg8f3qEVzN/+9jeNGzdOTqdThx12mCRp7dq1uvLKK3XooYeqtLRUbrdbdrtdQ4cO1QknnKB//etf29XyTT3Vp06dmn7+wgsv1OrVq3XWWWepoKBADodD48aN0z/+8Y/tjrmjnupz587t8FpffvmlHnroIe23335yOBwqKirSD3/4QzU1NW13zFAopF/96lcaPny4HA6HxowZo4cfflhr167t1TY5c+bM0amnnqphw4bJbrfL5/Np3LhxuummmzoNr9evX6+f/OQn2mOPPeR0OuVwODR06FBNmTJFV111lVatWtVh/KxZszR16lQVFBTIarUqNzdXe+65p8444ww99NBDu1zvtj2xR40alf53TU2NZs+evcP9GhoadOutt+rggw9Wbm5u+nt0zDHH6Lnnnutw/G0//1AopOuvv14jRoyQ1WrVjTfemB7b1tamP/7xj5oyZYpyc3Nls9k0ePBgHXfccXr++ec7reOf//ynpk+frsGDB8tqtcrn86myslInnXSS7rjjDiWTyfTY+vp6XX311RozZozcbrdsNpuKi4t10EEH6dJLL9WCBQt2+fOTpDPOOENXX3217rjjDr322msdtn3yySed7vPSSy/p+OOPV3FxsWw2m3JzczV58mTdc889CoVCHcauW7fuG7/DX/87/Kb9nn32WU2cOFEul0u5ubk67bTTtHHjxu3qi8fjuvPOO7XHHnvIbrersrJSt912m2Kx2Dd+Ft39/QQAAEA/lgIAAMigtWvXpiSlf2666aZv3H7ooYd2eLz//vunUqlU6l//+leH5zv7ueWWWzoc+7HHHuuwfVuHH354+vn99tsv5fV6tzueYRipN998s8N+ZWVlnb6X//73vx32PeSQQzqt8bDDDutwvGg0ut173vpzwgkndHj83//+t0uf+QUXXLDD970jV1111Td+tkOHDk19/PHH6fG1tbWpwsLCb9znz3/+c3r8TTfd9I1jBw8e3KU6t6qqqkqZzeb0/n/9619TBxxwQPrxySef3Ol+ixYtShUXF++wjhNPPDE99uvfn6//nq644opUKpVKVVdXp8aMGfON7++UU05JxWKxHR67s5+2trZUKpVKtbW1pfbcc89vHHvttdd26XP7+utu+50KBAIdtl1//fUd9o3H46nTTz/9G+sYPXp0qqqqKr3P1/++v/4d3vbv8IILLtjhfjv6e9pjjz3Sn9NWZ555Zqdjjz/++A6PH3vssfQ+3f39BAAAQP9G+xcAANCvvPvuuyorK9Mpp5wil8ulLVu2SJIsFovGjh2r8ePHq7CwUD6fT62trZo/f77++9//SpJuvfVW/eAHP9DQoUN36TWXL1+u3Nxc/fznP1dbW5seeeQRJRIJpVIp/f73v9eRRx65y+9j3rx5OvLIIzV58mS98sorWrFihSTpnXfe0YIFC3TwwQdLku6//369++676f32228/nXjiiVq2bJn++c9/7vLr7o4nn3xS9957b/rxmDFj9P3vf19VVVV6/PHHlUgktHnzZp188slauXKlLBaLXnrpJdXV1UmScnNzNWPGDOXn56uqqkqffvpph/ckSX/+85/T/542bZqmTp2q1tZWbdy4UfPmzVNbW9su15xIJCRJVqtVp5xyipqamrR06VJJ0quvvqqGhgbl5+en92lpadH3vvc91dTUpJ/7zne+oylTpigQCGjevHnf+JrvvvuuJk6cqKOOOkqtra0aPny4JOmcc87RypUr0+NOPfVU7b333nrjjTf0/vvvS2qf3f273/0uPbt9289jwoQJ+u53v6t4PK6NGzdq4cKFHWb5//e//9Vnn30mSXI4HOnveE1Njb744gu9/fbbu/TZdaaxsVF33XVX+rFhGDrttNM6jPnd737XYdb9wQcfrKOPPlqrVq3SCy+8IElatWqVzjnnHL311lvfuqZtzZs3TxMmTNAxxxyj//73v5o/f74kafXq1XrllVd05plnSpJefPFFPfvss+n9Ro4cqdNPP12bN2/Wk08+ucPjd/f3EwAAAP0boToAAOhXKioq9OGHH2rQoEEdnp8+fbqmT5+uzz//XEuXLlVdXZ2sVquOO+44LVy4UKFQSPF4XG+99ZbOO++8XXpNwzA0Z84cHXDAAZLag8v77rtPkrR48eLdeh/f//739dJLL8kwDF155ZUqKipKh8CLFy9Oh+r/7//9v/Q+5eXlWrBggZxOpyTpwgsv1OOPP75br78r7rnnng41LF68OF3D+PHjdckll0iSPv/8c7366qs66aSTFA6H0/ucfvrpHY4hSa2trQoGg+nH245/8sknVVxc3GH8l19+uUs1b/u5HH300crLy9OZZ56p6667TqlUStFoVH//+9912WWXpcfNmjWrQ6B+++2369e//nWX6zj55JP1wgsvyGT6vw6LH330UYcA+ZprrkmH0zfeeKMOPfTQdLB+//336ze/+Y1MJlOHz2PmzJnp78NW69atk81mk9Txszv88MP1wAMPdBgbiURUX1+/w7q/yRFHHLHdc7m5ufrTn/6k/fffP/1cMplM/01I0qRJk/Tuu+/KbDZLkq699lrdfffdktovAnz00UcaO3bsbtXUmYMOOkjz5s2T1WpVLBbTsGHD0hfcFi9enA7Vt/17ysnJ0cKFC5WXlyepvUXQ9ddf3+nxu/v7CQAAgP6NnuoAAKBf+dnPfrZdoC61h4xTpkzRnnvuqTPPPFOXXXaZrr76av3yl7/s0Md506ZNu/yakyZNSgfqkrTnnnum/91ZD/Su+OlPf5ru456Xl6eCgoLtjhkMBtMzkCXptNNOS4fZkjRjxozdeu1dEQqFtHz58h3WcP7553cYvzUgnjJlSvr9/eUvf9GBBx6o8847T7fddptmz54ti8WiwYMHp/c79NBD0//eZ599dPzxx+vKK6/UI488oi+++EIjRozocs2LFi3q0O97a6A6fPhwTZo0Kf38Y4891mG/bWeie71eXXvttdsd+5vq+PWvf90hUJf+7/PY6oILLkj/22w269xzz00/bmxsTP++t/08ti7e+7Of/UwPPvigVqxYofLy8vRrTZgwQXa7XZL0+uuva8yYMTrrrLN000036ZVXXlE0Gt3luzO+yUUXXaTTTz+9w3OfffZZh7765557bjpQ//r7lrb/XL6tH/7wh7JarZLa70yoqKhIb9v2b/SDDz5I/3v69OnpQH1rzTvSnd9PAAAA9H/MVAcAAP3KXnvt1enzJ510kpYtW7bT/SORyC6/Znl5eYfHWwNMSUqlUrt8vJ0dc+sClM3NzR3GfH127Ncf94SmpqYO73HbIFyS3G63PB5Petb51gDzoIMO0r333qsbbrhBwWBQH374oT788MP0fgUFBXrhhRc0depUSe3tNU4//XQtWLBADQ0N+s9//tPhdU4//XQ988wz24XWndk2LHc6nTrxxBPTj8866yy99957kqSlS5dqxYoV2nfffSWpQyhcWlraIRTuis6+m19fwPXrn9/XH2/9/H73u9/pyy+/1GuvvaZgMKg33nhDb7zxRnrc4Ycfrn//+99yu90aNmyYZs2apcsuu0z19fX65JNPOlxU8Hg8euSRR9IXF3bFxRdfrKFDh+p///d/0y177rnnHjU0NHT4nHf3fX7d1/+euvr32pW/J6nj31RRUdE31rit7vx+AgAAoP/j/+oDAAD9itvt3u65zz77rEOgfvbZZ2vTpk1KJpNKpVIqLCz8Vq+5dQbsVltnYPf0MXNycjo83trOYqttW5X0lNzc3A611dbWdtj+9TYuubm56X9feeWVqq2t1Zw5czRz5kxddtll2mOPPSRJ9fX1HWYvl5aW6v3339fq1av19NNP6+abb9Ypp5wii6V9Dsjzzz/fpVY3kUikQ8/strY2+Xw+GYYhwzA6tHuROgbw285a3rhxY7odT1d19t3c9pjS9p/f1x9v/fx8Pp/+85//aOPGjXrhhRd0++2365xzzpHL5ZIkvf322+l2KlL7bPyqqirNmzdPf/7zn3XVVVel764IBoP6wQ9+0OH31FVnnHGGfvOb32ju3Lk69thj08/PmjWrQ1/83X2fXw+ht+1NnkwmtWbNmi7V2dW/0W3vcvn639PXa9xWd30/AQAAkB0I1QEAQL/X0NDQ4fGpp56qoUOHyjAMzZ07N71gZn/j9Xo7tJp5+eWXFY1G04+/3r6kJ7hcrg69s1944YUOwecTTzzRYfzkyZMlSVVVVaqtrZXL5dJ3vvMdXXbZZZo5c6aee+659NgNGzakf3fLli1TMpnUyJEjdfbZZ+umm27Siy++qOOOOy49ftuZ7jvyyiuvbDfD/5s8/fTTisfjkqRDDjkk/XxLS4t+//vfbzd+/fr1XT629H+fx1bbBq+JREJPPfVU+nFeXl769/3xxx+ne4Ofeuqp+vWvf62nnnpKP/zhD9Pjt34ejY2NWr9+vaxWq6ZMmaKLL75Y99xzj+bMmZMeGwqFOrQS2lUmk0kzZ87sMHt/66KqUntLpG2D9aeeeqrDRYmvB85bP5evt3JasGBB+t+PPPJIt//tjh8/Pv3v2bNnd5hhv+3v4uu66/sJAACA7ED7FwAA0O+NHDlSJpMp3ebhiiuu0EcffbRdi4r+6Ec/+pGuvvpqSdLq1as1adIkffe739WyZcv0j3/8o1teY9ugcVs//vGP9eMf/1i/+MUv0ou7rlu3ThMmTND3v/99VVVVdQhLR40apeOPP16S9M477+icc87RIYccotGjR6ukpESJREIvv/xyerzNZkvPvD7jjDPk9/t1xBFHaOjQocrLy9OaNWs6tNnorJf+1237+3a73frud7+73Zja2lrNnTtXUvts5X//+9868cQTdeGFF+r2229Pz1j+1a9+pTlz5mjSpEkKhUJasGCBCgoK9Morr+y0jq32339/HXnkkemA++6779aXX36pMWPG6H//93879Ba/4oor0jO3r776ai1atEhHHnmkSktLVVhYqKqqqg7vb+vn8fnnn2vSpEmaMGGC9t9/f5WUlMhisWj27NkdaunK5/dNRo4cqTPOOEN///vfJUlz587Ve++9p8mTJ8tkMunnP/+5brjhBkntPdMPOeQQHX300fr000/1/PPPp49zxBFHpC/U+Hw+jRo1Sp9//rmk9sVhly5dqra2tg4LvHaXH/zgB3r99dclSX6/XxMnTtQZZ5yhTZs26cknn9zhft31/QQAAEB2IFQHAAD9XlFRkX784x/r4YcfltTeuuO3v/2tJOnII4/Up59+qs2bN2eyxN12+eWX6x//+Ee61ca2vcmPPfZYvfbaa+mxu9vPecmSJZ0+X1VVJal9AcelS5fq3nvvlSStXLlSK1eu7DC2pKREL7/8crodhtTevuOdd97RO++80+nxL7300g6LntbU1OiZZ57pdGxeXl6HWdqd2bx5c4e+42eddZYeeeSR7ca1tLSouLg4vYDtrFmzdOKJJ8rr9epf//qXTjjhhHSw/uabb+rNN99M77ttf/aueuqpp3TkkUem+5y/+OKLevHFFzuMOeWUU/TrX/+6w3NNTU3bjdvK4XDo8ssv7/Dc4sWLtXjx4k7Hn3zyyaqsrNzl2r/uV7/6lZ555pl07/PbbrstHSz/6le/0vLly/XCCy9Iap91vu3Mc0kaPXr0djPCr7nmmvTvNplM6tVXX5XUviiszWbTp59++q3r3uq0007Taaedlq7xiy++0O233y5Jmjp1avpiS2e+7fcTAAAA2YP2LwAAICv86U9/0m9/+1uVlZXJarVq+PDh+uUvf6l//etfHYLe/sZqtWr27Nm69tprNWzYMNlsNu2555764x//qN/85jcdxvbkTNl77rlHb7zxhk455RSVlJTIarXK4/Fo7NixuuGGG7R8+XKNGTMmPf6QQw7R7bffruOPP16VlZXyer2yWCwqLCzUkUceqVmzZumee+5Jj7/jjjt08cUX68ADD1RxcbGsVqtcLpf22msvXXLJJVqyZInKysq+scYnn3yyw6KUF110UafjvF6vTj311PTjf//73+k2IxMmTNDKlSt1yy23aMKECfL5fLJYLCoqKtJ3vvOd3Vrss7i4WIsXL9Y999yjSZMmKScnJ/1ZTJ8+Xc8++6xefPHFDt/TX/7yl7riiit08MEHa+jQobLZbLLb7RoxYoQuuOACLVq0SBMmTJDU3nrlnnvu0cknn6xRo0YpJydHZrNZubm5mjJliu6///4Ofea/jX322UcnnHBC+vFrr72WvshjNpv1/PPP64UXXtBxxx2noqIiWSwW5eTkaOLEifr973+vxYsXq6SkpMMxf/CDH+iRRx7R6NGjZbPZVFxcrJ/+9KdatGjRNy4euruefvpp3X777RoxYoSsVqvKy8t1/fXXd7hA9XXd8f0EAABA9jBSW6eZAAAAoE9qa2vrMKN7q6uvvjodTHs8HjU0NMhms/V2eQAAAAAwoPTfaVsAAAADxBFHHKERI0bo0EMPVWlpqZqamjR79uwOrSh+8pOfEKgDAAAAQC9gpjoAAEAfN3bsWC1btmyH248//ni99NJLstvtvVgVAAAAAAxM9FQHAADo4y699FIdc8wxGjp0qBwOh+x2u4YNG6aTTjpJL774ol599VUCdQAAAADoJcxUBwAAAAAAAACgi5ipDgAAAAAAAABAFxGqAwAAAAAAAADQRZZMF5Atksmkqqqq5PV6ZRhGpssBAAAAAAAA+pRUKqWWlhaVlJTIZGKuL/ovQvVuUlVVpdLS0kyXAQAAAAAAAPRpGzdu1LBhwzJdBrDbCNW7idfrldR+UvD5fBmuBgAAAAAAAOhbAoGASktL0zka0F8RqneTrS1ffD4foToAAAAAAACwA7RORn9H8yIAAAAAAAAAALqIUB0AAAAAAAAAgC4iVAcAAAAAAAAAoIvoqQ4AAAAAAAAAGZRIJBSLxTJdxoBms9lkMnVtDjqhOgAAAAAAAABkQCqVUk1NjZqbmzNdyoBnMplUUVEhm82207GE6gAAAAAAAACQAVsD9aKiIrlcLhmGkemSBqRkMqmqqipVV1dr+PDhO/09EKoDAAAAAAAAQC9LJBLpQD0/Pz/T5Qx4hYWFqqqqUjwel9Vq/caxLFQKAAAAAAAAAL1saw91l8uV4UogKd32JZFI7HQsoToAAAAAAAAAZAgtX/qGXfk9EKoDAAAAAAAAANBFhOoAAAAAAAAAgN0yd+5cGYah5ubmHn2d8vJy3XfffT36Gl1FqA4AAAAAAAAAfdTGjRt10UUXqaSkRDabTWVlZbriiivU0NDQ67VMnTpVV155ZYfnJk+erOrqauXk5HTLa8yaNUuDBg3a7vnFixfrxz/+cbe8xrdFqA4AAAAAAAAAfdCXX36p8ePHa/Xq1XrmmWf0xRdf6OGHH9acOXM0adIkNTY2ZrpE2Ww2FRcX93hv+MLCwj6zqCuhOgAAAAAAAAD0QT/72c9ks9n0v//7vzr88MM1fPhwHXvssXrzzTe1efNmXX/99ZLaF9l85ZVXOuw7aNAgzZo1K/342muv1ahRo+RyuTRixAjdcMMNisVi6e0333yzxo4dqyeffFLl5eXKycnRmWeeqZaWFknShRdeqLffflv333+/DMOQYRhat27ddu1fpk6dmt6+7c+6deskSffee6/23Xdfud1ulZaW6pJLLlEwGJTU3kpmxowZ8vv96f1uvvlmSdu3f9mwYYNOPPFEeTwe+Xw+nX766aqtre3y+/k2CNUBAAAAAAAAoI9pbGzU66+/rksuuUROp7PDtuLiYp1zzjl67rnnlEqlunQ8r9erWbNm6ZNPPtH999+vRx55RH/84x87jFmzZo1eeeUVvfrqq3r11Vf19ttv684775Qk3X///Zo0aZJ+9KMfqbq6WtXV1SotLd3udV5++eX09urqap188snac889NXjwYEmSyWTSzJkztXLlSj3++ON66623dM0110hqbyVz3333yefzpfe/+uqrt3uNZDKpE088UY2NjXr77bf1xhtv6Msvv9QZZ5zR5ffzbVi+9REAAAAAAAAAAN1q9erVSqVSGj16dKfbR48eraamJtXV1XXpeL/5zW/S/y4vL9fVV1+tZ599Nh1oS+1h9axZs+T1eiVJ5513nubMmaPbb79dOTk5stlscrlcKi4u3uHr5OXlpf/9xz/+UW+99ZYWLlyYvjCwbU/28vJy3Xbbbbr44ov10EMPyWazKScnR4ZhfONrzJkzRytWrNDatWvTwf4TTzyhMWPGaPHixZowYcJO38+3QagOAAAAAAAAAH3Uzmai22y2Lh3nueee08yZM7VmzRoFg0HF43H5fL4OY8rLy9MBtCQNGTJEW7Zs2fWiJb322mu67rrr9K9//UujRo1KP//mm2/qjjvu0KeffqpAIKB4PK5wOKxQKNTlnumrVq1SaWlph5nye++9twYNGqRVq1alQ/XufD/bov0LAAAAAAAAAPQxI0eOlGEYWrVqVafbV61apcLCQg0aNEiGYWwXvm/bL/3999/XOeeco+OOO06vvvqqli5dquuvv17RaLTDPlartcNjwzCUTCZ3ufZPPvlEZ555pu68804dffTR6efXrVun7373u9pvv/300ksvacmSJXrwwQclabtaukN3vZ+vI1QHAAAAAAAAgD4mPz9fRx11lB566CG1tbV12FZTU6Onn35aF154oSSpsLBQ1dXV6e2rV69WKBRKP37vvfdUVlam66+/XuPHj9cee+yh9evX73JNNptNiUTiG8fU19frhBNO0CmnnKKf//znHbYtWbJEyWRS99xzjw4++GCNGjVKVVVVu/wao0eP1saNG7Vx48b0c5988omam5u199577+K72nWE6gAAAAAAAADQBz3wwAOKRCI65phj9M4772jjxo2aPXu2jjrqKI0aNUo33nijJOk73/mOHnjgAS1dulQffPCBLr744g6ztPfYYw9t2LBBzz77rNasWaOZM2fqf/7nf3a5nvLyci1cuFDr1q1TfX19p7O+TznlFLlcLt18882qqalJ/yQSCY0cOVKxWEx/+tOf9OWXX+rJJ5/Uww8/vN1rBINBzZkzR/X19R0uDmw1bdo07bvvvjrnnHP04YcfatGiRTr//PN1+OGHa/z48bv8vnYVoTq6pC2a0PqGVoVj33yVCAAAAAAAAED32GOPPbR48WKNGDFCp59+usrKynTsscdq1KhRmj9/vjwejyTpnnvuUWlpqQ499FCdffbZuvrqqzv0J//e976nn//857r00ks1duxYvffee7rhhht2uZ6rr75aZrNZe++9twoLC7Vhw4btxrzzzjv6+OOPVVZWpiFDhqR/Nm7cqP3331/33nuv7rrrLu2zzz56+umndccdd3TYf/Lkybr44ot1xhlnqLCwUHffffd2r2EYhv7xj38oNzdXhx12mKZNm6YRI0boueee2+X3tDuM1M463aNLAoGAcnJy5Pf7t2vwnw0aghEt3disQU6rKgs9ynV3bQEEAAAAAAAAQMr+/GxXhcNhrV27VhUVFXI4HF3e76abbtK9996rN954QwcffHAPVjiw7Mrvw9JLNSELJJIp+dtiWr6pWSMKPRo6yCmTych0WQAAAAAAAMCAccstt6i8vFwLFizQQQcdJJOJZiS9jVAdu6TI61CgLaZPqvwKhGMaUeCR02bOdFkAAAAAAADAgDFjxoxMlzCgEapjl/mcVjmsZm1sbFNLOK6RRR4VeOyZLgsAAAAAAAAAehz3BmC32CwmleQ4FIoktHxTs9bWBRVPbL/aLwAAAAAAAABkE0J17DbDMFTotctltejzLS1aWR1QMBLPdFkAAAAAAAAA0GNo/4JvzW23yGYxqbq5TcGv2sEUee0yDBYxBQAAAAAAAJBdmKmObmE1m1SS41Q8kdKKTX6t2RJUNE47GAAAAAAAAADZhVAd3cYwDOW5bfI5rFq9JaiVVX4FwrFMlwUAAAAAAAAA3aZfhuoPPvigysvL5XA4NHHiRC1atGiHY19++WWNHz9egwYNktvt1tixY/Xkk092GHPhhRfKMIwOP9OnT+/pt5G1nDazhuQ4VdcS0bKNzapqblMqlcp0WQAAAAAAAMCA1xZNqC2aUCyRTP8bu6bf9VR/7rnndNVVV+nhhx/WxIkTdd999+mYY47RZ599pqKiou3G5+Xl6frrr9dee+0lm82mV199VTNmzFBRUZGOOeaY9Ljp06frscceSz+22+298n6yldlkaEiOU82hqD7e7FegLabyArccVnOmSwMAAAAAAAAGnFA0rlA0ocfmrdXslTUKtMXlc1o0fUyxZhxSIZfNLJctM3GxYRj6n//5H5100kmdbl+3bp0qKiq0dOlSjR07tldr60y/m6l+77336kc/+pFmzJihvffeWw8//LBcLpceffTRTsdPnTpV3//+9zV69GhVVlbqiiuu0H777ad58+Z1GGe321VcXJz+yc3N7Y23k/UGuWzKddm0rqFVH2/2q6k1mumSAAAAAAAAgAElHEvor+98qYNuf1MPzl2jNXWtqgtGtKauVQ/OXaODbn9Tf33nS4VjPT9r/c4775RhGLryyiu7vE9paamqq6u1zz77SJLmzp0rwzDU3NzcM0XuRL8K1aPRqJYsWaJp06alnzOZTJo2bZref//9ne6fSqU0Z84cffbZZzrssMM6bJs7d66Kioq055576qc//akaGhq+8ViRSESBQKDDDzrnsLa3g/G3xbRsU7M2NrYqkaQdDAAAAAAAANDTQtG4Hn57je57c7V2FMklU9J9b67Ww2+vUSga77FaFi9erL/85S/ab7/9dmk/s9ms4uJiWSx9o/FKvwrV6+vrlUgkNHjw4A7PDx48WDU1NTvcz+/3y+PxyGaz6fjjj9ef/vQnHXXUUent06dP1xNPPKE5c+borrvu0ttvv61jjz1WicSOr8zccccdysnJSf+UlpZ++zeYxUyGoSKvQ1aTSSurAlpVHaBfEwAAAAAAALAbksmUGoKRnf74Q1GFIgnNnLO6S8edOWe1QtGE/KHoTo+d3MVJs8FgUOecc44eeeSRTruEVFdX69hjj5XT6dSIESP04osvpretW7dOhmHoo48+0rp163TEEUdIknJzc2UYhi688MJdquXb6hvRfg/zer366KOPFAwGNWfOHF111VUaMWKEpk6dKkk688wz02P33Xdf7bfffqqsrNTcuXN15JFHdnrMX/3qV7rqqqvSjwOBAMF6F/icVjmsZm1qalMwEtfIIo8KPPSvBwAAAAAAALqqKRTVgbe9udNxD5x1gFZWBXY4Q/3rkinp0XlrtfcQny59Zuk3jl3ym2nK34Vc72c/+5mOP/54TZs2Tbfddtt222+44Qbdeeeduv/++/Xkk0/qzDPP1IoVKzR69OgO40pLS/XSSy/plFNO0WeffSafzyen09nlOrpDvwrVCwoKZDabVVtb2+H52tpaFRcX73A/k8mkkSNHSpLGjh2rVatW6Y477kiH6l83YsQIFRQU6IsvvthhqG6321nMdDfZLCaV5DhUH4xq+aZmVeS7VZrnksXcr26cAAAAAAAAAPq0gyry9Mc3P9+lfV5fWaMLJpd3ax3PPvusPvzwQy1evHiHY0477TT98Ic/lCTdeuuteuONN/SnP/1JDz30UIdxZrNZeXl5kqSioiINGjSoW2vtin6VYtpsNh144IGaM2dO+rlkMqk5c+Zo0qRJXT5OMplUJBLZ4fZNmzapoaFBQ4YM+Vb1YscMw1Ch1y6X1aLPt7RoZVVAwUjP9WsCAAAAAAAABhq33aJA265lboFwXG57983F3rhxo6644go9/fTTcjgcOxz39Xx30qRJWrVqVbfV0Z361Ux1Sbrqqqt0wQUXaPz48TrooIN03333qbW1VTNmzJAknX/++Ro6dKjuuOMOSe29z8ePH6/KykpFIhH95z//0ZNPPqk///nPktp7+dxyyy065ZRTVFxcrDVr1uiaa67RyJEjdcwxx2TsfQ4UbrtFdotJNf5wuh1MkdcuwzAyXRoAAAAAAADQr7VG4vI5LaoL7niC8df5HBa1duPk1yVLlmjLli0aN25c+rlEIqF33nlHDzzwwDdOfu6r+l2ofsYZZ6iurk433nijampqNHbsWM2ePTu9eOmGDRtkMv3fBPzW1lZdcskl2rRpk5xOp/baay899dRTOuOMMyS13y6wfPlyPf7442publZJSYmOPvpo3XrrrbR36SUWs0lDchxqCsW0YpNf5fkuDc93y2bpVzdSAAAAAAAAAL0i12XTkt9M2+k4q9nQMWOK9dDcNV0+9vR9hshpNe30+LkuW5eOd+SRR2rFihUdnpsxY4b22msvXXvttTKbzZKkBQsW6Pzzz0+PWbBggQ444IBOj2mztb92IpHoUg3drd+F6pJ06aWX6tJLL+1029y5czs8vu222zptfL+V0+nU66+/3p3lYTcYhqE8t01t0YS+qGtVSySuEYUe5TitmS4NAAAAAAAA6FNMJqPLi4RedEiFHn57TZcWKzUZ0kVTyuVzdi0w7wqv16t99tmnw3Nut1v5+fkdnn/hhRc0fvx4HXLIIXr66ae1aNEi/e1vf+v0mGVlZTIMQ6+++qqOO+44OZ1OeTyebqt5Z5gKjD7FaTOr2OdQfTCi5ZuaVdXcplSqi8sTAwAAAAAAAOjAZTPr8iP36NLYK47cQ06buYcr6twtt9yiZ599Vvvtt5+eeOIJPfPMM9p77707HTt06FDdcsstuu666zR48OAdTsDuKUaKxLJbBAIB5eTkyO/3y+fzZbqcbtcQjOiD9U0qyXH22ms2h6JqiyU0PM+l8gK3HNbM/EEDAAAAAADg28v2/GxXhcNhrV27VhUVFd+4gGe3vFYsoYffXqOZc1Z3OmPdZEiXH7mHLj68csBmcLvy++iX7V8wMAxy2eSIJbSuob0dzMhCj3Ld3XfrCQAAAAAAADAQOKxm/fiwETrv4DI9Nn+dXvu4WoFwXD6HRcfuM0QzppTLaTMP2EB9VxGqo09zWM0akuNUXUtEyzY1a0SBW0NzXTKbjEyXBgAAAAAAAPQbLptFLptFPztipH52xEhZzIbiifZp65lq+dJfEaqjzzMZhgb7HGoJx/RJdUCBcFyVhR7+2AEAAAAAAIBdtG2mxsT03UOojn7D67DKYTVrU1ObgpG4RhZ5VNDFVY4BAAAAAAAAoDuYMl0AsCusZpNKchwKRRJavqlZa+uCiieSmS4LAAAAAAAAwABBqI5+xzAMFXrtclkt+nxLi1ZWBRSMxDNdFgAAAAAAAIABgPYv6LfcdovsFpNq/OF0O5gir12GwSKmAAAAAAAAAHoGM9XRr1nMJg3JcSieSGnFJr/WbAkqGqcdDAAAAAAAAICewUx19HuGYSjPbVNbNKEv6lrVEolrRKFHOU5rpksDAAAAAAAA+pZYW/v/mixS8quWylZn5urph5ipjqzhtJlV7HOoPhjRso3NqmpuUzKZynRZAAAAAAAAQOZFQ1JrnfTO76W/HCr9ce/2/33n9+3PR0MZK628vFz33XffN44xDEOvvPJKr9SzM4TqyCpmk6Fin1OGpI83+/V5bYvCsUSmywIAAAAAAAAyJ9YmvTdT+sMo6d17pPrVUnBL+/++e0/78+/NlGLhbn/pRCKhG264QRUVFXI6naqsrNStt96qVGrXJsNWV1fr2GOPlSStW7dOhmHoo48+6vZ6u4L2L8hKg1w2OWIJrWtobwczstCjXLct02UBAAAAAAAAvSsaag/M596x4zGp5P9tn3y5ZHN128vfdddd+vOf/6zHH39cY8aM0QcffKAZM2YoJydHl19+eZePU1xc3G01fVvMVEfWcljNGpLjlD8U07JNzdrQ0KoE7WAAAAAAAADQ3yWTUmv9zn/amqVoUHr7rq4d9+27pFhr+347O3Yy2aVDvvfeezrxxBN1/PHHq7y8XKeeeqqOPvpoLVq0qMO4lpYWnXXWWXK73Ro6dKgefPDBDtu3bf9SUVEhSTrggANkGIamTp3atffXTZipjqxmMgwN9jnUEo7pk+qAAuG4Kgs9ctrMmS4NAAAAAAAA2D1tjdLvK3c+7tTHpJrl7TPRuyKVlN5/SCreV3pxxjeP/eUayV2w00NOnjxZf/3rX/X5559r1KhRWrZsmebNm6d77723w7jf//73+vWvf61bbrlFr7/+uq644gqNGjVKRx111HbHXLRokQ466CC9+eabGjNmjGy23u1QQaiOAcHrsMphNWtTU5uCkbhGFnlU4LFnuiwAAAAAAACg55RNlub+btf2+fRf0sQfd1sJ1113nQKBgPbaay+ZzWYlEgndfvvtOuecczqMmzJliq677jpJ0qhRozR//nz98Y9/7DRULywslCTl5+dnpC0M7V8wYFjNJpXkONQWTWj5pmatrQsqnujiVToAAAAAAACgv7F7pLB/1/YJBySbp9tKeP755/X000/r73//uz788EM9/vjj+sMf/qDHH3+8w7hJkyZt93jVqlXdVkd3YqY6BhTDMFTgsas1EtfnW1ra28EUeeSx86cAAAAAAACALBMJSo4cKbil6/s4fO192LvJL3/5S1133XU688wzJUn77ruv1q9frzvuuEMXXHBBt71ObyJJxIDktltkt5hU4w+n28EUee0yDCPTpQEAAAAAAADfzJnX3tN8Z0xWafT3pHfv6fqxR58oWVw7P74zr0uHC4VCMpk6Nkwxm81Kfm2h0wULFmz3ePTo0Z0ec2sP9UQi0aUauhuhOgYsi9mkITkONYViWrHJr/J8l4bnu2Wz0BUJAAAAAAAAfZjJ1KVFQiVJB/9UmvfHri1Wapjaxztzvl192zjhhBN0++23a/jw4RozZoyWLl2qe++9VxdddFGHcfPnz9fdd9+tk046SW+88YZeeOEF/fvf/+70mEVFRXI6nZo9e7aGDRsmh8OhnJzuq3lnSA8xoBmGoTy3TT6HVV/UterjzX7522KZLgsAAAAAAADoHla3dPi1XRt7+LWS1dmtL/+nP/1Jp556qi655BKNHj1aV199tX7yk5/o1ltv7TDuF7/4hT744AMdcMABuu2223TvvffqmGOO6fSYFotFM2fO1F/+8heVlJToxBNP7Naad8ZIpVKpXn3FLBUIBJSTkyO/3y+fz5fpcrpdQzCiD9Y3qSSne/+o+pJEMqW6YFh2i1kjizwq9jlkMtEOBgAAAAAAoDtke362q8LhsNauXauKigo5HI6efbFYWJp/n/T2XZ3PWDdM7YH6lCslaw/X0kftyu+D9i/AV8wmQ8U+p/xtsa9mrEdVUeCRw2rOdGkAAAAAAADA7rM6pMmXSxN+KC34s7TqH1I40L4o6egT21u+WJ0DNlDfVYTqwNfkOK1yWE1a3xBSMJLQyEKPct22TJcFAAAAAAAA7D6bq/3nsKulw34hmSxSMi7J6PaWL9mOnupAJ+wWs4bkOOUPxbRsU7M2NLQqkaRTEgAAAAAAAPo5q1OyuiSzrf1/CdR3GaE6sAMmw9Bgn0M2s0mfVAe0qjqgtmgi02UBAAAAAAAAyCDavwA74XVY5bCatampTcFIXJWFHhV67ZkuCwAAAAAAAEAGMFMd6AKr2aSSHIfaogkt39ystXVBxROdrJQMAAAAAAAA7IJkkoypL0ilut76mZnqQBcZhqECj12tkbg+q21RIBxXZZFHHjt/RgAAAAAAANg1NptNJpNJVVVVKiwslM1mk2EYmS5rQEqlUqqrq5NhGLJarTsdTxoI7CK33SK7xaQaf1jBSFwjizwq8to56QEAAAAAAKDLTCaTKioqVF1draqqqkyXM+AZhqFhw4bJbDbvdCyhOrAbLGaThuQ41BSKacUmv8ryXSrLd8tmoaMSAAAAAAAAusZms2n48OGKx+NKJBKZLmdAs1qtXQrUJUJ1YLcZhqE8t01t0YTW1LWq5at2MDnOnd8iAgAAAAAAAEhKtxzpStsR9A1MqwW+JafNrGKfQw2tES3b2Kyq5jYlk11f2AAAAAAAAABA/0GoDnQDs8lQsc8pk2Ho481+fVYbUDjGLTsAAAAAAABAtqH9C9CNcpxWOawmrW8IKRhJaGShR7luW6bLAgAAAAAAANBNmKkOdDO7xawhOU4FQjEt29SsDQ2tStAOBgAAAAAAAMgKhOpADzAZhop8DtnMJq2qadGq6oDaorSDAQAAAAAAAPo72r8APcjrsMphNWtTU5uCkbgqCz0q9NozXRYAAAAAAACA3cRMdaCHWc0mleQ41BZNaPnmZq2tCyqeSGa6LAAAAAAAAAC7gVAd6AWGYajAY5fbatFntS1aWRVQMBLPdFkAAAAAAAAAdhHtX4Be5LZbZLeYVOMPKxiJa2SRR0VeuwzDyHRpAAAAAAAAALqAmepAL7OYTRqS41A8kdKKTX59sSWoaJx2MAAAAAAAAEB/wEx1IAMMw1Ce26a2aEJr6lrVEo6rssijHKc106UBAAAAAAAA+AbMVAcyyGkzq9jnUENrRMs2Nmtzc5uSyVSmywIAAAAAAACwA4TqQIaZTYaKfU6ZDEMrN/v1WW1A4Vgi02UBAAAAAAAA6ATtX4A+IsdplcNq0vqGkIKRhEYWepTrtmW6LAAAAAAAAADbYKY60IfYLWYNyXEqEIpp2aZmbWhoVYJ2MAAAAAAAAECfwUx17FRbNCGnzaypexYqnkipNRJXMBxXa5QWJT3BZBgq8jnUEo5pVU2LAuG4Kgs9ctrMmS4NAAAAAAAAGPAI1bFDoWhcoWhCj81bq9kraxRoi8vntOiYMcWaMaVCuW6bagNhxRLMpO4JXodVDqtZG5tCCkbag/VCrz3TZQEAAAAAAAADmpFKpUhEu0EgEFBOTo78fr98Pl+my/nWwrGEHn57jWbOWa3Ouo+YDOnyI/fQjw4doY2NIYL1HpRKpdTQGlVSKVXkuzU8zyWLmc5NAAAAAACgf8m2/AwDF8kcthOKxvXw22t035udB+qSlExJ9725Wo+8+6UG+xy9W+AAYxiGCjx2ua0WfV7bopVVAQUj8UyXBQAAAAAAAAxIhOrYTiia0Mw5q7s0duac1YonU3LR77vHue0WDfY6VOMP66ONzaoNhMWNJgAAAAAAAEDvIlRHB23RhB6dt3aHM9S/LpmSZr23Tl4H7fl7g8Vs0pAch5KJlFZs8uuLLUFF48lMlwUAAAAAAAAMGITq2M7rK2t2afzsj6vlshOq9xbDMJTrtsnnsGpNXas+3uyXvy2W6bIAAAAAAACAAYFQHR1YzIYCbbvWrzsQjstqNnqoIuyI02ZWsc+hhtaIlm1s1ubmNiW7eosBAAAAAAAAgN1CqI4O4omUfM5dm3Xuc1gUiiYUT9CGpLeZTYaKfU6ZDEMrN/v1WW1A4Vgi02UBAAAAAAAAWYtQHduZPqZ4l8YfM6ZY81fX69JnlmrBlw0snpkBOU6r8tw2rW8IacVmv5pao5kuCQAAAAAAAMhKhOrowGkza8YhFTJ1sZuLyZDOnjhcTyxYr83Nbbr9P6t0/Ssf64stwZ4tFNuxW8wakuNUIBTTsk3N2tDQqgTtYAAAAAAAAIBuRaiO7bhsZl1+5B5dGvuzI0aqsTWqRWsb08+t2OzXVc9/pD+++bkagpGeKhOdMBmGinwO2cwmrapp0arqgELRXeuRDwAAAAAAAGDHdq15NgYEl82iiw+vlCTNnLNanU12NhnS5UfuoYumVOiSp5dstz0l6a1Pt2jeF/X6/gFDdcoBw+S0mXu4cmzldVjlsJq1qSmkYCSuykKPCr32TJcFAAAAAAAA9HtGigbY3SIQCCgnJ0d+v18+ny/T5XSLUDSutmhCj81fp9c+rlYgHJfPYdH0fYbowsnlspgM1QbCisSTend1vR5/f53qWjqfmZ7rsurcg8t05F6DZe5qbxl8a6lUSg2tUSWVUkW+W8PzXLKYuUEFAAAAAAD0vmzMzzAwEap3k2w+KbRFE0opJbNhKJZMKRSJqyUcVyia6DAuEk/on8uq9MIHm9QWS3R6rPJ8l35wyAiNLR3UC5Vjq9ZIXM1tUQ3JcaqyyCOPnZtUAAAAAABA78rm/AwDC6F6N8n2k0JDMKIP1jepJMe507HNoaj+vmiDXl9Z02nrGEkaX5arGVMqNDzP1c2VYkfiiaS2tETkdli0R5FHRV67DIO7BgAAAAAAQO/I9vwMAwd9INDtBrlsumTqSP3prHEaX5bb6ZgP1jfpsmc+1ENzv1BzKNrLFQ5MFrNJQ3IcSiZSWrHJry+2BBWNJzNdFgAAAAAAANCvEKqjxwzPc+mmE8bot98bo/L87WekJ1PSax/X6CdPLdGLSzYR8PYCwzCU67bJ57BqTV2rPt7sl78tlumyAAAAAAAAgH6DUB097oDhubrvjAN06REjleuybrc9FE3o8ffX6adPL9E7n9eJjkQ9z2kzq9jnUENrRMs2Nmtzc5uSO+rVAwAAAAAAACCNUB29wmwydMyYYv3l3PE6Y0KpbJbtv3pbWiL6/f9+pl++uFyrqgMZqHJgMZsMFfucMhmGVm7267PagMI7WGAWAAAAAAAAQDtCdfQqp82scyeW6S/nHqjv7FnU6ZjPalt0zUvLdefsT1XjD/dyhQNPjtOqPLdN6xtCWrHZr8ZWetwDAAAAAAAAO9IvQ/UHH3xQ5eXlcjgcmjhxohYtWrTDsS+//LLGjx+vQYMGye12a+zYsXryySc7jEmlUrrxxhs1ZMgQOZ1OTZs2TatXr+7ptzGgFXjs+vlRo/TH08dqn5LOV3ue/0W9fvr0Ej06f62CkXgvVziw2C1mDclxKhCKafnGZm1oaFWCdjAAAAAAAADAdvpdqP7cc8/pqquu0k033aQPP/xQ+++/v4455hht2bKl0/F5eXm6/vrr9f7772v58uWaMWOGZsyYoddffz095u6779bMmTP18MMPa+HChXK73TrmmGMUDjNLuqeNLPLod9/fV9cfN1olOY7ttseTKf3P0s368ZMf6NXlVYonWMy0p5gMQ0U+h2wWk1bVtGhVdUChKBczAAAAAAAAgG0ZqX62KuTEiRM1YcIEPfDAA5KkZDKp0tJSXXbZZbruuuu6dIxx48bp+OOP16233qpUKqWSkhL94he/0NVXXy1J8vv9Gjx4sGbNmqUzzzyzS8cMBALKycmR3++Xz9f5zOv+rCEY0Qfrm1SS4+yx14glknrt4xo9u2iDWnYwM33oIKdmTCnXQeV5Mgyjx2oZ6GKJpLa0hDXIZVNloUeFXnumSwIAAAAAAP1ctudnGDj61Uz1aDSqJUuWaNq0aennTCaTpk2bpvfff3+n+6dSKc2ZM0efffaZDjvsMEnS2rVrVVNT0+GYOTk5mjhxYpeOie5jNZv0vf1L9NfzxuuksSWymLYPzTc3t+m2f6/Sb175WGvqghmocmCwmk0qyXGqLZrQ8s3NWrMlyF0CAAAAAAAAgCRLpgvYFfX19UokEho8eHCH5wcPHqxPP/10h/v5/X4NHTpUkUhEZrNZDz30kI466ihJUk1NTfoYXz/m1m2diUQiikQi6ceBQGCX3w8653FY9INDRui4fYdo1nvr9N6ahu3GLN/s18+f+0jf2atI5x1cpnwPM6m7m2EYKvDYFYrGtXpLi4KRuCqLPPLY+9VpAwAAAAAAAOhWAyId83q9+uijjxQMBjVnzhxdddVVGjFihKZOnbrbx7zjjjt0yy23dF+R2M6QHKd+dexorazy62/z1mr1lo4z01OS5ny6RfO+qNf3Dxiqkw8YJqfNnJlis5jLZpHNbFKNP6yWSFwjCz0a7LPTfgcAAAAAAAADUr9q/1JQUCCz2aza2toOz9fW1qq4uHiH+5lMJo0cOVJjx47VL37xC5166qm64447JCm9364e81e/+pX8fn/6Z+PGjbv7trATY0py9IfT9tcvjhrVaW/vSDypZxdv1MVPLdEbn9QokexXywT0CxazSSWDnEomUvp4s19fbAkqGqcdDAAAAAAAAAaefhWq22w2HXjggZozZ076uWQyqTlz5mjSpEldPk4ymUy3bqmoqFBxcXGHYwYCAS1cuPAbj2m32+Xz+Tr8oOeYDENT9yzSn88Zp/MPLpPTuv2M9MZQVDPf+kI/f/4jLdvY3PtFDgC5bpt8DqvW1LVqxeZm+dtimS4JAAAAAAAA6FX9rv3LVVddpQsuuEDjx4/XQQcdpPvuu0+tra2aMWOGJOn888/X0KFD0zPR77jjDo0fP16VlZWKRCL6z3/+oyeffFJ//vOfJbX3jb7yyit12223aY899lBFRYVuuOEGlZSU6KSTTsrU28QO2C1mnTa+VNP2Hqy/L9yg//2kRl+fmL62vlW/+cfHmlCeqxlTKlSa68pMsVnKaTOr2OJQfTCiZRubVVnk0RCfQ6ZOFpYFAAAAAAAAsk2/C9XPOOMM1dXV6cYbb1RNTY3Gjh2r2bNnpxca3bBhg0ym/5uA39raqksuuUSbNm2S0+nUXnvtpaeeekpnnHFGesw111yj1tZW/fjHP1Zzc7MOOeQQzZ49Ww6Ho9ffH7om12XTz44Yqe/uN0SPvbdOS9Y3bTdm8bomLVnfpOn7DNHZBw1XjtOagUqzk9lkaLDPIX9bTB9v9ivQFlVFgUeOTu4gAAAAAAAAALKJkUqlaEDdDQKBgHJycuT3+7OyFUxDMKIP1jepJMeZ6VI69eGGJj06b63WN4Y63e6ymXX6+FKdsF+JbJZ+1fWoz4vEE6oPRpTvsauy0KM8ty3TJQEAAAAAgD4o2/MzDByki8gK44bn6v4zD9ClR4zUINf2M9JD0YRmvbdOP316id5dXSeuJXUfu8WsITlOBUIxLd/YrA0NrSwWCwAAAAAAgKxFqI6sYTYZOmZMsf5y7oE6Y3ypbObtv95bWiK6+/XP9MsXl+vT6kAGqsxOJsNQkc8hu8WsVTUtWlUdUCgaz3RZAAAAAAAAQLcjVEfWcdksOvfgMj187oE6Ys/CTsd8VtuiX760XHfN/lQ1gXAvV5i9PA6LCj12bWoKafkmv+paIpkuCQAAAAAAAOhW9FTvJtneE6qv91T/JqtrW/S3+Wu1sqrzmekWk6Hv7V+i08aXymPvd2v39kmpVEoNrVEllVJ5nltl+S5ZOrlzAAAAAAAADBzZnp9h4CDlQtbbY7BXd3x/X/36uNEakuPYbns8mdLLSzfrx09+oH8vr1I8kcxAldnFMAwVeOzy2CxavaVFK6sCagnHMl0WAAAAAAAA8K0RqmNAMAxDk0bk68Gzx+lHh1Z0OiO9JRzXw+98qUufWapFaxtZzLQbuGwWDfY6VBMIa9kmv2r8YT5XAAAAAAAA9GuE6hhQrGaTvrf/UP31vAN14v4lspiM7cZsbm7Trf/+RL/5x8f6si6YgSqzi8VsUkmOU8lESis2+7W6NqhonLsBAAAAAAAA0D8RqmNA8jqs+uGhI/Tg2eM0aUR+p2OWb/Lryuc+0v1zPldDkAU3v61ct02DnFZ9Wd+qFZub5Q/RDgYAAAAAAAD9DwuVdpNsX2ihPy9U2hUrq/z6f/PW6ostnc9Mt1tMOmXcMH3/gKFyWM29XF12SSRTqg9GZLOYVFnk0RCfQ6ZO7hgAAAAAAADZJdvzMwwczFQHJI0pydE9p+2vXxw1SgUe+3bbI/Gk/r5og37y1BK9+UmtEkmuRe0us8nQYJ9DJsPQys1+fVYbUDiWyHRZAAAAAAAAQJcQqgNfMRmGpu5ZpIfPHafzDi6Ts5MZ6Y2tUd3/1mpd9fxHWrapufeLzCI5Tqvy3Datbwhp+aZmNbZGM10SAAAAAAAAsFO0f+km2X77Sra3f+lMUyiqpxdu0Buf1GhHE9MPKs/ThVPKVZrr6t3iskgylVJ9S0Rmk6GKQreG5bpkph0MAAAAAABZJ9vzMwwczFQHdiDXZdOlR4zUzDMP0Ljhgzods2hdoy79+4d6+O018rex8ObuMBmGinwO2S1mrapu0arqgELReKbLAgAAAAAAADrFTPVuku1X2gbiTPWvW7K+SY/OX6sNjaFOt7ttZp0+vlQn7F8iq5nrVbsjlkhqS0tYg5w2VRZ5VOjdvr89AAAAAADon7I9P8PAQfIHdNGBZbmaeeYB+tnUkRrktG63vTWa0GPvrdNPn16id1fXietVu85qNqkkx6lwLKHlm5u1ZktQsUQy02UBAAAAAAAAaYTqwC4wmwxN36dYfznvQJ124DDZOpmRXhuI6O7XP9M1Ly3XpzWBDFTZvxmGoXyPXR6bRau3tOiTqoBawrTWAQAAAAAAQN9AqA7sBpfNovMnlevP547T1FGFnY75tKZFv3xxue5+/VPVBsK9XGH/57JZNNjrUE0grGWb/Krxh5n9DwAAAAAAgIwjVAe+hSKvQ784ek/dc9r+2ntI573A3l1dr58+vUSz3lur1ggLcO4Ky1ftYJKJlFZs9mt1bVDROO1gAAAAAAAAkDmE6kA3GDXYqztP3le/OnYvDclxbLc9lkjppQ8368dPfqB/r6hWIsmM612R67ZpkNOqL+tbtWJzs/wh2sEAAAAAAAAgMwjVgW5iGIYmVxbowbPH6QeHVMhtN283JhCO6+G31+jSZz7U4nWNtDPZBQ6rWcU+h5paY/poU5M2N7cpycUJAAAAAAAA9DJCdaCbWc0mnTR2qP567nh9b/8SmU3GdmM2NbXpt69+ohv+8bHW1gczUGX/ZDYZGuxzyGyYtHKzX5/VBhSOJTJdFgAAAAAAAAYQQnWgh/icVv3o0BF68KxxOnhEXqdjlm3y64pnP9LMOavVEIz0coX9V47Tqjy3TesbQlq+qVmNrdFMlwQAAAAAAIABwkjRf6JbBAIB5eTkyO/3y+frfMHK/qwhGNEH65tUkuPMdCn91orNfj06b62+qOt8ZrrdYtIp44bp+wcMlcO6fesYbC+ZSqm+JSKzyVBFoVvDcl2d3hkAAAAAAAAyL9vzMwwczFQHesm+Q3N0z+n76+fTRqnAY9tueySe1N8XbdDFTy3RnFW1SnK9a6dMhqEin0N2i1mrqlu0qjqgUDSe6bIAAAAAAACQxQjVgV5kMgx9Z68i/fmcA3XuxOFyWLf/E2xojeq+Oav18+c/0vJNzb1fZD/kcVhU5LVrU1NIyzf6VddCKx0AAAAAAAD0DNq/dJNsv32F9i89o6k1qqcXrtcbq2qV3MFf4sSKPF04uVzDcl29W1w/lEql1NgaVUIplee5NTzfJauZa4cAAAAAAPQF2Z6fYeAgbQIyKNdt06Xf2UP3n3GAxpYO6nTMwrWNuvSZpfrLO2sUaIv1boH9jGEYyvfY5bFZtHpLiz6pCqglzGcGAAAAAACA7sNM9W6S7VfamKne81KplJZsaNKj89dpY2Oo0zFuu1lnjC/Vd/crYQb2TsQTSW0JRuS2WzSy0KPBPrsMg0VMAQAAAADIlGzPzzBwkMoBfYRhGBpflqc/nXmALplaqRyndbsxrZGEHp2/Tpc8/aHmf1EvrontmMVsUkmOU8lESis2+7W6NqhoPJnpsgAAAAAAANDPEaoDfYzZZOjYfYbor+cdqNMOHCarefvZ1TWBsO6c/amufXmFPqtpyUCV/Ueu26ZBTqu+rG/Vis3N8odoBwMAAAAAAIDdR/uXbpLtt6/Q/iVztgTCemLBer39ed0Oxxy2R6EumFSmIp+jFyvrXxLJlOqDEVkthkYWeTXE55DJRDsYAAAAAAB6S7bnZxg4mKkO9HFFPoeuPnpP3XPa/ho9pPP/4Lyzuk4XP71Ej7+3TqFovJcr7B/MJkODfQ6ZDZNWbvbrs9qAwrFEpssCAAAAAABAP0OoDvQTowZ7ddfJ++q66XupuJMZ6bFESi9+uEk/fnKJXvu4WokkN6F0JsdpVZ7bpvUNIS3f1KzG1mimSwIAAAAAAEA/QqgO9COGYWjKyAI9dM44/WBKhdx283Zj/G0xPTR3jS57dqk+WNfIYqadsFvMGpLjVEtbXMs3Nmt9QysXIQAAAAAAANAlhOpAP2Q1m3TSAUP113PH64T9hsjcSW/wjY0h3fLqJ7rxnyu1tr41A1X2bSbDUJHPIbvFrFXVLfqkKkDrHAAAAAAAAOwUoTrQj/mcVv34sEo9eNY4TazI63TMRxubdeVzS/Wnt1ariVYn2/E4LCry2rW5OaTlG/3a0hLOdEkAAAAAAADow4wUvSG6RbavXtwQjOiD9U0qyXFmuhR8gxWbmvW3+Wu1pq7zmekOq0mnjBumk8YOlcO6feuYgSyVSqmxNap4KqWKfLeG57tkNXPdEQAAAACA7pLt+RkGDhIjIIvsO2yQ7j19rH4+bQ/lu23bbQ/Hknp64QZd/NQSvfVprZJcU0szDEP5Hru8dotWb2lvB9MSjmW6LAAAAAAAAPQxhOpAljEZhr6z12A9fO6BOmficDms2/+ZN7RG9cc3V+uq5z/Sis3+DFTZd7lsFg32OlQTCGvZJr9q/GEWewUAAAAAAEAa7V+6SbbfvkL7l/6rsTWqpxau15uf1GpHf+wTK/I0Y3KFhuby+91WU2tUkURSZXkulRW4ZLfQMgcAAAAAgN2V7fkZBg5mqgNZLs9t0+Xf2UP3n3mAxpYO6nTMwrWN+tkzH+qv76xRoI2WJ1vlum0a5LRqTX1QH2/2yx/iswEAAAAAABjomKneTbL9Shsz1bNDKpXSkvVNenT+Wm1saut0jNtu1pnjh+v4/YawUOdXEsmU6oMRWS2GRhZ5NcTnkMlkZLosAAAAAAD6lWzPzzBwkJgBA4hhGBpfnqc/nTVOPz28UjlO63ZjWiMJ/W3+Wl3y9Iea/0U9/cQlmU2GBvscMhsmrdzs16e1AYVjiUyXBQAAAAAAgAwgVAcGILPJ0HH7DtFfzj1Qp44bJqt5+1nXNYGw7pz9qa57eYU+r23JQJV9T47Tqjy3TRsaQlq+qVmNrdFMlwQAAAAAAIBeRvuXbpLtt6/Q/iW71QbCeuL99Xpndd0Oxxw+qlDnTypTkdfRi5X1TclUezsYs2GootCtYbkumWkHAwAAAADAN8r2/AwDBzPVAWiwz6FfHrOn/nDq/hpd7O10zNuf1+nip5boiffXKRSN93KFfYvJMFTkdchuMWtVdYs+qQoM+M8EAAAAAABgoCBUB5C2Z7FXd52yn66bvpcG++zbbY8lUnphySb95Mkleu3jaiWSA/tGF4/DoiKvXZubQ1q+0a8tLeFMlwQAAAAAAIAeRqgOoAPDMDRlZIH+fM6BumhKudw283ZjmttiemjuGl327FItWd+UgSr7DqvZpJIcp8KxhJZv8mvNlqBiiWSmywIAAAAAAEAPoad6N8n2nlD0VB+4/G0xPbt4g177uGaHM9MPKB2ki6ZUqLzA3cvV9S2haFzNoaiKc5waUeiW12HNdEkAAAAAAPQZ2Z6fYeBgpjqAb5TjtOonh1XqgbMO0MSKvE7HLN3YrCueW6oH3lqtptZoL1fYd7hsFhV5HaoJhLVsY7Nq/GFx3RIAAAAAACC7MFO9m2T7lTZ/KKYVVX4Fw3E5rWZ5HRZZzVyTGYiWb2rW3+av1Zd1rZ1ud1rNOuXAYTpx/xI5rNu3jhkomlqjiiSSKstzqazAJbtl4H4WAAAAAABI2Z+fYeAgVO8mA+Gk0BqJqykUVbU/LH8opkQqJZ/dKrfdLMMwMl0eelEyldJ/P92iJxasV+MOZqYXeGw67+ByTd2zUKYB+v0IxxKqb42oyGvXyEKvcly0gwEAAAAADFwDIT/DwECo3k0G0kkhkUzJ3xZTfUtEW1rCao3GZTWZ5XNamI07wIRjCf3P0s166cNNisQ7X5xzZKFHPzikQvsMzenl6vqGRDKl+mBEVouhykKPSnKcMpkG5kUGAAAAAMDANpDyM2Q3QvVuMlBPCuFYQk2hqGr9YTWGoorFU3LbLfLYLTITHA4YDcGInl64QW+uqtWOTiiTRuTrwsnlKhk0MBe79bfFFIrGNSzPqREFngHdGgcAAAAAMDAN1PwM2YdQvZsM9JNCKpVSIBxXYzCiGn9ELZGYTIYhr8Mil82S6fLQS9bWB/W3eWu1bJO/0+1mk6Hj9x2iMyeUyusYeK1QIvGE6oMR5bltqiz0KN9jz3RJAAAAAAD0moGenyF7EKp3E04K/yeWSKopFNWWQET1wYjCsYRcVou8DossLG6a9VKplD5Y36RH56/Vpqa2Tsd47BadMaFUx+87ZMAteJtMtbeDMRuGKgrdGpbr4q4OAAAAAMCAQH6GbEGo3k04KXQuGImrqTWqan+b/KG4UkrJ67DKbWNx02wXTyT1+ie1+vvC9QqE452OGZLj0IWTyzVpRP6A+z4Ew3H5wzGVDHJoZJGHOzoAAAAAAFmP/AzZglC9m3BS+GaJZErNoajqgxFtaYmoNRKX3WKWz2GVzTKwZioPNK2RuF5YslH/+KhK8WTnp5sxJT5dNKVCowZ7e7m6zIolktrSEtYgp00jitwq8joyXRIAAAAAAD2G/AzZglC9m3BS6LpwLKHG1qhqA+2Lm8YTSbltVnkdFpkG2GzlgaQmENYT76/Tu6vrdzhm6qhCnTepbECFy6lUSo2tUcVTKVXkuzU83zXgWuIAAAAAAAYG8jNkC0L1bsJJYdelUikF2uKqD0ZUGwirJRyTxWSS12GV02bOdHnoIZ9WB/T/5q3VZ7UtnW63mU06cWyJTj1w2IBqiRKKxtUcimqwz6HKIs+AXMgVAAAAAJDdyM+QLQjVuwknhW8nGk+qORRVTSCsxtZo++KmNou8dhY3zUapVErzvqjXrPfWaUtLpNMxg5xWnTOxTEftPXjALOQZTyS1JRiR22bWyCKvBvvsA67XPAAAAAAge5GfIVsQqncTTgrdpyUcU1NrVFX+9tnrkuS1W+VicdOsE40n9eryKj33wUaFoolOxwzPc+kHUyo0riy3l6vLnKZQVJF4UmV5LpUVuGS3cOcGAAAAAKD/Iz9DtiBU7yacFLpfPJFUc1tM9S3ti5uGonE5LGb5nFZ6TmcZf1tMzyzaoNc+rtYO1jLVuOGDdNGUCpXlu3u3uAwJxxJqaI2o0GvXyEKvcly0gwEAAAAA9G/kZ8gWhOrdhJNCz2qLJtQYiqrG36amUEyJZFIem1UeFjfNKhubQpo1f50WrWvsdLvJkI7au1jnTByuXJetl6vrfYlkSvXBiKwWQ5WFHpXkOGUaIK1wAAAAAADZh/wM2YJQvZtwUugdyWRKgXDsq8VNIwpG4rKYDPkcVjmstMjIFss2NevReWv1ZX1rp9udVrNOPXCYThxbMiBao/jbYgpF4xqW69SIQg/fdQAAAABAv0R+hmxBqN5NOCn0vkg8oeZQTDX+sJpaowrHE3LbLPI6rANmYctslkim9N9Pt+jJBevVGIp2OqbAY9f5k8p0+KjCrL9jIRpPqi4YVp7bpspCj/I99kyXBAAAAADALiE/Q7YgVO8mnBQyJ5VKKRiJq7E1quqvFjc1ZMjrsMhpZXHT/i4cS+jlDzfp5aWbFYknOx0zssijHx5SoTElOb1cXe9KptrbwZgMaUShR8NyXVxAAgAAAAD0G+RnyBaE6t2Ek0LfEE8k1RSKqS4YVl1LROFYUnazicVNs0BDMKKnFq7XnFVbtKOT1qQR+bpwcrlKBjl7tbbeFgzH5Q/HVDLIoZFFHrlslkyXBAAAAADATpGfIVsQqncTTgp9TyjaPnu9xh9WcyimRDIlr8Mit53FTfuzL+uC+tv8tVq+yd/pdovJ0HH7DtGZE0rldVh7ubreE0sktaUlrEFOm0YUuVXkdWS6JAAAAAAAvhH5GbIFoXo34aTQdyWTKfnbvlrctCWiYDguq5nFTfuzVCqlxesa9ej8ddrc3NbpGI/dojMnlOq4fYdk7V0KqVRKja1RxVMpVeS7NTzflbXvFQAAAADQ/5GfIVv0y/TlwQcfVHl5uRwOhyZOnKhFixbtcOwjjzyiQw89VLm5ucrNzdW0adO2G3/hhRfKMIwOP9OnT+/pt4FeYjIZynXbtMdgr8aX5Wps6SAVeOwKRmLa3Nwmf1v7LHb0H4Zh6KCKfD1w1gG6+LAR8jq2b38SjMT1/+at1c/+/qHeX1OvbLx+aBiG8j12ee0Wrd7SopWb/WoJxzJdFgAAAAAAQFbrd6H6c889p6uuuko33XSTPvzwQ+2///465phjtGXLlk7Hz507V2eddZb++9//6v3331dpaamOPvpobd68ucO46dOnq7q6Ov3zzDPP9MbbQS9zWM0qznFov2E5OrA8T3sVe2QxGdrSElZNIKxQNJ7pErELLGaTjt+vRH89b7xOPmCoLJ0s2lntD+t3r32qX/3PCn2xJZiBKnuey2ZRsc+p2paIlm1sVrW/LSsvIgAAAAAAAPQF/a79y8SJEzVhwgQ98MADkqRkMqnS0lJddtlluu6663a6fyKRUG5urh544AGdf/75ktpnqjc3N+uVV17Z7bq4faX/iiWSag7FtCUQVn2wfXFTh9Usr8NCK41+piYQ1uPvrdO8L+p3OOaIPQt13sHlKvTae7Gy3tMUiioST6osz6WyApfsFlocAQAAAAD6BvIzZIt+lRhGo1EtWbJE06ZNSz9nMpk0bdo0vf/++106RigUUiwWU15eXofn586dq6KiIu2555766U9/qoaGhm88TiQSUSAQ6PCD/slqNqnQa9eYoTkaX56nvUt8ctnNamyNqsrfpmA4zqzffqLY59C10/fS3afspz0Hezsd89/P6nTxU0v01IL1WXlnQq7LpkFOq76sD+rjzX75Q7SDAQAAAAAA6E79KlSvr69XIpHQ4MGDOzw/ePBg1dTUdOkY1157rUpKSjoE89OnT9cTTzyhOXPm6K677tLbb7+tY489VolEYofHueOOO5STk5P+KS0t3b03hT7FbbeoNM+lccNzNa4sVxX5biVSSVUH2lTXElEkvuPvBPqO0UN8+v2p++maY/ZUUScz0qOJpJ77YKN+8tQSvb6yJut66jusZhX7nGpqjemjTU3a1BRSMsveIwAAAAAAQKb0q/YvVVVVGjp0qN577z1NmjQp/fw111yjt99+WwsXLvzG/e+8807dfffdmjt3rvbbb78djvvyyy9VWVmpN998U0ceeWSnYyKRiCKRSPpxIBBQaWkpt69koXAsoaZQVLX+sBpDUcXiKbntFnnsFpk76eGNviUaT+pfy6v0/AcbFYp2flGkPN+lGVMqNG54bi9X1/P8bTGFonENy3VqRKFHDivtYAAAAAAAmUH7F2QLS6YL2BUFBQUym82qra3t8Hxtba2Ki4u/cd8//OEPuvPOO/Xmm29+Y6AuSSNGjFBBQYG++OKLHYbqdrtddnt29mRGRw6rWUNynCr2ORQIx9UYjKjaH9aWlrBMhiGfwyqnjaCyr7JZTDpl3DBNGz1Yf1+0QbM/rtbXJ22vawjppn+u1IFluZoxuVxl+e7MFNsDcpxWOa1mbWgIKRiJq7LQo3wP5y4AAAAAAIDd1a/av9hsNh144IGaM2dO+rlkMqk5c+Z0mLn+dXfffbduvfVWzZ49W+PHj9/p62zatEkNDQ0aMmRIt9SN7GAYhnKcVlUUejShIk9jhw9ScY5DoVhcm5tDamqNKp5IZrpM7ECO06qfHl6pB84ap/Flnc9IX7K+SZc/u1QPzf1CzaFoL1fYc2wWk4YMcqolHNfyTc1a39CadS1vAAAAAAAAeku/av8iSc8995wuuOAC/eUvf9FBBx2k++67T88//7w+/fRTDR48WOeff76GDh2qO+64Q5J011136cYbb9Tf//53TZkyJX0cj8cjj8ejYDCoW265RaeccoqKi4u1Zs0aXXPNNWppadGKFSu6PBud21cGrmAkrqbWqKr9bfK3tS9q6nVY5baZZRi0h+mrlm1s1t/mr9Xa+tZOtzutZp124DB9b2yJ7JbsuRMhGI7LH46pZJBDI4s8ctn61Q1LAAAAAIB+jPwM2aLfheqS9MADD+j3v/+9ampqNHbsWM2cOVMTJ06UJE2dOlXl5eWaNWuWJKm8vFzr16/f7hg33XSTbr75ZrW1temkk07S0qVL1dzcrJKSEh199NG69dZbt1sQ9ZtwUkAimVJzKKq6lojqghG1RuKyW8zyOayyWfrVTSEDRiKZ0luf1uqpBRvUuIOZ6YVeu84/uEyHjSqUKUsuksQSSdUFw8px2DSiyK0iryPTJQEAAAAABgDyM2SLfhmq90WcFLCtcCyhxtaoagPti5vGE0m5bVZ5HZasCWazSVs0oZeXbtLLSzcrGu+8hc+owR794JAR2ntIdvx9p1IpNbZGFU+lVJHv1vB8l6xmLv4AAAAAAHoO+RmyBaF6N+GkgM6kUikF2uKqD0ZUEwgrGI7JYjLJy+KmfVJDMKInF6zXW59u0Y5OjJMr83Xh5HINyXH2am09JRSNqykUVbHPocoij7wOa6ZLAgAAAABkKfIzZAtC9W7CSQE7E40n1RyKqiYQVmMwqnA8IZfNIq/dIgszhPuUNXVBPTpvrZZv9ne63WIy9N39huiM8cPlcfT/nuSJZEq1LWG5bWZVFnlU7HOwHgAAAAAAoNuRnyFbEKp3E04K2BUt4ZiaWqOq8ofVEo5Jkrx2q1wsbtpnpFIpLVrXqMfmr9Pm5rZOx3jtFp150HAdt09xVlwYaQpFFYknVZbnUlmBK6sWaAUAAAAAZB75GbIFoXo34aSA3RFPJNXcFlNdS1h1LVGFonE5LGb5nFb6W/cR8URSs1fW6O+LNqglHO90TEmOQzOmVGhiRV6/vygSjiXU0BpRgceuPYq8ynHRDgYAAAAA0D3Iz5AtCNW7CScFfFtt0YQaQ1HV+NvUFIopkUjJY7fIw+KmfUIwEtfzH2zUv5ZVKZ7s/LS5T4lPPzhkhEYWeXq5uu6VSKZUH4zIajFUWehRSY5TJhPfQQAAAADAt0N+hmxBqN5NOCmguySTKQXCMdUHI6oNRBSMxGUxGfI5rHJYaceRaTX+sGa9v07zv6jvdLsh6Yg9i3TepDIVeOy9W1w387fF1BqJqTTPpRGFHr5/AAAAAIBvhfwM2YJQvZtwUkBPiMQTag7FVOMPq7E1okg8KbfNIq/DKjMzhzPqk+qAHp23Vp/VtnS63WYx6fsHDNUpBwyT09Z/w+hoPKm6lrDyPDZVFnqU388vFAAAAAAAMof8DNmCUL2bcFJAT0qlUmqJxNXUGlW1P6xAW0wmw5DXYZHTyuKmmZJKpfTu6no9/v46bWmJdDom12XVuQeX6ci9BvfbCyHJVHs7GJMhjSj0aFiuq9++FwAAAABA5pCfIVsQqncTTgroLfFEUk2hmOqCYdW1RNQWTcphMbG4aQZF40n9c1mVnv9go9piiU7HlOe7dNGUCh0wPLeXq+s+wUhc/raYSgY5VFnokdtuyXRJAAAAAIB+hPwM2YJQvZtwUkAmhKJxNbZGVeMPqzkUUyKZktdhkdvO4qaZ0ByK6u+LNuj1lTXawVqmGl+WqxlTKjQ8z9W7xXWTWCKpumBYPodVlUUeFXrs3CkBAAAAAOgS8jNkC0L1bsJJAZmUTKbkb/tqcdOWiILhuGxmk7wOC4tLZsCGxpAem79WH6xv6nS7yZCOGVOssw8arkEuWy9X9+2lUik1tkYVT6VUnu9SWb6buyQAAAAAADtFfoZsQajeTTgpoK8Ix9oXN60NbF3cNCWP3SKP3UIf7F62dEOTHp2/VusaQp1ud1rNOm38MJ24/1DZLP0vlA5F42oKRVXsc6iyyCOvw5rpkgAAAAAAfRj5GbIFoXo34aSAviaVSikQjqupNaIaf0SBcEyGYcjnsMhloxd2b0kkU5rzaa2eWrBeTaFYp2OKvHadP6lch+1R0O9aqSSSKW1pCctlM6uyyKNin6PfvQcAAAAAQO8gP0O2IFTvJpwU0JfFEkk1haKqC0RUH4woHEvKYTXL67DQtqOXtEUTemnpJv3P0s2KxpOdjtlzsFc/OKRCo4f0v3NIUyiqSDyhsjy3ygpcsltoOwQAAAAA6Ij8DNmCUL2bcFJAf9Ea+Wpx00BY/lBMiVRKPrtVbruZGca9oD4Y0ZPvr9dbn23Z4Zgplfm6cHKFinMcvVjZtxeOJdTQGlGBx66RRZ5+2S8eAAAAANBzyM+QLQjVuwknBfQ3ia2Lm7ZEtKUlrNZoXFaTWT6nhVnGveCLLUE9On+tVmz2d7rdYjL03f1KdMaEUnns/addTyKZUn0wIqvZUGWRRyU5Tpno5Q8AAAAAEPkZsgehejfhpID+LBxLqCkUVY0/rKZQVNF4Uh67lcVNe1gqldLCtY16bP5aVfnDnY7xOiw6a8JwHbtPsSz9qFVPoC2mYCSm0jyXRhR65LByoQYAAAAABjryM2QLQvVuwkkB2WDr4qaNwYiq/WEFI3GZDEM+h1VOG6FoT4klknrt4xo9u2iDWiLxTscMHeTUjCnlOqg8r9+06YnGk6prCSvXbdPIIo/yPfZMlwQAAAAAyCDyM2QLQvVuwkkB2SYaT6o5FNWWlq2LmybkslrkdVj61Yzp/iQYjuu5Dzbo1eXViic7PzXvOzRHF02p0MgiTy9Xt3uSqfZ2MCZDqihwqzTPzd0PAAAAADBAkZ8hWxCqdxNOCshmwUhcTa1RVfvb5G+LK5VKyeuwym1jcdOeUO1v06z31um9NQ2dbjckHbFXkc4/uKzfzP4ORuLyt8VUMsihykKP3P2oTzwAAAAAoHuQnyFbEKp3E04KGAgSyZSaQ1HVtURUF4woFInLZjHL57DKZmH2endbWeXX3+at1eotwU632ywmnXzAUJ18wLB+0Z4nlkiqLhiWz2FVZZFHhR47F2UAAAAAYAAhP0O2IFTvJpwUMNCEYwk1tkZVE2hTUyimeCIpt80qr8MiE0Fpt0mmUnrn8zo9sWC96loinY7Jc9l07sHD9Z29Bvf51iqpVEqNrVHFUymV57tUlu+WlXZCAAAAADAgkJ8hWxCqdxNOChioksmUWsJx1QcjqgmEFQzHZDGZ5HNa5bD2/dnT/UUkntA/P6rSC0s2qS2W6HRMeb5LPzhkhMaWDurd4nZDKBpXUyiqYp9DI4o88jmsmS4JAAAAANDDyM+QLQjVuwknBeD/FjetCYTVGIwqHE/IZbPI57D2+RnU/UVTKKq/L9yg//2kRjtYy1Tjy3J10ZQKlea5ere4XZRIprSlJSyXzawRhR7luW1ciAEAAACALEZ+hmxBqN5NOCkAHbWEY2psjaraH1ZLOCZJ8tqtcrG4abdY39Cqx95bpyXrmzrdbjKk6fsM0dkHDVeOs2/PAm8KRRWKJuS2m5XvsanAY1eO0yq7hYAdAAAAALIJ+RmyBaF6N+GkAHQunkiquS2mupaw6lqiCkXjcljM8jmt9NLuBh9uaNKj89ZqfWOo0+0um1mnjy/VCfuV9OnFZJOplELRhFojcSWSSbkdVhW4bcr/KmDvy7UDAAAAALqG/AzZglC9m3BSAHauLZpQYyiqGn/74qaJREoeu0UeFjf9VhLJlN5cVaunFq5XcyjW6Zgir10XTCrXoXsU9Pk7BZKplEKRhILRmFIpyW23qMhrV67bphwuxgAAAABAv0V+hmxBqN5NOCkAXZdMphQIx75a3DSi1khcFpMhn4PFTb+NUDSulz/crP9ZulnRRLLTMXsO9uqHh1RoryH94zyVSKbUGomrNRqXYUhum0VFPrvy3Hb5HBZZCNgBAAAAoN8gP0O2IFTvJpwUgN0TiSfUHIqpxh9WY2tEkXhSbptFXhY33W11LRE9uWCd/vtZ3Q7HHDKyQBdMLlexz9GLlX07iWRKwUhcrZG4TIYhr9OiwV67Brls8jn5vgAAAABAX0d+hmxBqN5NOCkA304qlVJLJK6mrxY3DbTF2oNTh0VOK4ub7o7VtS362/y1WlkV6HS7xWTohP1LdPr4Unnsll6u7tuJJ5JqjSbUGo3JbJjkcVhU7PsqYHdYZSJgBwAAAIA+h/wM2YJQvZtwUgC6TzyRVFPoq8VNgxG1RZNyWEwsbrobUqmUFqxt1GPz16raH+50jNdh0dkHDdf0McX9sp1KLJFUMBJXWzQhs9lQjtOqoq9msHvtFgJ2AAAAAOgjyM+QLQjVuwknBaBnhKJxNbZGVeMPqzkUUyKVktdukdvO4qa7IpZI6rWPq/XMoo0KRuKdjhk6yKmLppRrQnlev70zIJZIKhiOKxSLy2oxKcdh1WCfQzkuq7x2S799XwAAAACQDcjPkC0I1bsJJwWgZyWTKfnb2hc3rW2JKBiOy2Y2yeuwsLjpLmgJx/Tc4o3694pqxZOdn/73G5qjiw6pUGWhp5er617R+Fcz2ONx2c3tdzoM9jk0yGWT20ZLIQAAAADobeRnyBaE6t2EkwLQe8Kx9sVNawNhNbRGFI2n5LFb5LFbWKyyi6qa2zTrvXV6/8uGTrcbkr6zV5HOO7hM+R577xbXAyLxhILhuCLxpOwWkwa5rCr0OTTIaZW7n/WTBwAAAID+ivwM2aJHQ/UNGzZow4YNOuSQQ9LPLVu2TPfcc48ikYjOOussnXTSST318r2KkwLQ+1KplALhuJpaI6rxRxQIx2QYhnwOi1w2gtKuWFnl1/+bt1ZfbAl2ut1uMenkA4bq5HHDsuaOgHAsoWAkrmiiPWDPddlU5LMrx2nlewMAAAAAPYj8DNmiR0P1k046ScFgUG+++aYkqba2VqNHj1Y0GpXX69WWLVv0wgsv6OSTT+6pEnoNJwUgs2KJpJpCUdUFIqoPRhSOJeWwmuV1WFjcdCeSqZTe+bxOj7+/XvXBSKdj8lw2nXdwmY7Yqyir7gZoiybUEokplkjKaTMrz21TocehQS5r1lxEAAAAAIC+gvwM2aJHk6ZFixbpqKOOSj9+4okn1NbWpmXLlmnz5s068sgj9Yc//KEnSwAwQFjNJhV5HRozNEfjy/O0d4lPLrtZja1RVfnbFAzHRberzpkMQ1P3LNLD547TeQeXydlJmNwYiur+t1br589/pGUbm3u/yB7itJlV5HWoJMcph8WsLf6Ilm1q1qK1jVq52a/aQFjhWCLTZQIAAAAAgD6kR0P1xsZGFRUVpR+/+uqrOvzww1VZWSmTyaSTTz5Zn376aU+WAGAActstKs1zadzwXI0ry1VFvluJVFLVgTbVByOKxAlJO2O3mHX6+FL95bwDdcyYYnU2IX1tfat+84+P9dtXV2pjU6j3i+whhmHIZbOoyOfQEJ9DNrNJ1f6wPtrYrA/WNeqTar+2tIT57gAAAAAAAPVo89jCwkKtX79ektTc3KwFCxbozjvvTG+Px+OKx+M9WQKAAcxsMpTntinPbdPwfJeaQlHV+MNqCkUVjUfksVtZ3LQTuS6bLj1ipE7Yb4genb9WH25o3m7M4nVNWrK+ScfuM0RnHTRcOU5r7xfaQwzDkNtukdtuUTKVUiiaUFVTWBsbQnI7rCr02JTnbu/BbrPQWggAAAAAgIGmR0P1adOmaebMmfL5fJo7d66SyWSHhUk/+eQTlZaW9mQJACBJcljNGpLjVLHPoUBbXA2tEdX4w6oNtMlsMsnnsMppo4f2tsry3brle/vow/VN+tv8tdrQ2HFmejIl/XtFteZ+tkWnjy/VCfuXZF3/epNhyGO3yLM1YI8ktKExpPUNIbntFhV57cpz2+RzWrPuvQMAAAAAgM716EKltbW1Ovnkk/X+++/LZrPprrvu0hVXXCFJikQiGjp0qM4++2zNnDmzp0roNSy0APQ/0XhSzaGoalvCaghGFY4l5LJa5HVYZCEg7SCRTOmNT2r19ML1am6LdTpmsM+uCyaV65CRBTKM7J79n0im1BqJqzUal2FIHptVhb72Gew+vj8AAAAA0CnyM2SLHg3Vt/L7/XI6nbLZbOnn2tra9Pnnn6u0tFR5eXk9XUKP46QA9G/BSFxNrVFV+9vkb2tf1NTrsMptM2d9QLwrQtG4XlyySf/4qErRRLLTMXsVe/WDQyq0V/HAOBcmkikFI3G1RuIyGYa8TosGe+0a5GqfwU57IQAAAABoR36GbNErofpAwEkByA6JZErNoajqWiKqC0YUisRls5jlc9A/e1tbWsJ6csF6zf2sbodjDt2jQBdMKtdgn6MXK8useCKpYCSuUCwus2GSx2FRsc+uXJddXodFJgJ2AAAAAAMY+RmyRY+G6nPmzNGHH36oX/7yl+nnHn30Ud18882KRCI6++yz9Yc//EFmc//vY8xJAcg+4VhCjV/NXm9uiymeSMpjs8rjsMjE7HVJ0ue1LXp0/lqtrAp0ut1qNvS9/Ut02oGlcts7LuPhtpnlcbQvCGo1mxRLJNUaiSsYjqs1muiN8ntU7KuAvS2akNlsKMdp1WCvQzkuq7x2AnYAAAAAAw/5GbJFj4bqhx56qMrKyvTUU09JklasWKFx48Zpv/3208iRI/Xiiy/qd7/7na699tqeKqHXcFIAslcymVJLOK76YEQ1gbBawjFZTSb5nFY5rP3/ouC3lUql9P6XDZr13jpV+8OdjvE5LDp7YpmmjymWw2rSYJ9D8WRKj81fq9dX1ijQFpfPadExY4o1Y0qFLCZDtYGwYonsuJkqlkgqGG6fwW61mJTjsGqw7/8CdloMAQAAABgIyM+QLXo0VC8oKNBvfvMbXXnllZKka665Rn/729+0ceNGuVwuXXzxxXr33Xe1cuXKniqh13BSAAaGaDypplBUtYGwGoNRheMJuWwW+Rz0zo4lkvrPimo9u3ijgpF4p2PGl+XqbxdO0GPz12rmnNVKdvJfIJMhXX7kHvrRoSO0sTGUNcH6VtF4Ui3hmMKJhOxmk3JcVhV5HRrkstHDHwAAAEBWIz9DtrDsfMjua21t7fAHMnv2bE2fPl0ul0uSNGHChPQsdgDoD2yW9lnWRV67gpH4V+1hwqoLts/Q9tqtcg3QYNRqNunEsUP1nb2K9Ozijfr3imolvpaa33TCGP2/d7/Un976YofHSaak+95cLUm6YFK5NjW19Wjdvc1mMSnfY5ckReIJBUJx1QUCsltMynXbVOC1a5DTul27HAAAAAAA0Df06Kp7paWlWrx4sSTpiy++0Mcff6yjjz46vb2xsVF2u70nSwCAHmEYhrwOq8ry3RpflqsDhudqaK5T0URSVf42NQQjiiWSmS4zI7wOq3506Ag9dPY4TRqRn35+YkWect1WPfjfHQfq25o5Z7XiyZRctuxtsWO3mJXvsatkkFNuu0WNrVGt2NSsD9Y1atnGZlX72xSKdj7rHwAAAAAAZEaPToM755xz9Nvf/labN2/WypUrlZubqxNPPDG9fcmSJRo1alRPlgAAPc5iNqnAY1eBx662/IQaQ1FVN7epMRRVIpGSx24ZkIublgxy6tfHjdbHm/3627y1Ou/gMj29YEOnLV86k0xJs95bpwsmlSmUBQuX7ozDak736G+LJlQfjKja3yanzaw8t02FHocGuejjDwAAAABApvVoqH799dcrGo3qP//5j4YPH65Zs2Zp0KBBktpnqc+dO1dXXHFFT5YAAL3KaTNrqM2pIT6HAuHYV4ubti9wajEZ8jkGXii6z9Ac3XP6/irPd+mPb36+S/vO/rhaP51aKSnSM8X1UU6bWU6bWalUSm2xhLb4I9rcFJbLZlaBx6YCj52FcgEAAAAAyJAeXah0IGGhBQA7Eokn1ByKqcYfVmNrRJFYUm67Rd4BtrjpnsVeTbnzLdUFux6QF3rteveaI/R5TcuA7FO/rVQqpVA0oWAkrkQqJbfNrHyPTfkeu3KcVtktBOwAAAAA+jbyM2SLXlsFLRgMauPGjZLae617PJ7eemkAyCi7xazBPrOKvHa1ROJq/Gr2+paWsEyGIa/DIqc1+xc3jSWS8jktuxSq+xwW+dtiuujxxZo0Il+TKgu09xDfgLoYsZVhGHLbLXLbLUp+FbBvbgprQ0NIbodVhR6b8tztAbvN0qNLpgAAAAAAMKD1eKi+ePFiXXPNNZo3b56SyfZF+0wmkw499FDdfffdGj9+fE+XAAB9gmG0t3/xOawalutSUyimupaw6oIRNYViclhM8jmtspqzMxBtjcR1zJhiPTR3TZf3OWZMsRavbVR9MKp/La/Wv5ZXK8dp1cEVeZpcWaB9h+Vk7ef1TUyG0d6r/6uAvTUS14bGkNY3hOS2W1TktSvPbcvq7xMAAAAAAJnSo+1fFi5cqKlTp8pms+nss8/W6NGjJUmrVq3SM888o2g0qrlz5+qggw7qqRJ6DbevANhdoWhcja1R1fjDag7FlEil5P1qRnI2LW7qtpnldVo18XdvdmmxUpMhvXPNEbrq+WVatLax82PazTqoPE+TKgs0bvigAd8CJZFsD9hbo3EZhuSxWVXoa5/B7nNYZCFgBwAAAJBB5GfIFj0aqk+bNk3r1q3TvHnzVFxc3GFbbW2tpkyZooqKCr3xxhs9VUKv4aQA4NtKJlPyt7UvblobCCsYSchmNsnrsGTNgpTDcp16/P11uu/N1Tsde/mRIzV9zBAdN/PdLh3bbjFpfFmuJlUWaEJ5rly2Xutw1iclkikFI3G1RuIymwx5HBYN9to1yNU+g30gttABAAAAkFnkZ8gWPRqqe71e3XjjjfrlL3/Z6fa7775bt956q1paWnqqhF7DSQFAdwrH2hc3rQ2E1dAaUTSeSrf76M9hqNVsqDTPpUfe/VIz56zudMa6yZAuP3IP/ejQEdrQ0Ko1da16b02D3v+yQWvrW7v0OhaTobGlgzSlskAHVeTJ57R28zvpX+KJpIKRuEKxuMyGSR6HRcU+u3JddnkdFpn68XcKAAAAQP9BfoZs0aPT+Ewmk+Lx+A63JxIJmUzcig4AX+ewmlWcY9Zgn12BcPviprVfLW7a3pvd0i9nYscSKW1sDOmCSeU6Z2KZZr23TrM/rlYgHJfPYdH0fYbowsnlspgMbWwMKZ6UyvLdKst366yDhqva36b31zTovTUN+qx2xxdk48mUPljfpA/WN8lkSPsOzdGkygIdXJGnfI+9F99x32AxmzTIZdMg2RT7KmD/rCYos7lVOU6rin0O+ZxWee0E7AAAAAAA7EyPzlQ/9thjtWLFCs2fP19lZWUdtm3YsEFTpkzRvvvuq//85z89VUKv4UobgJ4WSyTVFIpqSyCihmBEbbGEnFaLvA5Lv1yM0mUzy+uwyGW3yGo2FEukFIrE1RKOKxRN7HT/+mBEC75sD9hXVvm71KfdkLRXsVeTKvM1qbJAxT7Ht38j/VgskVQw3D6D3WYxyeewarDPoRxXe8BuZFFPfwAAAACZR36GbNGjofrSpUt12GGHKR6P6/vf/75GjRolSfrss8/0j3/8Q2azWfPmzdP+++/fUyX0Gk4KAHpTa+SrxU0DYflDMSVTKXntVrnt5gEZhPrbYlrwZXuLmGUbmxXvSsIuaUShW5MrCzR5RL5K81w9XGXfFo0n1RKOKZxIyG42KcdlVZHXoUEum9y2gfm9AgAAANC9yM+QLXo0VJekTz75RNdff73eeOMNhUIhSZLL5dLRRx+tm2++WQUFBSopKenJEnoFJwUAmZDYurhpS0S1LWGFonHZzO0zwO2W7FjcdFe1RuJavK5R761p0JINTYrGk13ab1iuU5MrCzRpRL4qC90DOkQOxxJqjcQVjiflsJiU67ap0GtXjtMqt73/tR0CAAAA0DeQnyFb9HiovlUymVRdXZ0kqbCwUCaTSbfffrtuvPFGJRI7v82/r+OkACDTwrGEmkJR1fjDagxFFYsn5bFb+/3ipt9GOJbQhxua9N6aBi1e19iltjKSVOS1a3JlviZXFmjPYq9MAzxgD0biisQTclrNGuSyqcjXHrD3x77+AAAAADKH/AzZotf+v2GTyaTBgwf31ssBwIDjsJo1JMepYp9Dgba4GlojqvGHVRtok9nU3i/baRtYs9cdVnN7e5fKAsUSSS3b1Kz31jRowZcNagnveCHtLS0RvfJRlV75qEp5LpsOrszX5BH52mdozoC7QOGwmuWwtn9v2qIJ1Qcjqgm0yWE1K99tV4HHrkEua3oMAAAAAADZjilmAJBlDMNQjsuqHJdVw3Jdag5FVdsSVkMwqsa2iFyW9sVNLf1wcdNvw2o2aXxZnsaX5elnU0dqZZVf769p0HtfNqixNbrD/RpDUf1nRbX+s6JaXrtFE0fkadKIAh0wfFC/XCD223DazHLazEqlUmqLJVTrD2tTU5tcNrMKPDYVeOzyOQnYAQAAAADZjVAdALKYzWJSkc+hIp9DwUhcTa1RVfvbVN8aVSqVktdhHZCLUJpNhvYbNkj7DRukHx02Qp/Xtui9NQ16f02DagLhHe7XEonrzVVb9OaqLXJazZpQnqvJlQUaNzx3QN0FYBiGXDaLXDaLUqmUQtGEqprD2tjUJrfNrAKPXXkem3Kc1gHb2x8AAAAAkL0I1QFggPDYLfLYLSoZ5FRzKKq6lojqghFV+6OyW8zyOqyyWQbWzGtJMhmG9ir2aa9in2ZMLte6hlbN/ypg39AY2uF+bbGE3lldr3dW18tmNumA4YM0ubJAB5XnyeMYOP95NQxDbrtFbrtFya8C9k1NbdrQGJLLblGRx65ct02DXNYBN7MfAAAAAJCduv3/6//www+7PLaqqqq7Xx4AsBNmk6F8j135HrvKou2Lm1b729QUiiqRTMpjs8rjsAzIxTkNw1BFgUcVBR6dO7FMm5pC7S1i1jToi7rgDveLJpJauLZRC9c2ymwytP+wHE0aUaCDR+RpkMvWi+8gs0yGkb54k0yl1BqJa31jq9Y1tMptt6jIa1eeu30G+0BrPwQAAAAAyB5GKpVKdecBTSZTl9sIpFIpGYahRCLRnSVkBKsXA+jPksmUAuGYGoJR1QTCagnHZDWZ6I+9jS2BsN7/sj1gX1UdUFf+42kypNFDfJpcma9JIwpU6LX3eJ19USLZHrAHo3GZDMljs6rI1z6D3TcA+/sDAAAAAxX5GbJFt4fqjz/++C7vc8EFF3RnCRnBSQFAtojGk2oKRVUbCKsxGFU4npDLZpHPYZXZNPBmr3emqTWqBWvbA/blm5qV7OJ/Sfco8mhyZYEmV+arZJCzZ4vsoxLJlIKRuFoj8f/P3p9HyXUWdv7/5y51a+291a3VWlqyMTay8SJZMg52MDiEMDhwAmQYbBjIb5LBAWMYgnNmsGGGGMbkYIhJIMmAPMmXAFkgmRDsYIPZLDDY2JZXZC3WvvRae93198ftLknW4mqpWt1d9X6do4O7uu7tp0pXD9WfeurzyDIN5VK2BjuS6s446kxzjQEAAACtjPwMraLpoXq7YlIA0GqiKA4/R0uu9k/Eq9clqSOZUKYNNzc9mULV08M7RrV5+4ge3TUmL2js/1ZX9GW0YVWfNg71a3lfpi2fTz8IVaz5Knu+LMNULmVrYWdSPZmkOlK2TAJ2AAAAoKWQn6FVEKo3CZMCgFbmB6HGK54OFaoaLrgqu75StqXONJtPHq3s+nrkhTE9tG1Ev3hhVFUvbOi4RV0pbRyKA/Y1A7m2DNi9qYDdDWRbhrrSCS3sTKkznVBnym7L5wQAAABoNeRnaBXzMgn5whe+oBUrViiVSmn9+vV6+OGHT3rfv/qrv9JVV12lnp4e9fT06Nprrz3u/lEU6WMf+5gWLVqkdDqta6+9Vlu3bp3phwEA84ZtmerPJfXyRV26fEWvXrG0W7mUrdGyq33jFRWqnkLeo1XGsXXVmgX6o994mf72Pev1399wvn79ZQPKJU+9L/j+iar+8dG9+tDfP67/fM/P9Zc/3KYteycUNNor0wISlqmejKMl3Wn1ZhxVaoGe2jehR14Y1SMvjGn3aFmFqifWAgAAAAAAZtu8W6n+9a9/XTfccIO++MUvav369brrrrv093//93ruuec0MDBw3P3f8Y536Morr9TGjRuVSqX06U9/Wt/85jf11FNPacmSJZKkT3/607rjjjt0zz33aOXKlfof/+N/aMuWLXr66aeVSqUaGhfvtAFoN2EYaaLiaaRU04F8TcWqr4RlqDPF5qYv5gehtuyd0ObtI9q8fUTjZa+h47rSCV2xslcbhvq1dmlXW34qwPVDFaqeqkGgpGWqO+NowWQHe5YaIgAAAGBeIT9Dq5h3ofr69et1+eWX6+6775YkhWGoZcuW6Q//8A/10Y9+9CWPD4JAPT09uvvuu3XDDTcoiiItXrxYH/rQh/ThD39YkjQxMaHBwUFt2rRJb3/72xsaF5MCgHZW8wONlbx4c9NSTTUvVDZpq4PNTY8ThJGePZDXQ9vigP1wodbQcVnH0uUre7VxqF+vXNbdlm9cVL1ApZqvqh8qZZvqycYBe1c6oexLfBoAAAAAwOwjP0OrmFe/gbquq0ceeUS33npr/TbTNHXttddq8+bNDZ2jXC7L8zz19vZKknbs2KEDBw7o2muvrd+nq6tL69ev1+bNm08aqtdqNdVqR4KQfD5/Og8JAFpC0ra0sMvSYGdShZqv0WK8ev1QoSrTMNSRspVx5tX/5cwYyzR0weIuXbC4S+991Uo9f6iozdtH9NC2Ee0dr5z0uJIb6MHnDuvB5w4raZu6bHmPNgz16/IVPW3z3KYSVv3NhKoXTG6iW1E6Yak742igM6nutKO0035vOAAAAAAAzp559Vv48PCwgiDQ4ODgMbcPDg7q2Wefbegcf/RHf6TFixfXQ/QDBw7Uz/Hic05970TuuOMOffzjH5/O8AGg5RlGXP/SmUpoaU9GY2VPhwtVHS7WNFb2lLJNNjc9imEYWjPYoTWDHXrnFcu1a7RcD9h3DJdOelzND/WTbSP6ybYR2aahi5d1a+NQn9av7FNnOnEWH8HsOTpgr7iBhos1HchXlEpY6ssm1Z9LqjtDFREAAAAAoPnmVah+pj71qU/pa1/7mh588MGGu9JP5tZbb9Utt9xS/zqfz2vZsmVnOkQAaBm2ZWpBR1ILOpIqu/7kquKqRkuugihSR9JWNmnLpBNbUhywL+/LanlfVm+//Bztn6ho87Y4YH/uYOGkx/lhpF+8MKZfvDAm03heFy7p0sahfl2xsld9ueRZfASzJ+1YSjuWoihSxQt0cKKqPWMVZRxL/TlH/bmkOtME7AAAAACA5phXoXp/f78sy9LBgwePuf3gwYNauHDhKY/9zGc+o0996lO6//77tXbt2vrtU8cdPHhQixYtOuacF1988UnPl0wmlUy2R1gBAGcq48T1L4u70pqoeBou1nQwX9X+iYocy1JHyibwfJFFXWm9+ZKlevMlSzVSrMWbnG4b0ZP7JhSeZDeUMJKe2DOhJ/ZM6Es/2KaXLezQhqE+bRjq18LOM3szeT4wDKN+rUVRpLIbaO94VbtGy8olbfXnkurNOepOO3JsPi0BAAAAADg98ypUdxxHl156qR544AFdf/31kuKNSh944AHddNNNJz3uf//v/61PfvKTuu+++3TZZZcd872VK1dq4cKFeuCBB+ohej6f189+9jP9wR/8wUw9FABoS6ZpqCfrqCfraFlvRmNlVwcnqhoruxotu7JNQ7mkrXTCksEK9rq+XFK/tXaxfmvtYk1UPP1sR7yC/fHd4/JPkrBHkp45UNAzBwr68k92atWCrDYO9Wvjqj4t682c3QcwCwzDUHby0xDhZMC+Z6yiXaNlZZK2BnJJ9WQddWeoIwIAAAAATI8RRdFJ1rvNTV//+td144036ktf+pLWrVunu+66S9/4xjf07LPPanBwUDfccIOWLFmiO+64Q5L06U9/Wh/72Mf01a9+VVdeeWX9PLlcTrlcrn6fT33qU7rnnnu0cuVK/Y//8T/0xBNP6Omnn264JobdiwHg9ERRpGLNV6Hq63ChpomKp4rryzQNZRJxKGqZBOwnUqr5+vnOUT20bUSP7BqT64cNHbe0J62NQ/3asKpPQwuybfUGRhhFKtV8lVxfUSTlkrYWdCTVm3XUlU7IJmAHAAAAZgz5GVrFvAvVJenuu+/WnXfeqQMHDujiiy/W5z//ea1fv16SdPXVV2vFihXatGmTJGnFihV64YUXjjvHbbfdpttvv11SHOjcdttt+su//EuNj4/rVa96lf78z/9c5557bsNjYlIAgOaoeoHyFU9jZVcjRVelWqBQodK2rUzSUtKmJuZEql6gR3eNafO2ET28c1RlN2jouIGOpDYO9WnjUL/OW9jRVh33QRgH7EXXl2lIOSehgc54BXtnyiZgBwAAAJqM/AytYl6G6nMRkwIANJ8XhCpUfeUrrg4XXRWqnjw/UsIylU1a1MSchBeEenzPuB7aNqKfbR9Rvuo3dFxvxtEVQ33auKpPFy7paqtPCARh/ImJUs2XZRrKpWwNdsQBe0cq0VbPBQAAADBTyM/QKgjVm4RJAQBmVhhGKrpxTcxwoabxiquqF8qUoWzSUsahJuZEgjDS0/sm9NC2ET20fUSjJbeh4zqSttav6tWGVf165TndbdU77gehijVfZc+XZZjqTNsa6EiqJ5NUR8qWyXUGAAAAnBbyM7QKQvUmYVIAgLOr4gYqVD2NllyNlFyV3UBBGCrj2Mo6thy7fULgRoVRpF8dLGjztnij0wP5akPHpROWLl/Ro41D/brknB6lnfap4PGmAnY3kG0Z6kontLAzpc50Qp0pm09KAAAAANNAfoZWQajeJEwKADB7vCBUvuIpX/F0uFhToebLC0IlLUsZh5qYE4miSDtHSvEK9m0j2jVabug4xzL1ynO6tXGoX+tW9CqXsmd4pHPHVB1RxfPl2KY6UwkNdqbUnUkolyRgBwAAAF4K+RlaBaF6kzApAMDcEIaRCjVfhaqnkaKr8bKrihfGPdmOrbRjURNzAnvGyvEK9u0jev5QsaFjLNPQRUu7tGFVv65Y1avujDPDo5w7XD9UoeqpGgRKWqa6M44WdCTVnXGUdXgTBwAAADgR8jO0CkL1JmFSAIC5qTzZwz5Sqmm05Kni+gpDKeNYyibttuoKb9ShfFWbt8cr2J/Zn1cjLxRMQzp/Uac2DvVpw6p+LehIzvg454qqF6hU81X1Q6USpnomA/audELZZPus5AcAAABeCvkZWgWhepMwKQDA3De1uni8HNfElFxfnh8qZccbnaYSJiuMX2Ss5OqnO+KAfcveCQVhYy8b1gzktHGoXxuH+rS4Oz3Do5w7ql6gQtWXGwRKJyx1ZxwNdCbVnXbaqoseAAAAOBHyM7QKQvUmYVIAgPklCCMVa77yFU/DxZomKp5qfiDLMJWlJuaEClVPD+8Y1ebtI3p015i8oLGXECv6Mtqwqk8bh/q1vC/TNm9cVNxAhZonPwyVSljqyybrK9hTCQJ2AAAAtB/yM7QKQvUmYVIAgPkriiKV3XiF8Wi5ptGiq7IbKJKUdWxlHIuamBcpu74eeWFMm7eP6Bc7x1TxgoaOW9SV0sahOGBfM5Bri4A9iiJVvEDFqi8vjJRNWurLOurPJdVJwA4AAIA2Qn6GVkGo3iRMCgDQOmp+HLCPl10NF10Va778IK6JySZtQtAXcf1Qj+0e00+2jejhHaMq1vyGjuvPOdqwqk8bhvr18kWdbfHJgPobODVfQRgql7TVn0uqN+eoO+3IsXnzBgAAAK2L/AytglC9SZgUAKA1BWGkYtVXvurpUKGqQtVXzQtkm6YyyXgVu9kGq60b5Qehtuyd0ObtI9q8fUTjZa+h47rSCV2xslcbhvq1dmlXW3wyIIwilWuBSq6vMIqUSdoamAzYu9KJtngOAAAA0F7Iz9AqCNWbhEkBAFrf1CrjfNXTaNHVaNlVxY1rTzKOraxjySYIrQvCSM8eyGvzthE9tH1Ehwu1ho7LOpYuX9mrjUP9euWy7rb4ZEAYRSrVfJVcX1Ek5ZK2FnQk1ZuNA3auKwAAALQC8jO0CkL1JmFSAID2U/XimpiJiqvhwmRNTBgpZZvUxLxIFEXadrikh7YN66FtI9o7XmnouKRt6rLlPdow1K/LV/Qo49gzPNLZF4RxwF50fZmGoY7JgL1nMmBvh5ocAAAAtCbyM7QKQvUmYVIAgPbmB6GKNV/5iq9DhaqKVV9VP1DCMpV1bKWpiamLoki7RstxRcy2EW0fLjV0nG0aunhZtzYO9Wn9yj51phMzPNLZF4SRijVfpZovyzSUS9kanAzYO1IE7AAAAJhfyM/QKgjVm4RJAQAwJYoildxA+YqnkVJN42VP5Zov0zSUTlAT82L7JypxRcy2ET13sNDQMaYhXbikSxuH+nXFyl715ZIzPMrZN/XGTdnzZRmmOtO2BjtT6k476kjZMgnYAQAAMMeRn6FVEKo3CZMCAOBkql7cwz5R9jRccFVyfflhGAfsSUtJm5qYKSPFWn0F+5P7JhQ2+Crl/IUd2jDUpw1D/VrYmZrZQc4B3lTA7gZK2IY6Uwkt7EypM51QZ8qWwaciAAAAMAeRn6FVEKo3CZMCAKARfhCqUPWVr3o6VKipWPVU8yMlLEO5pK10wiIQnTRR8fSzHXHA/tjucfkNJuyrFmS1cVWfNg71a1lvZoZHOfu8yWuq4vlybFOdqUS8gj2TUC5JwA4AAIC5g/wMrYJQvUmYFAAA0xVFcV92vupruFDTRMVT2fVlm6bSCUvZpE1n9qRSzdfPd47qoW0jemTXmFw/bOi4pT1pbRzq14ZVfRpakG35gLnmB/U+/6RtqjvjaKAzpa50QlmHN2wAAAAwu8jP0CoI1ZuESQEAcKaqXtzDPlZ2NVJ0VaoFChUqbdvKJm05Nj3sUvw8PbprTJu3jejhnaMqu0FDxw10JLVxsiLmZQs7Wn7j2KoXqFTzVfVDpRKmejKOFnQk44A9ac/28AAAANCGyM/QKgjVm4RJAQDQTFOVHvmKq8NFV4WqJ8+PlLBMZZMWNTGTvCDU43vG9dC2Ef1s+4jyVb+h43oyCV2xqk9XDvXrgsWdLb9xbNULVKj68oJAqYSlnmwcsHenHaUdOv0BAABwdpCfoVUQqjcJkwIAYKaEYaSi66swWRMzXnFV9UKZMpRNWso41MRIUhBGenrfhB7aNqKHto9otOQ2dFxH0ta6lb3aONSvi5d1t/QnAqIoUsULVKzFm+WmEpb6ssn6CvZUgoAdAAAAM4f8DK2CUL1JmBQAAGdLxQ1UqHoaLbkaKbkqu77CUEo7lrIONTGSFEaRfnWwoM3bRvTQthEdyFcbOi6dsHT5ih5tGOrXpef0tPQq7nrAXvXlhZGySUt9WUf9uaQ6CdgBAAAwA8jP0CoI1ZuESQEAMBu8IFS+4ilf8XSoUFPR9eUFoZJWHLCnEmbb18REUaSdI6V4Bfu2Ee0aLTd0nGOZeuU53do41K91K3qVS7VuD3kURSq7gQo1X2EYKpu01Z9LqjfnqDvt8EYNAAAAmoL8DK2CUL1JmBQAALMtDCMVar4KVU8jRVfjZVdVP5RpGMo5ttKORU2MpD1j5XgF+/YRPX+o2NAxlmlo7ZIubRzq1/pVverJODM8ytkTRpHKtUAl11cYRcokbQ1MBuxd6YQSLd4/DwAAgJlDfoZWQajeJEwKAIC5pjzZwz5Sqmm05KkyWROTcSxlkzbhqKRD+ao2b49XsD+zP69GXhQZkl6+uFMbh/q0YVW/FnQkZ3qYsyYII5VdX8VavAFsLmlroDOpnkwcsLf6Bq8AAABoLvIztApC9SZhUgAAzGWuH6pQ9TRe9nS4WIs3qgxCpex4o1NqYqSxkquf7ogD9i17JxSEjb1EWjOQ04ahPl051K/F3ekZHuXsCcJIpZqvouvLNAx1JG0t6EiqJxsH7HwKAgAAAC+F/AytglC9SZgUAADzRRBGKlZ95auehos1TVQ81fxAlmEqm7SVcSyZbR6wF6qeHt4xqs3bR/TorjF5QWMvl5b3ZuIV7EP9WtGXadk3KoIwUrHmq1TzZZmGcilbg5MBe0eKgB0AAAAnRn6GVkGo3iRMCgCA+ai+QWXV10ixprGyq7IbKJKUdeKAvd1rYsqur0deGNPm7SP6xc4xVbygoeMWdaW0cahPG4f6tWYg17IBux+EccDu+rJNU51pW4OdKXWnHXWkbJkE7AAAAJhEfoZWQajeJEwKAIBWUPPjgH287Gq46B5TE5NN2kolrNke4qxy/VCP7R7TT7aN6OEdo/Wu8ZfSn3N0xao4YH/5os6WXcntTQbsZTdQwjbUmUpoYWdKXZmEOpJ2y76xAAAAgMaQn6FVEKo3CZMCAKDVHF0Tc6hQVaHiq+YHsk1TGWpi5Aehtuyd0ObtI9q8fUTjZa+h47rSCV2xslcbhvq1dmlXy34SwAtCFaq+Kp4vxzbVlU5ooCOl7kxCOQJ2AACAtkR+hlZBqN4kTAoAgFYWRZFKbqBC1dNo0dVo2VXFjWtQMo6trGPJbtFwuBFBGOnZA3lt3jaih7aP6HCh1tBxWcfS5St7tXFVn155Tk/LfhKg5gcqVn1V/UBJ21R3xtFAZ0pd6YSyjkXADgAA0CbIz9AqCNWbhEkBANBOqt5RNTEFVyXXlx9GSicsZRyrZcPhRkRRpG2HS3po27Ae2jaiveOVho5L2qYuXd6jjUP9unxFjzKOPcMjnR1VL1Cp5qvqh0olTPVkHC3oSKo7k2jZxwwAAIAY+RlaBaF6kzApAADa1dRGlRMVT4cLNRWrvmpBKNs0lHVspdu4JiaKIu0aLccVMdtGtH241NBxtmno4mXd2jjUp3Ur+9SVTszwSGfH1JszXhAolbDUk50M2NOO0k77vjEDAADQqsjP0CoI1ZuESQEAgCM1MfmKp5FSTWMlTxXXl2kaSieoidk/UYkrYraN6LmDhYaOMQ3pwiVd2riqT1es6lNfLjnDozz7oihSxQvijXHDUKmEpb5sUgs6kupKJ9r6kw8AAACthPwMrYJQvUmYFAAAOF7VC5Svepoox6vYy24gPwyVSdjKJC0l7fYNS0eKtfoK9if3TShs8BXZyxZ2aONQnzYM9WthZ2pmBzkLpgL2QjWuFMomLfXlHPVnk+rKJNr6mgEAAJjvyM/QKgjVm4RJAQCAU/ODUIWqr4mKq8NFV8Wqp5ofybFMZZOW0on23bByouLpZzvigP2x3ePyG0zYVy3IauOqOGA/pzczw6M8+6IoUtkNVKj5CsNQ2aSt/lxSfbl4Bbtjt++nHgAAAOYj8jO0CkL1JmFSAACgcVEUqVjzla/6Gi7UNFHxVPECWYahdMJSNmnLMtszYC/VfP1856ge2jaiR3aNyfXDho5b2pPWhlV92jjUr6EF2ZZ7gyKMIpVrgUqurzCKlE3aWpBLqjfnqCudUKKNa4UAAADmC/IztApC9SZhUgAA4PRVvbiHfazsarjoqlwLFCpU2raVTdptuyK56gV6dNeYNm8b0cM7R1V2g4aOG+hI1itiXrawo+U2ig3CSGXXV7HmS5JySVsDnUn1ZOKAvZ17+wEAAOYy8jO0CkL1JmFSAACgObzJmph8xdWhQk3Fmi/Pj5Ro85oYLwj1+J5xbd42op9uH1G+6jd0XE8moSsmV7BfuLiz5QLnIIxUqvkqur5Mw1BH0taCjqR6s44604m2/cQDAADAXER+hlZBqN4kTAoAADRfGEYqur4KkzUx4xVXFTeUZRrKOpYyTnvWxARhpKf3TeihbSPavH1EIyW3oeM6krbWrezVxqF+Xbysu+U+ARCEca1QqebLMg3lUrYGO5LqSCeUS9pKJdjkFAAAYDaRn6FVEKo3CZMCAAAzr+IGylc9jZVcjZRclV1fYSilHUu5pN2WvdphFGnrwaIe2jash7aN6EC+2tBx6YSly1b0aONQvy49p0dpp7UCZz8IVaz5KruBDCO+RjpTCfXlHHUkE8omrZZbtQ8AADDXkZ+hVRCqNwmTAgAAZ5frhypUPeUrXlwT4/ryglBJy1LWsZVKmG1XExNFkXaOlPTQthE9tG1Eu0bLDR3nWKZeeU63Ng71ad2KPuVS9gyP9OwKo0gVN1DZDeSGgWzTVDZhqzeXUHfGUS5pK+O0Z60QAADA2UR+hlZBqN4kTAoAAMyeMIxUqPkqVD0dLtSUr3iq+qFMw1DOsZV2rLasidkzVtbm7XHA/vyhYkPHWKahtUu6tHGoX+tX9aon48zwKM8+LwhVdgNV3EChIiVtU7mkrf6co45UQrmUraTdWiv3AQAA5gLyM7QKQvUmYVIAAGDuKE/2sI+UahoteSrXfEWSMglL2TatiTmUr2rz9riD/el9eTXyAtCQ9PLFndo41KcrVvVpoCM108M866IoUs0PVXEDVbwTV8XkUu3Z3Q8AANBs5GdoFYTqTcKkAADA3OT6ofJVTxNlT4eLNRVrvvwgVMqOA/ak3X41MWMlVz/dEa9g37J3QkHY2MvBNQM5bRjq05VD/VrcnZ7hUc6Ok1XF9OUcdWUSVMUAAACcAfIztApC9SZhUgAAYO4LwkjFqq981dNwsaaJiqeaH8gyTGUnw1KzzcLSQtXTz3eO6qFtI3p015i8oLGXhst7M9ow1KeNQ/1a0Zdp2ZCZqhgAAIDmIT9DqyBUbxImBQAA5pcoilR2g7gmpljTWNlV2Q0USco6ccDebjUxFTfQL14Y1ebtI/rFzjFVvKCh4xZ1pbRxqE8bVvVrzWCuZd+YmKqKKbuBqr4vQwZVMQAAANNAfoZWQajeJEwKAADMbzU/DtjHy66Gi66KVV9+GCllx6vYU4n2Wo3s+qEe2z2mh7aN6Gc7RlWs+Q0d159zdMWqPm1c1aeXL+5q6YA5CCNVPapiAAAAGkV+hlZBqN4kTAoAALSOIIxUqHoqVH0dKlRVqPiq+ZOhadJWus1qYvwg1JP78npo27A2bx/ReNlr6LiudELrV/Zq41C/1i7tavmV/0dXxQRRpFSCqhgAAICjkZ+hVRCqNwmTAgAArSmKIpXcQIWqp5Giq7Gyq4ob16JkHFtZx5Ld4mHx0YIw0rMH8tq8bUSbt4/oUKHW0HFZx9LlK3u1cVWfXnlOT8uv/H9xVYxpGEonLHWmE+rLJZVL2solqYoBAADthfwMrYJQvUmYFAAAaA9V76iamIKrkhvXxKQTljKO1fJh8dGiKNK2wyU9tG1YD20b0d7xSkPHJW1Tly7v0YZVfVq3slcZx57hkc4+qmIAAADIz9A6CNWbhEkBAID24wehijVfExVPhws1Faqe3CCSbRrKOu1VExNFkXaPVeKKmG0j2j5caug42zR08bJubRzq07qVfepKJ2Z4pHPDVFVM1Qvi7n6qYgAAQBsgP0OrIFRvEiYFAADaWxRFKtZ8Faq+Rko1jZU8VVxfpmkok7CVbbOqj/0TlXpFzLMHCg0dYxrShUu6tHFVn65Y1ae+XHKGRzk3nKwqpiuTUG+WqhgAANA6yM/QKgjVm4RJAQAAHK3qBcpXPY2XPA0Xayq7gfwwVCZhK5O02moV8kixpp9uH9FD20b05L4JhQ2++nzZwg5tWNWnjUP9WtiVmtlBziFBGKnixRueemEoyzSoigEAAC2B/AytglC9SZgUAADAyfhBqELV10TF1eGiq2LVU82P5FimsklL6UT7BKQTFU8/2zGizdtG9NjucfkNJuyr+rPaONSnDUP9Oqc3c8r7Zh1LuVT86YCEZcoLQpVqvopVX6XJTWbnE6piAABAqyA/Q6sgVG8SJgUAANCIMIxUcn3lq76GCzWNV1xVvVDWZOVHO9XElGq+fr5zVJu3j+iRF8ZU88OGjlvak66vYB9akK2/IZGwDA12puSHkb7ykx2676kDyld8daZtXXfBQr37ypWyTUMH81V5wfx8CUxVDAAAmM/Iz9AqCNWbhEkBAACcjoobqFD1NFZ2NVx0Va4FChUqnbCVdWw5tjnbQzwrql6gX+4a00PbRvTwzlGVG1xRPtCR1IZVfbr25YP69ZcN6K9+tF2ff2DrCStmTEN6/2vW6PeuWqXdo+V5G6wfbaoqpuz68oNItn1UVUw6oSxVMQAAYA4hP0OrIFRvEiYFAABwprypmpiyq8PFmoo1X64fKmlbyjjtUxPjBaGe2DOhh7YN66fbR5Sv+i95zP+76VX696cP6M++9/xL3vfma9foxg0rtGes0ozhzilUxQAAgLmM/AytglC9SZgUAABAM4VhpKLrK1/xNFJ0NV5xVXEnN610LGWc9qj4CMJIT++b0EPb4x72kZJ73H3Wr+zVn771Iv3a//5+Q5ugmob0sz++VvmK1/CK+PmIqhgAADDXkJ+hVRCqNwmTAgAAmEkVN1C+6mms5Gqk5Krs+gpDKe1Yyk1uyNnqwijS1oNFPbRtWA9tG9GBfFWSdPfvvlJP7cvrL36wreFzve+a1bpxw3IdzNdmarhzzgmrYhxbfdm4KiaXstvm0xAAAGB2kJ+hVdizPQAAAAC8tLRjKe1YGuxMyfVDFaqe8hVPBws1jVVceUGolBWvYE8lzJYMRk3D0HkLO3Tewg69a+MK7Rwp66Ftw7piVZ8+e/+vpnWue5/cr/e8aqWe3Duh8xZ2tMWbEpZp1FenS0eqYl4YKVMVAwAAAEwDoToAAMA849im+nJJ9eWSWt6XVaHmq1D1dLhQU77iaawSyDJMZR1bacdqyXoPwzC0sj+rlf1ZdaYTyldeunf9aPmqL8c2des3t8ixTb18UacuWtqttUu7NLQg15LP2YslLFNdaVNd6US9KqZUCzRSKlAVAwAAAJwCoToAAMA8ZpqGutIJdaUTWtqTUdn1la/4Gi3XNFrydKhQVSQpk7CUbdGaGC8I1Zm2dbjYeJVLZ8pWqRYH8a4f6rHd43ps97gkKetYunBJl9Yu7dLaJd06py8jswVX/h/NMAylEpZSCUuSU6+KOZSvad9YlaoYAAAA4CiE6gAAAC0k49jKOLYWdsU1Mfmqp4myp8PFmkbLrvwgVMqOA/Y4QJ3/SjVf112wUH/+YOOd6tddsFA/3zF64vO5gX62Y1Q/m/x+VzqhV0yG7Bct7dairlTLh8knq4rZOVJSEIqqGAAAALQ1NiptEjZaAAAAc1kQRipWfeWrng4Xq8pXfNX8yZqYpK2MY83b1dhZx1JHOqH1f3K/wgZe2ZqG9JOP/ro+/Z1n9c+P7dN0Xwz355KTAXuX1i7tVn8ueVrjnq+mqmLKbqCq71MVAwAAGkZ+hlZBqN4kTAoAAGC+iKJIZTdQoeprpFjTWNlVyQ1kaGqluzXvamKW9qR1z+aduuv+rS9535uvXaMbN6zQnrGK8hVPT+6b0BN7JvTEnnHtHqtM+2cv7kpp7WQf+9ql3epKJ07nIcxbU1UxZdeXH0RUxQAAgJMiP0OrIFRvEiYFAAAwX9X8QPmKr4mKq+Giq2LVlx9GStnmvKmJSViGlvVm9Fc/2q7PP7D1hCvWTUN6/2vW6PeuWqXdo2V5wfF3GinWtGVvHLI/vmdchwqN97RPWdGX0dql3bpoaZcuWNylbLK9GhenqmIqnq8wklK2qVzKVn8uGVfFJG059vx60wYAADQH+RlaBaF6kzApAACAVhCEkQpVT4Wqr0OFqgqTNTG2GQfs6TlcE5OwDA12puSHkTY9tFP3Prlf+aqvzpSt37hwkd61cYVs09DBfPWEgfqJHMhX9cSe8fpK9rGyN60xmYa0eiCntUvileznL+qcF29SNAtVMQAA4GjkZ2gVhOpNwqQAAABaTRRFKrmBClVPw8WaxsueKm4gKa6JyTqW7DlYE5NxLHWkbGWSthKWIS+IVK75KlR9lSfHfzqiKNKesYqe2DOux/dMaMveCRVr/rTOYZuGzlvYoYsm62LOHeyYd1U7Z4KqGAAA2hv5GVoFoXqTMCkAAIBWV/XiHvbxsqvhgquS6ysII6USljKO1VYrsKU4IN4xXIpXsu+d0FP7JlT1wmmdI2mbumBxZ9zJvqRLqxbk2mrVNlUxAAC0F/IztIp5Gap/4Qtf0J133qkDBw7ooosu0p/92Z9p3bp1J7zvU089pY997GN65JFH9MILL+izn/2sbr755mPuc/vtt+vjH//4Mbedd955evbZZxseE5MCAABoJ34QqlD1la96OlyoqVD15AaRbNNQLtmeq439INTWQ8V6XcwzB/IN18xMyTqWLlzSVe9kP6c30zbPYxRFqnqhyq6vqh/INAxlHEvdGUc9WYeqGAAAWgD5GVrFvNs16etf/7puueUWffGLX9T69et111136brrrtNzzz2ngYGB4+5fLpe1atUq/c7v/I4++MEPnvS8F1xwge6///7617Y9754aAACAs8a2TPVk47DznN6MipP1KlM1MeNlV6ZpKJOwlW2TINS2TJ2/qFPnL+rU2y6PN4B99kBBj++OQ/athwon3ED1aCU30M92jOpnO0YlSd3phF6xtEtrl3TromVdWtiZatmQ3TAMpR1LaSf+xMNUVczBfFV7xypUxQAAAGDOmHcr1devX6/LL79cd999tyQpDEMtW7ZMf/iHf6iPfvSjpzx2xYoVuvnmm0+4Uv1b3/qWHnvssdMeF++0AQAAxKpeoHzV03gp7mIvu4H8MFQmYSuTtJS026smZkrZ9fXUvnx9Jfv24dK0z7GgI6m1kyvZ1y7tUn8uOQMjnZuoigEAYP4jP0OrmFfLsV3X1SOPPKJbb721fptpmrr22mu1efPmMzr31q1btXjxYqVSKW3YsEF33HGHzjnnnDMdMgAAQNtJJeJ+9YGOlFYFWRWqviYqrg4XXeWrnjzfVcIylU1abbXSOOPYunxFry5f0StJmqh4enLvhJ7YO6HHd49r73jlJc9xuFDTA88e0gPPHpIkLelOa+3SOGR/xZIudaUTM/oYZlPCMtWVNtWVTtSrYgoVX4cLNVmGqbRjqmeqKiZlK+u0xyckAAAAcPbNq1B9eHhYQRBocHDwmNsHBwen1X/+YuvXr9emTZt03nnnaf/+/fr4xz+uq666Sk8++aQ6OjpOeEytVlOtVqt/nc/nT/vnAwAAtKpja2IiFd3JmphCTeMVV+MVT6YMZZOWMm0WgnalE7pydb+uXN0vSRop1vTE3gk9sWdcj++Z0OFC7SXOIO0dr2jveEXfefKAJGlFX0YXLe3W2qXdunBJpzLOvHq537CTVcUcyFe1e6yiBFUxAAAAmEGt+Sp7ml7/+tfX/3vt2rVav369li9frm984xt6z3vec8Jj7rjjjuM2NwUAAMDJmaahzlRCnamElnSnVXEDFaqeRkuuRkquDhdqChUqnYhXGbdblUdfLqlrzhvQNecNKIoiHczX9PhkVcwTe8c1XvZe8hw7R8raOVLWPz++T6YhrRnoqK9kf9nCDqUSrVm9Y01ukJtLxr/eTFXF7BwpHVMVsyCXUi5lUxUDAACAMzKvQvX+/n5ZlqWDBw8ec/vBgwe1cOHCpv2c7u5unXvuuXr++edPep9bb71Vt9xyS/3rfD6vZcuWNW0MAAAArW5qpfFAZ0peEMY1MWVXh4s1TVRduX6opG0p47RXTYwUr8Re2JXSwq6Fuu6ChYqiSLvHKpOr2Me1Ze+ESrXglOcII+m5gwU9d7Cgv39kj2zT0MsWdtT72M8d7FDCas1g+eRVMRNxVUzSUk86QVUMAAAATsu8CtUdx9Gll16qBx54QNdff72keKPSBx54QDfddFPTfk6xWNS2bdv0zne+86T3SSaTSibbZ2MoAACAmZSwTPVmHfVmHS3vy6ro+spXPI0UXY2XXY2VPVmmoazTfjUxUhyyn9Ob0Tm9Gf3W2sUKwkg7hkv1qpin9k2o5oenPIcfRnpyX15P7svrqw9LqYSply/q0kWTK9lX9mdb8nk9YVWMG+jABFUxAAAAOD3zKlSXpFtuuUU33nijLrvsMq1bt0533XWXSqWS3v3ud0uSbrjhBi1ZskR33HGHpHhz06effrr+33v37tVjjz2mXC6n1atXS5I+/OEP641vfKOWL1+uffv26bbbbpNlWfrd3/3d2XmQAAAAbezompilPRlV3ED5o2tiilWFoZRxLGWTdsuutj4VyzS0eiCn1QM5vfmSpfKCUFsPFfXEZF3MM/vz8sPolOeoeqEe3TWmR3eNSZKySUuvWNKltUvilezn9GZaMli2TCOugEmdvCqmI22rP0tVDAAAAE7MiKLo1K+256C7775bd955pw4cOKCLL75Yn//857V+/XpJ0tVXX60VK1Zo06ZNkqSdO3dq5cqVx53j1a9+tR588EFJ0tvf/nb98Ic/1MjIiBYsWKBXvepV+uQnP6mhoaGGx5TP59XV1aWJiQl1dnae8WMEAADA8Vw/VKHqaaLi6VChppLry/NDpex4BXsqYbZkEDxdNT/QM/sL9ZB966GCXiJjP053JqG1S+JV7Bct7dZgZ7Lln9upqpiy66vmhzIN47iqmJxjy2zBFf0AAJwN5GdoFfMyVJ+LmBQAAADOrjCMVKjFNTHDxZryFU9VP5BlmMo6ttKO1ZJ1JqejVPP11L58HLLvndCO4dK0zzHQkaxverp2SZf6cq1fhThVFVN2fflRJNuKq2L6c4460wl1JBO8kQMAwDSQn6FVEKo3CZMCAADA7ImiSBUvUL7ia7Rc02jJU7nmK5KUsuONTgk/j5ioeHpy74Qen1zJvne8Mu1zLOlOa+3SLl20tFsXLulSVzoxAyOdW6aqYiqeryCMlE5YVMUAADAN5GdoFYTqTcKkAAAAMHfU/ECFqq9CxdNIyVWpFtd5RJGUtE2lHUuphCWTkF2SNFys6Yk9E/WNT4eLtWmfY1V/tr6S/YLFnco48277pmmhKgYAgOkjP0OrIFRvEiYFAACAuWkq/CzWfJVqnkbLrgpVXzUvVKRIScsiZD9KFEU6kK/WQ/Yn9kxovOJN6xymIa0Z6KivZH/Zog4lbWuGRjw3vLgqJmEZyryoKibttPZzAADASyE/Q6sgVG8SJgUAAID5o+oFkyG7r9HSZMjuBwpDybHNyboYOtmlOGTfNVrWE3viupgn906o5AbTOodtGjp/UWc9ZF8zkJNttXZNiuvHq9grXqAwoioGAACJ/Aytg1C9SZgUAAAA5q+qF6hU81V2A42WXOWrnqpuqCCK5FhxXUyakF1SvCJ7++Gintgbr2R/al9eNT+c1jlSCVMXLO7S2iVxXczK/mxLP7dUxQAAECM/Q6sgVG8SJgUAAIDWUfMDlWvxavZ4JbunqhfKD0MlLFOZhK1Uwmz51daN8IJQvzpYqNfFPHugID+c3q8YuaStVyzpqneyL+tJt/SmslTFAADaFfkZWgWhepMwKQAAALSuqSqPYs3XWNlVvuKr4gbyo1AJ48jGpwlCdlW9QM/sz8ch+95xPX+oqGlm7OrJJLR2aXc9ZF/YmZqZwc4RJ6uKWZBLKZe0laUqBgDQIsjP0CoI1ZuESQEAAKB9eEEYr2R3fU2UXY2XPZW9QEEYyTIMpRPx5qeE7FKp5uupfRN6fHIl+86R8rTPMdCR1EWTIfsrlnSpL5ecgZHODVTFAABaGfkZWgWhepMwKQAAALQvPwhVcuNe9omKp/GSp7LvKwgiGVMhe8JitbGkiYqnLZN97I/vHte+ieq0z7G0Jx2vZF8Sh+yd6cQMjHRuoCoGANBKyM/QKgjVm4RJAQAAAFOCMFLJ9VWq+cpXPI2VvTgUDUIZmqqLMZW0CUMPF2rasne8vpJ9uOhO63hD0soFWa1d0q2Llnbp5Ys7lXHsmRnsHHB0VUwURUo5ljpTCfXnklTFAADmPPIztApC9SZhUgAAAMDJBGGksuurVAtUqHoaKbmquL7cIJJpSCk77mRP2mZLb9D5UqIo0v6Jar2P/Yk9E5qoeNM6h2lI5w521DvZX7awo2XfvIiiSBUvUMUNqIoBAMwL5GdoFYTqTcKkAAAAgEaFYaSyF9fFFKqeRopuHIwGoQxJqcm6GEL2SLtGy3p8Txywb9k7obIbTOscCcvQ+Qs7tXZply5a2q3VAznZLdp1T1UMAGCuIz9DqyBUbxImBQAAAJyuKIpUdqdCdl+jZVelmq+aF0iGoZQdh+ypRHuH7EEYadvhYrySfc+4ntqfl+uH0zpHOmHpgsVxyL52abdW9mdltuhz+lJVMbmUzWa6AICzivwMrYJQvUmYFAAAANAsU7UexZqvUtXXSGkyZPdDRZGUtKd62a2WDYQb4QWhfnWwoCf2TOjxPeN67kBBfji9X286krYuXNKliyZD9qU96ZZ84+LoqpiqH8qarIrpzcRVMdkkVTEAgJlHfoZWQajeJEwKAAAAmClRFKnqhXHIXos72Ys1XzUvVKRIScsiZJdU9QI9vT9fX8m+7XBR08zY1ZtxJlexxyH7YGdqZgY7y6iKAQDMBvIztApC9SZhUgAAAMDZVJ1ayV7zNVpyVaj6qvmBwlBybHOyLsaS1cYrj4s1X0/tm6iH7DtHytM+x2BnMt70dEkcsvdmnRkY6eyjKgYAcDaQn6FVEKo3CZMCAAAAZlN1cuPTshtotOQqX/VUdUMFUSTHiuti0m0eso+XXW3ZO1Gvi9k/UZ32OZb1pOOQfWmXXrGkSx2pxAyMdHa9uCrGNg2lHUs92YR6Mo5ySVtZqmIAAKeB/AytglC9SZgUAAAAMJfU/EClWnDUSnZPVS+UH4ZKWKYyCVuphCm7jVcfHypUtWXPkZB9pORO63hD0qoF2XrIfsGirpasTKEqBgDQLORnaBWE6k3CpAAAAIC5bKreo1jzNVZ2la/4qriB/ChUwjiy8Wm7VnxEUaT9E1U9vme8XheTr/rTOodlGjp3IFcP2V+2sFOO3XrPJ1UxAIDTRX6GVkGo3iRMCgAAAJhPvCBUuRao6PqaKLsaL3sqe4GCMJJlGEon4s1P2zUcDaNIu0bK9ZD9yX0TKrvBtM6RsAydv6hTF02G7GsGOlqufoeqGADAdJCfoVUQqjcJkwIAAADmMz8IVXLjupiJiqfxkqey7ysIIhlTIXvCasmV140IwkjbDhfrIfvT+/Ny/XBa50gnLF2w+EjIvqI/K9NorbA5CCOV3bjb3w9DObZJVQwAoI78DK2CUL1JmBQAAADQSoIwUsn1Var5ylc8jZW9uFM7CGVoqi7GVNJuz4DUC0I9d6CgJ/aM64m9E3ruQEF+OL1frTqStl6xtKteF7O0Oy2jxUJ2qmIAAEcjP0OrIFRvEiYFAAAAtLKpFcilWqBC1dNIyVXF9eUGkUxDStlxJ3vSNlsuGG5E1Qv09L68ntg7rsf3TGj74aKmmbGrN+to7dIuXbQkDtkHOlMzM9hZQlUMAID8DK2CUL1JmBQAAADQTsIwUtmL62IKVU8jRVcVN1AtCGVISk3WxbRryF6s+npyX7zh6RN7JvTCaHna51jYmdLaqZXsS7rUk3VmYKSz5+iqmOCYqphkXBWTspVKtOcnIQCgVZGfoVUQqjcJkwIAAADaWRRFKrtTIbuv0bKrUs1XzQskw1DKjkP2VKI9Q/axsqsn907o8T1x0L5/ojrtcyzrzeiiJV1au7RLr1jSrVzKnoGRzp4TVcV0pRz15RyqYgCgRZCfoVUQqjcJkwIAAABwxFTVR7Hmq1T1NVKaDNn9UFEkJe2pXnar5TbrbMShfFVP7I0D9sf3TGi05E7reEPS0IJcfSX7yxd1ttQGoFTFAEBrIj9DqyBUbxImBQAAAODkoihS1QvjkL0Wd7IXa75qXqhIkZKW1bYhexRF2jderfexb9kzrnzVn9Y5LNPQuYMdk53sXTpvYaccu3VWdVMVAwCtgfwMrYJQvUmYFAAAAIDpqU6tZK/5Gi25KlR91fxAYSg5tjlZF2PJarPVyGEU6YWR8uQq9nE9uTevihdM6xyOZer8RR26aGm31i7t1uqBXEs9jzU/UNkN4ueFqhgAmDfIz9AqCNWbhEkBAAAAODPVyY1Py26g0ZKrfNVT1Q0VRJEcK66LSbdhyB6EkZ4/VIw3Pd07oaf35eUG4bTOkU5YunBJp9Yu7dZFS7u0vC/bMp8ICKNIVapiAGBeID9DqyBUbxImBQAAAKC5an6gUi04aiW7p6oXyg9DJSxTmYStVMKU3WYrkr0g1LMHCnHIvmdCzx0sKAin92tdR8rW2iVxH/vapV1a0p1umQ1kT1QVk03a6stSFQMAs438DK2CUL1JmBQAAACAmeX6ocqur2LN11jZVb7iq+IG8qNQCePIxqftVvtRcQM9vT9fD9m3HS5qur/k9WWd+qana5d2aaAjNSNjnQ2nqorpSNnKJqmKAYCzhfwMrYJQvUmYFAAAAICzywtClWuBiq6v8ZKriYqnshcoCCNZhqF0It78tN0C02LV15Z9E/WQfddoedrnWNSVqq9kf8XSLvVknBkY6dl3dFVMLQjj64SqGAA4a8jP0CoI1ZuESQEAAACYXX4QquTGdTETFU/jJU9l31cQRDKmQvaEJcdur5B9rORqy96Jeif7/onqtM9xTm9Ga5d26aKl3bpwSZdySXsGRnr2+UGoihdQFQMAZwn5GVoFoXqTMCkAAAAAc0sQRiq5vko1X/mKp7Gyp7Lryw9CGZqqizGVtNsrND2Ur+qJPRN6fG+8kn205E7reNOQVi3I6aKlXVq7pFsvX9zZMsHz0VUxhqRUwlJXOqHeLFUxANAM5GdoFYTqTcKkAAAAAMxtUxtYlmqBClVPIyVXFdeXG0QyDSllx53sSdtsmU07X0oURdo7XtETe46sZC9U/WmdwzYNnTvYUe9kf9nCjpYInqeqYspuoJofyDZNqmIA4AyRn6FVEKo3CZMCAAAAML+EYaSyF9fFFKqeRopuvWt7apVyus1C9jCK9MJISY/vmdDju8f11L58vMHnNDi2qZcv6qzXxQwtyMlqgeD5RFUxacdWdzqhrkxCGcdW1rFkt8AbCgAwU8jP0CoI1ZuESQEAAACY38IwUqUesvsaLbsq1XzVvEAyDKUnV7KnEu0TsvtBqOcPF+sr2Z/ZX5AbhNM6R8axdOHirvpK9uV9GZkt8Py5fhyyV7xAYRTJMuPe/q60re6Mo6xjK5O02q5eCABOhfwMrYJQvUmYFAAAAIDWEkVxyF6s+SpVfY2UJkN2P1QUSUl7qpfdaomQuBGuH+q5A3k9vndCT+yZ0K8OFhSE0/uVsjNl6xVLu+ud7Iu7Uy3xJoUXhKpOhuxeEMo2TaUSpjpScV1MJmkpl7Tb6pMPAPBi5GdoFYTqTcKkAAAAALS2KIpU9cI4ZK/FnezFmq+aFypSpKRltV3IXnZ9Pb0/X1/Jvv1wSdP9BbM/52jtku76SvYFHckZGevZFoRxJ3vVC1T1A5mGoWTCVNax1Zd1lE3FnewZxyJkB9A2yM/QKgjVm4RJAQAAAGg/1amV7DVfoyVXhaqvmh8oDONu8XQiDtlboVO8EYWqpycnV7E/vndCu0fL0z7Hoq6U1k6uZH/Fki51Z5wZGOnZN7XxadULVfUDRVGklB2/EdObddSRSiiTtJR17La5XgC0H/IztApC9SZhUgAAAABQnexkL7uBRkuu8lVPVTdUEEVyrLguJt1GIftoydWWvfEq9if2TOhAvjrtcyzvzcSbni7r1gWLu5RL2jMw0rMviiLV/LgypuaHCqJQCctU2rbVnU2oK52o97In2PwUQIsgP0OrIFRvEiYFAAAAAC9W8wOVasFRK9k9Vb1QfhgHqJmErVTClN0moenBfLUesD+xZ0KjZXdax5uGNLQgp7VL47qYly/qVCrROhuBTm1+WvUCBWG8+WnKmdz8NO0om4zrYlrpMQNoL+RnaBWE6k3CpAAAAADgpbh+qFLNV8n1NVZ2la/4qriB/ChUwjiy8Wk7rEyOokh7xiv1PvYteyZUqPnTOodtGjpvYYfWLon72M9b2NFSz50fhKp6cdDuhaFMGUo5pnJJWz2ZhHKpeDV7KsHmpwDmB/IztApC9SZhUgAAAAAwXV4QqlwLVHR9jZdcTVQ8ladWKRuG0om4c7uVguKTCaNIO4dLenxyJftT+/KqeMG0zuHYpl6+qFMXTa5kH1qQa6mqnSCMVPMDVdxAtSCQZChlm8okbfVmHHWkbGWStjIJS2YLPW4ArYP8DK2CUL1JmBQAAAAAnCk/CFVy47qY8YqriZKvsu8rCCIZUyF7wpJjt37I7gehnj9U1OOTnezP7M/LC6b362vWsXThki6tXdqltUu6dU5fRmYLreiOoije+NQLVPUDhZGUsk2lHCsO2dN2XBmTsNqmYgjA3EZ+hlZBqN4kTAoAAAAAmi0II5VcX6War3zF01jZU9n15QehDE3VxZhK2q3fse36oZ49kK/XxTx3sKBwmr/NdqUTesVkyH7R0m4t6kpNqzYl61jKpeKgOmGZ8oK4zqdY9VVyp7eqfiZEUSR3qjLGDRRGoWzbVNq21J1JqCvjKOtYyjh2W7wxA2DuIT9DqyBUbxImBQAAAAAzLQgjlV1fpVqgQtXTSMlVxfXlBpFMQ0rZcSd70m79ju2y6+vpfXk9vmdCT+wd147DJU33l9v+XHIyYI872ftzyRPeL2EZGuxMyQ8jfeUnO3TfUweUr/jqTNu67oKFeveVK2Wbhg7mq9NeTT/TvCAO2KteIC8KZRvxSvaOpK3erKOMYymbtNn8FMBZQX6GVkGo3iRMCgAAAADOtjCMVPbiuphC1dNI0Z3s2w5lSEpN1sW0Q8ier3h6ct9EfSX77rHKtM+xuCultZN97GuXdqsrnVDCMrSsN6O/+tF2ff6BrSdcHW8a0vtfs0a/d9Uq7R4tz7lg/WhBGKnixSF7zQ9lmVLStpRLxZUx2WRC2WR83bT6NQPg7CM/Q6sgVG8SJgUAAAAAsy2cDEzjkN3XaNlVqear5gWSYSg9uZI9lWj9kH205OqJPeN6Yu+EHt89rkOF2rTPsaIvo6+8e52+9cu9+twDW1/y/jdfu0Y3blihPacR6M+WMIpU9SY3P/VDSZGSCUsZx1Jv1lFHKhGvZndsNj8FcMbIz9Aq7NkeAAAAAACgOUzTUDYZd34PdEqrojhkL9Z8laq+Rkru5CaooaJIStpTvexWS23gKUm9WUdXnzegq88bkCQdyFfjkH1yJftY2XvJcwx2ppSwDP3Z9146UJekzz+wVe9Yv1wZx1J5DnSsN8I0DGUcWxknjgeiKFLND1XxAu0YLimMJMcylHZsdacT6soklHFsZR02PwUAtC9CdQAAAABoUcbRgWmHtKI/UtUL45C9FneyF2u+JiqeIkVKWlbLhuwLO1Na+PKFet3LFyqKIu0Zq+iJPeN6fM+EtuydULHmH3fMO69Yrv/vp7sa3hA1jKRND+3UjRuWz5tQ/cUMw5j8NMORjnV3MmTfPVbRC6NlWaahdMJSV9pWd8ZR1rGVSVptsWEuAAASoToAAAAAtA3DMJR24uB8QUdSK/ql6tRK9pqv0ZKrQtVXvuopDCXHNpWeDFitFqr+MIy4J31Zb0ZvWLtYQRhp50hJj++O62Ke2jehqhdq3cpeffb+X03r3Pc+uV/vedUKPfDMQa0eyGlpT2beP3eObcqxTXWlE5LizU+rXqCD+Zr2jFVkm6ZSCVMdqYR6Mo4ySUu5pN0WXf4AgPZEqA4AAAAAbWxqVXJ/LqnlfVlVJzvZy26g0ZKrfNVToeAriCI5VlwXk26xkN0yDQ0tyGloQU5vvmSp/CDU1kNFdaQTyleOX8F+KvmqL8e2dNdkB3s6YWnVgqzWDHTo3MGc1gx0aLAzOa/D5oRlKmHFIboUb35a9eLrZf9ERaZhKJkwlXVs9WUdZVN2vJrdYfNTAEBrIFQHAAAAANRNhex9kpb1ZlTzA5VqwVEr2T0NF335YaiEZSqTsJVKmC3Vr21bps5f1KkwjNSZtnW42Pgmp50pW6WjqmQqXqCn9uX11L58/baOpK3VAzmtGezQmoGc1gzk1JdLNvUxnE3WUV3+0pHNT0u1QCOloqIoUsqOPyFR3/w0GW9+2kpvzgAA2gehOgAAAADgpJJ23JXdm3W0rDcj1w9Vqvkqub7Gyq7ylXgDVD8KlTCObHyaaIGQvVTzdd0FC/XnD25r+JjrLlion+8YPeV9CjVfv9w9rl/uHq/f1pt16gH7moEOrR7IqXOybmW+ObL5afz11OanVS/QCyNlBVH8hkzattWdTagrnaj3srfCdQMAaH1GFEUNbrmCU8nn8+rq6tLExIQ6OztnezgAAAAAcFZ4wVTIHmi85Gqi4qnsBQrCSJYRb2iZduZnWJp1LHWkE1r/J/c3tFmpaUgPffQ1+tIPtun/PbFfw9NY4X4iCztTWjOY0+oF8ar2oQXZeNPZFjC1+Wl16loxDaWcyc1P046yybgu5ugNUwHMf+RnaBWE6k3CpAAAAAAAkh+EKrlxXcx4xdVEyVfZ9xUEUbxRaiLuZHfs+RGyL+1J657NO3XX/Vtf8r43X7tGN25YoT1jFUnSWMnV1kNFbT1UiP/3YEH56vQ62o9mSFram9GagZzOnayPWdGXnTfP5an4QaiqFwftXhjKlKGUYyqXtNWTSSiXilezpxJsfgrMZ+RnaBWE6k3CpAAAAAAAxwvCSCXXV6nmK1/xNFb2VHZ9+UEoQ1N1MaaS9txckZywDC3rzeivfrRdn39g6wlXrJuG9P7XrNHvXbVKu0fL8oIT/5odRZEOF2pHgvaDRT1/uKiyG5z2+GzT0PK+jNYMdGjN5Eao5/Rm5n1XeRBGqvmBKm6gWhBIMpSyTWWStnozjjpStjJJW5mEJXOeP1agnZCfoVUQqjcJkwIAAAAAvLQgjFR2fZVqgQpVTyMlVxXXlxtEMg0pZceVH0l77qxITliGBjtT8sNImx7aqXuf3K981VdnytZvXLhI79q4QrZp6GC+etJA/WTCKNLe8Yqen1zJvvVQUdsPl+QG4WmP17FNDS3IHdPRvqg7JXOOPJ+nI4oiVb24l73qBwojKWWbSjlWHLKn441SMwmrpTbNBVoN+RlaBaF6kzApAAAAAMD0hWGkshfXxRSqnkaK7uTq5FCGpNRkXcxcCNkzjlVfIZ2wDHlBpHLNV6Hqn9Fq8xfzg1C7Rsv1ypith4vxBp+NFLufRNaxtHoyYF8zmNPqgZwW5JKz/pyeriiK5E5VxriBwiiUbZtK25a6Mwl1ZRxlHUsZx26JehygVZCfoVUQqjcJkwIAAAAAnLkwjFSph+y+RsuuSjVfNS+QDEPpyZXs7datXfMD7RguaevBIx3te8cqOpNf6LvTCa0eyOncwQ6tGYiD9u6M07Qxn21eEAfsVS+QF4WyjXgle0fSVm/WUcaxlE3abH4KzCLyM7QKQvUmYVIAAAAAgOaLojhkL9Z8laq+RkqTIbsfKoqkpD3Vy27N63qT01F2fW07VNTWQ0X9anJV+6FC7YzOuaAjWa+MWTOY0+oFOWWTdpNGfHYFk2/QVL1ANT+UZUpJ21IuFVfGZJMJZZPxJyHa6Q0aYDaRn6FVEKo3CZMCAAAAAMy8qW7tYs1XqRZ3shdrvmpeqEiRkpbVtiG7JE1UPG09VJjsaI9XtY+VvTM655LudBy0T26EumpBds5uLHsqYRSp6k1ufuqHkiIlE/Hq9Z5MQh2pRLya3bHZ/BSYIeRnaBWE6k3CpAAAAAAAs6M6tZK95mu05KpQ9VXzA4VhvGlnOhGH7FYbBqVRFGmk5NY3QY3/FFSqnX4HvGlIy/uy9RXtqwdyWtGXmXcbhEZRpJofquIF8fUSSY5lKO3Y6k4n1JVJKOPYyjpsfgo0C/kZWgWhepMwKQAAAADA3FCd7GQvu4FGS67yVU9VN1QQRXKsuC4m3aYhuxSHyfsnqkc2Qj1U1LbDxcnV26cnYRla1Z87ZkX7kp70vPu0gDsZsle8QGEUyTINpROWutK2ujOOso6tTNKalyv1gbmA/AytglC9SZgUAAAAAGBuqvmBSrXgqJXsnqpeKD8MlbBMZRK2UgmzrVcjB2GkPWNlbT1Y1K8mN0LdOVySH55+ZJBOWBpakNWayY1Q1wx2aLAjOa/6y70gjCtjvEBeEMo2TaUSpjpSCfVkHGWTcX1M0m6vjXOB00V+hlZBqN4kTAoAAAAAMD+4fqhSzVfJ9TVWdpWv+Kq4gfwoVMI4svFpoo1DdikOlHcMl+J+9kMFbT1Y1O6xss4gZ1dHyo43QT1qRXtv1mneoGdYEEb1kL3mBzINQ8mEqaxjqy/rKJuy49XsDpufAidCfoZWQajeJEwKAAAAADA/ecFUyB5ovORqouKp7AUKwkiWEdd/pB1CdkmquIG2Dx/ZBHXroaL2T1TP6Jx9WUdrBnNaPRW2D+TUkUo0acQza2rz06oXquoHiqJIqURcL9SbdeLNT5Px5qftWjcEHI38DK2CUL1JmBQAAAAAoDX4QaiSG9fFjFdcTZR8lX1fQRDJmArZE5YSlsFqZEnFql8P2KdWtI+U3DM656KuVH0j1DWDOa3qzyntzP0e86nNT6teoJofKojiiqG0bas7m1BXOlHvZedNGrQj8jO0CkL1JmFSAAAAAIDWFISRSq6vUs3XRNnTWNlT1fPlBvGv05ZhyLFNJW1TjtXe3exTRkuunj9U0K8OHVnVXqj6p30+05CW9mTq3exrBnJa2Z+dF8H01Oan1alPP5iGUs7k5qdpR9lkXBeTSsz9Nw2AM0V+hlYxL0P1L3zhC7rzzjt14MABXXTRRfqzP/szrVu37oT3feqpp/Sxj31MjzzyiF544QV99rOf1c0333xG5zwRJgUAAAAAaA9BGNVD0qoXr2jPV/0jq5PDSIqkhGXWw/aEZbZ1/UcURTpYqGnrwcJkR3tRzx8qquIFp31O2zS0oj9br4xZM9ChZb2ZOf88+0GoqhcH7V4YypShlGOqI5lQTzahbDLuZU8l2PwUrYf8DK3Cnu0BTNfXv/513XLLLfriF7+o9evX66677tJ1112n5557TgMDA8fdv1wua9WqVfqd3/kdffCDH2zKOQEAAAAA7csyDeWStnLJY3+lrvlxt3ZtsmM7X3VVqAYqu75cP1KoSIYkZzJsdyZXtrdDcGoYhhZ2prSwM6Wr1iyQFPeR7x2r1Ctjth4qavtwUV7Q2No/P4z0/GQ4/53J25K2qaEFuWNWtC/qSs2p59i2TOUsU7lUfP1MbX46XnZ1sFCRZChlm8okbfVmHHWkbGWStjIJS+Ycf8MAANrFvFupvn79el1++eW6++67JUlhGGrZsmX6wz/8Q330ox895bErVqzQzTfffNxK9TM55xTeaQMAAAAAvFgYHunYrvqBKm6gQjWuknH9UG4QKlIk0zCVPCpsnw+1JjPBD0K9MFo+ZiPUF0ZKCs8gucgmrbibfXJF++qBDvXnnDkVtB8tjCLVvCPXTCQpaZlKOVYcsqftuDImYVE1hHmH/AytYl6tVHddV4888ohuvfXW+m2maeraa6/V5s2bz+o5a7WaarVa/et8Pn9aPx8AAAAA0LpM01DasY7bZNMPwqPC9lDlmq+JihfXyVR8+UEkKZJtmkraVn11+1yvNjlTthWvNB9akNNvaKGk+BMAOw6X4n72yVXte8crDZ+zVAv02O5xPbZ7vH5bdyZxzEaoawY61JVONPvhnBbTOPaaiaJI7mRlzAsjZYVRKNs2lbYtdWcS6so4yjqWMo4txyZkB4CzYV6F6sPDwwqCQIODg8fcPjg4qGefffasnvOOO+7Qxz/+8dP6mQAAAACA9mZPbmiafVGFjOuHqvpxV3vNC1WoespXfVV8X/lqpHDyw+btVCGTtC29bFGnXrboyKrWUs3X84fj6petB+MV7YcKtVOc5VjjZU8/3zmmn+8cq9820JE8pjZm9UBOGWf2YxPDMJS0LSVtqx78e0Goihto/3hVu8bKso14JXtH0lZv1lHGsZRN2mx+CgAzZPb/32GeuvXWW3XLLbfUv87n81q2bNksjggAAAAAMN9NBeWdqSOrpqPoqAoZL1TFjTdGLdV8Fau+3CBUqEiWjgTtyRavkMkmbV20tFsXLe2u3zZeduuboP5qckPU8YrX8DkPFWo6VKjpJ9tGJEmGpCU96XplzLkDOa1ckFXSnv2gOmGZSqRNdU6G7FOb546WXO2fqMoy4zcjcqm4MiabTCibtJROWC39BgwAnC3zKlTv7++XZVk6ePDgMbcfPHhQCxcuPKvnTCaTSiaTp/UzAQAAAABolGEYSiWs41YdT21weXSFTL7qqeKFGq+48vxQkiHbNI4K262WrZDpzji6bEWvLlvRKyl+M2K46B61EWoctJfcoKHzRZL2jFW0Z6yi7z93WFK8Se3y3swxK9rP6c3Merf5izfPDaP42siXfR3Ou5IiJRPx6vWeTEIdqUS8mt2x2fwUAE7DvArVHcfRpZdeqgceeEDXX3+9pHhT0QceeEA33XTTnDknAAAAAAAzzTINZZP2cRUyXnBkVXvVC1Ss+cpXPNX8UIWqrzAKFcmIK2SOqpExW2wFs2EYWtCR1IKOpDYO9UuKw+b949X6JqhbDxW17XBRrh82dM4gjLR9uKTtwyXd93S8OM+xTK3sz9a72dcM5rSkOz2rz6dpGMo4dr2+ZurTDhU30HjZVRhJjmUo7djqTifUlUko49jKOmx+CgCNmFehuiTdcsstuvHGG3XZZZdp3bp1uuuuu1QqlfTud79bknTDDTdoyZIluuOOOyTFG5E+/fTT9f/eu3evHnvsMeVyOa1evbqhcwIAAAAAMF8krLj6pSN15LapULXmxZ3t9QqZaqCS62usHCpSXHniWJObo9qmEpbRUnUhpmFoSU9aS3rSuvq8AUlxUL5rtFxfyb71YFE7R0ryw6ihc7pBqOcOFvTcwYKk/ZKkjGNp9YK4l31qRftAR3LWnssTfdrB9UNVvEC7xyp6YbQsyzSUTlj1kD3r2MokrTlRdwMAc40RRVFj/y8xh9x999268847deDAAV188cX6/Oc/r/Xr10uSrr76aq1YsUKbNm2SJO3cuVMrV6487hyvfvWr9eCDDzZ0zkbk83l1dXVpYmJCnZ2dL30AAAAAAACzLAyjyY1R41XtZTeukCm7oVw/kOfHq9qnKmSSkxujtvpqZtcPtXOkpK0HC/rV5Ir2PaNlnUmA0pmytWawQ6sHcjp3IF7V3pN1mjbmMzX1CYeKF8gPQ1mGqVTCVEcqoZ6Mo2wyro9J2q29MS5mFvkZWsW8DNXnIiYFAAAAAECreHGFTKnmq1DzVfUC1fxQQRhJUbwqvh62t2CFzNEqbqBth4tHqmMOFnUgXz2jc/bnnLgyZnJF++oFOeVSc6NUYKqzv+IFqvmBTMNQMmEq69jqyzrKpux4NbvD5qdoHPkZWgWhepMwKQAAAAAAWlkURXKDUFUvVG0ycM9XXRWqgVw/kOtHChXJNKSE2boVMkcrVL16N/vWg3HYPlpyz+ici7pS9W72NQM5DS3IHbdJ7WyY2vy0OlkhFEWRUglL6YSl3qwTb36ajDc/bdXNcHHmyM/QKgjVm4RJAQAAAADQjsIw7muveoGqfqByLVCh6qvk+nHYHkSKokiWaSp51MaoiRatkBkp1rT1UDHuZz9U0NaDRRVq/mmfzzSkc3ozcT/75Kr2Ff3ZWX/+pnr6659eiEIlLFNp21Z3NqGu9JFe9tkeK+YO8jO0CkL1JmFSAAAAAADgCD8IVZ0K2ycrZPLVIxUyfhDJMCLZU6vaJwP3VlvlHEWRDuZr2nqooF8djIP2bYeLqnrhaZ/TNg2t7M/Gm6AuyGnNYE5LezKz/txNbX5a9QIFYRRvfupY6kzb6k47yibjupi5sPIes4P8DK2CUL1JmBQAAAAAAHhptcmNUacqZApVT4War5ofyPMjhZMxhXPUqnbHaq3NMYMw0t7xSr0yZuuhgrYfLskPTz+iSSVMDS2IK2PWDMQboi7qSs3q8+ZP1gVVvEBeGMqUoZRjqiOZUE82oWwy7mVPJVrr7xcnR36GVkGo3iRMCgAAAAAAnJ6jq0SqXqiKG69qL9V8uX4oNwgVKZJptG6FjBeEemGkfNRGqAXtGi3rDHJ25ZL2ZG1MvBHquQM59eWSzRv0NE1tflr1AtWCQJKhlG0qk7TVm3HUkbKVSdrKJCyZLfaJBcTIz9AqCNWbhEkBAAAAAIDmOjqErfqhyjVfExVvMpSNK2SiKFLCPBK0J21r1mtQmqXqBdo+XNLWg4XJjvai9o5XzuicvRknDtoHj3S0d6YTTRrx9IRRpJp3pI8/kpS0TKUcKw7Z03ZcGZOwZLfQGyjtjPwMrYJQvUmYFAAAAAAAODtcP1TVn1zxPFkhk68eXSETKpIRV8gctbLdbIGKkWLN17ZDRf1qchPUrYeKGi7Wzuicg53JesC+ZiCnoYGcMo7dpBE3LooiuVOVMW6gMApl26bStqXuTEJdGUdZx1LGseXYhOzzEfkZWgWhepMwKQAAAAAAMHtOViFTrsVVI64fKpJkyjhqVXtrVMiMld14JXu9o72oiYp32uczJC3tScdB++SK9pX92VkJsr0gDtirXiAvCmUb8Ur2jqSt3qyjjGMpm7TZ/HSeID9DqyBUbxImBQAAAAAA5p4gjOqbo1a9QGXXV77qqeyGcv1Anh9KMmSbRstUyERRpMPFWn0l+9ZDcX1M2Q1O+5yWaWh5X6a+ov3cwZzO6c2e9ecpCCNVpnrZ/VCWKSVtS7lUXBmTTSaUTVpKJyw2P52DyM/QKgjVm4RJAQAAAACA+cMLjqxqr3qBijVfhaqnqhfK9UMFUShFhhLWkVXt87lCJowi7RuvaOuhYn1V+7bhklw/PO1zOrapof7sZEd7HLYv7k6f1ecojOLe/Yobh+xSpGQiXr3ek0moI5WIV7M7NpufzgHkZ2gVhOpNwqQAAAAAAMD8NlUhU/OOdLbnq56K1UCuH8j1I8Vt7ZJjxSvaHdtUwjLm5aroIIy0a7SkXx21ov2FkbKC8PSjooxjxSH7wJGNUBd0JM/a8zP1d1jxAtX8QGEkOZahtGOrO51QVyahjGMr67D56WwgP0OrIFRvEiYFAAAAAABaUxhGkyF7WF8VXaj6Krl+HLYHcbRiGcaRVe2WOS9DW9cPtWO4pK31jVAL2jNW0ZmER13pRH0T1DWDHVo9kFNPxmnamF+KOxmyV7xAYRTJMg2lE1Y9ZM86tjJJS0mbXvaZRn6GVkGo3iRMCgAAAAAAtBc/CFWtb44aqFSLN0ed6vsOwkiKdEyFTMIy511fe9n1te1Qsb4J6tZDBR3M187onP255GTIntO5Ax0aGsgpl7SbNOJTm6r+qXiB/DCUZZhKJUx1pBLqyTjKJtn8dKaQn6FVEKo3CZMCAAAAAACQVN8YtTbZ2Z6vuiqcpEJmanNUxzLnVYXMRMWLu9knN0HderCo0bJ7Rudc0p0+Uh0z2KFV/dmzEmwHYVQP2Wt+INMwlEyYyjq2+rKOsik7Xs3usPnpmSI/Q6sgVG8SJgUAAAAAAHAyYRh3fVe9QFX/qAqZmi/XD+UGoSJFMg1TyaPC9sQ8qpAZKdb0q8lNUKc2RC3W/NM+n2lI5/Rm6pugrhno0PK+zIw/J1Obn1Ynu/WjKFIqYSmdsNSbdeLNT5Px5qfz7VMHs438DK2CUL1JmBQAAAAAAMB0+UF4VNgeqlzzNVHx4gqZIJQfRJIi2ebkxqiTgft8CHOjKNKBfLXezb71UFHbDhdV9cLTPmfCMrSyP1vfBHXNYIeWdKdn9PmY2vx06g2RMIqUsEylbVvd2YS60kd62efTmyCzgfwMrYJQvUmYFAAAAAAAQLO4fji5OWqgmheqUPWUr/qq+YE8P1I4GefMtwqZIIy0Z6x8pJ/9YEE7hkvyw9OPp9IJS6sWxEH7uYPxivbBzuSMPhdTm59WvUBBOLn5qWOpM22rO+0om4zrYuhlPxb5GVoFoXqTMCkAAAAAAICZdMyKaS9UxY03Rj26QiZUJEtHgvbkPKiQ8YJQO4dL9cqYrYcK2jVa1hnk7OpI2nE/e706Jqe+XLJ5g34RPwjjvxMvkBeGMmUo5ZjqSCbUm3XqdTGpxNx/42MmkZ+hVRCqNwmTAgAAAAAAmA1TG20eXSGTr3qqeKFcP5Dnh5IM2aZxVNhuzekKmaoXaNvhqdXscdC+f6J6RufszTr1gH3NQIdWD+TUmU40acTHOvrvpBYEkgylbFOZpK3ejKOOlK1M0lYmYcmcw38PzUZ+hlZBqN4kTAoAAAAAAGAu8YIjq9qrXqBizVe+4qnmh3L9UGEUKpIRV8gcVSNjztGV1MWqr+cPH9kIdeuhgoaL7hmdc2FnSmsGjwTtQwtySjvNr2wJo0g170gveyQpaZlKOVYcsqftuDImYcme458sOBPkZ2gVhOpNwqQAAAAAAADmuqkKmZoXd7bXK2Sq8Ypq1w8VSTIU97UnbUuObSphGXOytmSs5NY3QZ3qaM9X/dM+nyFpaW9GawZyOneyPmZlf7bpFTpRFMmdqoxxA4VRKNs2lbYtDXQmtXqgo6k/b64gP0OrIFRvEiYFAAAAAAAwX4VhNLkxaryauuzGFTJl90iFTHRUhUxycmPUubaqOooiHSrU6gH785Nhe8ULTvuctmloRV92sqM9XtF+Tm+m6fU5XhBqrOwq49i6YlXvnHwT40yRn6FVEKo3CZMCAAAAAABoNS+ukCnVfBVqftwV7ocKwkiKpIR1ZGPUuVYhE0aR9o5X6t3sWw8WtWO4JDcIT/ucSdvUqgW5YzraF3WnTvtxZx1LuZStjGMrYRsKJ4c2E1U0s4n8DK2CUL1JmBQAAAAAAEA7OLq6pDYZuOerrgrVQK4fyPUjhYpkGlLCnJsVMn4Qatdoub6ifeuhonaOlBSeQUqWdax4NftAh9YM5rR6IKcFueQpH3PCMjTYmZIfRvrKT3bovqcOKF/x1Zm29RsXLNS7X7VSGcdSxrFPf2BzCPkZWgWhepMwKQAAAAAAgHYWhnFf+9RmnOVaoELVV8n147A9iBRFkSzTVPKojVGb3Vd+ump+oB3DpSMr2g8VtXesojMJzrrTCa0eyOncwY54Rftgh7rSCUlxoL6sN6O/+tF2ff6BrScM9E1Dev9r1uj3Xz2kVGL+r1onP0OrIFRvEiYFAAAAAACA4/lBqOpU2D5ZIZOvHqmQ8YNIhhHJnlrVPhm4N7uz/HSUXb/eyz61qv1QoXZG51zQkdSagZw+9ea1+sdH9+hzD2x9yWNuvnaN/n+/tmrer1gnP0OrmN//EgEAAAAAADCn2ZapnGUqlzw2hqpNbow6VSFTqHoq1HxVfF/5aqRwch2oc9Sqdscyz2qFTMaxtXZpt9Yu7a7fNlHx6t3sUyvax8tew+c8XKhpVX9Wpin92fdeOlCXpM8/sFXvvGL5vA/VgVbBv0QAAAAAAACcdUnbUtK2pMk6FCnua69XyHihKm68qr1U81Ws+nKDUJEimcbsVch0pRO6bHmvLlveWx/zSMmtd7PHfwoq1YKTnuOdVyzX//fTXQ13uIeR9JWf7NT7rlndcpuXAvMRoToAAAAAAADmBMMwlEpYx/WHB2FUr4+p+qHKNV8TFS+uk6n48if72hPmkaA9aVtnpULGMAz155LqzyW1YahfUhy075+o6lcHC/X6mG2Hi6r5oSRp3cpeffb+X03r53znyf163zWrmz5+ANNHqA4AAAAAAIA5zTINZZO2si+qkHH9UFU/DttrkxUy+aqvqh9vkhqEoWQYcYXMUSvbzRmukDEMQ4u701rcndbV5w1Iit8Y2D1a1tZDBXWkEspX/GmdM1/1ZVuz3zMPgFAdAAAAAAAA89RUSN6ZeukKmXItUMn1NVYOFUkyZRy1qn3mK2Qs09CK/qxW9GcVRpE607YOFxvf9LQzZcsPIiVofwFmHaE6AAAAAAAAWsapKmSmNketeoHKrq981VPZDTVeceX5oSRDtmnMeIVMqebrugsW6s8f3NbwMa+/cFHTxwHg9BCqAwAAAAAAoOVZpqGMYyvjHHu7FxxZ1V71AhVrvgpVT1UvjCtkolCKDCWsI6vaz7RCplj19e4rV+qLP9jW0GalpiG9+8oVbFIKzBGE6gAAAAAAAGhbCSuufulIHbltqkKm5h3pbJ+oeCpVj62QMSQ5Vryi3bFNJSxDRgNhe8kN1JN19P7XrNFd9299yft/4DVrCNSBOYRQHQAAAAAAADjK0RUyXTrS1x6G0WTIfqRCplD1VXYD5auu3CBedm4ZxpFV7ZYp+wR97QfzVf3eVaskSZ9/YOsJV6ybhvT+16zRf3n10HF1NgBmjxFFUQMfMsFLyefz6urq0sTEhDo7O2d7OAAAAAAAADhL/CBUtb45aqBSLd4cteoFqvmhgjCSIh1TIZOwTKUSpgY7U/LDSJse2ql7n9yvfNVXZ8rW6y9cVK98yTitsS6W/AytglC9SZgUAAAAAAAAcLSpjVFrk53t+aqrQjWQ6wdy/UihIhmSFuSSGuxKqSNly7FNhWF8fKtVvpCfoVW0xttcAAAAAAAAwByTtC0lbUtKT1XIZBSGcV971QtU9QNV3ECFqq+dwyW5fqhcytaly3sa6mYHMDsI1QEAAAAAAICzxDQNpR3ruFXoUxUykgjUgTmOUB0AAAAAAACYZbZlKneCDU0BzD38SwUAAAAAAAAAoEGE6gAAAAAAAAAANIhQHQAAAAAAAACABhGqAwAAAAAAAADQIEJ1AAAAAAAAAAAaRKgOAAAAAAAAAECDCNUBAAAAAAAAAGgQoToAAAAAAAAAAA0iVAcAAAAAAAAAoEGE6gAAAAAAAAAANIhQHQAAAAAAAACABhGqAwAAAAAAAADQIEJ1AAAAAAAAAAAaRKgOAAAAAAAAAECDCNUBAAAAAAAAAGgQoToAAAAAAAAAAA0iVAcAAAAAAAAAoEGE6gAAAAAAAAAANIhQHQAAAAAAAACABtmzPYBWEUWRJCmfz8/ySAAAAAAAAIC5Zyo3m8rRgPmKUL1JCoWCJGnZsmWzPBIAAAAAAABg7ioUCurq6prtYQCnzYh4a6gpwjDUvn371NHRIcMwZns4TZfP57Vs2TLt3r1bnZ2dsz0czHFcL5gOrhdMB9cLpoPrBdPB9YLp4HrBdHC9YDpa/XqJokiFQkGLFy+WadJKjfmLlepNYpqmli5dOtvDmHGdnZ0tOaljZnC9YDq4XjAdXC+YDq4XTAfXC6aD6wXTwfWC6Wjl64UV6mgFvCUEAAAAAAAAAECDCNUBAAAAAAAAAGgQoToakkwmddtttymZTM72UDAPcL1gOrheMB1cL5gOrhdMB9cLpoPrBdPB9YLp4HoB5gc2KgUAAAAAAAAAoEGsVAcAAAAAAAAAoEGE6gAAAAAAAAAANIhQHQAAAAAAAACABhGqAwAAAAAAAADQIEJ11H3hC1/QihUrlEqltH79ej388MOnvP/f//3f62Uve5lSqZRe8YpX6N/+7d/O0kgxF0znetm0aZMMwzjmTyqVOoujxWz54Q9/qDe+8Y1avHixDMPQt771rZc85sEHH9Qll1yiZDKp1atXa9OmTTM+TswN071eHnzwwePmFsMwdODAgbMzYMyqO+64Q5dffrk6Ojo0MDCg66+/Xs8999xLHsfrl/Z0OtcLr1/a11/8xV9o7dq16uzsVGdnpzZs2KDvfOc7pzyGuaV9Tfd6YW7B0T71qU/JMAzdfPPNp7wfcwww9xCqQ5L09a9/Xbfccotuu+02Pfroo7rooot03XXX6dChQye8/0MPPaTf/d3f1Xve8x798pe/1PXXX6/rr79eTz755FkeOWbDdK8XSers7NT+/fvrf1544YWzOGLMllKppIsuukhf+MIXGrr/jh079IY3vEHXXHONHnvsMd18881673vfq/vuu2+GR4q5YLrXy5TnnnvumPllYGBghkaIueQHP/iB3ve+9+mnP/2pvvvd78rzPL3uda9TqVQ66TG8fmlfp3O9SLx+aVdLly7Vpz71KT3yyCP6xS9+oV//9V/Xm970Jj311FMnvD9zS3ub7vUiMbcg9vOf/1xf+tKXtHbt2lPejzkGmJuMKIqi2R4EZt/69et1+eWX6+6775YkhWGoZcuW6Q//8A/10Y9+9Lj7v+1tb1OpVNK//uu/1m+74oordPHFF+uLX/ziWRs3Zsd0r5dNmzbp5ptv1vj4+FkeKeYSwzD0zW9+U9dff/1J7/NHf/RH+va3v33MC8S3v/3tGh8f17333nsWRom5opHr5cEHH9Q111yjsbExdXd3n7WxYW46fPiwBgYG9IMf/EC/9mu/dsL78PoFUxq5Xnj9gqP19vbqzjvv1Hve857jvsfcghc71fXC3AJJKhaLuuSSS/Tnf/7n+l//63/p4osv1l133XXC+zLHAHMTK9Uh13X1yCOP6Nprr63fZpqmrr32Wm3evPmEx2zevPmY+0vSddddd9L7o3WczvUixS8ali9frmXLlr3kyg20L+YWnI6LL75YixYt0mtf+1r95Cc/me3hYJZMTExIioOMk2GOwZRGrheJ1y+QgiDQ1772NZVKJW3YsOGE92FuwZRGrheJuQXS+973Pr3hDW84bu44EeYYYG4iVIeGh4cVBIEGBwePuX1wcPCkvbQHDhyY1v3ROk7nejnvvPP05S9/Wf/8z/+sv/3bv1UYhtq4caP27NlzNoaMeeRkc0s+n1elUpmlUWGuWrRokb74xS/qH//xH/WP//iPWrZsma6++mo9+uijsz00nGVhGOrmm2/WlVdeqQsvvPCk9+P1C6TGrxdev7S3LVu2KJfLKZlM6vd///f1zW9+Uy9/+ctPeF/mFkznemFuwde+9jU9+uijuuOOOxq6P3MMMDfZsz0AAK1vw4YNx6zU2Lhxo84//3x96Utf0v/8n/9zFkcGYD4777zzdN5559W/3rhxo7Zt26bPfvaz+pu/+ZtZHBnOtve973168skn9eMf/3i2h4J5oNHrhdcv7e28887TY489pomJCf3DP/yDbrzxRv3gBz84aVCK9jad64W5pb3t3r1bH/jAB/Td736XDWqBeY5QHerv75dlWTp48OAxtx88eFALFy484TELFy6c1v3ROk7nenmxRCKhV77ylXr++ednYoiYx042t3R2diqdTs/SqDCfrFu3jmC1zdx0003613/9V/3whz/U0qVLT3lfXr9gOtfLi/H6pb04jqPVq1dLki699FL9/Oc/1+c+9zl96UtfOu6+zC2YzvXyYswt7eWRRx7RoUOHdMkll9RvC4JAP/zhD3X33XerVqvJsqxjjmGOAeYm6l8gx3F06aWX6oEHHqjfFoahHnjggZP2wG3YsOGY+0vSd7/73VP2xqE1nM718mJBEGjLli1atGjRTA0T8xRzC87UY489xtzSJqIo0k033aRvfvOb+t73vqeVK1e+5DHMMe3rdK6XF+P1S3sLw1C1Wu2E32NuwYud6np5MeaW9vKa17xGW7Zs0WOPPVb/c9lll+kd73iHHnvsseMCdYk5BpirWKkOSdItt9yiG2+8UZdddpnWrVunu+66S6VSSe9+97slSTfccIOWLFlS7/z6wAc+oFe/+tX60z/9U73hDW/Q1772Nf3iF7/QX/7lX87mw8BZMt3r5ROf+ISuuOIKrV69WuPj47rzzjv1wgsv6L3vfe9sPgycBcVi8ZhVNzt27NBjjz2m3t5enXPOObr11lu1d+9e/d//+38lSb//+7+vu+++Wx/5yEf0n//zf9b3vvc9feMb39C3v/3t2XoIOIume73cddddWrlypS644AJVq1X99V//tb73ve/p3//932frIeAset/73qevfvWr+ud//md1dHTUe0W7urrqn2zh9QumnM71wuuX9nXrrbfq9a9/vc455xwVCgV99atf1YMPPqj77rtPEnMLjjXd64W5pb11dHQct59HNptVX19f/XbmGGB+IFSHJOltb3ubDh8+rI997GM6cOCALr74Yt177731zTB27dol0zzywYaNGzfqq1/9qv77f//v+uM//mOtWbNG3/rWt0652RNax3Svl7GxMf3e7/2eDhw4oJ6eHl166aV66KGH6KRsA7/4xS90zTXX1L++5ZZbJEk33nijNm3apP3792vXrl31769cuVLf/va39cEPflCf+9zntHTpUv31X/+1rrvuurM+dpx9071eXNfVhz70Ie3du1eZTEZr167V/ffff8w50Lr+4i/+QpJ09dVXH3P7V77yFb3rXe+SxOsXHHE61wuvX9rXoUOHdMMNN2j//v3q6urS2rVrdd999+m1r32tJOYWHGu61wtzC14KcwwwPxhRFEWzPQgAAAAAAAAAAOYDOtUBAAAAAAAAAGgQoToAAAAAAAAAAA0iVAcAAAAAAAAAoEGE6gAAAAAAAAAANIhQHQAAAAAAAACABhGqAwAAAAAAAADQIEJ1AAAAAAAAAAAaRKgOAAAAAAAAAECDCNUBAACAabr99ttlGMZsDwMAAADALCBUBwAAwKzYtGmTDMOo/7FtW0uWLNG73vUu7d27d7aHBwAAAAAnZM/2AAAAANDePvGJT2jlypWqVqv66U9/qk2bNunHP/6xnnzySaVSqdkeHgAAAAAcg1AdAAAAs+r1r3+9LrvsMknSe9/7XvX39+vTn/60/uVf/kVvfetbZ3l0AAAAAHAs6l8AAAAwp1x11VWSpG3bttVv+973vqerrrpK2WxW3d3detOb3qRnnnnmmOPe9a53acWKFced70T954Zh6KabbtK3vvUtXXjhhUomk7rgggt07733Hnf8j3/8Y11++eVKpVIaGhrSl770pSY8SgAAAADzFSvVAQAAMKfs3LlTktTT0yNJuv/++/X6179eq1at0u23365KpaI/+7M/05VXXqlHH330hEF6I3784x/rn/7pn/Rf/+t/VUdHhz7/+c/rLW95i3bt2qW+vj5J0pYtW/S6171OCxYs0O233y7f93XbbbdpcHCwGQ8VAAAAwDxEqA4AAIBZNTExoeHhYVWrVf3sZz/Txz/+cSWTSf3Wb/2WJOm//bf/pt7eXm3evFm9vb2SpOuvv16vfOUrddttt+mee+45rZ/7zDPP6Omnn9bQ0JAk6ZprrtFFF12kv/u7v9NNN90kSfrYxz6mKIr0ox/9SOecc44k6S1veYte8YpXnOnDBgAAADBPEaoDAABgVl177bXHfL1ixQr97d/+rZYuXar9+/frscce00c+8pF6oC5Ja9eu1Wtf+1r927/92xn93KlAfeqcnZ2d2r59uyQpCALdd999uv766+uBuiSdf/75uu66687oZwMAAACYv+hUBwAAwKz6whe+oO9+97v6h3/4B/3mb/6mhoeHlUwmJUkvvPCCJOm888477rjzzz9fw8PDKpVKp/Vzjw7Kp/T09GhsbEySdPjwYVUqFa1Zs+a4+51oPAAAAADaAyvVAQAAMKvWrVunyy67TFJc6/KqV71K//E//kc999xz0zrPizcjnRIEwQlvtyzrhLdHUTStnwsAAACgvbBSHQAAAHOGZVm64447tG/fPt19991avny5JJ0wYH/22WfV39+vbDYrKV5lPj4+ftz9pla7T9eCBQuUTqe1devW47433cAfAAAAQOsgVAcAAMCccvXVV2vdunW666671NPTo4svvlj33HPPMYH5k08+qX//93/Xb/7mb9ZvGxoa0sTEhJ544on6bfv379c3v/nN0xqHZVm67rrr9K1vfUu7du2q3/7MM8/ovvvuO61zAgAAAJj/CNUBAAAw5/y3//bfdPDgQW3atEl33nmnRkZGtGHDBn3mM5/R//yf/1O//uu/rq6uLt1+++31Y97+9rcrm83qt3/7t/W5z31Od9xxh9avX69zzz33tMfx8Y9/XJJ01VVX6dOf/rQ++clP6pprrtEFF1xwpg8RAAAAwDxFqA4AAIA5581vfrOGhob0mc98Rtdcc43uvfde9fX16WMf+5g+85nP6IorrtBPfvITrVy5sn5MX1+fvvnNbyqTyegjH/mI7rnnHt1xxx164xvfeNrjWLt2re677z4tWLBAH/vYx/TlL39ZH//4x/Xbv/3bzXiYAAAAAOYhI2InJgAAAAAAAAAAGsJKdQAAAAAAAAAAGkSoDgAAAAAAAABAgwjVAQAAAAAAAABoEKE6AAAAAAAAAAANIlQHAAAAAAAAAKBBhOoAAAAAAAAAADSIUB0AAAAAAAAAgAYRqgMAAAAAAAAA0CBCdQAAAAAAAAAAGkSoDgAAAAAAAABAgwjVAQAAAAAAAABoEKE6AAAAAAAAAAANIlQHAAAAAAAAAKBBhOoAAAAAAAAAADSIUB0AAAAAAAAAgAYRqgMAAAAAAAAA0CBCdQAAAAAAAAAAGkSoDgAAWtKDDz4owzDqf3bu3DnbQzqp+TRWAMe6+uqr6/923/Wud832cAAAAHAWEKoDAIDT9uIw+GR/2jloapXAfNOmTcc8jgcffHC2hzRj/uEf/uG4a/juu++e7WHNWTt37jzhv3vTNNXZ2akLL7xQN910k55//vnZHioAAADQFPZsDwAAAKDdDQ0N6c4776x/3dvbO4ujwVe+8pXjbtu0aZNuuummWRjN/BVFkQqFgp566ik99dRTuueee/SDH/xAl1xyyWwPDQAAADgjhOoAAKBp3va2t+myyy477vYLL7xwFkYzfyxbtkwf/vCHZ3sYkHTgwAHdd999x93+yCOP6Mknn5zxazmfz6uzs3NGf8ZMe+1rX6vXve51KpVK+u53v6uf/OQnkqRisaj/9b/+l/7pn/5plkcIAAAAnBnqXwAAQNP8xm/8hj784Q8f9+c3fuM3JEkPPPDAMfUQO3bsOOb4MAy1ePHi+vc/+clPSpJGR0f1kY98RK95zWu0YsUKdXR0yHEcDQ4O6rWvfa3+5m/+RlEUNTzOU3Ugv7jm5GgPPvig3vOe9+iSSy7RokWLlEwmlclktHr1ar373e/Wli1bjrm/YRi65pprjrlt5cqVx/3sl6qICYJAX/7yl/Wa17xG/f39SiQS6uvr0zXXXKO/+qu/ku/7x9z/xXUcDz74oL72ta9p/fr1ymQy6unp0e/8zu9o9+7dDT9np+tXv/qV/uAP/kDnnXeeMpmMMpmMzj33XP2X//Jf9Oyzzx53/1KppE984hO65JJL1NHRoUQioYGBAV188cX6vd/7Pd17773H3P9HP/qRfvu3f1tLliyR4zjK5XJasWKFXv/61+v222/XxMTEtMb7N3/zNwqCQJKUy+W0ePHi+vc2bdp00uN839eXv/xlve51r9Pg4KAcx9GCBQt0xRVX6OMf/3j9fif6u/k//+f/6JJLLlE6ndav/dqvHXPef/zHf9Qb3vAGLVy4UI7jqKenRxs3btSf/umfqlwuHzeOLVu26D/9p/+kFStWKJlMKp1O65xzztGv//qv69Zbb9XevXuPGfNdd92lDRs2qLu7W7Ztq6+vTxdccIFuuOEGfe1rX5vWczdl48aN+vCHP6zbbrtN3//+97V06dL6955++ukTHvPII4/ohhtu0MqVK5VKpZTL5XThhRfqQx/6kPbs2XPc/VesWFF/Dm+//fZjvnf77bfXv7dixYpTHvfII4/ot37rt9Td3a1MJqOrrrpKP/7xj084xm9+85tat26d0um0BgcH9Z73vEeHDh065XPR7OsTAAAAc0QEAABwmr7//e9Hkup/vvKVr5zy/mEYRsuXL6/f/0/+5E+O+f4DDzxQ/55pmtHu3bujKIqiLVu2HPNzTvTn3e9+9ynHtmPHjvr3Xv3qV9dvv/HGG4857itf+coxxx3tQx/60CnH4DhO9N3vfrd+/5ca89TPPtVYi8Vi9Gu/9munPM+rXvWqqFAo1I/ZsWPHcd8/0XFr1qyJKpXKKf/OTva8fP/733/JY77xjW9EqVTqpONOJpPR3/3d3x1zzNVXX33Kx/q2t72tft/7778/sizrlPd/5plnGnp8U17+8pfXj/2P//E/Rh/84AfrXw8ODkae5x13zMjISHT55ZefdAxdXV31+7747+aqq6465uuLLrooiqIo8n0/eutb33rKx3b++edH+/btq5/7qaeeijKZzCmP+c53vlO//4033njK+65fv76h5+zFj+m222475vuXXHJJ/XtXXnnlccd/9rOfjUzTPOXz9+Lr7eh55MU/77bbbqt/b/ny5Sc9bt26dVEikTjhdfn0008fc9xf/MVfnHBsK1euPOaaOXo+mYnrEwAAAHMD9S8AAKBp7r33Xg0PDx93+9ve9jYtW7ZMhmHoxhtv1Cc+8QlJ0le/+lXdeuut9ft99atfrf/3a1/72voKV9M0df7552vdunVauHChuru7Va1W9ctf/lL/7//9P0VRpK985Sv6/d//fa1bt27GHl82m9WrX/1qveIVr1Bvb6/S6bRGRkb07W9/W88884xc19X73//++mrcO++8U9u2bdMXv/jF+jn++I//WD09PZIaq8V5//vfrx/+8If1r1/3utdpw4YN+ulPf1qvKfnxj3+s97///fryl798wnP8+Mc/1uWXX67rrrtO3//+9+t1HFu3btW3vvUtvf3tbz+9J+QUnn/+eb3zne9UrVaTJPX19enGG2+UYRi65557NDw8rFqtphtvvFGXXnqp1qxZo2eeeaa+Aappmrrhhht07rnnanh4WDt27Dhuc9S//Mu/rK8qf9nLXqbf+Z3fkW3b2rVrlx577DE9+uij0xrzww8/fMxK6re//e0aHBzUZz/7WUnSwYMH9Z3vfEdvfOMbjznune98p37+85/Xvz7//PP1m7/5m0omk/rlL3+pn/3sZyf9mT/60Y+0fPlyveUtb1Emk6mvfP6TP/kTfeMb36jf74orrtDrXvc6PfPMM/r7v/97SdIzzzyjd7zjHfre974nSbrnnnvqq9eXLl2q//Sf/pOy2az27NmjJ598Uj/96U/r5ysWi/rbv/3b+tdvectbdMkll2hiYkIvvPCCfvCDH0zruTuRUqmkf/u3f9Pjjz9ev+2tb33rMff54Q9/qFtuuaX+SZNzzjlHv/u7v6tisaivfOUrKpfLmpiY0Fve8hY9//zz9X87zfDwww9r6dKlesc73qHdu3fX559arabPfe5z9X+3e/bs0Qc/+MH6cR0dHXrPe94j0zT15S9/+bhP3Exp9vUJAACAOWS2U30AADB/vXiF9cn+HL3KdPv27ZFhGPXvbdmyJYqiKKrValFPT0/99q9//evH/bwXXngh+od/+Ifo7rvvjj7zmc9Ed955Z7RkyZL6MZ/4xCdOOrZmrFSPoigKgiD62c9+Fm3atCm66667ojvvvDO65ZZbjjlm165dDY3jpe4zPDx8zErXt771rcccd/RKZsuyouHh4SiKjl85vG7dush13SiKosh13WhgYKD+vVtuueX4v9gTmO5K9Q984AP1+5qmWf97jqL4kwdHr0z+wAc+EEVRFD366KPHrMIOw/CYc/q+H+3cubP+9X/4D/+hfv8Xr3iPoijav39/VCqVGnp8URRFf/AHf1A/X09PT1Sr1aIoiqKhoaH67W9+85uPOeaJJ5445nn5zd/8zfpzPWXbtm31/37x383KlSujsbGxY+4fBEHU29tbv8+GDRsi3/fr3//IRz5yzDl++ctfRlEURe9///vrt91xxx3HPb7R0dFodHS0/t9T9+3s7Kw/1ilhGEbbt29v6Hl78WM60Z9EIhF96EMfioIgOObYN73pTfX7dHR0RAcPHqx/79/+7d+OOcdnP/vZ+veasVI9m81Ge/furX/v+uuvr3/vkksuqd9+xx13HDOO+++/v/69n/zkJ8d87+j5pNnXJwAAAOYOVqoDAICzauXKlbr66qv1/e9/X5L0d3/3d/rkJz+pe++9V2NjY5Kk3t5evelNb6ofMzIyohtvvFHf/va3T3nuE3UvN9N3v/tdvfe979WuXbtechzLli0745/38MMP11e6StKNN954zPdvvPHG+mrmIAj08MMP6/Wvf/1x53nve9+rRCIhSUokElq5cmV9RfTUc95smzdvrv/3pZdeesyq/AsvvFCXXnppfXX31H3PP/989fX1aWRkRM8884xWr16tV77ylTr33HO1du1aXXvttVq+fHn9PFdddZX+5V/+RZL0rne9S1/60pd07rnn6rzzztOVV16pdevWHdeLfzK1Wu2YDvE3v/nN+v+zd+dhbpUF+8e/2ZfJTGbvdLovQHeWlpYWoSAogiIoKiAKVEFQeBVwAV5fFcRXUBQQEYoLi/5QFlcExRdQikLZylIotED3ZfYlmezJOef3x5lOO51MO6Wz5/5cV65Ock6SJ5lMZnqfJ/fj9XoB+5MW3//+9wF45JFHaGlpoaKiAqBH//Z3vvOdrud6p6lTp/Z6v5dccgmlpaXdLlu3bh2tra1d5z/zmc/gcrm6zp933nn88Ic/7Dq/cuVKDjvsMI455hhuvfVWAP7nf/6Hhx9+mBkzZnDIIYewaNEijjnmmK7bKSsrY/bs2axZs4ZoNMqUKVM48sgjOeigg5g7dy4nnHACU6ZM2fuTth+OO+44vv71r+N0dl/SaffXyYc+9CGqq6u7zp988slUVVXR1NTUte9ll13Wb2M67bTTunXmH3LIIV1f7/5z8dJLL3V9PWbMGE444YSu80uWLGHKlCl5Z6v35+tTRERERIYXLVQqIiIi/ebuu+/Gsqwep+OOO67bfp/73Oe6vv7d734HdK9++fSnP43P5+s6//nPf36fgTrQVTWyP6w9Fjjt7TZ27NjB6aefvs9A/b2OI5/dg1WwA729ne8tIN9zscbdn1vTNA9ghL3bfex7jnPPy3aO2+/38+CDDzJx4kQANmzYwB/+8Aeuv/56zj77bMaNG8dNN93Udb3LLruMz372s7hcLtLpNE899RQ///nP+epXv8pRRx3FvHnzqKur69N4//znP3d7/navxDn77LO7vs5kMtx33315Hyew30H0jBkzelz2Xr/vn/jEJ/ja176Gz+fDMAxWrlzJ3XffzVVXXcXxxx/PtGnTWLNmTdf1fvvb3zJr1izAfn3/5S9/4Uc/+hHnnXceEydO5Iorrtivx7LTBz7wAW644QY+/elPd4XGjz/+OCeccEKPxVXfy+tkT339Gd5TX38u2tvbu77ePfTPN8bd9efrU0RERESGF4XqIiIiMujOOOMMSkpKANi4cSNPPPEEf/3rX7u2L1u2rOvreDzOI4880nX+hBNOYP369eRyOSzL4sgjj9zv+999tmwymey27Z133sl7nb/+9a/dAsEf//jHtLe3Y1lWt6CyP5WXl3c739DQsNfzvfVN7zlzejBmx+4+9j3Huedlu4/7/e9/Pxs3buTFF1/kF7/4BVdeeSXHHHMMYAfaX//613n33XcBcLvd/PrXv6auro4///nP/OAHP+Bzn/tc1+298cYbXHXVVX0a7z333NPt/Ac+8AEcDgcOh4O5c+f2uu+e36Pe+rV7U1RU1OOyA/m+33jjjTQ0NPC3v/2Nm266iYsvvrhrNvbmzZv50pe+1LXvvHnzWLNmDatXr+aee+7hm9/8ZtcnHUzT5Oabb+76RMn+WLJkCVdeeSX33Xdf1wx/gDVr1vDjH/+418e6P6+T9/IzvKe+/lzs/kmCnZ/w6G2Mu+vP16eIiIiIDC8K1UVERGTQBQKBbjOBv/CFL3QF1oceeihHHHFE17ZIJNKtAuXDH/4wU6dOxeVysW7dOlavXr3f9797SPbKK6+QyWQA2L59O/fee2/e67S0tHQ7v2zZMsLhMEC3BSX3tGdwt+dM3b1ZuHBht9qPPce2+3mXyzWgi7TuryVLlnR9vWrVqm4HHt544w1WrVrVY99UKsVbb72F0+lkwYIFXHDBBdxwww2sWLGi67k2TbNr4ct169aRSCSoqqritNNO4xvf+Aa/+tWv+Na3vtV1231ZDHLHjh08/vjjfX5sr7zyStfr7n3ve1+3bddddx25XK7bZZs3b+7zbYNdQ7J72Pz//t//6/YzsOfrYOfzt3HjRtrb2wmHw5x88slcfvnl3HHHHdx2221d++7+fLz66qsAzJ07l/POO4/vfe97/O1vf2PevHl5938vvva1rzF9+vSu8zfddBPRaLTH2MFe6Hj30Prvf/97V/XLnvvu/jP8wgsvdM1Wf/3117sdoOsPCxYs6Pq6oaGBJ598suv8s88+2+uBlP56fYqIiIjI8KNOdREREek3jz32GM3NzT0uD4fDXHjhhd0uW7ZsGT//+c+B7rN7d5+lDnbdQmlpaVcFw/e+9z0aGxvJ5XLcdddd76lq5cgjj+RPf/oTAO+++y5HHHEEM2fO5F//+leP8Hyn3fuWwQ73Tz75ZFavXs3vf//7Xu9r3Lhx3c5fcsklnHTSSbjdbj760Y9y8MEH93rdiooKzj//fH71q18Bdnjf3t7O4sWLee655/jHP/7Rte+5557b1fM9GC666CKKi4t7XD5//nzuvPNOLrnkEu644w7S6TSmabJ06VLOO+88HA4H9957b1e9htfr5ZJLLgHsmo1Zs2Yxe/ZsFi5cSG1tLYFAgP/85z9EIpGu+9gZqN5888385je/6er/HjNmDK2trfz617/use/e/PrXv+4WWp966qkEg8Fu+5imyUMPPdR1/u677+bmm29m7ty5nHLKKfztb38D7M71Qw89lFNOOQW/38+aNWt4+umn8/5c9MbpdHL55Zd3ha8rV67kfe97Hx/84AdZu3Ztt4M4xx9/PIceeigADzzwAN/5znc47rjjOOiggxg7dizxeLyrYmnP5+Ooo46itraWY445htraWkpKSnjttde6Hajqy/O3N263m2984xt84QtfAOzv8W233cZ///d/A3D55Zfzl7/8Bcuy6Ojo4Mgjj+TTn/40sViMu+66q+t2ysvLu60pcOSRR/LKK68AsGLFiq7H8sQTT3QdJOsv55xzDtdcc03Xe83HPvYxLrjgAhwOR7cx7qm/Xp8iIiIiMgwN0QKpIiIiMgr861//soB9niZNmjeT7hAAAQAASURBVJT3+jNnzuy2n9frtZqbm3vsd8MNN+S93Tlz5ljz58/vOn/eeef1OraNGzd2bWtoaLAqKip63J7T6bROOumkbpftlMlkrLlz5+Ydx3nnndft/L/+9a9u4z/88MPzXu+hhx7a51hjsZh17LHH7vX5Pfroo62Ojo6u62zcuHGv41m6dGne52xv7r777j59r5cuXdp1nQcffNDy+/297uvz+azf/e53XfvX1dXt8/YXLlxoZbNZy7Is66KLLtrrvk6n0/rTn/60z8c2Y8aMruscdNBBve53zDHHdO1XXV3dNY7m5mbryCOP7HUc4XC46zb29b3ZKZfLWZ/85Cf3+vhmzpxpbd++ves6119//T6fv1tvvbVrf5/Pt9d9p0yZYrW3t+/z+dvzMX3nO9/ptj2dTlvjxo3r2l5ZWWnF4/Gu7TfffLPldDr3+vzt+TytWbMm7/gDgYB13HHHdZ3f871n0qRJvY7zO9/5Tq/Xu+222/KOrba21jrooIPy/jz11+tTRERERIYf1b+IiIjIkNlzVvqpp56ad7b1lVdeyc9+9jMOPvhgPB4PNTU1XHjhhaxYsYJQKLTf91tdXc2KFSs4+eSTCYVCFBUV8f73v5+nnnqqWy3N7jweD//85z85//zzqaiowOfzMWfOHH7+859zzTXX7PX+/vjHP/Kxj32M8vLy/e4zLyoq4sknn+SXv/wlxx9/POXl5bjdbsrKyli6dCl33nknTz311Ht6HgbaJz/5SV599VUuvvhipk+fjt/vx+/3M23aNC688EJeeeWVbs93WVkZt912G2effTazZs2ivLwcl8tFSUkJCxYs4LrrruPJJ5/E7bY/bPn5z3+eK6+8kmOPPZYJEybg9/vxer1MmDCBT37yk6xYsYLTTz99r2N87rnnWLt2bdf5PV+Tu9t9W2NjY9fiuRUVFTzzzDP88pe/5MQTT6SqqqrrezR//nwuu+yy/X7uXC4XDz74IA899BCnnHIK1dXVuN1uwuEwixYt4sYbb+TFF1/s6ksHOP300/n2t7/NiSeeyOTJkwkGg7jdbsaOHcuHP/xhHn74Yf7rv/6ra/877riDZcuWMW/evK4xh0Ih5s2bxze+8Q2ef/75rtqdA+H1evnqV7/adb65uZnly5d3nb/ssst4/vnn+exnP8ukSZPwer0EAgFmzpzJ5Zdfzuuvv95jseNZs2bxxBNPcMwxxxAIBCgpKeHUU0/l+eefZ+nSpQc85j1dcskl/P73v2f+/Pn4fD4qKyv57Gc/y/PPP9/te7C7/nh9ioiIiMjw5LCszgJCERERERERERERERHZK81UFxERERERERERERHpI4XqIiIiIiIiIiIiIiJ9pFBdRERERERERERERKSPFKqLiIiIiIiIiIiIiPSRQnURERERERERERERkT5SqC4iIiIiIiIiIiIi0kcK1UVERERERERERERE+sg91AMYLUzTZMeOHRQXF+NwOIZ6OCIiIiIiIiIiIsOKZVl0dHRQW1uL06m5vjJyKVTvJzt27GDChAlDPQwREREREREREZFhbevWrYwfP36ohyHynilU7yfFxcWA/aZQUlIyxKMREREREREREREZXqLRKBMmTOjK0URGKoXq/WRn5UtJSYlCdRERERERERERkV6oOllGOpUXiYiIiIiIiIiIiIj0kUJ1EREREREREREREZE+UqguIiIiIiIiIiIiItJH6lQXERERERERERERGUKGYZDNZod6GAXN6/XidPZtDrpCdREREREREREREZEhYFkW9fX1tLe3D/VQCp7T6WTKlCl4vd597jvsQvWf/exn3HjjjdTX13PooYfy05/+lIULF/a6f3t7O9/85jf54x//SGtrK5MmTeKWW27hlFNO6fNtplIpvvrVr3L//feTTqc56aSTuP322xkzZsyAPlYREREREREREREpXDsD9erqaoLBIA6HY6iHVJBM02THjh3U1dUxceLEfX4fhlWo/sADD3DFFVewfPlyFi1axC233MJJJ53EunXrqK6u7rF/JpPhAx/4ANXV1fz+979n3LhxbN68mdLS0v26zcsvv5xHH32Uhx56iHA4zKWXXsrHP/5xnnnmmcF66CIiIiIiIiIiIlJADMPoCtQrKiqGejgFr6qqih07dpDL5fB4PHvd12FZljVI49qnRYsWceSRR3LbbbcB9hGCCRMm8F//9V9cddVVPfZfvnw5N954I2vXru31ge7rNiORCFVVVfz2t7/lE5/4BABr165l5syZrFy5kqOOOqpPY49Go4TDYSKRCCUlJe/l4YuIiIiIiIiIiIxays+6S6VSbNy4kcmTJxMIBIZ6OAUvmUyyadMmpkyZgt/v3+u+fWteHwSZTIZVq1Zx4okndl3mdDo58cQTWblyZd7rPPzwwyxevJhLLrmEMWPGMGfOHL7//e9jGEafb3PVqlVks9lu+8yYMYOJEyf2er8A6XSaaDTa7SQiIiIiIiIiIiKyP1T5Mjzsz/dh2ITqzc3NGIbRo8d8zJgx1NfX573Ohg0b+P3vf49hGPztb3/jW9/6Fj/+8Y/53ve+1+fbrK+vx+v1dquM2df9Alx//fWEw+Gu04QJE/b3IYuIiIiIiIiIiIjICDNsQvX3wjRNqqur+fnPf878+fM588wz+eY3v8ny5csH/L6vvvpqIpFI12nr1q0Dfp8iIiIiIiIiIiIiw8lTTz2Fw+Ggvb19QO9n8uTJ3HLLLQN6H301bEL1yspKXC4XDQ0N3S5vaGigpqYm73XGjh3LwQcfjMvl6rps5syZ1NfXk8lk+nSbNTU1ZDKZHt/0vd0vgM/no6SkpNtJREREREREREREpD9t3bqVz33uc9TW1uL1epk0aRJf+cpXaGlpGfSxHHfccVx22WXdLluyZAl1dXWEw+F+uY977rmnR6sIwIsvvsgXvvCFfrmPAzVsQnWv18v8+fN58sknuy4zTZMnn3ySxYsX573O0Ucfzbvvvotpml2Xvf3224wdOxav19un25w/fz4ej6fbPuvWrWPLli293q+IiIiIiIiIiIjIQNuwYQMLFizgnXfe4Xe/+x3vvvsuy5cv78o3W1tbh3qIeL1eampqBrwbvqqqimAwOKD30VfDJlQHuOKKK/jFL37Bvffey1tvvcUXv/hF4vE4y5YtA+Dcc8/l6quv7tr/i1/8Iq2trXzlK1/h7bff5tFHH+X73/8+l1xySZ9vMxwO8/nPf54rrriCf/3rX6xatYply5axePFijjrqqMF9AkREREREREREREQ6XXLJJXi9Xv7v//6PpUuXMnHiRE4++WSeeOIJtm/fzje/+U3AXmTzz3/+c7frlpaWcs8993Sdv/LKKzn44IMJBoNMnTqVb33rW2Sz2a7t11xzDYcddhi/+c1vmDx5MuFwmLPOOouOjg4Azj//fFasWMFPfvITHA4HDoeDTZs29ah/Oe6447q2737atGkTADfddBNz586lqKiICRMm8KUvfYlYLAbYVTLLli0jEol0Xe+aa64Beta/bNmyhdNOO41QKERJSQmf+tSnujWW7OvxHIhhFaqfeeaZ/OhHP+Lb3/42hx12GK+++iqPPfZY10KjW7Zsoa6urmv/CRMm8I9//IMXX3yRefPm8eUvf5mvfOUrXHXVVX2+TYCbb76Zj3zkI5xxxhkce+yx1NTU8Mc//nHwHriIiIiIiIiIiIjIblpbW/nHP/7Bl770JQKBQLdtNTU1nHPOOTzwwANYltWn2ysuLuaee+7hzTff5Cc/+Qm/+MUvuPnmm7vts379ev785z/zyCOP8Mgjj7BixQpuuOEGAH7yk5+wePFiLrzwQurq6qirq2PChAk97uePf/xj1/a6ujo+/vGPc8ghh3TlsU6nk1tvvZU1a9Zw77338s9//pNvfOMbgF0lc8stt1BSUtJ1/a997Ws97sM0TU477TRaW1tZsWIFjz/+OBs2bODMM8/s8+M5EO4DvoV+dumll3LppZfm3fbUU0/1uGzx4sU899xz7/k2Afx+Pz/72c/42c9+tl9jFRERERERERERERkI77zzDpZlMXPmzLzbZ86cSVtbG01NTX26vf/5n//p+nry5Ml87Wtf4/777+8KtMEOq++55x6Ki4sB+OxnP8uTTz7J//7v/xIOh/F6vQSDwb2uRVleXt719c0338w///lPnn/++a4DA7t3sk+ePJnvfe97XHzxxdx+++14vV7C4TAOh2Ov9/Hkk0/y+uuvs3Hjxq5g/9e//jWzZ8/mxRdf5Mgjj9zn4zkQwy5UFxERERERERERERHbvmaie73ePt3OAw88wK233sr69euJxWLkcjlKSkq67TN58uSuABpg7NixNDY27v+ggb///e9cddVV/PWvf+Xggw/uuvyJJ57g+uuvZ+3atUSjUXK5HKlUikQi0efO9LfeeosJEyZ0myk/a9YsSktLeeutt7pC9f58PLsbVvUvIiIiIiIiIiIiIgLTp0/H4XDw1ltv5d3+1ltvUVVVRWlpKQ6Ho0f4vntf+sqVKznnnHM45ZRTeOSRR3jllVf45je/SSaT6XYdj8fT7bzD4cA0zf0e+5tvvslZZ53FDTfcwAc/+MGuyzdt2sRHPvIR5s2bxx/+8AdWrVrV1R6y51j6Q389nj0pVBcREREREREREREZZioqKvjABz7A7bffTjKZ7Latvr6e++67j/PPPx+AqqqqbmtRvvPOOyQSia7zzz77LJMmTeKb3/wmCxYs4KCDDmLz5s37PSav14thGHvdp7m5mVNPPZUzzjiDyy+/vNu2VatWYZomP/7xjznqqKM4+OCD2bFjx37fx8yZM9m6dStbt27tuuzNN9+kvb2dWbNm7eej2n8K1UVERERERERERIYBw7RIZfceJkphue2220in05x00kk8/fTTbN26lccee4wPfOADHHzwwXz7298G4P3vfz+33XYbr7zyCi+99BIXX3xxt1naBx10EFu2bOH+++9n/fr13HrrrfzpT3/a7/FMnjyZ559/nk2bNtHc3Jx31vcZZ5xBMBjkmmuuob6+vutkGAbTp08nm83y05/+lA0bNvCb3/yG5cuX97iPWCzGk08+SXNzc7eDAzudeOKJzJ07l3POOYeXX36ZF154gXPPPZelS5eyYMGC/X5c+0uhuoiIiIiIiIiIyBCyLIvmWJrV29p5c0d0qIcjw8hBBx3Eiy++yNSpU/nUpz7FpEmTOPnkkzn44IN55plnCIVCAPz4xz9mwoQJHHPMMXz605/ma1/7Wrd+8o9+9KNcfvnlXHrppRx22GE8++yzfOtb39rv8Xzta1/D5XIxa9Ysqqqq2LJlS499nn76ad544w0mTZrE2LFju05bt27l0EMP5aabbuIHP/gBc+bM4b777uP666/vdv0lS5Zw8cUXc+aZZ1JVVcUPf/jDHvfhcDj4y1/+QllZGcceeywnnngiU6dO5YEHHtjvx/ReOKx9Nd1Ln0SjUcLhMJFIpEfBv4iIiIiIiIiISD7tiQzb2pLUR1KkcyblRV6OmlqOw+EY6qH1O+Vn3aVSKTZu3MiUKVPw+/19vt53vvMdbrrpJh5//HGOOuqoARxhYdmf74d7kMYkIiIiIiIiIiIinWLpHNtaE9RFkmQNi/IiL+nsgS+gKKPftddey+TJk3nuuedYuHAhTqfKSAabQnUREREREREREZFBksoa7GhPsrUtQTJjUB70EfC6ABSqS58tW7ZsqIdQ0BSqi4iIiIiIiIiIDLBMzqQhmmJLa4KOVJaw30t5qW+ohyUi74FCdRERERERERERkQFimBZNHWk2t8Zpi2cI+TzUhgOjsjNdpFAoVBcREREREREREelnlmXRHMuwrS1BYzSFz+2ipiSAy6kwXWSkU6guIiIiIiIiIiLSj9oTGba0JmiMpnE4oLrYj9ulxSRFRguF6iIiIiIiIiIiIv2gI5Vle1uSHZEkhmlRHvThdStMFxltFKqLiIiIiIiIiIgcgGTGYEd7km3tCZIZg/Kgj4DXNdTDEskrmTEAcLsc5AwLQK/X/aRQXURERERERERE5D3I5EzqI0m2tCWJpbKEA17KS31DPSyRvBKZHImMwd3/2chja+qJJnOUBNx8aHYNy943haDXRdA7NHGxw+HgT3/6E6effnre7Zs2bWLKlCm88sorHHbYYYM6tnz0+RMREREREREREZH9kDNM6iJJXt7Sxlt1UbCgNhwg5NP8VRmeUlmDnz+9gYX/+wQ/e2o965viNMXSrG+K87On1rPwf5/g509vIJU1BnwsN9xwAw6Hg8suu6zP15kwYQJ1dXXMmTMHgKeeegqHw0F7e/vADHIf9JMuIiIiIiIiIiLSB6Zp0RxPs7U1QVNHmoDHTU04gNPh6J87sCz7JNKPEpkcP396A7c88U6v+5gWXdu/cOzUAZux/uKLL3LnnXcyb968/bqey+WipqZmQMb0XmimuoiIiIiIiIiIyF5YlkVbPMMbOyK8tjVCeyLLmGI/5UXe/QvULQuHkcaZ6cCVasUdr8fTsQVv29v4m16jpPEFilreGLgHIqOGaVq0xNL7PEUSGRJpg1uf7D1Q392tT75DImMQSWT2edumuX8HgGKxGOeccw6/+MUvKCsr67G9rq6Ok08+mUAgwNSpU/n973/ftW3Tpk04HA5effVVNm3axPHHHw9AWVkZDoeD888/f7/GcqA0U11ERERERERERKQXHaks29oS7IiksEyoKPLicfUyT9UycRgZHGbWPhkZHGYGRy6FMxvHmUviMHOdpyxgAQ4shwPL6caVSWM6sGer99fsdxmV2hIZ5n/viX3ud9vZh7NmR5S+5t+mBXf9ZyOzxpZw6e9e2eu+q/7nRCpCfV9D4JJLLuHDH/4wJ554It/73vd6bP/Wt77FDTfcwE9+8hN+85vfcNZZZ/H6668zc+bMbvtNmDCBP/zhD5xxxhmsW7eOkpISAoFAn8fRHxSqi4iIiIiIiIiI7CGZMdjenmB7W5JUzqA86MPvdthBeSaDw9gZnKdx5lI4cgmcuSROMwc9QnMnltON5XRjurzgKcJyunsE50a2FUXp0p8WTinn5ife3q/r/GNNPectmdyv47j//vt5+eWXefHFF3vd55Of/CQXXHABANdddx2PP/44P/3pT7n99tu77edyuSgvLwegurqa0tLSfh1rXyhUFxERERERERERATAN0ukkjW0d7GiJkEgmKfXCWGcGZ2sCh5HqDM2zOMwcdmgOlsNlh+YuD8ZeQnORwVbkcxNN5vbrOtFUjqJ+XHR369atfOUrX+Hxxx/H7/f3ut/ixYt7nH/11Vf7bRz9SaG6iIiIiIiIiIgUBtMAIwO5NBhZ+2sjDZkkuXSMtmiU5kiMeDJFsdOi2uvCkaFrlrnldGO4fVjOInAoNJfhL57OURJw0xRL9/k6JX438fT+BfF7s2rVKhobGzniiCO6LjMMg6effprbbruNdLrvYxsuFKqLiIiIiIiIiMjoYOQ6g/I9Tuk4ZON2mG5m7UDdNAAwLYtoxqIhZhLNWnjcPkrKS3E4XeQUmsswVRb0sup/Ttznfh6Xg5Nm13D7U+v7fNsfmjOWgMe5z9svC3r7dHsnnHACr7/+erfLli1bxowZM7jyyitxuVwAPPfcc5x77rld+zz33HMcfvjheW/T67Xv2zCMPo2hvylUFxERERERERGRkaErNN9tpnkuDZlEr6E5AC43ON3g9IAnCH4PlsNNNJWjIZqiNZ7G5XBSEvbg7GUNUpHhxOl09HmR0M+9bwrLV6zv02KlTgd87ujJlAT6Fpj3RXFxMXPmzOl2WVFRERUVFd0uf+ihh1iwYAHve9/7uO+++3jhhRf41a9+lfc2J02ahMPh4JFHHuGUU04hEAgQCoX6bcz7olBdRERERERERESGh65KlgzkdptpnonZwXnX7PMcWHuG5h5w7QrNcfYee8UyORqjMZpjGSwTwgEvbpdmpcvoFPS6+PIJB3HLE+/sc9+vnHAQAa9rEEbV07XXXsv999/Pl770JcaOHcvvfvc7Zs2alXffcePGce2113LVVVexbNkyzj33XO65555BG6vDsqw+HKOQfYlGo4TDYSKRCCUlJUM9HBERERERERGR4cfIdvaZZzoD9LQdnmfjdkWLkdk109wy7es4HJ2zzN12aO7y7Dq/n5JZg+aONA0dabI5k5KAB697+ExNT0VbcbjczF58Mo5ROGVe+Vl3qVSKjRs3MmXKlL0u4Nkv95U1WL5iPbc++U7eGetOB3z5hIO4eOk0/J6hCdWH2v58PzRTXUREREREREREDpxldZ9pvvOUTdszzbOJzlqWPUNzds0yd7rBE7LPO/sv2MsYJi2xDPWRFMlMjmK/h3DA02+3LzLc+T0uvnDsVD571CTufmYTf3+jjmgqR4nfzclzxrLs6MkEvK6CDdT3l0J1ERERERERERHZt67QfI8+81waMp0LgRpZMDt7z3eWI+ycab5zlrnH3znTfODDu5xp0Ra3w/SOdI6gx0VVsd8O8kUKTNDrJuh1c8nx07nk+Om4XQ5yhv1zOlSVLyOVQnUREREREREREekMzfP0mWdTdmCeTUCuc6a5me0emrs8nbPN3eAJ2OcdQ1dfYprQnrTD9PZkFr/bRWXIh0Nhuki3AF0T098bheoiIiIiIiIiIoVgZ2ie222muZHuDM0T9mxzI9cZmmdgZ++yw7lrIVC3B5xDH5r3xrIgmsrREE3REk/jdjqpKPIxCuvJRWQIKVQXERERERERERkNTLNnn7mRgUzCDs2zyV2d5mZ2j9C8c5a52wOuoF3PMgxD873pSOdoiqZoitkHBEoDXtwuTU0Xkf6nUF1EREREREREZCQwzc4+84wdjuc6v84m7YVAcyl7prmRsXvNd3I6dy0E6vaBq8g+P0q6UJJZg6aONI3RNFnTpMTvweseWQcERGRkUaguIiIiIiIiIjIcmEb+TvNMwu40z6U6a1uyPUNzl9eeXe72gS/UOdN8dITmvUnnTFpiaRqiaZKZHMUBD2GPZ6iH9Z4EisKUlpUTKDoUh9vXefAEu59eRIYdheoiIiIiIiIiIoPBNHbNLjeyu2adZ5KQ6bC3mTtnmhv2dRyAw7VrIVCPH3zFBRGa9yZrWrTFMzREUnSkcxR5XVSV+Id6WO+J2+tj3ISpuDFwPL8cx9q/QioC/jDM/Cgc9UXwFIE3ONRDFZHdKFQXEREREREREekPO6tXjN0WAs2ld5tpnra7zI3srtAcOhcBde8Kzf0l9nnpxjChPZmhPpIikszg97ipDPlG7LEFt9fHxKkzcay8DefTPwTL3LUx1gj//jH852ZYeiUcfZn92hDpD9mk/a/TvetTL/pUxH7RO7SIiIiIiIiISF8Yufyd5pmE3WluZOzQPJcFK09o7vKCJwh+j0Lz/WBZEElmaYimaE1kcDudVBT5cY7w2vRxE6bagfqKG3rfyTLhqevtr5d8WTPW5cDsPMD33B3w1sPD6lMRkydP5rLLLuOyyy7rdR+Hw8Gf/vQnTj/99EEbV2/0Di4iIiIiIiIiAt1nl+/+dTZuh1FdM81ze4TmnSG5ywPeIvC7FZr3k45UjoZoipZYBoDSgBe3a4ROTd9NoCiMG8Oeod4XK34AR16gUF3eu2wSnr3Vfi0N8qciDMPgmmuu4f/9v/9HfX09tbW1nH/++fzP//wPjv34qEldXR1lZWUAbNq0iSlTpvDKK69w2GGH9et4+0Lv8CIiIiIiIiIy+lnWrqB891MuA+kOyCY6a1k661l2hk4O7FqWnaG5p3MRUIXmAyqZMWjsSNPUkSZrmoT9HjzuET41fTelZeU4nl/ePdzcG8u0Zxcf+zXVdMj+yyTsQH3npx7yGcBPRfzgBz/gjjvu4N5772X27Nm89NJLLFu2jHA4zJe//OU+305NTU2/jelA6TeAiIiIiIiIiIx8vYXm2RRk4p2heaZzIdA8ofnO2eaezkVAna4hfTiFKp0zaYmlqY+mSWUNiv1uwh7PUA/rgDiMDJ7YDryxbXg7tuKJbaNo+i/tRUn3x1t/gWO/OjCDlJHHNCHZuu/9nG77vW/FD/p2uyt+AEd+ftf75d4EyulLD9Ozzz7Laaedxoc//GHArnr53e9+xwsvvNBtv46ODs4++2wefvhhSktL+e///m8uueSSru27179MmTIFgMMPPxyApUuX8tRTT/XtMfYDheoiIiIiIiIiMvxZ1m5h+W595rm03We+c6b5ztnmlmVfz+HY1WfuctuzfBWaDztZ06I1lqY+kiKeyVHkdVNV7BvqYfWdZeHKRPB0bOsKz+1/t+FONOLA6r6/N2j3We+PVFSfkJBdkq1w47R97/eJu6F+9f59KmLl7VAzF36/bO/7fn09FFXu8yaXLFnCz3/+c95++20OPvhgXnvtNf7zn/9w0003ddvvxhtv5L//+7+59tpr+cc//sFXvvIVDj74YD7wgQ/0uM0XXniBhQsX8sQTTzB79my8Xm/fHl8/0U+iiIiIiIiIiAy9naH57n3mRtqeaZ5N2LPNjZwdmO8Zmrs8nbPNO0Nzlwcco6cqZDQzTGhPZqiLJOlIZfG73VSF/PYnCIYjM4cnXt8VmHtj2/B0BuiubLzvt5OO2QtExhr7fh1/iT1z2DW44aGMcJOWwFPf37/rrP0rLPpCvw3hqquuIhqNMmPGDFwuF4Zh8L//+7+cc8453fY7+uijueqqqwA4+OCDeeaZZ7j55pvzhupVVVUAVFRUDEktjEJ1ERERERERERl4ptl9prmR3q2eJWYvomfkwMx0huad13M47bDc5QG3B5wKzUcDy4L2ZJaGaIq2RAaP00l50N+XJolB4czEOgPzbd0D9Hgdjt0XqX2vNj8LMz5iLw7ZR9bM03AM26MNMmz5Qu/tUxHeUL8N4cEHH+S+++7jt7/9LbNnz+bVV1/lsssuo7a2lvPOO69rv8WLF3e73uLFi7nlllv6bRz9SaG6iIiIiIiIiBy4rtB8t5nmubQdlmcTnfUsnTPNjQxdU5Gdzl2zzN1ecAXt8w6Fh6NVRypHQzRFSyyDAygNeHG7huD7bRm4E017zDq3/3Wn2/v97rKBKrKhcWRC43FFI4QWXYzjmZ/0rZbD4YSjLtYipbL/3uunIjKxfhvC17/+da666irOOussAObOncvmzZu5/vrru4XqI4lCdRERERERERHZN9PYNdM8t9tCoJkEZOOQS9mh+Z6L23WF5h5w+8BVpNC8QCUyBo0daZo6UuRMi7Dfg8c98FPTHbkk3th2vB3b8OweoMd24DQz/XpfptPTFZxniseT7fw3ExqH5d4ViJeNP4RMzkP4mG/gfvqGfd5u9phvEM24CXoMAl6tByDYi4R+ff2+93N6YOZH4d8/7vttzzwN3MF9336gvE83l0gkcO7xMRSXy4Vpdj+g9Nxzz/U4P3PmzLy3ubND3TD64ZMj74FCdRERERERERHZFZrv2WmeSdqheTZph+VGdo/Q3NXZae4Gj8+uGnC6FZpLl3TOpKkjTWNHmlQ2R7Hfg9/Tz8GwZeFKteyadd4VnG/Hk2zq3/sCcr5SMqHxZIvHdwXomdB4csEqcOz7sQVCYS749cvcfuYlFAPuf/8w/4x1h5PcMd8gNv8SLn1gDXedv7DfH4uMUE5nnxYJBeCoL9pVQ33+VMQXIRA+sPHt5tRTT+V///d/mThxIrNnz+aVV17hpptu4nOf+1y3/Z555hl++MMfcvrpp/P444/z0EMP8eijj+a9zerqagKBAI899hjjx4/H7/cTDvffmPdlWIbqP/vZz7jxxhupr6/n0EMP5ac//SkLF+Z/07jnnntYtqz7SrQ+n49UKtV13tHLL/If/vCHfP3rXwdg8uTJbN68udv266+/vqscX0RERERERGRE2zmLfM9TOt450zzdWc2StQP2nbpCcw94/HYtgMOl0Fz2KWtYtMTT1EdSJDI5Ql4PVcX+A7pNh5HBE9/RY9a5N7YdZy7ZTyO3WQ4X2aKx3ULznSG6eYB90x6Xi3cb45y6/CXu+cznGXfE5/C+dCeutQ/bfdb+EowZHyWz4CK2xeD8O14iY5hDU5MjI5+nCJZeCU9dv+99l17Z7zVDP/3pT/nWt77Fl770JRobG6mtreWiiy7i29/+drf9vvrVr/LSSy9x7bXXUlJSwk033cRJJ52U9zbdbje33nor3/3ud/n2t7/NMcccw1NPPdWv496bYReqP/DAA1xxxRUsX76cRYsWccstt3DSSSexbt06qqur816npKSEdevWdZ3fM0Svq6vrdv7vf/87n//85znjjDO6Xf7d736XCy+8sOt8cXHxgT4cERERERERkcHRFZrv0WneVc/SS2juctszy50e8ATB3znrXOQ9MkxoS2SojyaJJrMEPG6qQn76vMamZeHKRPF0bO3edx7bjifegIM+zLbdn/F6ijoD8wndA/SimgH5WUjnLOKZHCUBN+ub4pz40xdZOKWcC486l4ULL8Pv85JKZ3hhYyu/eGADL2xsBWBaVRE5w6K/J/lLAfAG4ejL7K9X/KDXT0Ww9Ep7P8+BHfzaU3FxMbfccsteFx3dtGnTPm/Hsqxu5y+44AIuuOCCAxzdezPsfkvedNNNXHjhhV2zz5cvX86jjz7KXXfd1euscYfDQU1NTa+3uee2v/zlLxx//PFMnTq12+XFxcV7vR0RERERERGRIdNVybJnp3nMDs67Zp/nwNozNO/sNFdoLgPIsqA9maUhmqItnsbjclFR5MfZW226aeBJ1HeG5ls7Fwm1u89d2Y7+HRsOcsHqrsA80xmgZ4vHY3jDg/bJi+d3ZLltVYr/8bTywVk13LHC7qx+YWNrV3jem5PnjB2MIcpo5fHDki/DkRfAc3fAW3/p+lQEM0+zK188gX4P1EerYfVbNJPJsGrVKq6++uquy5xOJyeeeCIrV67s9XqxWIxJkyZhmiZHHHEE3//+95k9e3befRsaGnj00Ue59957e2y74YYbuO6665g4cSKf/vSnufzyy3G7h9VTJCIiIiIiIqOVke3sM890BuhpOzzPxu2KFiOza6b5zlmGDkfnLHO3HZrv7DNXaC6DrCOVoz6aojWWwQGUBX24OqtKnJkYntj2zlnnW7tmnXtjdTis3N5veD+ZLj+Z0LjdFgntDM9DY7Fcvn69r/3REDe5/eUUz263H+9vntvMjz91KHc+vR7T2seVAacDlh09WYuUyoHxBu3TsV+DY79q/64wc4Cj3ytfRrth9Vu2ubkZwzAYM2ZMt8vHjBnD2rVr817nkEMO4a677mLevHlEIhF+9KMfsWTJEtasWcP48eN77H/vvfdSXFzMxz/+8W6Xf/nLX+aII46gvLycZ599lquvvpq6ujpuuummvPebTqdJp9Nd56PR6P4+XBERERERESkUltV9pvnOUzZtzzTPJjprWfYMzdk1y9zpBk/IPu9UsCbDQzyTozGaprkjiTvRxNhsA4HEdnvmeeesc3e6rd/vN+uv2GOR0Alki8eT81cMq77/rGHxh3UZ/t+aNOndPkDy/MZW2uJZLjl+Oj/957v7vJ2vnHCQAnXpP7sH6C7v0I1jBBtWofp7sXjxYhYvXtx1fsmSJcycOZM777yT6667rsf+d911F+eccw5+f/ePMlxxxRVdX8+bNw+v18tFF13E9ddfj8/X80jm9ddfz7XXXtuPj0RERERERERGrK7QfI8+81waMp0LgRpZe0agkbH3h10zzV0761n8nTPNFZ7JMJVNQmQbmZZNJBs3YrRtoTq2jUmJHTjNTL/elen0kA3V2sH5zkVCiyeQKarF8gT79b4GwisNOX66KsXWaP4O+Jv/9go/+cxRuBxw6z/fzTtj3emAL59wEBctnYZfZeoiw8awCtUrKytxuVw0NDR0u7yhoaHPXecej4fDDz+cd9/teZTv3//+N+vWreOBBx7Y5+0sWrSIXC7Hpk2bOOSQQ3psv/rqq7sF8dFolAkTJvRpjCIiIiIiIjKCZZOQbO+caZ6yA/NsAnKdM83NbPfQ3OXpnG3utmcHujz2gnAiw5VlQaIF2rfsOkW22v/G7MzG23nqDzlveI9Z53Z4ngtWgWPkBcktSZOfv5rin5vzV9uU+hx84TAfJ0520rxpNecvmslnFk3k7mc389iaeqKpHCV+NyfPGdtV+aJAXWR4GVahutfrZf78+Tz55JOcfvrpAJimyZNPPsmll17ap9swDIPXX3+dU045pce2X/3qV8yfP59DDz10n7fz6quv4nQ6qa6uzrvd5/PlncEuIiIiIiIio1QqAh0NEN1uzz4HOxzfuRCo2wNOheYyghgZiGzvGZy3b7EPFPUjy+EkWzS2KzjP7hagm97ifr2voWKYFg+/m+Ge19Mksj23O4CPTPewbJ6fYq9dUZPLpGna8Cr+ohKWLRjLF4+djMfj7py17lDlS4GwrD4U68uA25/vw7AK1cGuYTnvvPNYsGABCxcu5JZbbiEej7Ns2TIAzj33XMaNG8f1118PwHe/+12OOuoopk+fTnt7OzfeeCObN2/mggsu6Ha70WiUhx56iB//+Mc97nPlypU8//zzHH/88RQXF7Ny5Uouv/xyPvOZz1BWVjbwD1pERERERESGJ9OEZBtEd0Cs3q5z8ZdASe2w6m0W2atUe/dZ5+2d4XlH3a7+/n5iuIt6hOaZ4vFki2rsg0+j1JvNOW59KcX69vzP5yHlTv5rQYBDyvOH5Kl41D5FW3G43MxefDIOpw7OjXYej/0zkUgkCAS0UOhQy2TsCiuXa98Hs4ZdqH7mmWfS1NTEt7/9berr6znssMN47LHHuhYv3bJlC87d3lTa2tq48MILqa+vp6ysjPnz5/Pss88ya9asbrd7//33Y1kWZ599do/79Pl83H///VxzzTWk02mmTJnC5Zdf3q3eRURERERERAqIkYN4kz0rPdYEmBAohaLKoR6ZSH5mDjrq9wjPO0/paL/elYWDXLC6W13LzvoWw1daUAecommTX76W5u8b8kxNB0Ie+Nyhfk6Z6sHlLJznRfrG5XJRWlpKY2MjAMFgEEcB/fwMJ6Zp0tTURDAYxO3ed2TusPT5gn4RjUYJh8NEIhFKSkqGejgiIiIiIiLyXmRTEG+0g8hku13lEigFV3+1R4scoExs10zznf9GtkBkmx2s9yPD6SNVVEuueAK5kgm7AvRQLZarsCtxTcvisQ1Zfvlamo5M/mjtg1M8XHCojzJ/32ecj/aZ6srPerIsi/r6etrb24d6KAXP6XQyZcoUvN59/84fdjPVRURERERERAZdKtrZl74N0h3gDUJxDTjVZyxDwDIh1pi/6zzR0v/3F6yE0onkSibQ4R9Lk2sMHYGx+MM1eD2Kjvb0bpvBrS+leKvFyLt9ctjJVxb4mVOl5072zeFwMHbsWKqrq8lm83/iQQaH1+vt1pCyN/rpFhERERERkcJkWbv60jvqIJcCXwmExxdUfYUMoVxqt9nmW7t3nhvp/r0vpwfC46B0YvdTeAIZV4CWWIb6aIpkJkfI56FEC2T2EM9Y3PNGmoffyXQuJNpdwA3nzvFx+sFe3Kp6kf3kcrn61OUtw4NCdRERERERESksRg4SzRDZble9mAYEytSXLgPDsiDZmr/rPNbQ//fnD/cMzksnQqjnJy9ypkVbPEN9tIOOVJaAx01VyA/Kg7uxLIt/bc5x56spWlP5q16WTnBz8eF+KoOjr7JFRHpSqC4iIiIiIiKFIZuyFx9t3wqpNjtgDJSBu7C7oaWfGFl7Ydvdu853nrLx/r0vhxNKaiG8Z3g+wQ7V98E0oT2VoSGSoj2ZxetyURny6wMaeWyOGNy2KsWrjfmrXsYVO7n0CD8LxipiEykk+okXERERERGR0S3d0dlPvbWzLz0AoWpw6r/E8h6kIj2D88gWu0bIMvv3vjxF+Wedl9Tai+juJ8uCaCpHQzRFazyNy+GkPOhjFK6HecCSOYvfrknz+3UZcnm+rV4XfHqWj0/O8OJ16WiESKHRXxAiIiIiIiIy+nT1pddBrA4ySfAX253Smo4r+2LmoKN+j57zzlMq0v/3FxqTPzwPlPfb6zWWydEYTdEcy2CZEA54cSsM7sGyLJ7dnuP2l1M0JvJXvSyqdXPJEX7GhnQ0QqRQKVQXERERERGR0cM0IN4MkW12X7plgL8UghVDPTIZjjJxe8Z5ZI/gPLIdzGz/3pfLZ9ez9FgodDy4/f17X7tJZg2aO9I0dKTJ5kxKAh68boXB+dTFTH72cornd+Tybq8OOrjkCD+Lx7lx6OCcSEFTqC4iIiIiIiIjXy69qy890aK+dNnFMjtfG3m6zhPN/X9/wYo8wfkEu3LIMXhhdsYwaYllqI+kSGZzFPs8hAP7XxlTCDKGxYNvZfjdW2kyearT3U74xCFePj3bR8CtMF1EFKqLiIiIiIjISJaO2X3pka2QjtozfovHqC+9EOXSnXUtewTnka2QS/XvfTnd9gzzHguFjgdvqH/vaz/lTIu2uB2md6SzBD1uqkJ+UBac10t1OX66KsWOWP4+/MOqXfzXAj8TS1wDMwDLwmFmcORSOI007myKnLt8YO5LRPqN/soQERERERGRkcWyINVud15Hd9gVHv5ie/HGQZwJLEPAsiDZ2nOR0PYt0NEA5O/Afs98Jfm7zotrht2BG9OE9qQdprcns/jdLipDfi0h0IumhMnyV1I8vTV/1Uu538FFh/s5fmI/V72YOZy5FA4jhdPIAA4slxfT5SNbVEs8EMT0l2jtB5Fhbnj9BhARERERERHpjWnY1S7R7XaAauYgEIagZnWOOkbWPmCyZ9d5+xb7IEp/cjiheGye8HyC3cc/zFkWRFM5GqIpWuNpXE4nFUU+nDq+lFfOtPjT2xl+/UaaVJ483emA0w7yct4cH0XeAwy2LROHke4M0dM4LBPL4cZy+zF85WR9pZieIKY7iOkJgsNJOpnF6VSgLjLcKVQXERERERGR4S2XsTuxI5196Q5HZ1/6wC3uKIMkFe2sbNkjOI9ut7vQ+5MnmL/rPDwOXN7+va9B0pHO0RRN0RTLgAXhgBe3S4Fsb15vzHHrqhSbIvlfW7Mq7KqX6WXvrerFYWTsGei5FA7DACdYLj+mK0C2aCymtxjTHcByB7FG6GtORGwK1UVERERERGR4ysTtvvT2LXb46vHZiz0Os9oN2QfTgFh9z67z9i12jU9/C43JH54HK0ZNpUYya9DUkaaxI002Z1IS8OB1a2p6b9pSJr94Nc3jm7J5t5d4HVxwqI+Tpnpw9vU1Yho4jVRnF3oGAMvlwXT5yQVrMHzhrhnolss/al57ImLTXyIiIiIiIiIyfFgWpCK79aXHwFcMYfWlD3vZRP7gPLrNrnPpTy6vXc+y50Kh4fHgCfTvfQ0j6ZxJSyxNQzRNMpOjOOAhHPAM9bCGLcO0+Nv6LHetThHr5SV4ylQPnz/UR4lvL+8vlmXXuHSG6HaNi8uehe4r7V7j4g6Cc4AWNRWRYUOhuoiIiIiIiAw90+zsS99hz2o2suAvsUNSzfAcPizLruLZPTTf2Xseb+7/+wuU518oNFRdUAdZsqZFWzxDQyRFRzpH0OOiqkT1R3uzrsXg1lVJ3m7NX/UyrdTJlxf4mVXZMxpzGNldNS6mXbxuuX2YrgC54hpMb6hzFnoAy+Ub0MchIsOTQnUREREREREZOrkMJJrtXu14C+CAYKn60odaLm33mu8567x9C+RS/XtfTjeUjOu5SGh4gv0phQJmmtCWzFAfSRFJZvF7XFSGfDrOtBcdGYu7V6d45N0sVp7tQQ8sm+vn1OkeXE4HWAbOXLozRE8DYDk9WC4/uWD1rhoXdxDLHdBBPhEBFKqLiIiIiIjIUMgkINYAkW2QbAe3F4oqwaUqi0FjWXaneb7gvKMe8kaSB8BXDKWTenadl4xVT/4eLAsiySyNHWla4mncTicVRT6chTM5f79ZlsXjm7L84tU07en8r90TJrn5whwHVd4kjmRbZ42Lw65x8RSTDU20a1w8O2tc9LoUkfz07iAiIiIiIiKDJxWBjgaIbIdMB/hCnaGqOogHjJmza3XyheeZWP/el8MJxTV5FgqdCP6wZvn2QUc6R2MkRXPMXvyyNODF7dLztjcb2w1+uirF601G3u2TQhaXzUlxeKWJ5fJiOgPkiqswvcWds9ADWPp0jIjsB4XqIiIiIiIiMrBME5KtdpAea7CrRQJh9aX3t3RH/uA8ugOs/GHje+YJ7ArLd69sKRkHbnVMvxfJjEFjR5qmjjRZ0yTs9+Bxa2r63iSyFr95I80f385g5pmc7ndZfHaGg4/NLMYZnEjSvXMWeqCgOvlFpP8pVBcREREREZGBYWTtxSsj2+zFLQECpRCqGtJhjWimAbHG/AuFJtv6//6KqvMsFDoBgpU6INJP0jmTllia+miaVNag2O8m7FEN0t5Ypsm/t6S449Uszan8r8MlE4u48OjxVJaVYjg99PNhpQHTz6VLIjJAFKqLiIiIiIhI/8omdwW/qXZweaGowv53JPOG7F5wb8jufjeydn1KuqP/a1SyCftgxJ6zziPbwMj07325vHa3eY+FQseDJ9i/9yVdsqZFayxNfSRFPGNQ5HVRVaxZ/nmZOZy5FA4jxfZIlp+84eelZhfQM1AfU+LjomOnceTk8sEf5wGKp3PE0lnGlwVx6KCVyLCmUF1ERERERET6RyoCHY0Q3WYHzd4iKB4Ffekur/04zCw8txzW/tV+rP4wzDgVjroYghXQscMO2vvKsiDRnL+yZefM/v4UKMvfdR6qHvnfoxHEMKE9maE+kiKayuB3u6kK+fLlw4XJMnEY6c4QPd25mKiblMPP/3u3iAfXZsmaPa/mdjr4xPzxfGL+eHzukfV6Ni2L5lgahwMOGVPM+HIdzBIZ7hyWZemTJf0gGo0SDoeJRCKUlJQM9XBEREREREQGh2natSPRHRCrh1wKfCX2jO7RMNPS5YXSSbDyp7Dih2DlSfMcTlj6DVj8X9C+qWewnktDdHueWedb7Vn9/cnhgvC4PYLzzlnovuL+vS/ZL5YF7cksjdEUrYkMHqeTYr8HZ4FXezuMDA4j1RmiG+AEy+XHdAXI+UsxvcU8tyPH8mfraehI572NwyeUcvHSadSWBgZ59AculTVoiacpL/IyrSpERWh0f1pB+ZmMFpqpLiIiIiIiIvvPyNmzqaPbIdYEmHZfelHlUI+sfxWPtQP1p27ofR/LtLdbwPzz4cVfdu8776jPH8YfCG8IyiblWSi0Fpz6r/5w05HK0RBN0RKzq3tKA17crlFw0Gl/mQZOI4Ujl8LZWWNkuTyYLj+5YA2GL4zZuZio5fLT2JHmFys28NyG1rw3V1Hk5cJjprJkWsWIrEtpS2RI5wwmVxQxubIIv2dkzbAXKWT6TSsiIiIiIiJ9l01BvLMvPdkOLjcUlY/8vvR8vCG78mXFD/u2/9M/hMPPgS3PwuZn+2EADjvU37PrvHQi+EtHxycBRrlExqCpI01jR4qcaRH2e/C4C2RqumXZNS6dIbpd4+KyZ6H7Ssn6SjE9QTtEdwe7VRBlDZM/v7yN+1/cSibX84CU0wGnHTaOs46cQNA78qKtnGHS2JGmyO9mzrgwNSX+EXlQQKSQjbx3HhERERERERl8qai9+GhkG6Sj4A1C8ZjRPSvaV2x3qPd1lrllwkt3w5EX7l+o7vb37DovnQgl48A9uqsgRqt0zqS5I01DR5pUNkex3zPqZyE7jOyuGhczB4Dl9tk1LsU1mN5Q5yz0AJar99f16m3t3LFiPdva8lcjza4t4YtLpzGpomhAHsdAi6VyRFIZxoYDTKsOEfKN4vdQkVFMP7kiIiIiIiKSn2Xt6kvvqOvsSy+G8PjCmCXtDdmLku6PtX+FRV/Iv62oKn/XeVFVYTyfBSBrWLTE09RHUiQyOUJeD1XF/qEeVv+zDJy5dGeIbvecW04PlstPLli9q8bFHcRyB/r0+m6NZ7jrmY2seDv/Ir3hgIfPHT2Z4w+pHpGzuk3LorkjjcvpYObYEsaVBnC7CuRTCyKjkEJ1ERERERER6c7IQaIZItvtqhfTgEDZ6OtL35tEK1S5IBXZv+ulovaBh6nHdQ/PwxPs2f0yKhmm3Y9dH00STWYJeNxUhfww8rLfnrpqXNK71bg47BoXTzHZ0ES7xsWzs8Zl/6Imw7R49PU67nt+M4mM0WO7Azh57lg+u2gSIf/IjLF2LkZaEfIxvSpEWdEorMsSKTAj891IRERERERE+l82ZS8+2r4VUm12x3GgrLAqSNq3wOqH4J3H4LI3wB+2a2/6yl9iz/A/8ZoBG6IMH5YF7cksDdEUbfE0HpeLiiI/zpE8AdnM4sx1dqEbWQAslxfTHSBXXIXpLe6chR7Ach/YLPy19VHueGo9G5rjebdPrw7xpaXTOGhM8QHdz1CxLIu2RJasYTK1MsSkyiA+9+iuARIpFArVRURERERECl26Y1dfeipiz6gOVY/uvvTdWRY0vA6vPdDZhW7Zl29+FmZ8BP5zc99va+ZHIZM/IJTRpSOVoz6aojWWwQGUBX24XCNsarpl4MjZs9CdRhosc1eNi78Cw1eK5d45Cz0Ajv45WhBNZrl35Sb+782GvNuLfC7OWzyZD86qweUcYc9pp6xh0hRLEfJ5mDE2THWxb0TW1ohIfgXyF5KIiIiIiIh009WXXgexOsgkwR+C8Lh+C86GPdOATf+B1Q9A45s9t7/4S/jYHfDMT/q2WKnDCYsuhlR7vw9Vho9ExqAhmqI5lsYwLcJ+L273CAhLLQuHmcGRS9lVLqaBhQPcfkxPEenQOExPCNMdwPQEwenp9yGYlsXjbzZw78pNdKRyefc5YUY15y+ZTGlw5FakdKSydKRz1IaDTK0qokiLkYqMOvqpFhERERERKSSmAfFme1Z6vAmsHPhLIVgx1CMbPLkUrHsMXn/QXoS1N1tfgFwGjv06rPjBvm936TfsyhzNVB+V0jmTxmiaxliKdNagxO/F5xnGB6DMHM5cyl5M1MgADrvGxeUjW1SL6S3p6kE/0BqXvtjQFOP2p9azrqEj7/ZJ5UG+eNw0ZteGB3wsA8UwLZpjaTwuB7PGFlNbGhyxM+1FZO8UqouIiIiIiBSCXHpXX3qipTD70pPtsOZP9ikd7X0/bwhmnQZzPg5GGpZ8GRwOWPHD/DPWHU47UF/8X9C+aaBGL0Mka1i0xNPUR1IkMjlCXg8lxf0/i/uAWKa9mGguhcNIdy4m6sZy+zF85WR9pZ2LiRb1a41LX8TTOe57fjOPvl6HafXcHvC4+PTCiXxk3ljcrmF8kGIfkhmD1kSGqmIv06pCI3qmvYjsm8OyrDxvabK/otEo4XCYSCRCSUnJUA9HRERERETElo519qVvtYNktx8CpYXTlw72gYTXH4S3/wFGpvf9QmNg7ifhkFPsXvmdXF4oHgtmDp5fDm89DKmovSjpzI/alS9OF3TUQefCjjLyGSa0JuwwvSOVJeBxE/K5YRhMPHYYGXsGei6FwzDACZbLj+kKkPOX7lpMdIBqXPrCsixWvN3EXc9spC2R/+fifdMrueB9U6gIjdyDe5Zl0RrPkDMtJlcEmVhRhNc9cg8ODDTlZzJaKFTvJ3pTEBERERGRYcOy7F7vjnq73iQTB38x+EoKpy8doP4NWH0/bHqGrsVH86k8GOadCVOX7v1gg7fIfg69RXbQbmTs5zYdVeXLKGKaEEllqY8kaU9k8LhcFPs9OIfqR8c0cBqpzi50+6CQ5fJguvyY3hIMX7gzQC/CcvnsT1UMsa2tCZavWM/q7ZG822vDfi5aOo0jJpYN8sj6VyZn0hRLUxrwMLW6iKqQFiPdF+VnMloU0NQEERERERGRUc407GqX6HZ7drqRhUAYguVDPbLBYxqw+Vk7TG9Ys/d9JyyCQ8+CsYf1LYjMxBWej3LRZI6GjhQtsQxOB5QFfbhcgxiSWpZd49IZots1Li57FrqvdFeNi7uzxsXpGryx9UEqa/DAi1v586vbyeXpevG6nHxqwXg+fsR4PCO46gUgkswST2eZWB5kSmWIgHd4fS9EZGApVBcRERERERnpchm7Lz3S2ZfucHT2pQ/84oPDRi4Nbz8Grz9kL8LaG6cbpp8I8z4F5VMHb3wyrMUzORqjKZpjGQzTIuz34nYPfJjuMLK7alzMHACW22fXuBTXYPiKMd0BLHcQyzV8O7oty+K5ja384t8baOpI591nwaQyLjp2GjXhkf2+ZJgWTbEUPreL2ePC1IYDOLUYqUjBUaguIiIiIiIyUmXi9oz09q2QioDHB6HqwupLT7XDmj/bi4+m8ldNAHZly8yPwpwzoKhysEYnw1wqa9LUkaKxI03GMCj2efF5BmgGtWXgzKU7Q3Q7eLacHiyXn1ywurPGpQjTE8Ry+YdFjUtf1EdS3Pn0el7a3JZ3e1Wxjy8cM5VFU8pHfDVKIpOjLZFhTImfqVUhwoFhtmCtiAyaAvpLS0REREREZBSwLDs87upLj4GvGMK1hdWXHtlmz0pf9xgY+WfGAlBUbS8+OuPD3RcflYKWMUxaYhnqoymSmRwhn4eS/gxIu2pc0rvVuDjsGhdPCdlQKaa3yO5CdwdG5IGwTM7kj69s46GXtpExzB7b3U4HHzt8HJ9aMAG/Z2RXo5iWRUssg+WwOKi6mIkVwRFfXyMiB2bkvWuLiIiIiIgUItPs7EvfAbEGe5FMfwmEx4+YGa39omENvPYAbPo3e118tGK63Zc+9bgRGVjKwMiZFm2JDPWRFB2pLAGPm6qQHw70R8jM4sx1dqEbWQAslxfTHSBXXIXpLelcTDRgLyY6wr28pY3lK9ZTF0nl3T5vfJiLl05jQtnIP5CVzhk0d6QpLfIyvTpEZWjkf/9E5MDpLwsREREREZHhLJeBRLPdlx5vARz24qOewFCPbPBYpr346Gv3Q8Mbe993/JF2mF57RGEdbJC9Mk1oT2VoiKRoT2bxulxUhvzv7SViGThy9ix0p5EGy9xV4+KvwPCXdc5AD2K5/aPqEyTNsTS//M9Gnnm3Oe/2sqCHz79vKsceVDniq14A2hMZklmDSZX2YqQjfca9iPQfheoiIiIiIiLDUSYB8d360l0euwvcVUAdvrk0vPN/sPpB+6BCbxyuXYuPVkwbvPHJsGdZ0JHKUR9N0RrP4HI4KA/6cPY157YsHGYGRy5lV7mYBhYOcPsxPUWkQ+MwPSFMz84al9H585kzTP66ege/fWELqWzPqhenAz4yr5ZPL5xIkW/kR005w6QxlqbI62LOuDA1JX4tRioi3Yz8dzoREREREZHRJBWBjgaIbIdMB/hCUFwDzgKaIZmKwJt/gTf+aC9E2htPEcw8FeZ83F6gVWQ3sUyOxmiK5lgGy4RwwIPbtY9g1MzhzKXsxUSNDOCwa1xcPrKhcbtqXNwBexZ6AVizI8IdT61nc2si7/ZDxhTzpeOmMbUqNMgjGxixdI5IMsPYcICpVUUU+0fngRIROTAK1UVERERERIaaaUKy1e5L76i3Z2gHwoXXlx7dDqsfgnV/38fio1Uw9xMw4yPgLRq88cmIkMzaHdiNHWkyOZOSgAevO8/UdMu0FxPNpXAY6c7FRN1Ybj+Gv4KMrxTLHdw1C30U1bj0RXsiw93PbuKfaxvzbi/2uzl/yWROnDkG5yh4nzIti+ZYGqcDZtQUM74siFuLkYpILxSqi4iIiIiIDBUjC/FmiGyDeJN9WaAUQlVDOqxB1/jmrsVHrZ7VEl0qpsG8M2Ha+7X4qPSQMUxaYvYipMlsjmKfh5LArlnGDiNjz0DPpXAYBjjBcvkxXQGyRbWY3mJMdwDTExy1NS59YZgW/1hTz6+f20Q8beTd56RZYzh38eRuz+9IlsoatMTTVIR8TKsKUV7kHeohicgwp79CREREREREBls2CbFGaN9i15u4vFBUYf9bKCwTNq+E1Q9A/eq97ztugb346Lj5hTVzX/okZ1q0xTPUR1N0pLIEPG6qgh6cZgpHcmeNC1guD6bLT65oLIY33DkDPYjl8ul11emdhg5uX7GedxtjebdPrSzii8dNY0ZNySCPbGBYlkV7IkvaMJlcUcTkyiItRioifTIsP8fys5/9jMmTJ+P3+1m0aBEvvPBCr/vec889OByObie/v3uv2fnnn99jnw996EPd9mltbeWcc86hpKSE0tJSPv/5zxOL5f8lIiIiIiIi8p6kotD0Dmx5DupeAyMDxWPtPvBCCdRzaXjrEXjwPPi/b/YeqDtccNAH4Yxfwod/BOMXKPiUbkwTWuMZ3q6LsL6uGSveRo2jnbJcI+5UMw4jh+ErJV12CMnqw0mMWUiiZhGpitlki8dj+MvtXnS9roilctz+1Lt89aHX8gbqQa+LLxwzlZs+ddioCdSzhkldJIXT5WDuuDCH1BQrUBeRPht2M9UfeOABrrjiCpYvX86iRYu45ZZbOOmkk1i3bh3V1fkXnikpKWHdunVd5x15fiF+6EMf4u677+467/P5um0/55xzqKur4/HHHyebzbJs2TK+8IUv8Nvf/rafHpmIiIiIiBQk04Rkm92XHquHXAp8JYXXl56KwJsPw5o/2s9HbzxBmPkRmPMJLT4qeVm5DB2xGM3tESLxBC6cVBYFcXhCZP2lXTUuljuIVSgHq94jy7L459pG7n52E5FkNu8+xx1cxbKjp4yqSpRYKkcklaW21M/UqhAh37CLx0RkmBt27xo33XQTF154IcuWLQNg+fLlPProo9x1111cddVVea/jcDioqanZ6+36fL5e93nrrbd47LHHePHFF1mwYAEAP/3pTznllFP40Y9+RG1t7QE8IhERERERKUhGDhKdfemxJsC0+9KLKod6ZIMrWgevP2gvPppL9b5fUaUdpM/8CHhDgzc+Gd5Mw37d5FKQTRHPGDQnTZpSDnKeMkJjp+P0hUh5glguzTrfH5tb4tyxYj1rdkTzbh9fFuCLS6cxb3zp4A5sABmmRUs8jcvpYOZYezFSl1OvGRHZf8MqVM9kMqxatYqrr7666zKn08mJJ57IypUre71eLBZj0qRJmKbJEUccwfe//31mz57dbZ+nnnqK6upqysrKeP/738/3vvc9KioqAFi5ciWlpaVdgTrAiSeeiNPp5Pnnn+djH/tYj/tMp9Ok07tWo49G8/8SEhERERGRApNNQbyzLz3ZDi43FJUXTr3LTo1r7b70jSv2vvho+dRdi4+6Rseih/IeWRYYaftnKJe0P+XhcII7QNJZRIOzih2Wi4TPT7iqhCKfDwvIv5Sm9CaRyfG7F7by8GvbMa2e231uJ2cdOZHTDqvF4xqWrcHvSTJj0JpIU9m5GGnZKJp5LyKDb1iF6s3NzRiGwZgxY7pdPmbMGNauXZv3Oocccgh33XUX8+bNIxKJ8KMf/YglS5awZs0axo8fD9jVLx//+MeZMmUK69ev57//+785+eSTWblyJS6Xi/r6+h7VMm63m/Lycurr6/Pe7/XXX8+1117bD49aRERERERGhXQHdDTYM9PTUfAGoXgMOIfVf7sGlmXaffGrH7A74/dm3Hw7TB9/pGYXFyoj2zkDPWl/7XDYB588AQiNAX8JKYefhpSTLRGDeCZHWYmXMd4C+pnqR5Zl8cz6Fn757w20xDN59zlqajkXvm8q1SX+vNtHIsuyaI1nyJkW06pCTKwI4nOrO11EDsyI/020ePFiFi9e3HV+yZIlzJw5kzvvvJPrrrsOgLPOOqtr+9y5c5k3bx7Tpk3jqaee4oQTTnhP93v11VdzxRVXdJ2PRqNMmDDhPT4KEREREREZkSxrV196R50dDvoLsC/dyMA7j8PqB6F9c+/7OZz2jPR5Z0LlQYM3Phl6pmEvUptL2v9aln3AyROAomoIloGnyD4Y5Q6QtaCxI83W5gTtyQzFPje1pYG8a6jJvu1oT7J8xXpe2dqed/uYEh8XHTuNIyeXD+7ABljWMGnsSBEOeJhVFaKq2KfXkIj0i2EVqldWVuJyuWhoaOh2eUNDwz4703fyeDwcfvjhvPvuu73uM3XqVCorK3n33Xc54YQTqKmpobGxsds+uVyO1tbWXu/X5/P1WOxUREREREQKhJGDRIs9Kz3eaM/Q9pcWXl96ugPe/Au88UdItva+nycAM06FuWfYM5BldLMs+0BLLmlXuZiGfZDJHbD78sOTwBeyA3RPkV2R1Mk0LZpjaTa3JmiJZQh6XYwN+3EqCH1P0jmDh1Zt4w+rtpHL0/Xidjr4xPzxfGL++FE3ezuazNKRzjK+LMjUqiKC+oSDiPSjYfWO4vV6mT9/Pk8++SSnn346AKZp8uSTT3LppZf26TYMw+D111/nlFNO6XWfbdu20dLSwtixYwF7tnt7ezurVq1i/vz5APzzn//ENE0WLVp0YA9KRERERERGj1waYo3QvhVSbfbM60AZuAtswk1HHbz+e1j76N4XHw1Wwpwz7MVHfcWDNz4ZXGbO/pRGLgW5zloRtw/cfghPtD+94S0CTxA8+WtFdlZ0bGtL0hBN4XE5qSnxaxHJA/DiplbufHo9DdF03u2HTyjl4qXTqC0NDPLIBpbReWDG43YwuzZMbWlAryMR6XfDKlQHuOKKKzjvvPNYsGABCxcu5JZbbiEej7Ns2TIAzj33XMaNG8f1118PwHe/+12OOuoopk+fTnt7OzfeeCObN2/mggsuAOxFTK+99lrOOOMMampqWL9+Pd/4xjeYPn06J510EgAzZ87kQx/6EBdeeCHLly8nm81y6aWXctZZZ1FbWzs0T4SIiIiIiAwf6Zgdpke2Qipiz7ANVRdWXzpA0zp47f59Lz5aNgUOPROmnaDFR0cby+zsQU/Z/1oWOF32LPRgpX2QqStAD4Jz3wtdRpJZtrcl2BFJ4QAqQ75RtUDmYGuMpvjFfzbw3Ib8nx6pKPJy4TFTWTKtYtRVoSQyOdoSGaqL/UyrChEO6v1HRAbGsPsL8Mwzz6SpqYlvf/vb1NfXc9hhh/HYY491LV66ZcsWnLv9Um5ra+PCCy+kvr6esrIy5s+fz7PPPsusWbMAcLlcrF69mnvvvZf29nZqa2v54Ac/yHXXXdetvuW+++7j0ksv5YQTTsDpdHLGGWdw6623Du6DFxERERGR4aOrL70OYnWQSYI/BOFx9gz1QmGZsPUFO0yve3Xv+9YeYYfp4xcWVqf8aJZLd85AT9m1Rw6HPQPdE4TwBLvGxRO0g/T9PIAST+fY3pZkR3uStGFSHvTi94yuCpLBlDVM/vzqdu5/cSuZXM+DXk4HfPTQcZy9cMKoq0KxLIuWeAbDspheHWJieRFedwG9T4vIoHNYltWzVEv2WzQaJRwOE4lEKCkpGerhiIiIiIjIe2Uau/rSY41g5ey+dG/RUI9scBkZePdJWP0AtG3qfT+HE6Yeb4fplQcP2vBkAJi5XbPQjTRYgMtrV7b4yyAQ3m0xUf97PnCSyhrUR5JsbUsSTxuUBT2jLuQdbKu3tXPHivVsa0vm3T67toSLj53G5MrR9z6WyZk0xdKUBj1M61yMVIYv5WcyWui3loiIiIiICNgzcuNNdl96osWurQiUF15feroD3nrYXnw00dL7fm4/zPiIvfho8djBG5/0D8vcNQs9m7LPO1x2gB4os1/73s4A3RO0K14OUNYwaYim2NqaIJLMUeJ3M26U9XkPttZ4hrue2ciKt5vybg8HPHzu6Mkcf0j1qKt6Abs6KJHJMbE8wJTKEAGvPukgIoNDobqIiIiIiBS2dKwzTN8C6agdFhePKby+9I56e/HRdY/ai072JlBuB+kzP6rFR0cSI7OrB93I2pd5/OAOQnGt/b30Bu2Z6G5v/95158KRW1oTtMbTBDxuasP+URnyDhbDtHj09True34ziYzRY7sDOHnuWD67aBIh/+h7LzNMi8aOFAGvi9njwowt8ePUYqQiMohG3zuriIiIiIjIvlgWpNrtIDm6A7IJ8IagpLaw+tIBmt+G1x6ADf/ax+Kjk2HemTD9BLsSRIYvM9d9FjrYfeduPxTX7Koz8gTBExiw/vudPdfbWhM0dqTxupyMKQ7gUvh5QNbWR7njqfVsaI7n3T69OsSXlk7joDGj86BXPJ2jPZmhpsTP1OoQJX4tRioig0+huoiIiIiIFA7TgEQrRDv70o2s3RMdLB/qkQ0uy4JtL9hh+o6X977v2MPg0LNgwsLCO+AwEliW3X+eTUEuCYZhV7W4A+ArgbIpuwJ0b1G/1Lj0RSSRZWtbgvqIHepXhnx4XHr9HIhoMsu9Kzfxf2825N1e5HNx3uLJfHBWzag8cGFa9iceHA44eEwxE8qDek2JyJBRqC4iIiIiIqNfLmNXvEQ6+9IdDrs32u0f6pENLiML65+0w/S2jb3v53DClKX24qNVMwZvfLJvRrZzBnrSfl07HHbvvycAoTHgL9kVoA/BegCxdI4dbUm2tyfIGhblRV58bvVcHwjTsnj8zQbuXbmJjlQu7z7vn1HNsiWTKQ2Ozk+RpLIGLfE05UVeplWFqAgV2FoXIjLsKFQXEREREZHRKxO3Z6RHtkGyHTw+CFUXXl96ugPeegTe+AMkmnvfz+2HGR+GOZ+AEi0+OuRMo7PGJdlZ42KB02MH6EXVECyzO9C9QXtmunPoZu2msgZ17Um2tiWJZ3KUB70EvQX2czYANjTFuP2p9axr6Mi7fVJ5kC8eN43ZteFBHtngaUtkSOcMJlUEmVIZwu/RQRoRGXr6DSciIiIiIqOLZUEqsqsvPRMDXwjCBdiXHmu0Fx9d+4jdG9+bQBnM6Vx81F8yeOOTXSzLXkx0Z4Bu5uzXqztg9/2HJ9mv452LibqGx3/nMzmTxo4Um1sSdKSyhP1expcGh3pYI148neO+5zfz6Ot1mFbP7QGPi7MXTuDUebW4R2kFSs4waexIU+RzMWdcmJoSLW4rIsPH8PgtLCIiIiIicqBM0652ie6AWIMdUPpLIDx+wBZiHLaa34HVD8D6f4Fl9L5f6cTOxUdPHJKqkIJm5jorXFJ2jQvY3wO3H8IT7ddu12Kiw6+myDAtmjrSbGmN05bIEPS4qQ0HFHoeIMuyWPF2E3c9s5G2RDbvPu+bXskF75syqitQYqkc0XSGmnCAqVVFFGsxUhEZZhSqi4iIiIjIyGZkd/Wlx1sAh734qCcw1CMbXJYF21+C1+6H7av2vu/YQ+0wfeJRhTd7fyhYZmcPesr+1zTtmebuAAQr7U8KdAXowSGtcdkXy7JoiWfY2pqgqSON1+VkTHFgVC6MOdi2tiZYvmI9q7dH8m6vDfu5aOk0jphYNsgjGzymZdHckcbldHDImGLGlwVH7Ux8ERnZFKqLiIiIiMjIlEnYYXr7FrvuxeWBokr730JiZO0Z6asfgNb1ve/ncMKUY+0wvXrm4I2vEOXSuxYTNXJ2SO7224F5eIJd47JzMdER9HptT2TY1pakPpLCAVSFfAo8+0Eqa/DAi1v586vbyeXpevG6nHxqwXg+fsR4PKP4+d65GGlFyMe0qhDlRaNz0VURGR0UqouIiIiIyMiSikBHA0S2Q6bDDiiLa8BZYIvXZWKdi4/+HuL7WHz0kJNh7iehpHbwxlcozNyuAH1njYvLa1e2lIzv/NTEzsVE/SOyiiiWzrGtNUFdJEnOtCgP+vC6R2+4O1gsy+K5ja384t8baOpI591nwaQyLjp2GjXh4VcB1F8sy6ItkSVjmEytDDGpMojPXWDv5yIy4ihUFxERERGR4c80IdkK0TqI1ds1GoFwYfalxxrhjT/AW3/d9+Kjsz8Gs04Df3jwxlcIMnFIRe2+eofbDtAD5fbJ2xmge4Ij/kBPKmuwvS3JtvYEyYxBedBHwDuyH9NwUR9JcefT63lpc1ve7VXFPr5wzFQWTSkf1T31WcOkKZYi5PMwY2yY6mLfqH68IjJ6KFQXEREREZHhy8jas7Aj2+yqF4BAqV3zUmha3oXVD8K7T+598dHwBLvi5aAPaPHR/paJQaIdvAEonWS/Fr1Beya6e/RUVWRyJg3RFFtaE3SksoT9XspL9VrqD1nD5A8vb+Ohl7aRMcwe291OB6cfNo4zj5yA3zO6D2B0pLJ0pHOMDQeYVhWiyKeISkRGDr1jiYiIiIjI8JNN2jOyu/WlV9i1GoXEsuxFR1c/ANte3Pu+NfPsMH3SYi0+2t92D9MrD7JrdPwlQz2qfmeYFo0dKTa3JGhPZAj5PNSGA5o53E9e3tLGnSvWsyOSyrt93vgwFy+dxoSy4CCPbHAZpkVzLI3H5WBmTTHjyoJa6FZERhyF6iIiIiIiMnykonZfenSbHWR6goXZl27mYP0/7TC9ZS+Lj+KAKcfAoWdB9axBG17BKJAw3TQtWuIZtrYmaOpI4XO7qCkJKOjsJ82xNL/8z0aeeTf/2gdlQQ+ff99Ujj2octQfwEhmDFoTGaqKvUyrClEaLLADpSIyaihUFxERERGRoWWakGyDjjr7lEuBrwRKxhVeX3omDmsfgdd/v6vuJh+Xz158dN4n7edJ+lcmBsl28HSG6eFx4Cse6lENiLZ4hq1tCRqiaZwOqC7243bpkw79IWeY/HX1Dn77whZS2Z5VL04HfGReLZ9eOHHUV59YlkVrPEPOtJheVcTEiiItdisiI9roftcWEREREZHhy8hBorMvPdYEmIXblx5vgtd3Lj4a730/f6m9+Ojs0+yvpX+lO+y6IU8QKkZ3mN6RyrK9LcmOSBLDtCgP+hRy9qM1OyLc8dR6NrfmX0z4kDHFfOm4aUytCg3yyAZf1jBp7EhRGvAytbqIqpAWIxWRkU+huoiIiIiIDK5sCuKN0L4VUu12tUuwrDAX1WzdAK89AO8+0YfFRz8FB32wMJ+ngbZ7mF55CJSMHbVhejJjsKM9yba2BKmcQVnAR8BbYPVKA6g9keHuZzfxz7WNebcX+9ycf/RkTpw5BmcBBMuRZJZ4Osv4siBTq4oIehVDicjooHczEREREREZHOkOuy89sg3SUfAGIVQNzgL7b4llwY6X7b70rS/sfd8xc+DQM2HS0Vp8dCAUUJieyZnUR5JsaUsSS2UJB7yUF+kATX8xTIt/rKnn189tIp7Of4Dsg7PGcO7iyYQDnkEe3eAzTIummN3PP3tcmNpwAKc6+kVkFCmwv15FRERERGRQWZbdlx7dYfelZ5P2Qo/h8YXXl27mYMNT9sz0lnf2sqMDJh9jh+ljZg/W6ApLusPuTPcWdYbpteAbnTUcOcOkKZZmc0uC9kSGkM9DbTig+o1+9E5DB7evWM+7jbG826dWFvHFpdOYMXb0LXKbTyKToy2RYUyJn6lVoYI4iCAihUehuoiIiIiI9D8jB4kWe1Z6vBEs0+4AL8S+9EwC1j4Kb/weYg297+fy2ouPzv2kfdBB+l9XmB6CqhmjOkw3TYvmeJqtrQmaOtIEPG7GhgMFUTkyWGKpHL9+bhOPvVGPlWd70OviM4smccrcsbgKYJa22bkYqYnFQdXFTKwI4tGityIySilUFxERERGR/pNLQ6yzLz3ZavelBwq0Lz3eDG/8Ad56GDJ7W3w0bC8+Out0e6FW6X/pDkhG7JnpVTM7a15GZ5huWRbtiSxb2xI0RNO4HA5qSgIFEeoOFsuy+OfaRu5+dhORZDbvPscdXMWyo6dQXuQd5NENjXTOoLkjTWmRl+nVISpDBfieLyIFRaG6iIiIiIgcuHTMDtMjW+2Oam8QiscUXl86QOtGuy/93SfsypfelIyzFx89+CRw+wdvfIWkW5g+Y1SH6QDRVJbtbQl2RFJYJlQUeTVTuJ9tbolzx4r1rNkRzbt9fFmALy6dxrzxpYM7sCHUnsiQzBpMrAwytTKE36OFb0Vk9CvAv3BFRERERKRfdPWl10Gszq458RdDeFzhLappWVD3Krx2P2x9fu/7jpkN886CSUvsmfzSvywLMrHOML2z5iVcawfro1QyY7C9PcH2tiTJrEFFkU/BZj9LZHL87oWtPPzadsw8XS8+t5OzjpzIaYfVFsyBjJxh0hhLE/S6mDMuTE2JX4uRikjBUKguIiIiIiL7xzTsvvToduhoACtn96UHK4Z6ZIPPzMGGFfbM9Oa397KjAyYfbYfpNXMGbXgFxbLsmempCHiLCyJMT+cM6iMptrYmiKVzlAa8lBepdqM/WZbFM+tb+OW/N9ASz+Td56ip5Vz4vqlUlxTOJ07i6RztyQxjwwGmVhVR7NdipCJSWBSqi4iIiIhI3+TSEG+y+9ITLeB0QqC8MPvSswlY+zd4/aF9LD7qgYM/BHM/BaUTBm98hWTPML16ll3zMorD9Jxh0tiRZktrgvZEhpDPQ204gEOLkParHe1Jlq9Yzytb2/NuH1Pi46Jjp3Hk5PLBHdgQMi2L5lgapwMOGVPMhPIg7gKZmS8isjuF6iIiIiIisneZeOfio1sgHbX7vwu1Lz3RAm/8Ed78i10x0htfib346OzT7YVapf8VYJhumnaguaU1QUs8Q8DtYmw4gFNher9K5wweWrWNP6zaRi5P14vb6eCM+eP55Pzx+NyFU7OTyhq0xNOUF3mZVhWiQouRikgBK8C/gkVEREREZJ8sC1Lt0FEP0R32zGxvCEpqC68vHaBtE6x+EN55HMxs7/uV1Nqz0g/5kBYfHSg7w/R01H5NjpkNxTWjOky3LIu2RJatrQkaO1K4HE7GFPtxqb+63724qZU7n15PQzSdd/vhE0q5eOk0aksDgzyyoWNZFu2JLGnDYHJFEZMri9TZLyIFT6G6iIiIiIjsYhqQaIXoNnt2upGFQBiChVNv0MWyoO41WH0/bHlu7/tWz7T70ie/T4uPDhTLsoP0VBR8nTPTi8eCNzjUIxtQ0VSWba0JdkRSYEFFka9gFsIcTI3RFL/4zwae29Cad3tFkZcLj5nKkmkVBVWzkzVMmjrSFPndzK0pZUyJr6Aev4hIbxSqi4iIiIgI5DJ2X3qksy8dBwTLCnO2tZmDjf+2w/SmdXvfd9LRcOiZMGYuKGgaGHuG6WPmdM5MH91heiKTY3tbku3tSdJZk/Iir2YHD4CsYfLnV7dz/4tbyeTMHtudDvjooeM4e+EEgt7CilBiqRyRVJbaUj9Tq0KEfIX1+EVE9kbviCIiIiIihSyTsBfajGyDZDt4fBCqLsy+9GwC1j1mLz7aUdf7fi4PHHQSzPsUlE4cvPEVmgIN09M5g/pIii2tCeKpHGVFXiqK1F09EFZva+eOFevZ1pbMu33W2BK+uHQakytHb7VQPoZp0RJP43I6mDm2mPFlQVUNiYjsoQD/UhYRERERKXCWZS/uuLMvPRMDXwjCBdqXnmiBNX+yFx9Nd/S+n68EZp1mL0BaiHU4g8WyIB2BVIf9nBdImJ41TBo70mxtSdCezFLsc1NbGlDVxgBojWe465mNrHi7Ke/2cMDDsiWTef+M6oJ7/ncuRloZ8jGtKkRZkXeohyQiMiwpVBcRERERKRSmCclWiGy3Z6cbGfCXQHh8YVaXtG+2Fx99+//2vvho8ViY+0k45GTwFM7ihINuzzC9Zq4dpo/y59w0LZpjaba0JmiOZQh6XIwN+3EW4s/kADNMi0dfr+O+5zeTyBg9tjuAD82p4dyjJhPyF1ZcYlkWrfEMOdNiamWISZVBfG7VDYmI9KawfkuIiIiIiBQiI7urLz3eYl8WKB31YWVelgX1q+G1B2DLs3vft2oGHHoWTD5Gi48OpAIN03eGmNvakjREU3hcTmpK/KrZGCBr66Pc8dR6NjTH826fXh3iS0uncdCY4kEe2dCzPyWRIhzwMKsqRFWxFiMVEdkXheoiIiIiIqNVJmGH6e1b7LoXlweKKu1/C41pwKZ/22F601t733fiEnvx0Zp5hTmDf7AUaJgOEElm2d6WYEckBRZUhnx4XAVYvTQIosks967cxP+92ZB3e5HPxXmLJ/PBWTUFeUAjmszSkc4yvizI1KqigluMVUTkvdK7pYiIiIjIaJOKQEeDXfOS6QBvkR1WFuJs62wS1v0dXv89dOzofT+nBw76AMw7E8omDd74CtHOTv90B/jDBRWmx9M5drQn2d6WJG2YlAe9+D0F+HM5CEzL4vE3G7h35SY6Urm8+7x/RjXLlkymNFh4veFGZ+2Qx+1gdm2Y2tJAQR5UEBF5rxSqi4iIiIiMBqYJyTZ74dFYPWRTEAgXbl96onW3xUejve/nK95t8dGKwRtfIcobpo8Fj3+oRzbgUlmD+kiSrW1J4mmDsqCHCq9vqIc1am1oinH7U+tZ15B/4eFJ5UG+eNw0ZteGB3lkw0Mik6M9maWqczHScLAAP70kInKAFKqLiIiIiIxkRhbizRDZZle9YNl96UWVQz2yodG+BVY/BO88Zj83vSmu2W3x0eDgja8QFXCYbndVp9nSEieSzFHidzOudPTPyB8q8XSO+57fzKOv12FaPbcHPC7OXjiBU+fV4i7Aup2uxUgti2lVRUwsL8LrLrznQUSkPyhUFxEREREZibLJXX3pyfbOvvQKcBVejQGWBQ2v233pm5/Z+76VB9uLj045Fpz679CAskxIRSET260zvTDC9J3VGltaE7TE0gS9bsaG/TgL8VMjg8CyLJ5+p5lf/WcDbYn8B9OOnl7JBe+bQmWoMD8hkMmZNMXSlAY9TOtcjFRERN47/RUpIiIiIjKSpKJ2X3p0mz3z1xss3L5004BN/4HVD0Djm3vfd+JRdl/62MMKsw5nMFlm58z0mD0zfcycggnTd84E3tqaoKEjjc/lpKZEXdUDaWtrguUr1rN6eyTv9tqwn4uWTuOIiWWDPLLhI5LMksjkmFgeYEpliIC3AH9fiIj0M4XqIiIiIiLDnWXZHeEddfYpl7Jn/hZqX3ouBeseg9cftDvke7Nz8dG5n4TyKYM3vkK1Z5g+9lAIjSmIMB0gksiytS1BfSQFQFXIh6cAK0YGSypr8MCLW/nzq9vJ5el68bqcfGrBeD5+xPiC/T4YpkVjRwq/x8XscWHGlvhx6gCPiEi/UKguIiIiIjJcGTlINENkO8QaARP8pYXbl55stxcfXfOnvS8+6g3Zi4/O+bgWHx0M+cL04hpwF0a9RDydY3tbkh3tSTKGSXmRF59bM4EHimVZPLexlV/8ewNNHem8+yyYVMZFx06jJlwYB3TyiadzRJIZxpT4mVodosSvxUhFRPqTQnURERERkeEmm+rsS98KqTa72iVYVjAhZQ/tW+1Z6W//A4xM7/uFxnQuPnqKXYsjA6tbmF5acGF6KmtQ155ka1uSRMagLOihwlsYj32o1EdS3Pn0el7a3JZ3e1Wxjy8cM5VFU8pxFOKneADTsmiJZcBhMb26mIkVwYKdqS8iMpAUqouIiIiIDBfpDntGevvWzr70AISqC3dBzfo3YPX9sOkZoGe9Q5fKg+2+9KlLC/e5GkyWaX9qIJOAQGnBhemmaVEfTbGpOU40laXE72FcaWCohzWqZQ2TP7y8jYde2kbGMHtsdzsdnH7YOM48cgJ+T+F+SiCdM2iOpSkLeplWHSrYRVlFRAaD/uIUERERERlKlgXJNojWQazODir9JRAeV5h96aYBm5+1w/SGNXvfd8IiOPQsLT46WPYM02sPsj8dUCBhOkBHKsvG5jj1EbunujYcKNgZ0YPl5S1t3LliPTs6u+r3NG9cmIuXTmNCeWF/OqUtkSGdM5hUEWRKZaigDy6IiAyGYfkZoJ/97GdMnjwZv9/PokWLeOGFF3rd95577sHhcHQ7+f27etOy2SxXXnklc+fOpaioiNraWs4991x27Oi+oNHkyZN73M4NN9wwYI9RRERERAqckYOOBtj+Mmx9Hto3gdsPpRPsXupCC+pyaXjzL/DQefD4t3oP1J1uOPhD8Im74OQfQO3hhfdcDTbLtBfKjWwHlxdqD4UJC6F0YsEE6oZpsa0twatb26lrT1EZ8lEW9CpQH0AtsTQ3PLaW7zy8Jm+gXhb08NUPHMz3Tp9T0IF6zjDZEUnidMCccWFm1JQoUBcRGQTDbqb6Aw88wBVXXMHy5ctZtGgRt9xyCyeddBLr1q2juro673VKSkpYt25d1/nd/7BJJBK8/PLLfOtb3+LQQw+lra2Nr3zlK3z0ox/lpZde6nY73/3ud7nwwgu7zhcXF/fzoxMRERGRgpdL2xUvkW2QaLH70gMF3Jeeaoc1f7YXH01Fet/PWwQzP2ovPlpUNVijK2xdM9Pj9mu09iAI1YDbO9QjG1SRZJZNzXHqIilCPje1qnoZUDnD5JHVdfz2hS0ks0aP7U4HfGReLZ9eOJEi37CLNAZVLJ0jmspQUxJgalURxVqMVERk0Ay730A33XQTF154IcuWLQNg+fLlPProo9x1111cddVVea/jcDioqanJuy0cDvP44493u+y2225j4cKFbNmyhYkTJ3ZdXlxc3OvtiIiIiIgckHSsM0zfaofH3iAUjyncDvDINnj9IVj3GBjp3vcrqrYXH53xYS0+Olgs064kyibtBUgLNEzPGiY72pNsao6TMUzGFPtwa8HHAbVmR4Q7nlrP5tZE3u2HjCnmS8dNY2pVaJBHNryYlkVzRxqX08EhY4oZXxbUa1NEZJANq7/gM5kMq1at4uqrr+66zOl0cuKJJ7Jy5cperxeLxZg0aRKmaXLEEUfw/e9/n9mzZ/e6fyQSweFwUFpa2u3yG264geuuu46JEyfy6U9/mssvvxy3e1g9RSIiIiIyknTrS6+3Z/z6izv70gs0AGlYA689AJv+zV4XH62YbvelTz2ucA88DDbTsD85sDNMrzykszO9sMJ0gLZ4ho3NcRo7UoT9XsqLCvSTJIOkPZHh7mc38c+1jXm3F/vcnH/0ZE6cOQZngVfupLIGLfE0FSEf06pClBcV3s+niMhwMKz+Om1ubsYwDMaMGdPt8jFjxrB27dq81znkkEO46667mDdvHpFIhB/96EcsWbKENWvWMH78+B77p1IprrzySs4++2xKSkq6Lv/yl7/MEUccQXl5Oc8++yxXX301dXV13HTTTXnvN51Ok07vmlETjUbfy0MWERERkdHINOxql+h2uzfdytkhZbB8qEc2NCzTXnz0tfuh4Y297zv+SDtMrz1CXemDRWF6l0zOZGtrgi2tcUwLakoCuJx6HQ4Uw7T4x5p6fv3cJuLpnlUvAB+cNYZzF08mHCjsahPLsmhLZMkYJlMrQ0yqDOJzqztdRGSoDKtQ/b1YvHgxixcv7jq/ZMkSZs6cyZ133sl1113Xbd9sNsunPvUpLMvijjvu6Lbtiiuu6Pp63rx5eL1eLrroIq6//np8vp6zEq6//nquvfbafn40IiIiIjKi5dIQb4L2rZBstUPhQHnh9qXn0vDO/8HqB+3am944XDD9RJj3KaiYNnjjK3S7h+mBsoIO0y3LoqVzdnpLLENpwFPwfd0D7Z2GDm5fsZ53G2N5t0+tLOKLS6cxY2xJ3u2FJGuYNMVShHweZowNU13s0yK5IiJDbFj9lVBZWYnL5aKhoaHb5Q0NDX3uOvd4PBx++OG8++673S7fGahv3ryZf/7zn91mqeezaNEicrkcmzZt4pBDDumx/eqrr+4WxEejUSZMmNCnMYqIiIjIKJOJ233p7VsgHQW3H0LVhVtbkmqHNx+GN/5of90bTxHMPNVefDRUPVijk51heiYJwcIO08Gu09jSkmBLWwIHUFPi1+z0ARRL5fj1c5t47I36vAVQQa+LcxZN4sNzx+r7AHSkskRTOWpLA0yrCulgj4jIMDGs3o29Xi/z58/nySef5PTTTwfANE2efPJJLr300j7dhmEYvP7665xyyildl+0M1N955x3+9a9/UVFRsc/befXVV3E6nVRX5//j3ufz5Z3BLiIiIiIFwrLsYLKjHqI7IJsAbwhKagu3Lz26HVY/BOv+vo/FR6tg7idgxkfAWzR44yt0e4bpVTPsMN1VmLUalmXR1JFmQ3Oc9kSG8qCPgFd1GgPFsiz+ta6Ru57ZRCSZzbvP0oOr+NzRU9QTjl2N0xxL43E5mDW2mHFlQR1kEBEZRoZVqA52Dct5553HggULWLhwIbfccgvxeJxly5YBcO655zJu3Diuv/56AL773e9y1FFHMX36dNrb27nxxhvZvHkzF1xwAWAH6p/4xCd4+eWXeeSRRzAMg/r6egDKy8vxer2sXLmS559/nuOPP57i4mJWrlzJ5Zdfzmc+8xnKysqG5okQERERkeHJNCDRagfIsQYwshAIF25fOkDjm7sWH7XM3vermAbzzoRp7y/cWfxDQWF6D8mMwaaWGNvaUnicDsaGAwW/AOZA2twS544V61mzI/9aZOPLAnxx6TTmjS8d3IENU8mMQWsiQ1Wxl2lVIUqDOsggIjLcDLu/ZM8880yampr49re/TX19PYcddhiPPfZY1+KlW7ZswencNfOnra2NCy+8kPr6esrKypg/fz7PPvsss2bNAmD79u08/PDDABx22GHd7utf//oXxx13HD6fj/vvv59rrrmGdDrNlClTuPzyy7vVu4iIiIhIgctl7L70yFY7VMcBwVK76qUQWSZsXgmrH4D61Xvfd9wCe/HRcfO1+OhgMg1ItkE2BcEKhemAaVo0dKTY2BQnmspRUeTF79Hs9IGSyOT43Qtbefi17Zh5ul58bidnHTmR0w6rxeMq0E/47MayLFrjGXKmxbSqIiZVFOF163kRERmOHJZl5asxk/0UjUYJh8NEIpF99rWLiIiIyAiSSdgz0iPbINkOHh/4Sws3mMyl4Z3H7TB9n4uPntC5+Oj0wRuf9AzTyybZnfWF+prtFEvn2NQcZ0d7Er/bRWnQo8UeB4hlWTyzvoVf/nsDLfFM3n2OmlrOhe+bSnVJgR6Y3EPWMGnsSFEa8DK1uoiqkBYjldFJ+ZmMFsNuprqIiIiIyLCQbN/Vl56JgS8E4QLuS09F7MVH1/zRDmx74wnCzI/AnE9o8dHBZubs120uDYFyqJ6lMB27m7o+mmJjU4x4JkdlkV+zfwfQjvYky1es55Wt7Xm3jynxcdGx0zhycgFXZu0hkswST2cZXxZkalURQa+iGhGR4U7v1CIiIiIiO5kmJFshsrMvPQP+EgiPL9zakmgdvP6gvfhoLtX7fkWVdpA+8yP2gq0yeBSm9yqayrKxKU5dJEnI56E2HBzqIY1a6ZzBQ6u28YdV28jl6XpxOx2cMX88n5w/Hp9blTtgH/BpiqXwup3MHhemNhzAqcVIRURGBIXqIiIiIiJGtrMvfZv9Lw4IlIInMNQjGzqNa+2Kl40r9r74aPnUXYuPKsQdXGbO/tRALmPXvChM75IzTHa0J9nYEieVNaku9quzewC9tKmV5U+vpyGazrv98AmlXLx0GrWlBfyeuodEJkdbIsOYEj9Tq0KEA/q5FREZSRSqi4iIiEjhyiYh1gjtW+x6E5cHiqoKN5S0TNjynB2m1722933HzbfD9PFHFu4s/qGyZ5g+ZhIUVYNL/70DaE9k2Ngcp7EjRcjroTbsG+ohjVqNHSl+8e8NPLehNe/2iiIvFx4zlSXTKtQP3smyLFriGUwsDqouZmJFUAd8RERGIP3VJSIiIiKFJxWBjga75iXTAd4iKK4BZ4FWEhiZzsVHH4T2zb3v53DaM9LnnQmVBw3e+MSmMH2vMjmT7W0JNrckyJkW1SE/boWVAyJrmPz51e088OJW0rmen2RxOuCjh47j7IUT1A++m3TOoLkjTWmRl2lVIaqKdcBHRGSk0m83ERERESkMpmkHktEdEKuHbAoC4cLuS093wJt/gTf+aHfJ98YTgBmnwtwzIDRm8MYnNoXp+9QSS7OxOU5zLE044KXCp+dmoKze1s4dK9azrS2Zd/ussSV8cek0JlcWDfLIhrf2RIZk1mBiZZCplSH8ngI9iCsiMkroLw0RERERGd2MLMSbd+tLt+y+9KLKoR7Z0Omog9d/D2sf3fvio8FKmHOGvfior3jwxic2MweJVvs1HKyEMRMVpu8hnTPY2pJgc1sCTKgpCeDSQo8DojWe4a5nNrLi7aa828MBD8uWTOb9M6pV9bKbnGHSFEsT8LqYMy5MTYlfi5GKiIwC+mtMREREREYnI2uHx+1bINne2ZdeAS7vUI9s6DStg9fu3/fio2VT4NAzYdoJhdsvP5R2D9OLKqF0UmfXv/77tpNlWTTF0mxqjtMaz1AW9KpmZIAYpsWjr9dx3/ObSWSMHtsdwIfm1HDuUZMJ+fU92F08naM9mWFsOMCUqiJK/Ho/FREZLfQbT0RERERGn3SHHSBH68AbKOy+dMuErS/YYXrdq3vft/YIuy99wsLCrcQZSjvDdDNn17woTM8rlTXY1BxnW1sSl9PB2HAAp16vA2JtfZQ7nlrPhuZ43u3Tq0N8cek0Dh6jT7LszrQsmmNpnA44ZEwxE8qD6vcXERll9NeZiIiIiIwusUZoXAvpKJTUgLNA/+Q1MvDuk7D6AWjb1Pt+DidMPR7mfQqqDhm04cludg/TiyohPBFC1YV7IKgXlmXR2JFmY1Oc9mSGiiKfeqkHSDSZ5d6Vm/i/Nxvybi/yuTj3qMmcNLtGdTt7SGUNWuJpyjsXI60IaTFSEZHRqED/hyEiIiIio45p2FUvzW/bs6xLagtztnW6A9562F58NNHS+35uP8z4iL34aPHYwRuf7KIwvc/i6RybW+Jsb0/idbmoDQfU2z0ATMvi8TcbuHflJjpSubz7vH9GNcuWTKY0WMBVWr1oi2dIGwaTK4qYXFmkgz4iIqOYQnURERERGfmyKWh+B9o3gT9cmItqdtTbi4+uexSyyd73C5TbQfrMjxbm8zQc7Bmm76x5UZjeg2la1EdTbGiOE0tlqQz58Ln1PA2EDU0xbn9qPesaOvJun1Qe5IvHTWN2bXiQRzb8ZQ2Tpo40RX43c2tKGVPi00EfEZFRTqG6iIiIiIxsyXZoWmvXvoSqwV1gH7VvfhteewA2/Gsfi49OtvvSp59Q2Iu1DqXdFyANVSlM34eOVJaNzXHq2pMEvG7NTh8g8XSO+57fzKOv12FaPbcHPC7OXjiBU+fVqhc8j1gqRySVpbbUz9SqECGfYhYRkUKgd3sRERERGZksCzrqoHEd5JJ23UuhhJOWBdtesMP0HS/vfd+xh8GhZ3UuPqpAbEgYWUi2gpFTmN4HOcOkLpJiY3OcZMagqtiPR2Fuv7Msi6ffaeZX/9lAWyKbd5+jp1dywfumUKle8B4M016M1O10MHNsMePLguqXFxEpIArVRURERGTkMXLQugFa3rVnppcUSCe4kYX1T9phetvG3vdzOGHKUjj0TKiaMXjjk+66heljoHSCwvR9iCSybGqJUxdJEfK5qS0NDPWQRqWtrQmWr1jP6u2RvNvHhv1cvHQaR0wsG+SRjQw7FyOtDPmYVhWirEif/hERKTQK1UVERERkZMnE7cqT9q0QrABvcKhHNPDSHfDWI/DGHyDR3Pt+bj/M+DDM+UThHGgYjnaG6aYBRdVQOtHuTleY3qusYbK9LcHmlgQZw2RMsU9VIwMglTV48KWt/OmV7eTydL14XU4+uWA8Hz98PF63nv89WZZFWyJL1jCZWhliUmVQHf8iIgVKobqIiIiIjBzxFmh6CxJtUFwDLs9Qj2hgxRrtxUfXPgLZRO/7BcpgTufio/6SwRufdKcw/T1pjWfY2ByjqSNN2O+lvEhVI/3Nsiye39jKz/+9gaaOdN59Fkwq46Jj/z97dx4nV13n+/9V+17V+5qtk7CFQBBCAgkg2wg6o6JcRUFlG3Tmd3GB8TriwlXQgesyVx0dmesV3AV1ZtRxHLwa9hDCviSQkKU7na3Ta+37Oef3xwkxSVcgS3d1dfX7+Xj4kNT3VJ1vdzrJqc/51PuzgI6Yv8q7mx5KhslgKk/U7+GkzhitEQ0jFRGZyVRUFxEREZHaZ5qQ3AFDG8EyINYN9VzMGN4EL94HWx60v95DaZizd/joxTNvQGstMUqQHbEHxe4rpreCU52+r6dQNtgxmqN/NINhQkc0oEzqSTCQyPMvj2zh6W1jFddbIz4+fO58lvc0qUh8CMlciXSxzKzGIPNbQwS9KqWIiMx0+pdARERERGpbuWgXmeN94A2Bv2WqdzQ5LAt2Pg0v3As7n3n9YzuX2MX0OWdp+OhU2r+YHm6H2GwV0w+DZVmMZIr0DmcYSRdpDHpUpJwEJcPkX5/dwS+f3kHRMMetu50OLjutmyvOnI3fo09TVPLaMFKPy8GizghdDRpGKiIiNl25iIiIiEjtyift7vTUbgi32pnh9cYo2R3pL94Ho1sOfZzDCT3n2cX0tpOqtz8ZzyhCdtS+ERLe25kebFEx/TDkSwb9I1n6x7I4cdAR9atIOQme7R/jXx7ewq5EvuL6qd0x/ubNC5jdNANmUhylbLHMWLZEa8TLwtYIsWCdx42JiMgRUVFdRERERGpTag8MbYBiyh666ayzS9dieu/w0V9B5g2Gj57wVjjlPRDtqt7+ZDwV04+aZVkMpQpsHc4QzxZpCvoIeNUdPdFG0gW+91gvqzdX/julMejhupU9vPn4VkW9HIJlWYxmipQti4VtIeY0hTS0VURExqmzdyYiIiIiMu2ZBoxtg+FX7QGPka76yk9PD8K6f4VX/uONh4+e/C5Y9E7wx6q3PxnPKO6NecGOeWmYrWL6EcgWy2wbybBjLI/H6aAzFsBZT3+ma0DZMPndi7v52ZP95Erj5zA4HfCXp3Ry1fK5hHwqAxxKsWwylC7QEPQwvzVEa1jDSEVEpDL9ayoiIiIitaOUg6FXId4PwQbwhqd6RxNnZDO8cB9seeD1h4/GZtsRL8f9hYaPTrVxxfQ5EGxWMf0wmabFnlSerUMZUvkyzSGvsrsnwfpdCb770Ba2jVa+SXdCe4S/PX8BC1rr6O/TSZDIlcgWy8xpCtDTEtYnKURE5HWpqC4iIiIitSE3BoMbIDMEkXZwead6R8fOsuyhoy/eBzueev1jO061i+lzz9bw0an2WjEdB4RUTD8a6UKZvuEMO+M5Am4XXTG/On4nWDxb5J7H+3hgw2DF9YjPzTUr53HxSe36ZMDrMEyLwVQev8fFyd0xOqN+nMr5FxGRN6CiuoiIiIhMLcuC5C57IGm5ALHu6V9UNst2R/qL98HI6wwfxQE958KS90HboqptTw6hXIDcqDrTj4FhWuxO5OgbzpAplmkJ+ZVHPcEM0+IP6wf40RN9ZAqVP/XylkXtfOjsecQCGq75euxhpEU6on7mt4WJ+vX9EhGRw6OiuoiIiIhMHaMEI1thdAt4/BDtmOodHZtiBjb8Dl76ld1xfygunz189NT3QLS7evuTysoFyI4BlorpxyCRK9E3nGEgmSfkddMVC071lurOpj0p/vnhLWweTFdcn98S4m/fvIATO6NV3tn0YloWI+kiOCyOa4swpzmIx6U/7yIicvhUVBcRERGRqVFI293pyZ0QagbPNC7AZYbgpdeGj2YOfZy/wR4+evI77f+WqXVwMb1xLgSaVEw/QmXDZGc8R99IhkLJpDXsU4FygqXzZX70RB/3rxvAqrAe9Lq4avlc/vKUTlyKLnldhbLBcLpAQ9DLwrYwLWHNrhARkSOnorqIiIiIVF9mGAZfgXwCop3gnKaXpaNb7eGjm/90GMNH3wvHvUXDR2vBvmI69qcjYrPtznTlTh+xeLZI73CGwVSeiM9DU0w/3xPJsiwe3DjI3av7SORKFY958/GtXLeyh6ZQHcyhmGRj2SL5ksHc5iA9LWENzhURkaM2Td+9iIiIiMi0ZJoQ3wYjm8EyIdo1/QqZlgW7nrXz0rc/+frHti+GJVfA3JXTPye+HqiYPmGKZZMdY1n6R7KUTYv2SEAd0hNs20iG7z68hfW7khXXZzUG+Js3L2DJrIbqbmwaKhsmg+kCIa+LU2bFaI9oGKmIiBwbFdVFREREpDrKBRjeBGN94IuAf5pl/ppl2PqQ3Zk+sul1DnTAvHPtYnr7ydXanbyecgGyo4BDxfQJMJIu0DucYThdIBbw0uzT28qJlCsa/Pypfn7z/E7MClkvPreTK86czWWndStm5zCkC2WS+SId0QDzW0NENIxUREQmgK5+RERERGTy5RMwtAFSeyDcNr0iUIpZ2PCfsO5XkN5z6ONcXnv46Cnvgdis6u1PDu21YrrDYccMxeZAsEnF9KOULxlsH83SP5YFCzqi6k6fSJZlsXrLCP/30a2MZIoVjzlrfhM3nDOftqi/yrubfkzLYjhVwOmEE9ojzGoM4tZNCBERmSAqqouIiIjI5ErutgeSFtN23ItzmmTYZoZh3b/CK7+F4usNH43Zw0cXXQaBhmrtTl6PiukTyrIshtIFtg5liGeLNAa9BL16KzmRdsVz3PXwFp7bHq+43h718ZHzFnDmvKbqbmyaypcMRjIFmsM+FrSGlTcvIiITTldCIiIiIjI5TANGe+2oFJcHYt1TvaPDM9pr56Vv/pMd+XIo0W57+Ojxl4BbXaM14YBietfemBcV049FrmiwbSTDjrEcLqeDzlgAp76fE6ZQNvjlMzv412d2UK6Q9eJ2Orj8jFm854xZ+NzT5IbkFLIsi3i2RMEw6WkJM7c5qGGkIiIyKVRUFxEREZGJV8za+emJfgg0gjc01Tt6fZYFu5+HF+6F7Wtf/9j2k+HU98HcFdOn677elfN7i+lO+2ZHbJaK6cfINC0GUwX6hu3u9OawT8XJCfZ03yh3PbKFPclCxfU3zW7gb968gK6GQJV3Nj2VDJOhdJ6wz8OpnTHaIj4c+jtAREQmiYrqIiIiIjKxsqMwuAFyI3Z+uqvKH7v3hu1BqN6w3SFvlOzomULK/v/9mWXY+rDdmT786uu8qAPmrYRTr4COUyZ1+3IE9hXTXRCdBQ2z7Zs4KqQdk0yhzLaRDDvjObwuF10NARUnJ9BgKs/3Ht3KE1tHK643h7z89bnzWbmgWd/3w5TKl0jmy3Q1BFjQGiak4bkiIjLJ9C+NiIiIiEwMy4LEDhjeCEbRjt9wVHEonMsLkU4wS/DEXbDhP+wBqf4YnPh2OOtvINgMqV324xt+Dy/98g2Gj3rg+Evt4aMNc6r3tcjrUzF9UhimxUAyT+9whnS+RGvYj9etwY4TpWSY/Ob5Xdz7VD+Fsjlu3emAdyzp5v3LZiuz/jAZpsVwuoDb5WBRZ4TuxqCG54qISFU4LMsaH9wmRyyZTBKLxUgkEkSj0anejoiIiEh1GSUY2QyjW8ETrP7ATpcXGubCmn+Ch78C1viCFQ4nnPcpWPZhuOetdvH/UHxRe/joyZfZxVqpDeU8ZEbA6bZvoKiYPmFS+RK9wxl2x3MEvW5iAY+6pCfQizvi3PXwFraP5SquL+qM8rdvXsC8lhqPyqohuaLBaLZIS9jLwrYwDUENIxWZDlQ/k3qh298iIiIicmwKKRjaCMldEGoBzxTk/0Y67YL6Q3ce+hjLhIfvBMuAd/8f+D9vHn9MtAtOeS+ccKmGj9aS/YvpDXPszHQV0ydE2TDZncjRO5wlVzJojfjxuNSdPlFGM0XuXt3Lw68OVVyPBTxcu2IeF57YppsYh8myLEYzRcqmxYLWEHObQ/pEhYiIVJ2K6iIiIiJy9NKDdn56IQnRTrvoWW3esB358vBXDu/4R78Op3/IHjS67XH7sbaT7OGj887R8NFaUsrZMS8uj/1JhFi3iukTKJEt0TucZiBZIOJz0xXTQMyJYpgW//nSbn66dhvZojFu3QFcuriDD501j7Bfb8sPV8kwGUzlaQh4md8aolXDSEVEZIroX28REREROXKmCfFtfx7uGe2aukKnL2JnqFeKfKnEMuHpe+DMGwAHLLkC2k9RobaWlHKQG9vbma5i+kQrGSY7x7L0jWQpGSbtER9udacflpDXRdjvJuRz43E5KRkmmUKZdL5MZm/xfMNAku8+tIWtw5mKr7GwLczfvnkBx7dHqrn1aS+RK5EplJjVGGR+a0i58yIiMqX0r5CIiIiIHJlS3s5PH+u1h4D6prgw5A3bQ0mPxIb/gHM+AU09k7IlOUqlHORGwemB2BwV0yfBaKZI73CaoVSBmN9Lc8g31VuaFjwuB+1RP2XT4p7Vvfxh/QDJXJlowM0lJ3dw7coeYkEPX/3DRu59anvF1wj5XHzorHlccnKHhmkegdeGkXrcDk7ujtEVC+DU909ERKaYiuoiIiIicvhycRjaYMe+hNvAXQMFOZcH8okje04+qcz0WnJAMX0uNMwCf4OK6ROoUDbYPpqlfzSLaUJHNKDC7mHyuBzMbgryvUe38q1VmzCtP68NpQv880NbuOvhLfz3CxbyiYuP59FNw+yMHziQ9MIT27h2xTwN0zxC2WKZsWyR9qif+a1hYgHPVG9JREQEUFFdRERERA6HZUFqNwxuhHLOjnuplexxo2R3zKcHD/85/igYxcnbkxyeUnZvzIsXGub9uTNdJoxlWQyn7e700UyJxqBHsRlHqD3q53uPbuUbf9p0yGNMC/7pgc0A3PWBM3j7tx8DYG5TkL89fwEnd8Wqstd6YVkWI5kiJhbHtUWY0xzUAF0REakpupoSERERkddnlGG0F0Y22Z3p0c6p3tGfFbOw6zk48a/gsf99+M876R1QrJx3LFWgYnpV5EsG/SNZ+seyOHHQGfPjVPf/EQl5XZRNi2+tOnRBfX/feXAzV5w5m3MXtnBce5i3n9qlvPojVCybDKXyNIS8LGgN0xqpgU9EiYiIHERFdRERERE5tGIWhjdCfDsEm8EbnOod/dnQRlh1GzTOg3d9F1Z/8/CGlTqcsPxvIB+f7B3KwUpZyI6BS8X0yWRZFoOpAr3DGeLZIk1BHwFvjXyyZJoJ+93cs7r3gMiX12Na8LO1/dzx7lMYy5Ymd3N1KJErkS2WmdMcZH5rGL9HP7ciIlKbavKW+Xe+8x3mzZuH3+9n+fLlPPnkk4c89gc/+AEOh+OA//n9B+ZjWpbFrbfeSmdnJ4FAgIsvvphNmw7sNBgdHeWqq64iGo3S0NDA9ddfTzqdnpSvT0RERGRayIzArmchvgMiHbVTULdMePE++M1/h+RO2LbaLtSe+3eH9/w3f8qOrlGnevWUspDYCYWMfRNk9jLoWKyC+iTIFsu8MpDkxR0J8kWDrlhABfVjEPK5+cP6gSN6zh/WD9AQUnb6kSgbJrsTOSwsFnfHOLEjqoK6iIjUtJorqt93333cfPPN/M//+T959tlnWbJkCZdccgmDg4fOyIxGo+zevXvf/7Zt23bA+le+8hW+9a1vcdddd7F27VpCoRCXXHIJ+Xx+3zFXXXUV69ev549//CO/+93veOSRR/jwhz88aV+niIiISM0yTYj32wX1YtruJnbVyHC47Cjc/2l44rtglv/8+H0fgKXXw5v/3u5Er8ThhPM/DWd/1M6Hl8lX3FtML2ahsWe/YnrDVO+s7pimxe5Ejuf64/SP5GgMemgO+3Ao7uWYeFxOkrnyGx+4n2S+jMel7/vhyhTK7EnlaYv4WDK7ga6GAE4N0RURkRrnsCzrMD/IVh3Lly/nzDPP5Nvf/jYApmkye/ZsPvrRj/LpT3963PE/+MEP+MQnPkE8Hq/4epZl0dXVxd/93d/xyU9+EoBEIkF7ezs/+MEPeN/73scrr7zCokWLeOqpp1i6dCkA999/P29729vYsWMHXV1db7jvZDJJLBYjkUgQjUaP8qsXERERmWLlIoxshtGt4AvbA0BrxY6n4cEv21ncB2tbBJf8A3QusYvta++CV34L+aQ9lPSkd9iRL06XXVA3FMswqYp7M9PdPoh07Y15aZjqXdWtdKFM33CGXfEcfo+LhoBHxfQJMJYpsnhWjPfc9Thbhg7/ky0LWkP85sZz6D2C58xEpmUxnC7gcEBPc4jZTUHlz4vMAKqfSb2oqX+xisUizzzzDBdffPG+x5xOJxdffDFr1qw55PPS6TRz585l9uzZvPOd72T9+vX71np7exkYGDjgNWOxGMuXL9/3mmvWrKGhoWFfQR3g4osvxul0snbt2on8EkVERERqVz4Ju1+wi+qh5topqBslWPsv8PtPViioO+C0q+Ad37KLtvFtdlb6sg/DDQ/CJ16y/3/Zh+3H4/0qqE+m1zrTS3s702edCR0nq6A+SQzTYsdYluf6x9g5lqMl7KMx6FVB/RjlSwb3PtXPh3/yNI9vHuYtizqO6PmXLu4kWziy7vaZJl8y2J3IEfG7WTKrgZ7WsArqIiIyrdTUoNLh4WEMw6C9vf2Ax9vb29mwYUPF55xwwgncfffdnHrqqSQSCb72ta+xYsUK1q9fz6xZsxgYGNj3Gge/5mtrAwMDtLW1HbDudrtpamrad8zBCoUChUJh36+TyeSRfbEiIiIitSS1B4Y2QDEF0U5w1shlYnInrLrd3tvBgs1wwWeg+4wDHy9mlJdebft3pjf2QMOs2rkpU6cSuRJ9wxkGknlCXjddDYGp3tK0Z5gWD24Y5MdrtzGaKQLw4ye28fX3LuFfHtlyWMNKnQ64ZsU8kjndvDuUsWyRQtlgXnOIeS0hZaeLiMi0VCPvlo7e2Wefzdlnn73v1ytWrOCkk07iX/7lX7j99tsn7bx33HEHX/ziFyft9UVERESqwjRgbBsMv2pHo0S6oFa6XDf/CR79R7vr+WCzz7Lz0dUBPbWKGcjFVUyvopJhsiueo284Q6Fs0hr24VGH7zF7tn+Me1b30jdy4N83a3tHGcuU+O8XLOSfHtj8hq/zsYuOw+10kC0ak7XVaatkmAylCoT8bk7pbqA9qsx/ERGZvo6pqN7f309/fz/nnHPOvsdeeOEFvv71r1MoFHj/+9/PZZdddtiv19LSgsvlYs+ePQc8vmfPHjo6Du8jdx6Phze96U1s3mxf8Lz2vD179tDZ2XnAa5522mn7jjl4EGq5XGZ0dPSQ573lllu4+eab9/06mUwye/bsw9qjiIiISE0o5WF4E4z1QSAGvshU78hWysLqb8Krfxi/5vTA8o/A4strp/g/E+1fTG9eANEuFdOrYCxTpHc4w2AqT9TvoSnkm+otTXu9wxl+8Hgvz/bHK647gJ88sY3P/dVJuJwOvrVqU8WOdafDLqjfcO58to9WuBE4w6XzZRL5Ip0NARa0hgn7pn1/n4iIzHDH9C/Zxz72MdLpNH/6058Au1B9wQUXUCwWiUQi/OpXv+KXv/wl7373uw/r9bxeL2eccQarVq3aV4w3TZNVq1Zx4403HtZrGIbBSy+9xNve9jYAenp66OjoYNWqVfuK6MlkkrVr1/K3f/u3gN3tHo/HeeaZZzjjDPvjww888ACmabJ8+fKK5/H5fPh8uogVERGRaSo3BoMbIDMMkTZwead6R7bhV2HVbZDYMX4tNhsuuhVajqv+vsRWzEB2DDx+FdOrqFg22T6aZftolrJp0REN4HLqptKxGEkX+Onafv70yh4Olepy+pwGrlnRQ09LiJ1jOa4+ex5XLZ/LDx7v4/51u0nmy0T9bi5d3Mk1K+bhdjrYPpqlZBxGTswMYZj2MFK308FJnVG6GwLKThcRkbpwTEX1J598ko9//OP7fv2jH/2IXC7HunXr6Onp4dJLL+VrX/vaYRfVAW6++Wauvvpqli5dyrJly/jGN75BJpPh2muvBeBDH/oQ3d3d3HHHHQDcdtttnHXWWSxcuJB4PM5Xv/pVtm3bxl//9V8D4HA4+MQnPsGXvvQljjvuOHp6evj85z9PV1fXvsL9SSedxKWXXsoNN9zAXXfdRalU4sYbb+R973sfXV1dx/ItEhEREaktlgXJXTC0EcoFiHWBowYKHJYFL/0KnvwXMCsM+Dv+rbDyo+AJVn9v8udiujcALQsh2g3+6FTvakYYSRfoHc4wlCrQGPQSUofvMckWy/zbczv59XM7KZTNisfMaw5y7YoeTp/buO+xkmGxYyxH0Ovi6rPn8rfnL8DjclAyLLKFMslcSZEvB8mXDEYyBVrCPha0hmkM1cjNWxERkQlwTFdko6OjBwz4/N3vfseb3/xmFixYAMC73/1uPvOZzxzRa15xxRUMDQ1x6623MjAwwGmnncb999+/b9Bof38/Tuef3/iNjY1xww03MDAwQGNjI2eccQaPP/44ixYt2nfMpz71KTKZDB/+8IeJx+Occ8453H///fj9/n3H/PSnP+XGG2/koosuwul0cvnll/Otb33rqL4vIiIiIjXJKMFoL4xstjuNo4cXrzfpcmPw0P+C7U+MX/ME4dy/g4UXVX9fAsU0ZOMqpk+BfMlg+2iW/rEsWNAZU3f6sTBMiz++vIefPrmNeLbyENGmkJcPLJ/DhSe2H/J7nS0ae4vnhUnc7fRmWRZj2RIlw2R+S5i5LUF8bg0jFRGR+uKwLOuoP5s2e/Zsrr/+er7whS8Qj8dpb2/nzjvv5KabbgLg29/+Np/5zGdIJpMTtuFalUwmicViJBIJolG90RAREZEaU8zYcS/JnRBqrp2O7x1Pw4P/ALnR8WutJ8FFn7cjRqS69i+mR2ftjXnRNW41WJbFULrA1qEM8WyRxqCXoFfd6UfLsiye3jbGPY/3HTLr3O9xcvnps7jstG78HhV/j0XJMBlK54n4PCxoC9MW0TBSETmQ6mdSL47p6uziiy/mW9/6FtFolIceegjTNA8YTPryyy9reKeIiIjIVMsMw+ArkE9AtBOcNVCgM8vw1PfhhXuhUqLxkvfDmdfXxl5nkmLaHkDqCdjZ9SqmV1WuaLBtJMOOsRxup4POWACnCpJHbctQmrtX9/LijkTFdacD/mJRB1ctm6NokgmQzJVIF8t0xYLMbw0pqkhEROraMf0rd+edd/Lqq6/yyU9+Eq/Xy9e+9jV6enoAKBQK/OIXv+DKK6+ckI2KiIiIyBEyTUj0w/AmsEy7QFoLBbrkLlh1Owy9Mn4t0AQXfAZmLa3+vmayQsq+6eIJQvNxEOsGX2SqdzVjmKbFYKpA73CaRLZEc9injuljMJQq8OMn+nho49Ahh5AundvINSvmMbc5VNW91aPXhpF6XA4WdUboaggqqkhEROreMRXV29vbWb16NYlEgkAggNf757v7pmmyatUqdaqLiIiITIVyAYY3Q7wPvOHa6TbevAoe/TqUKsQwzF4O538aAo3j12RyHFBMP94eXKtielVlCuV93ek+t4uuhoDiMo5StljmV8/s4DfP76JoVB5COr81xHUre1gyq6G6m6tT2WKZsWyJ1oiXha0RYkHPVG9JRESkKibk81ixWGzcY4FAgCVLlkzEy4uIiIjIkcgnYGgDpPZAuA3cvqnekV1EX/1P8Op/jV9zumHZR+CUy8HhHL8uE2//YnrLCXYskIrpVWWYFgPJPL1DadKFMq1hP163fv6PRtkw+cP6AX7+1HYSucpDSFvCXj541jzOP6FVkToTwLIsRjNFypbFwrYQc5pC+vkVEZEZ5ZiK6qtWreLZZ5/lf/yP/7HvsbvvvpsvfOELFAoFrrzySr72ta/hcumjiyIiIiJVkRqwB5IW03bci7MGrsOGN8Gq2yCxffxabBZcdCu0HF/9fc1EhZSdme4NqZg+hZL5En3DGXbHcwS9brpi6k4/GpZlsbZ3lB883sfOeK7iMQGPi/csncU7lnThc9fA34d1oFg2GUoXaAh6mN8aojWsYaQiIjLzHFNR/Qtf+AJz587d9+uXXnqJj3zkI5x66qksXLiQb33rW3R0dPD3f//3x7xREREREXkdpgFjfTD8Krg8dib2VLMsWPevsPZfwKzQPXr8JbDy43a3tEyufcX0MLSeaN9w8YWnelczTtkw2RXP0TuSIV8yaY348bjU3Xs0Xt2T4u7Vvazflay47nTAWxd38r4zZ9MQ1BDSiZLIlcgUSsxpCtLTEibg1Y0KERGZmY6pqP7KK69w+eWX7/v1j3/8Y6LRKI8++ijBYJC/+Zu/4Uc/+pGK6iIiIiKTqZSDoVch3g/BBrtwOtVycXj4Tuh/YvyaJwjn3ATH/UXVtzXjFFKQS9id6a0n7e1Mr4GfjxkokS3RO5xmIJkn4vPQFauBWKZpaE8yz4/WbOORTUOHPOas+U1cffY8ZjXqht1EMUyLoXQen9vF4lkNdEb9ODWMVEREZrBjKqpnMhmi0T8Pvbr//vu59NJLCQbti5czzzyTn/zkJ8e2QxERERE5tOyoHfeSG4FIG7hqoCNz5zPw4D9AdmT8WuuJcNHnIVoDnfT17IBi+okqpk+hkmGycyxL30iWsmHRHvHjVnf6EUvny/zime38xwu7KJtWxWOOawtz3coeFnePn/klR88eRlqkI+qnpzVMLKBhpCIiIsdUVJ89ezZPPfUU1113HZs3b2bdunX83d/93b710dFRfD51YIiIiIhMOMuC5E4Y2ghGwY7zmOohn2YZnr4Hnv8ZUKHoteT9sPQ6O55GJp5l2Vn6KqbXjNFMkd7hNEOpArGAl+bQMb39mpFKhsl/rdvNvU9uJ1UoVzymLeLj6rPncc5xLRpCOoFMy2IkXQSHxXFtEeY0BxVXJCIistcxXdVdddVV3HbbbezcuZP169fT2NjIO9/5zn3rzzzzDMcfr6FTIiIiIhPKKMHIFhjdYkepRDqnekeQ3A0P3A6DL49fCzTCBZ+BWWdWf18zgWXZnen5BHgjdjE91mUX1mVKFMoG20ez9I9msUzoiAZwKSrjiFiWxeNbRvjhmj52J/IVjwn5XLz3jNn81aldeN0q9k6kQtlgOF2gIehlYVuYlrCa5URERPZ3TEX1z372sxSLRX7/+98zZ84cfvCDH9DQ0ADYXeoPPfQQH//4xydinyIiIiICUEjb3enJnRBqAU9gqncEWx6AR74Opcz4tVlnwvm3QLCp+vuqdwcX09sW2Z3pKqZPGcuyGE7b3ekjmSJNQS9Br7rTj9SG3Um+v7qXDQOpiutup4O3ndLJFUtnE1UUyYSLZ4vkSgZzm+1hpH6PhpGKiIgczGFZVuVAOjkiyWSSWCxGIpE4IGdeREREZMKkh2Bog11EjbSDc4qLdaUcPP5PsPH349ecblh2A5zynqmPpak3BxfTG2armF4D8iWD/hG7O93pcNAc9iqK5AjtTuT44eN9rN5SYR7DXisXNPOhs+fR1VADNxTrTNkwGUwXCHldzG8N06FhpCIyCVQ/k3oxYe/E0uk027dvB+ys9XBY2Y0iIiIiE8I0Ib4NhjcB1t789CkudAxvglW3QWL7+LVotz2MtPXE6u+rnr1WTC8kwRuG9pMh0qFi+hSzLIvBVIHeoQzxXJHmkE+dvUcomStx39Pb+f1Luw85hPTEjgjXrezhpE4VYCZDulAmmS/SEQ0wvzVExK9PAIiIiLyeYy6qP/XUU3zqU5/isccewzRNAJxOJ+eeey5f+cpXWLp06TFvUkRERGTGKhfsAvZYH/ij4ItM7X4sC9b/GzxxF5il8evHvQVWfgK8wapvrW5Zll1Izyft3/+2RXaOvr7HUy5bLNM3nGFnPIfH6aQrFsAx1Te8ppFi2eR3L+7iF09vJ1M0Kh7TEfVzzYp5rFjQrO/tJDAti+FUAacTTmiPMKsxiFvDSEVERN7QMRXV165dy/nnn4/X6+Wv//qvOemkkwB45ZVX+PnPf855553HQw89xLJlyyZksyIiIiIzSi4OwxshtQfCbeCe4kFx+Tg89L+gf834NU8AVt4Ex7+l6tuqWwcX09sX7+1MVzF9qpmmxUAyz9bhDKl8idawD59b3emHy7QsHt00zI/W9DGYKlQ8JuJz875ls3nr4k48KvJOinzJYCRboDnkY0FrmKaQd6q3JCIiMm0cU6b6xRdfTF9fH4899hgdHR0HrO3Zs4eVK1fS09PDH//4x2PeaK1TJpSIiIhMGMuC1ICdn17KQrgdnFNcsNv1HDzwJchWyDpuPQEu/DzEZlV/X/Xo4GJ6w1wV02tIKl+ibzjL7kQOv8dFQ8CjDuojsG5ngrtX97JpMF1x3e108PYlXbz3jNmE/RryOhksyyKeLVEwTOY0BZnbHFRkkYhUjepnUi+OuVP91ltvHVdQB2hvb+fDH/4wt99++7GcQkRERGRmMcow2gsjm8HtsfPTp5JZhmd+AM/9FKjQi3HqFXDmX4NL+bvH7IBielSd6TXGMC12J3L0DmfIFgxaIz51UB+BHWNZfrimjye2jh7ymPOOa+WDZ8+lI+qv4s5mlpJhMpjKE/F7OKUjRnvUp5tCIiIiR+GYiupOp5NyuXzIdcMwcDp1oSkiIiJyWIpZO+4lvh2CTVM/gDK12+5O37N+/FqgEc6/BWYr5u+YWRYUEpBP/bmYHu20I3WkJiRyJfqGM+xO5An73HQ16PfmcCVyJX7+ZD//tW43h5hBysldUa5b2cPx7VM8M6LOpfIlkvkyXQ0B5reGCfv0SQAREZGjdUz/iq5YsYLvfOc7XHnllcydO/eAtf7+fv75n/+ZlStXHtMGRURERGaEzAgMvQLZUbs7eao7v7c+BI98FYqZ8WuzzoTzPw3B5qpvq64cXEzvOMX+vVcxvWaUDJNd8Rx9wxkKZZP2iE9DHA9ToWzw2+d38ctndpArVR5C2t0Q4JoV81je06Ru6UlkmBYjmQIup4NFnRG6G4O4nPp+i4iIHItjylR/7rnnOO+88yiXy7zrXe/i+OOPB2Djxo385je/weVy8dhjj7FkyZIJ23CtUiaUiIiIHBXLgsR2GH4VjJI9kNQxhUW7Ug7WfBs2/Of4NYcLlt0Ap753avc43R1cTG+cq2J6DRrLFOkdzjCYyhPze5XvfZhMy+KhjUP8+IltDKcrDyGN+t1cuWwOl5zcoZsUkyxXNBjNFmkJe1nYFqYhqGGkIjK1VD+TenFMV4ZvetObWLt2LZ/97Gf57W9/SzabBSAYDHLppZfyhS98gZaWlgnZqIiIiEjdKRdhdAuMbAFfaOo7v0c2w6rbIN4/fi3aBRfeCm0nVn9f9cKyIJ+AQgr8MXWm16hi2WT7aJb+0QyGCR3RgLp6D9MLO+LcvbqXrUMVPuECeF1O3nlaF5efPouQokcmlWVZjGaKlE2LBa0h5jQH8bk1jFRERGSiHFOn+v5M02RoaAiA1tZWnE4nX/7yl7n11lsxjMof96snutMmIiIiR6SQgqGNkNwF4VZwT+FgPsuC9f8Oa79rd8sfbOHFcM5NU5/xPl0dXExvmKNieg2yLIuRvd3pI+kiDQGPCr+HqX80yz2re3l629ghj7nwhDY+cNZcWiO+Ku5sZnptGGlDwMv81hCtEQ0jFZHaofqZ1IsJu0p0Op20t7dP1MuJiIiI1K/UHhjaAMWUPZDSOYWFu3wCHv4KbFs9fs3th3NuhuPfUv191YOKnemd4JnCGyhSUb5k0D+SpX8siwPoiPrVnX4YxjJFfvZkP//v5YFDDiE9dVaMa1f0sLAtXN3NzVDJXIl0ocSsxiDzW0MEvboxJCIiMhn0L6yIiIhItZgGjG2z89OdToh0wVR2D+56Dh78MmSGx6+1HA8X3QqxWdXf13RnmZBPQjG93wBSFdNrkWVZDKUKbB3OMJYp0hzyEfAqIuON5EsGv35+J//67A7yJbPiMbObgly3Yh5nzG1Ul3QVGKbFcLqAx+3g5O4YnTHFFomIiEwmFdVFREREqqGUh+FNEN8G/ij4IlO3F7MMz/wQnvsJUKG99JT32gNJXZ6qb21as8y9nelpuzO9fbGK6TUsVzToG0mzYyyPx+mgqyGAU8Xf12WYFg9s2MNP1vYzmilWPKYh6OGqZXP5i0XtKupWSbZYZixbpC3iZ0FrmFhQf3eLiIhMNhXVRURERCZbbgwGN0JmEMJt4J7CTOHUADzwJdizbvyavwHOvwXmLK/6tqa1g4vpnUsg3K5ieo0yTYvBVIGtQ2mS+TLNIS9+j7rT38iz/WPcs7qXvpFsxXWv28m73tTNu9/UrciRKnltDoCJxcK2MHOaQnjdzqneloiIyIxwxFc7zz777GEfu2vXriN9eREREZH6YVn2INKhjVDOQ6wbHFNY8Nj6EDzyVShmxq91L4ULboFgc9W3NW2pmD7tpAtl+oYz7Irn8LtddMX8iiZ5A73DGe5Z3ctz2+MV1x3AxSe1c9XyOTSHNYS0Woplk6F0gYaghwWtYQ2AFRERqbIjLqovXbr0sC88LcvSRaqIiIjMTEYZRrfC6BZwee2BpFOlnIfHvwMb/mP8msMFZ/41LLliagv+08kBxfQGu5ge6ZjaTyDI6zJMi4Fknt6hNJlimZaQXx29b2AkXeCna/v50yt7KoVEAXD6nAauWdFDT0uoqnub6RK5EtlimTlNAXpawpoDICIiMgWOuKh+zz33TMY+REREROpHMWMPI41vtzu/vcGp28vIFlh1m53lfrBIF1z0eWg7qfr7mo4sE3JxKGYh0KBi+jSRzJfoHcqwO5Ej7PPQFZvCP4/TQLZY5t+e28m/P7eTYrnyENJ5zUGuXdHD6XMbq7y7ma1s2N3pAa/LHkYa9eNUbr2IiMiUOOKi+tVXXz0Z+xARERGpD5lhGHzFLr5GO8E5RdnClgUv/xqe+GcwSuPXF1wE594MXnWYvqGDi+ldx9kxLyqm17SyYbIrnqN3JEO+ZNIW8eNxqTv9UAzT4o8v7+GnT24jnq3wdwbQFPLygeVzuPBEDSGttkyhTDxXpCPqZ35bmKhfw0hFRESmkibIiIiIiEwE04TEdrtD3TL25qdPUdEpn4CHvwLbVo9fc/vhnE/AcZdM3f6mi33F9AwEGlVMn0bi2SK9wxkGU3nCXg9dMf2eHYplWTy9zR5Cun0sV/EYv8fJ5afP4rLTujXUtcpMy2I4XcDhgBPaI8xqCurmkIiISA1QUV1ERETkWJULMLwZ4n1257e/Zer2sut5ePDLkBkav9Z8HFx0KzTMrvq2phUV06etYtlk51iWbSNZyqZFW9iPWwXIQ9o8mOaex3t5cUei4rrTAW9Z1MGVy+bQGPJWeXeSLxmMZAo0hbwsaA1rEKyIiEgNUVFdRERE5FjkkzC0EVK7Idxqd4JPBbMMz/4YnvuxXRQ+2CnvgWU32ENTpTLLhNwYlHL2ANKu4yDcAW59z6aDkXSB3uEMw+kCsYCXZp/e6hzKUKrAj5/o48GNFW6+7bV0biPXruxhTpMy6KfCWLZIoWwwrznEvJaQPiEgIiJSY3SlKSIiInK0UgMwuAFKGYh2gXOKih7pPbDqdtizbvyavwHO/zTMOavq25o2TAPy8T8X01tO2NuZrmL6dFAoG2wfzbJtNAsmdEQDyvs+hEyhzK+e2cFvX9hF0ag8hHR+a4jrVvawZFZDdTcngD0LYDBVIOR3s7g7RkfUj0NRXSIiIjVHRXURERGRI2UaMNZn56e7PHZBfapsfRge+SoU0+PXuk+HCz4Lwebq72s6UDF9WrMsi+F0kd7hNKOZIo1BL0Gv3t5UUjZM/rB+gJ892U8yX654TEvYywfPmsf5J7TiVBF3SqTzZRL5Ip2xAAvawoT1aQsREZGapX+lRURERI5EKQdDr0K8H4IN4A1PzT7KeVjzHXjlP8avOZxw5vWw5P32f8uB9i+mBxpVTJ+G8iWDvuEMO8ZyuJwOOmMBFYIrsCyLtb2j/ODxPnbGKw8hDXhcvGfpLN6xpAufWxEjU8G0LIZTBVxOByd1RuluCGgWgIiISI1TUV1ERETkcGVH7fz0zBBE2qcun3x0K6y6ze6WP1ikEy78HLSfXPVt1bzXiunFHARVTJ+OLMtiMFWgdyhDPFekOeRT1vQhvLonxd2re1m/K1lx3emAty7u5P3L5hALeKq8O3nNa8NIW8I+FrSGNRBWRERkmlBRXUREROSNWBYkd9oFdaMIse6p6QC3LHjlt3aHulEcv77gQjj35qnrnq9VBxfTW0+EUJuK6dNMplBm20iGnfEcXpeLrlhAWdMV7Enm+dGabTyy6dBDSM+a38TVZ89jVqOGkE4Vy7IYy5YoGSbzW8LMbQnqkwIiIiLTiIrqIiIiIq/HKMHIFhjdAp4gRDqmZh/5pJ2d3vfo+DW3H1Z+HI6/FFRk/LNKxfRwu52DL9OGaVoMJPNsHc6QzpdoCftUfKwgnS/zi2e28x8v7KJsWhWPOa4tzHUre1jcHavy7mR/JcNkKJ0n4vNwYmeMtohPN4hERESmGRXVRURERA6lkLa705M7IdRsF9Wnwu4X4IEv2bEzB2teCBfdCg1zqr+vWpZPQj5hD2lVMX3aSuVL9A5n2B3PEfC61Z1eQckw+a91u7n3ye2kCpWHkLZFfFx99jzOOa5F2fNTLJUvkSqU6YoFmd8aIqRhpCIiItOS/gUXERERqSQ9BEMb7E7naCc4p+CyySzDsz+G534Mljl+ffHlsPwjU5ftXovKBcgMgycA7YshNksxL9NQ2TDZncjTO5whVzRojfjxaHDjASzL4vEtI/xwTR+7E/mKx4R8Lt57xmz+6tQuvG59/6aSYVoMpwt4XA4WdUboagjicuoGh4iIyHSlorqIiIjI/kwT4ttgeBNgQbR7aiJV0oN2d/rAi+PX/DF489/D3BXV31etMg3Ijtj/H5sNTT3gj071ruQoJLIl+kYy7E7kCfvcdDUEpnpLNWfD7iTfX93LhoFUxXW308HbTunkiqWziWoI6ZTLFQ1Gs0VaI14WtIZpCOpGn4iIyHSnorqIiIjIa8oFu5g+1mcXZH2RqdlH76PwyFegUKFg1vUmuOCzEGqp/r5q1b6olxZoXgDhNmXLT0Mlw2TnWJZtI1mKhkl7xIdb3ekH2BXP8aM1fazeMnLIY1YuaOZDZ8/TzYgaYFkWo5kiZctiYWuIOc0hfWJARESkTqioLiIiIgJ2UXZoA6T22EVZt6/6eygX4Il/hpd/M37N4YSl18OS94FTQxoBRb3UkbFMka3DaYZSBWJ+L02hKfjzV8OSuRL3Pb2d37+0+5BDSE/siHDdyh5O6tQnNGpBsWwylC7QEPAwvy1Ea1jDSEVEROqJiuoiIiIys1kWpAbsgnopC9GuqSlaj/bCqi/aXfIHi3TAhZ+H9pOrvq2apKiXulEoG+wYzdE/msEwoSMaUM70foplk9+9uItfPL2dTNGoeExnzM/VZ89jxYJmFW1rRCJXIlMoMacpSE9LmIBXN0JFRETqjYrqIiIiMnMZZbuYPbIZ3B67oF5tlgWv/Aes+TYYxfHr8y+Ac2+euiiaWlNIQS6+N+plPoTbFfUyDVmWxUimSO9whpF0kcagh6BXb01eY1oWj24a5kdr+hhMFSoeE/G5ed+y2bx1caeGuNYIw7QYSufxuV2c3B2jKxbAqZtEIiIidakmr76+853vMG/ePPx+P8uXL+fJJ588rOfde++9OBwOLrvssgMedzgcFf/31a9+dd8x8+bNG7d+5513TuSXJSIiIrWkmIU9L8HwBvBHINhc/T3kk/DH/wmP/eP4grrbD+f9D7joVhXUwY56SewEo2RHvcxaanfwq6A+7eRLBpv2pHl+e5xUrkxH1K+C+n7W7UzwyV++wNf+38aKBXW308G73tTN//ngUt6xpFsF9RqRLZYZSOZoCftYMruBWY1BFdRFRETqWM1dvd53333cfPPN3HXXXSxfvpxvfOMbXHLJJWzcuJG2trZDPq+vr49PfvKTnHvuuePWdu/efcCv/+u//ovrr7+eyy+//IDHb7vtNm644YZ9v45E9AZWRESkLmVHYXAD5EbsTmeXp/p7GHgRVn0JMoPj15oXwIW3QuPc6u+r1limnZu+L+plHvhjU70rOQqWZTGUKrB1OEM8W6Qp6FMsxn52jGX5weN9rO0dPeQx5x3XyofOnkt71F/FncnrMS2LkXQRy2FxXFuEOc1B3egQERGZAWquqP6P//iP3HDDDVx77bUA3HXXXfznf/4nd999N5/+9KcrPscwDK666iq++MUv8uijjxKPxw9Y7+joOODXv/nNb7jggguYP3/+AY9HIpFxx4qIiEgdsSxI7IDhjXbHc7TLHgBaTaYBz/0Env2hXTA+2OLLYdmHp2ZQaq05OOol1AZOFaumo2yxzLaRDDvG8nicDjpjAZz6lAFg52///Ml+/mvdbg4xg5STu6Jct7KH49vV9FNLCmWD4VSBhpCXhW1hWsL6e1tERGSmqKmierFY5JlnnuGWW27Z95jT6eTiiy9mzZo1h3zebbfdRltbG9dffz2PPvro655jz549/Od//ic//OEPx63deeed3H777cyZM4crr7ySm266Cbe78reoUChQKPz545jJZPKNvjwRERGZSuUijG6BkS3gC01N3Et6EB78Mux+YfyaLwrnfxrmrqj+vmpNuWB3p3sCdtRLbBa4vVO9KzkKpmmxJ5Vn61CGVL5Mc8iL36PudLALsr99fhe/fGYHuVLlIaTdDQGuWTGP5T1NGkJaQ0qGSSJXomSYzG2xh5Hq51pERGRmqami+vDwMIZh0N7efsDj7e3tbNiwoeJzHnvsMb7//e/z/PPPH9Y5fvjDHxKJRHj3u999wOMf+9jHOP3002lqauLxxx/nlltuYffu3fzjP/5jxde54447+OIXv3hY5xQREZEpVkjB0EZI7oJQi12srba+R+Hhr0Khwo34ztPgws9CqLXq26oplgnZEXuArKJepr10oUzfcIad8RwBt4uumF+FYey4kIc2DvHjJ7YxnK48hDTqd3PlsjlccnIHbkWJ1IxC2SCeLWEBTSEPsxqDtIZ9yk4XERGZgWqqqH6kUqkUH/zgB/ne975HS0vLYT3n7rvv5qqrrsLvPzCH8Oabb97336eeeiper5ePfOQj3HHHHfh84z/Gd8sttxzwnGQyyezZs4/yKxEREZFJkx6089MLSYh2grPKlz/lAjzxXXj51+PXHE4441o47UpwzvAux/2jXjoU9TKdGabF7kSOvuEMmWKZlpAfr1u/lwAv7Ihz9+petg5lKq57XU7eeVoXl58+i5BvWr9VqyvZYplEroTL5aAt6qMzFqAp5MWlYrqIiMiMVVNXai0tLbhcLvbs2XPA43v27KmYdb5lyxb6+vp4+9vfvu8x07SzSd1uNxs3bmTBggX71h599FE2btzIfffd94Z7Wb58OeVymb6+Pk444YRx6z6fr2KxXURERGqEaUC8H4ZfBYdjb356lQsgY32w6jYY3Tp+LdwOF34eOhZXd0+1plyA7DC4FfVSD5L5Er1DGXYncoR9HrpiwaneUk3oH81yz+pent42dshjLjyhjQ+cNZfWiN5j1ALLssgUDBL5En6Pk9lNATpjAWIBjz5xISIiIrVVVPd6vZxxxhmsWrWKyy67DLCL5KtWreLGG28cd/yJJ57ISy+9dMBjn/vc50ilUnzzm98c1zn+/e9/nzPOOIMlS5a84V6ef/55nE4nbW1tR/8FiYiIyNQo5WF4E8S3gT8KvioP97Ms2PA7ePzbYFSId5h/Ppz7d9XfVy3ZF/VSgqiiXqa7smGyK56jdyRDoWTSFvHjUWwJY5kiP32ynz++PHDIIaSnzopx7YoeFraFq7s5qci0LFL5MulCiZDPzcK2EO1RPxG/Z6q3JiIiIjWkporqYMewXH311SxdupRly5bxjW98g0wmw7XXXgvAhz70Ibq7u7njjjvw+/0sXnxgd1dDQwPAuMeTySS//OUv+frXvz7unGvWrGHt2rVccMEFRCIR1qxZw0033cQHPvABGhsbJ+cLFRERkcmRi8PQBjv2JdwG7ip3fRZS8MjXoPfh8WsuH6z8KJzwl9Xvmq8linqpK/Fskd7hDIOpPBGfh6aYOq3zJYNfP7+Tf312B/mSWfGY2U1BrlsxjzPmNqrzuQYYpkU8W6RgGER8HhZ1RmmN+Al4Z3g0l4iIiFRUc0X1K664gqGhIW699VYGBgY47bTTuP/++/cNL+3v78d5FG+67r33XizL4v3vf/+4NZ/Px7333ssXvvAFCoUCPT093HTTTQdkpouIiEiNsyxI7YbBjVDOQazbziyvpoGX4IEvQXrP+LWmBXDRrdA4t7p7qiX7ol78e6Neuqt/00MmTLFssmMsS/9IlrJp0R4JzPiMacO0eGDDHn6ytp/RTLHiMQ1BD1ctm8tfLGqf8d+vWlAyTMayRQzTojHk5fiGCM1hLz63iukiIiJyaA7Lsg7xQUQ5EslkklgsRiKRIBqNTvV2REREZhajbOeWj24BlxeCTdU9v2nA8z+FZ35gx5oc7OR3wfK/mbkF5AOiXmYp6qUOjKQL9A5nGE4XiAW8hDVUk2f7x7hndS99I9mK6163k3e/qZt3vamboFffr6mWLxkkciUAWsJeuhrs4aNuxRaJiEwq1c+kXuhqTkRERKa3YsYeRhrfDsFm8FZ5MGJ6EB78Mux+YfyaLwpv/hTMO6e6e6olinqpK/mSwfbRLP1jWTChI6ru9N7hDPes7uW57fGK6w7g4pPauWr5HJrDM/TGWg3JFMok8yU8LgcdMT+dMT+NQS/OGf5zLCIiIkdGRXURERGZvjIjMPQKZMcg0gGuKg+S61sND/8vKCTHr3UugQs+a+e6z0SKeqkrlmUxlC6wdShDPFukMeid8d3WI+kCP13bz59e2cOhPvp7+pwGrlnRQ09LqKp7kwNZe4ePpgolAl4X85pDtMf8RP1u5dmLiIjIUZnZV8IiIiIyPZkmJHfA0EawjL356VUsjJQLsPYuWP/v49ccTjjjGjjtKnDOwEzeA6JeZivqpQ7kigbbRjLsGMvhcjrojAVwzuBCZLZY5t+e28m/P7eTYrnyENJ5zUGuXdnD6XMaq7w72Z9hWiTzJbLFMmG/hxPaI7RF/YQUVyQiIiLHSFcTIiIiMr2UizC8CeJ94A2Bv6W65x/bBqtus/PbDxZuhws/Bx2nVHdPtWJf1EsTdCxU1Ms0Z5oWg6kCfcN2d3pz2IffMwNvFO1lmBb/7+UBfvZkP/FsqeIxTSEvH1w+lwtObJvxsThTqWyYxHMlioZJLOBhfmuMlhn+8ysiIiITS0V1ERERmT7ySbs7PbUbwq12tEi1WBZs/E9Y/U9gFMav95wH5/0P8EWqt6dacUDUy8kQm6Wol2kuUyizbSTDzngOr8tFV0NgxsZkWJbF09vsIaTbx3IVj/F7nFx++iwuO61bhdspVCgbxLMlLCyaQl66G4I0h714NHxUREREJpiK6iIiIjI9pPbA0AYopiDaCc4qXsYUUvDo12HrQ+PXXD5Y8VE48S+rG0FTCxT1UncM02Igmad3OEM6X6Il7MPnnrlF4s2Dae5Z3cuLOxMV150OeMuiDq5cNofGkLfKu5PXZItlErkSTqeD1oiProYATSGvPi0gIiIik0ZFdREREaltpmFHrgy/ameUR7qqW7weWAcP3A7pPePXGnvg4v8JjfOqt59asX/US/sCO/pGUS/TWipfonc4w+54jqDXTVds5nanD6by/OSJbTy4ceiQxyyd28i1K3uY0xSs4s7kNZZlkSkYJPJF/B4Xs5oCdEYDNAQ9M/bnVkRERKpHRXURERGpXaUcDL0K8X4INoA3XL1zmwY8/zN45h67I/tgiy6Ds/525sWcKOql7pQNk92JHL3DWXIlg9aIf8bGZWQKZX71zA5+88JOSoZV8Zj5rSGuW9nDklkN1d2cAGBaFql8mXShRMjnZmFbmLaon6jfM9VbExERkRlERXURERGpTbkxGNwAmSGItIOritEKmSF48B9g13Pj13wRePOnYN651dtPLTgg6mUWNPUo6qUOJLIleofTDCQLRHx2d/pMVDZM/rDeHkKazJcrHtMS9vLBs+Zx/gmtONUJXXWGaZHIlciXy0R8Hk7qjNIW8RPwztx4IhEREZk6KqqLiIhIbbEsSO6y89PLRYh1g6OKXbPbHoeH7oRCcvxa5xK44LMQbqvefmqBol7qTskw2TmWpW8kS8kwaY/4cM/A7nTLsniid5QfPt7HznjlIaQBj4v3LJ3FO5Z0zeh8+alSMkzGskUM06Ih6GFhewMtYa9+L0RERGRKqaguIiIitcMowcgWGN0KHj9EO6p37nIBnvw/sO5fx685nHD61fCmD9i57jOFUbS79hX1UldGM0V6h9MMpQrE/F6aQzPz9/TVPSnuXt3L+l0VbqBhDyF96+JO3r9sDrGAokWqLV8ySORKADSHvXTvHT46E2/+iIiISO1RUV1ERERqQyENQxshuRNCzeCp4vC/+DZYdZtd0D9YqA0u+hx0nFq9/Uw1y4TsqH2jIdZtD2QNNEz1ruQYFcoG20ez9I9mMU3oiAZwOWdejMlAMs+P1/TxyKbhQx5z1vwmrj57HrMaNYS02rLFMvFsCY/bQUfMT2fMT2PQi3MG/qyKiIhI7VJRXURERKZeesiOe8knINoJzipdolgWbPw9PP5PUM6PX593Hpz3SfBHq7OfWnBA1MtiRb3UAcuyGE7b3emjmRKNQQ9B78x7G5DOl/nFM9v5jxd2UTYrDyE9ri3M9ef0cHKX5gVUk7V3+GiqUCLgddHTEqI96icacONQfr2IiIjUoJl3NS0iIiK1wzTtLvHhTYAF0S6oVgGlmIZHvg5bHxy/5vLC2TfCSW+v3n6m2v5RL22LoGG2ol7qQL5k0D+SpX8sixMHnTH/jBuyWTJMfv/Sbu57ajupQuUhpG0RH1efPY9zjmuZcd+fqWSYFsl8iVzJIORzc0J7hNaon7BPb1NFRESktulqRURERKZGuWAX08f6wBepbjf4nvXwwO2QGhi/1jgPLroVmuZXbz9TSVEvdcmyLAZTBXqHM8SzRZqCPgLeGTQPAPt78PiWEX64po/diQqfRAFCPhdXLJ3NX53ahUdZ3VVTNkwSuRIFwyQW8NDTEqI14sPvmVk/oyIiIjJ9qaguIiIi1ZdP2HEvqT0QbqteR7RpwAv3wtPft4vJB1v0Tjjr/5s5HdqKeqlL2WKZvpEMO8fyeJwOOmOBGdd9/cruJHev7mXDQKriutvp4G2ndHLF0tlENYS0aoplk3i2iIlFY9DLCY0BWsI+3dAQERGRaUdFdREREamu5G67oF7M2HEvzip1JmaG4cF/gF3Pjl/zhuHNn4Ke86qzl6lmFO3vh8urqJc6YpoWe1J5tg5lSOXLtIS9+Nwzq/N3VzzHj9b0sXrLyCGPWbmwhavPnktnLFDFnc1suaJBPFfE6XTQGvHR1RCgKeSdkYNyRUREpD6oqC4iIiLVYZTtqJfhTeD22FEj1dK/Bh660+6QP1jHKXDh5+wu7XqnqJe6lS6U6RvOsCuew+9x0RXzz6gBj8lcifue3s7vX9p9yCGkJ3ZEuG5lDyd1zqDBw1PIsiwyRYNErojf7WJWU4DOaICGoGdG/WyKiIhIfVJRXURERCZfMWsX0xP9EGgEb6g65zWKsPZfYN2/jl9zOOFNH4TTPwjOGXBJpKiXumSYFrsTOXqHM2QKZdoi/hkVpVEsm/zuxV384untZIpGxWM6Y36uPnseKxY0q5hbBaZlkcqXSRdKBL1uFraFaYv6ifoVsyMiIiL1Ywa8gxQREZEplR2FwQ2QG7ELua4qFVbi/bDqNhjZPH4t1Gp3p3cuqc5eptL+US+tJ0HjHEW91IlErkTfcIaBZJ6Q1013Q3Cqt1Q1pmXx6KZhfrSmj8FUoeIxEZ+b9y2bzVsXd86oGw1TxTAtErkSuVKZqN/DSZ1RWiM+gl695RQREZH6oyscERERmRyWBYkdMLzRLuxGu+zu8Gqc99X7YfU3oZwfvz7vHDjvU+Cv8wgIRb3UrZJhsiueo284Q6Fs0jrDBj2u25ng7tW9bBpMV1x3Ox28fUkX7z1jNmG/3u5MtpJhEs+WKBkGjSEvC9sbZmSev4iIiMwsusoUERGRiWeU7A7x0a3gDUKwszrnLabh0f8NW1aNX3N54Owb4aR3QL1HQLwW9RJoVNRLnRnLFOkbybAnmSfq99AUmjmfOtgxluUHj/extnf0kMecd1wrHzp7Lu1RfxV3NjPlSwaJXAmA5rCXroYozSEv7hl0g0dERERmLhXVRUREZGIVUjC0EZK7INQCnkB1zjv4Mqy6HVK7x681zoOLboWm+dXZy1RR1EvdKpZNto9m2T6apWxadEQDuJx1fnNor0SuxM+f7Oe/1u3mEDNIObkrynUrezi+PVLdzc1A2WKZeLaEx+2gI+anI+anKejFOUN+HkVERERARXURERGZSOlBOz+9kIRoZ3UGgFomvPBzeOpusCoMKjzp7XD2fwd3HXeuKuqlro2kC/QOZxhKFWgMegn5ZsYlfKFs8Nvnd/HLZ3aQK1UeQtrdEOCaFfNY3tOkIaSTyLIs0oUyyVyJgNfF3OYgnbEA0YBb33cRERGZkWbGFbmIiIhMLtOE+DYYftWOVol2VSdiJTsCD/4D7Hxm/Jo3DOd9EuafP/n7mErFNGTH9ka9nAzhDkW91Il8yWD7aJb+sSxY0BmbGd3ppmXx0MZBfvxEP8PpykNIo343Vy6fyyWL2hU3MokM0yKZL5EtGoR9Lo7viNAW9ROeITd2RERERA5FV0MiIiJybEp5GN4E8T7wx8BXpfiF/ifgoTsgnxi/1r4YLvwcRDqqs5epYBQhPWTHu7SeBA2zwVPH3fgziGVZDKULbB3KEM8WaQx6CXpnxmX7Czvi3L26l61DmYrrXpeTd57WxeWnz5oxHftToWyYJHIl8mWThqCHnpYQrREffo+Gj4qIiIiAiuoiIiJyLHJxGNpgx76E26qT320U4cnvwUu/HL/mcMKbPginf7A60TNTQVEvdS1XNNg2kmHHWA6300FnLIBzBsRr9I9muWd1L09vG6u47gAuOKGND5w1l9aI5gRMlmLZJJ4tYlgWTSEvJzQGaA758Lr1aQARERGR/dXpu00RERGZVJZlDwQd3AjlnB334qxCB2N8O6y6DUY2jV8LtcAFn4Ou0yZ/H1NFUS91yzQtBlMFeofTJLIlmsMzoyt4LFPkp0/288eXBw45hPTUWTGuXdHDwrZwdTc3g+SKBol8EYfDQWvER2eDn+aQb0bEDYmIiIgcDRXVRURE5MgYZRjttQvbbp89kHSyWRZs+gM89g0o58evz10Jb/6UHT9TjxT1UtcyhfK+7nSf20VXQ6Duhz/mSwb//txO/u25HeRLZsVjZjcFuW7FPM6Y21j334+pki6USeSK+N0uuhsCdMQCNAY9+n6LiIiIvAEV1UVEROTwFTP2MNL4dgg2gzdYnXM+9r9h85/Gr7k8cNb/B4suq85g1GqzTMiN2bn1sW5onGd3qUtdMEyLgWSe3qE06UKZ1rC/7mM2DNPigQ17+MkT/YxmixWPaQh6uGrZXP5iUbs6pSeBaVmk8mXSxRJBj5sFrWHaY36ifs9Ub01ERERk2lBRXURERA5PZgSGXrHjRyIddkF7sg2+Aqtuh9Su8WsNc+GiW6F5weTvYyrsH/XStkhRL3UmmS/RN5xhdzxH0OumK1b/3enPbhvjnsd76RvJVlz3up28+03dvOtN3TNmMGs1GaZlDx8tlQn7PZzYHqEt6tf3WkREROQo6ApKREREXp9pQnIHDG0Ey7A7pie7+GeZ8MJ98NT/tc95sBP/Cs7+7+AJTO4+poKiXupa2TDZFc/RO5IhXzJpjfjxuOr7ZknvcIZ7Vvfy3PZ4xXUHcPFJ7Vy1fA7NYQ0hnWglwySeLVEyDGJBLwvbG2gJe/G56z+zX0RERGSyqKguIiIih1YuwshmGN0KvjD4Wyb/nNkRePAO2Pn0+DVvCM77HzD//MnfR7W9FvVSLijqpU4lsiV6h9MMJPNEfB66YvVdQB5JF/jJ2m2semWQQ8wg5fQ5DVyzooeellBV9zYT5EsGiVwJC2gOeelujNIU8tb9TRwRERGRalBRXURERCrLJ+3u9NRuCLeCuwrd0v1r4aE7IB8fv9a+GC78nB09U28U9VLXSobJzrEsfSNZyoZFe8SPu44Lm9limX97bif//txOiuXKQ0jnNQe5dmUPp8/RjaOJli2WiedKeFwO2qI+uhoCNAW9OJVPLyIiIjJhVFQXERGR8VJ7YGgDFFMQ7QTnJF8yGCV48nvw0i8qLDrgTR+AM66e/H1Um1GEzDC4vIp6qVOjmSK9w2mGUgViAS/NoTr7Gd6PYVr8v5cH+NmT/cSzpYrHNIW8fHD5XC44sU1DSCeQZVmkC2WS+TIBj5O5TUE6Yn5iAU/dZ/WLiIiITIX6vaoXERGRI2caMLYNhl8FpwsiXZOfn57YAatus895sGALXPhZ6HrT5O6h2vaPeol2KeqlDhXKBttHs/SPZrFM6IgG6raIbFkWT28b457VvWwfy1U8xu9xcvnps7jstG78HmV5TxTTskjmSmSKBmGfi+PawrTH/IR9epsnIiIiMpl0tSUiIiK2Ug6GN8NYHwRi4ItM/jlf/QM89r+hnB+/NmcFnP8p8DdM/j6qSVEvdc2yLIbTdnf6SKZIU9BL0Fu/l9ybB9Pcs7qXF3cmKq47HfCWRR1cuWwOjSFvlXdXv8qGSSJXIl82aQh4OLkrRGvEpxsWIiIiIlVSv1f4IiIicvhyYzC4ATJDEGm340gmUzFrF9M3/3H8mtMDZ/0tnPyuye+SryajZH9/XV5oPREa5ijqpc7kSwb9I3Z3utPhoCsWwFlPP8P7GUzl+ckT23hw49AhjzlzXiPXrOhhTlOwijurb8WySTxbxLAsGkNeTmgM0Bzy4XXrxpyIiIhINamoLiIiMpNZFiR32QNJywWIdYNjkoszQxvsuJfkrvFrDXPgoluheeHk7qGa9o96CXdA83xFvdQZy7IYTBXoHcoQzxVpDtVvx3CmUOZXz+zgNy/spGRYFY+Z3xriupU9LJnVUN3N1bF8ySCeK+LAQWvER2eDn+aQr24jhURERERqnYrqIiIiM5VRgtFeGNlsd0xHOyb3fJYJL/7CHkhqGePXT/xLOPtG8AQmdx/VpKiXupctlukbzrAznsPjdNIVC9TlYMiyYfKH9fYQ0mS+XPGYlrCPD541l/NPaK3bDv1qs4ePFvG6nHQ3BOiIBWgIeHCqmC4iIiIypVRUFxERmYmKGTvuJbkTQs3gmeR4huwIPHQn7Hhq/Jo3BOd+EhZcMLl7qCZFvdQ907QYSObZOpwhlS/RGvbhc9dfd7plWTzRO8oPH+9jZ7zyENKAx8V7ls7iHUu66vJ7UG2WZZHMl0kXSgS9bua3hGmL+okFPFO9NRERERHZS0V1ERGRmSYzDIOvQD4B0U5wTvLlwPYn4aE77AiUg7Utgos+D5HOyd1DtSjqZUZI5Uv0DWfZncjh97jortPu9Ff3pLh7dS/rdyUrrjsd8NbFnbx/2RwVfCeAYVokcyVypTJhv4cTOyK0RvyEfHrLJiIiIlJrdIUmIiIyU5gmJPpheJNd/I12Te4gUKMET/1fePG+CosOOO0qWHrN5Bf1q6WYgeyoXURvPQkiHeBU1249MUyL3YkcvcMZsgWD1ogPj6v+4nwGknl+vKaPRzYNH/KYs+Y3cfXZ85jVqCGkx6pkmCRyJYplg1jQy/y2GC3h+s3lFxEREakHdfIuVkRERF5XuWAX08f6wBcBf3Ryz5fYAQ/cbg9APViwGS74LHSfPrl7qBZFvcwIiVyJvuEMuxN5wj43XQ11lP2/Vzpf5r6nt/O7F3dRNisPIT2uLcz15/RwclesyrurP4WywVi2BEBTyEN3Q5TmsLcub9SIiIiI1BsV1UVEROpdPgFDGyC1B8Jt4PZN7vk2/T947H9DqUL+8pyz4fy/B3/D5O6hGiwLcqOKeqlzJcNkVzxH33CGQtmkPeLDXWdFz5Jh8vuXdnPvU9tJFyoPIW2L+Lj67Hmcc1yLhpAeo2yxTDxXwu1y0B710dUQoCno1fBRERERkWmkJt8RfOc732HevHn4/X6WL1/Ok08+eVjPu/fee3E4HFx22WUHPH7NNdfgcDgO+N+ll156wDGjo6NcddVVRKNRGhoauP7660mn0xP1JYmIiEyN1ADsfA7SQ3bcy2QW1ItZePAf7P8dXFB3emDFR+GSf6iPgnoxY3fju3zQeRp0naaCeh0ayxR5aUeCV3Yn8bpcdMYCdVVQtyyLxzYP899/9iz/97HeigX1kM/FdSvncdcHzuC841tVUD9KlmWRypfYGc+RKxnMaQpwxpxGTum2o15UUBcRERGZXmquU/2+++7j5ptv5q677mL58uV84xvf4JJLLmHjxo20tbUd8nl9fX188pOf5Nxzz624fumll3LPPffs+7XPd2BR4aqrrmL37t388Y9/pFQqce211/LhD3+Yn/3sZxPzhYmIiFSTacBoL4xsApcHYt2Te76hjbDqNkjuHL8Wmw0X3Qotx03uHqpBUS8zQrFssn00S/9oBsOEjmgAV50VPV/ZneTu1b1sGEhVXHc7HbztlE6uWDqbqIaQHjXTsoePZooGYZ+L49rCtEV9RPz6noqIiIhMZw7LsioHJk6R5cuXc+aZZ/Ltb38bANM0mT17Nh/96Ef59Kc/XfE5hmFw3nnncd111/Hoo48Sj8f59a9/vW/9mmuuGffY/l555RUWLVrEU089xdKlSwG4//77edvb3saOHTvo6up6w30nk0lisRiJRIJodJJzakVERF5PKQdDr0K8H4KN4A1N3rksE176JTz5PTArxEac8Da7Q90zzfOnX4t6KeUh0glNPRBsmupdyQSzLIuRTJHe4Qwj6SINAQ8hX831oByTXfEcP1zTx+NbRg55zMqFLVx99lw6Y9P8z+0UMkyLeLZIwTCI+b10NfhpjfgJeDV8VEREZjbVz6Re1NS7hGKxyDPPPMMtt9yy7zGn08nFF1/MmjVrDvm82267jba2Nq6//noeffTRisc89NBDtLW10djYyIUXXsiXvvQlmpubAVizZg0NDQ37CuoAF198MU6nk7Vr1/Kud71rgr5CERGRSZYdhcENkBuBSJvdUT2Z53r4TtheIabNE4Jzb4aFF03e+aulmLG/1kAjtJ4EkQ5wqjBWb/Ilg+2jWbaNZnEAHVF/XXWnJ3Ml7nt6O79/afchh5Ce2BHhupU9nNSpN7hHq2SYjGWLmKZFQ8jL8Q0RWsI+vO76iQ0SERERkRorqg8PD2MYBu3t7Qc83t7ezoYNGyo+57HHHuP73/8+zz///CFf99JLL+Xd7343PT09bNmyhc985jO89a1vZc2aNbhcLgYGBsZFy7jdbpqamhgYGKj4moVCgUKhsO/XyWTyML9KERGRSWBZdvTK0EYwCnZ+umMSizg7noYHvwy5sfFrbSfBhZ+39zCdGSXIDNvxOa0nQsPs6d9xL+NYlsVQqsDW4QxjmSLNIV9ddRMXyya/e3EXv3h6O5miUfGYzpifq8+ex4oFzTiUmX5U8iWDeK6IAwctYa89fDTkrasMfhERERH5s5oqqh+pVCrFBz/4Qb73ve/R0tJyyOPe97737fvvU045hVNPPZUFCxbw0EMPcdFFR9dBd8cdd/DFL37xqJ4rIiIyoYwSjGyB0S3gCdrxJJN5rqfvhhd+XmHRAaddCUuvBec0vsRQ1MuMkSsa9I2k2TGWx+N00NUQqJtBnKZl8eimYX60po/BVKHiMRGfm/ctm81bF3fiUfH3qGQKZRL5Il6Xk65YgM6GAA0BjwaPioiIiNS5mnrH29LSgsvlYs+ePQc8vmfPHjo6OsYdv2XLFvr6+nj729++7zHTNAG703zjxo0sWLBg3PPmz59PS0sLmzdv5qKLLqKjo4PBwcEDjimXy4yOjlY8L8Att9zCzTffvO/XyWSS2bNnH/4XKyIiMhEKabs7PbkTQi2T20md3AmrboehCp8eCzbDBZ+B7jMm7/zVoKiXGcE0LQZTBbYOpUnmyzSHvPg99fP7vG5ngrtX97JpMF1x3e108I4lXbxn6WzCdZYZXw2WZZHMl0kXSgS8Lnqaw7TH/ET9bnX6i4iIiMwQNXUV7fV6OeOMM1i1ahWXXXYZYBfJV61axY033jju+BNPPJGXXnrpgMc+97nPkUql+OY3v3nIIveOHTsYGRmhs9Pu5Dv77LOJx+M888wznHGGXQx44IEHME2T5cuXV3wNn8+Hz+c72i9VRETk2KWH7AJ3PgHRzsntDt/0R3jsf0MpO35t9llw/qch0DB5559sr0W9ON2Keqlz6UKZvuEMu+I5/G4XXTF/3RRCd4xl+cHjfaztHT3kMecd18qHzp5Le9RfxZ3VB8O0SOZKZEtlwn4PJ3ZEaI34626YrYiIiIi8sZq7Arz55pu5+uqrWbp0KcuWLeMb3/gGmUyGa6+9FoAPfehDdHd3c8cdd+D3+1m8ePEBz29oaADY93g6neaLX/wil19+OR0dHWzZsoVPfepTLFy4kEsuuQSAk046iUsvvZQbbriBu+66i1KpxI033sj73vc+urqmeR6siIjUH9OE+DYY3gRYe/PTJ6koWMrC6m/Cq38Yv+Z0w/KPwOL/Nnnnn2yKepkxDNNiIJmndyhNplimJeSvm+GR8WyRnz+1nfvX7eYQM0g5uSvKdSt7OL49Ut3N1YGSYZLIlSgaJrGAh/ltMVrCvrr6dIOIiIiIHJmaK6pfccUVDA0NceuttzIwMMBpp53G/fffv294aX9/P07n4b8BcrlcvPjii/zwhz8kHo/T1dXFW97yFm6//fYDOs1/+tOfcuONN3LRRRfhdDq5/PLL+da3vjXhX5+IiMgxKRfsYvpYL/hj4JvEAtnwq7DqNkjsGL8Wmw0XfR5ajp+88082Rb3MGMl8id6hDLsTOUJeN12x4FRvaUIUyga/fX4Xv3xmB7lS5SGk3Q0Brlkxj+U9TXXTkV8thbJBPFvCwqIp5KW7IUhz2Kv8eRERERHBYVnWIfpZ5Egkk0lisRiJRIJoNDrV2xERkXqUi8PwRkjtgXAbuCcphswy4aV/hSf/Bczy+PXjL4WVH7OHok5H+0e9NMyFxjmKeqlT+ZLBYDJP70iGfMmkNeyri4KoaVk8tHGQHz+xjeF0seIxUb+bK5fP5ZJF7bjr4GuupmyxTCJXwuVy0Br20RkL0BTy4tLwURERkWOm+pnUi5rrVBcREZGDWBakdsPgRihn7biXyeqozo3BQ3fC9rXj1zxBOPdmWHjx5Jx7sinqpe6ZpkW6WCaVLzOcKpDIlcgWy0R8Hrpi9TEL54Xtce5+vJetQ5mK616Xk3ee1sXlp89S1vcRsCyLTMEgkS/h9ziZ3RSgMxYgFvCow19ERERExtGVtoiISC0zyjDaCyObwe2xC+qTZcfT8OA/2IXng7WeZMe9TOb5J9NrUS/+BuhS1Es9KZZNUvkSyVyJoXSBdKFMqWzhcTkJ+VzEAgGcdVAU3TaS4QeP9/H0trGK6w7gghPa+MBZc2mN1McNhGowLYtUvky6UCLkc7OwLUR71E/E75nqrYmIiIhIDVNRXUREpFYVs3bcS3w7BJvBO0lxK2YZnvo+vHAvUCEVbsn74czr7biU6Wb/qJeW46FxrqJepjnLssgWDVL5MqPZAqOZErliGdOEgNdFzO+tmwGkAGOZIj99sp8/vjxwyCGkp86Kce2KHha2hau7uWnMMC3i2SIFwyDi87CoM0prxE/Aq5ttIiIiIvLGpuG7YxERkRkgMwJDr0B2zO6qdk1S12RyF6y63T7XwQJNcMFnYNbSyTn3ZFLUS10pGybpgh3rMpjKk8qXKZQNXA4nIa+b1rC/7vKu8yWDf39uJ//23A7yJbPiMbObgly3Yh5nzG1URMlhKhkmY9kihmnRGPJyfEOE5rAXn1vFdBERERE5fCqqi4iI1BLLgsR2GNpod5DHusAxSV23m1fBo1+HUnb82uxlcP4tEGicnHNPpgOiXk60i+qKepl28iWDZL5EIltiOFUkUyxTNi38bichn5vmUH1GnBimxaoNe/jpE/2MZisPIW0Ierhq2Vz+YlF73d1MmCz5kkEiVwKgJeylq8EePqohriIiIiJyNFRUFxERqRXlIoxugZEt4AtBqGVyzlPKwup/glf/a/ya0w3LPgKnXD55xfzJoqiXae21IaPJXImRdJFEzo51cTodBDxummdAAfTZbWPc83gvfSMVbnQBPreTd72pm3e9qZugV5fxhyNTKJPIlfC6HXTE/HTG/DQGvTh1M0JEREREjoGuxkVERGpBIWV3pyd3QbgV3P7JOc/wJlh1m90Nf7BoN1x0K7SeMDnnniyKepm2CmU7Gz2RLTGcLpApHjhktCEQmBGxJr3DGe5Z3ctz2+MV1x3AxYvauWrZHJrD9dmhP5GsvcNHU4USAa+LnhZ7+Gg04J4RP08iIiIiMvlUVBcREZlqqT0wtAGKKYh2Ts5AUMuCdb+Ctf8HzNL49eMvgRUfn7xhqJOlmIXsCPhjinqZBizLIlM0SOVLjKaLjGWLZIsGllWfQ0bfyEi6wE/WbmPVK4OVRgQDcPqcBq5Z0UNPS6iqe5uODNMimS+RLZYJ+z2c0B6hLeon5NNbHhERERGZWLrCFBERmSqmAWPbYPhVcDoh0gWT0UWZi8PDd0L/E+PXPAE452Y47i8m/ryTySxDekhRL9NA2TDtruG9Q0bT+TL5soHbaQ8ZbYt4ZlwueLZY5t+e3cm/P7+TYrnyENJ5zUGuXdnD6XOm4VyDKisbJvFciaJhEgt46GmJ0Rrx4ffoBpuIiIiITA4V1UVERKZCKW9HsYz1QSAGvsjknGfnM/DgP9jd3AdrPREu+rwd+zJdWBbkxqCUU9RLDcvt7UaP7411yRaNA4eMztAIE8O0+H8vD/Cztf3EcxU+MQI0hbx8cPlcLjixbcbdbDhShbJBPFvCwqIp5KW7IUhz2IunzrP3RURERGTqqaguIiJSbbkxGNwImaG9+emTUGA0y/D0PfD8z6BSsMSp74MzrweXZ+LPPVkOiHo5TVEvNcQ0LVKFMqm8PWQ0niuSKxq4nA6CM2TI6OuxLIunt41xz+peto/lKh7j9zi5/PRZXHZatzqs30C2aA8fdTodtEZ8dDUEaAp5dRNCRERERKpGRXUREZFqsSx7EOnQRijnIdYFjkkoNCZ3wwO3w+DL49cCjXDBZ2DWmRN/3smiqJeatP+Q0aHXhowaJj6Xi6DXRWPAq6GQwObBNPes7uXFnYmK604HvGVRB1cum0NjyFvl3U0flmWRKRgk8kX8HhezmgJ0RgM0BD36ORMRERGRqlNRXUREpBqMMoxuhZHN4PHbA0knw5YH4JGvQykzfm3WmXD+LdMnLmVf1Et2b9TL/Omz9zp0yCGjQNBjF9EVu/Fng6k8P3liGw9uHDrkMWfOa+SaFT3MaZpmA4KryLQsUvky6UKJkM/NwrYwbVE/Uf80+pSNiIiIiNQdFdVFREQmWzEDgxvsLvVQE3gmoYBWysHj/wQbfz9+zemGZTfAKe+ZnM74yXBA1MubFPUyRV4bMprMlxhKFUjnyxT2DhkN+mbmkNE3kimU+dUzO/jNCzspGRWil4D5rSGuW9nDklkN1d3cNGKYFolciXy5TMTn4aTOKK0RH0Gv3r6IiIiIyNTTVamIiMhkygzD4CuQi0O0wy5wT7ThTbDqNkhsH78W7baHkbaeOPHnnQxmGdLD4HTaUS8Nc8CrLt5qem3I6Fi2yEja7kY3TAu/xzWjh4y+kbJhcv/6AX7+ZD/JfLniMS1hHx88ay7nn9CKU5ElFZUMk7FsEcO0aAh6WNjeQEvYi8+tm2oiIiIiUjtUVBcREZkMpmkXuYdfBcuEWDdMdBHNsmD9v8ETd4FZGr9+3Ftg5SemR1FaUS9TxjAt0gcNGc2XTFwOBwGPa8YPGX0jlmXxRO8oP3y8j53xykNIAx4X71k6i3cs6VJx+BDyJYNEzv57rDnspXvv8FH97ImIiIhILVJRXUREZKKVCzC8GeJ94A2DPzrx58jH4aH/Bf1rxq95ArDyJjj+LRN/3smgqJeq05DRifHqnhTff6yXl3cnK667nA7eenIH71s2h1hAGeCVZAplErkSHreDjpifzpifxqAXp2KFRERERKSGqaguIiIykfJJGNoAqQEIt4F7EqIydj4LD37ZLkQfrOV4uOhWiM2a+PNONEW9VI2GjE6sgWSeH6/p45FNw4c85qz5TVx99jxmNepn+mDW3uGjqUKJgMdFT0uI9qifaMCtmzkiIiIiMi2oqC4iIjJRUgP2QNJiGqJdE99tbZbh6R/A8z8FKgxAPPUKOPOvwVXjHbGKeqmKkmGSfp0ho+1Rj3K9j1A6X+a+p7fzuxd3UTYrDyE9ri3M9ef0cHJXrMq7q32GaZHMl8iVDEI+Nye0R2iN+gn79JZERERERKYXXcGKiIgcK9OAsT47P93lsfPTJ1pqNzzwJdizfvxaoBHOvwVmL5v48060Yhayo3YkjqJeJtxrQ0ZHM0VGM3Y3etk0CXjcGjJ6DEqGye9f2s29T20nXag8hLQt4uPqs+dxznEtullxkLJhksiVyJdNGoIeelpCtEZ8+D36sy8iIiIi05OK6iIiIseilIOhVyHeD8EGO0N9om15EB79GhQz49e6l8IFt0CweeLPO5EOiHpZCA1zFfUyAQzT2teNPpqxh4zmivaQ0aDXRUvYh0vZ1EfNsixWbxnhh4/3MZDMVzwm5HNxxdLZ/NWpXYrQOUixbBLPFjGxaAx6OaExQHPIh9et75OIiIiITG8qqouIiByt7CgMbYTMEETaweWd2Ncv5WDNt2HDf45fc7hg2Q1w6nvBUcMFqn1RLzmIdCjqZQLkS3uHjOaKDKeLpAtlyqaGjE60V3YnuXt1LxsGUhXX3U4HbzulkyuWziaqIaQHyBUN4rkiTqeD1oiPzgY/zSHd4BERERGR+qGiuoiIyJGyLEjutAvqRtGOe5nowvbIZlh1m90Bf7BoF1x4K7SdOLHnnGj7R710LpmcnPkZwLIs0oUyqXyZkUyBeLZEtmgA9pDRpqCGjE6kXfEcP1zTx+NbKgwC3mvlwhauPnsunbFAFXdW214bhpvIFfG7XcxqDNAZC9AQ9Ogmj4iIiIjUHRXVRUREjoRRgpEtMLoFPEG7+3oiWRas/3dY+137XAdbeDGccxN4QxN73omkqJdjVjJMUvkyyVyRoXSRdL5EoWzi2TtktENDRidcMlfivqe38/uXdh9yCOmJHRGuW9nDSZ3RKu+udpmWRSpfJl0oEfS6WdgWpi3qJ+pX976IiIiI1C8V1UVERA5XIW13pyd3QqgFPBPcpZqPw0Nfgf7Hx6+5/XYx/fhLJvacE+ngqJfGHgjVeNZ7DckW7W70sUyRkUyRbMHAsOwhoxG/hxa3uvwnQ7Fs8rsXd/GLp7eT2fsJgIN1xvxcffY8VixoVtf1XoZpkciVyJXKRP0eTuqM0hrxEfTq7YWIiIiI1D9d9YqIiByO9BAMbbAL39FOcE7wP6G7noMHvgzZ4fFrLcfDhZ+HhtkTe86JVMpCZgT8MTvqJdIJLl1mvJ79h4wOpwskcyVyJROX00HI66I1ogzqoxXyugj73YR8bjwuJyXDJFMok86X9xXOTcvi0U3D/GhNH4OpQsXXifjcvG/ZbN66uFMRO3uVDJN4tkTJMGgMeVnY3kBL2ItPN31EREREZAbRu10REZHXY5oQ3wbDmwALot0wkZ2qZhme+SE89xP79Q92ynth2V9P/BDUiXJA1Mtxinp5AxWHjBomfreLoNdNY9CpTuhj4HE5aI/6KZsW96zu5Q/rB0jmykQDbi45uYNrV/bQGPLy8KtDfPvBzWweTFd8HbfTwTuWdPGepbMJ+3S5DPbPbiJnR1I1h710NURpDnlx62aDiIiIiMxADsuyKodGyhFJJpPEYjESiQTRqHI2RUTqQrlgF9PH+uxhm77IxL5+agAe+BLsWTd+zd8A598Cc5ZP7DknimXZXfvFrKJeXkelIaO514aMet0EvS51QE8Qj8vB7KYg33t0K99atYlKsehOB9x44UKuXDaXy7/7ODvjuXHHnHdcKx86ey7tUX8Vdl37ssUy8WwJj9tBa8RHZyxAU9CLU5+iEBERkaOg+pnUC7XeiIiIVJJP2HEvqT0QbgO3b2Jff+tD8MhXoZgZv9Z9BlzwGQjWaJFaUS+vq9KQ0aJh4XY6CHndRDVkdFK0R/1879GtfONPmw55jGnBt1ZtxrLgrg+cwdu//di+tZO7oly3sofj2yf45tk09NrNoGSuRMDrYm5zkM5YgGjArU9SiIiIiIigorqIiMiBLMvuIB/aYBePo13gnMCs4HIeHv8ObPiP8WsOF5z517DkCnDUYPeyol4qsiyL3N5Yl/2HjJqYBNz2kFHlTU+ukNdF2bT41qpDF9T3950HN3PFmbNZ1tPEzrEc16yYx/KephlfMDZMi2S+RLZoEPa5OK49QnvMrwgcEREREZGD6ApZRETkNUYZRnthZDO4PXZBfSKNbIFVt9kZ7QeLdMFFn4e2kyb2nBNh/6iXcDs0zZ/xUS8aMlpbwn4396zurRj5Uolpwc/W9vP5v1xE2TBnfC542TBJ5ErkyyYNQQ89LSFaIz78Ht0MEhERERGpREV1ERERsAvGwxshsQMCjeANTdxrWxa8/Gt44p/BKI1fX3ARnHvzxJ5zoijqZR8NGa1NyVyJOc1B/rB+4Iie94f1A/x/Fyykd6hCBNMMUSybxLNFDMuiKeTlhMYAzSEfXvfMvskgIiIiIvJGZua7YhERkf1lR2HwFciO2EM3XZ6Je+18Ah7+CmxbPX7N7YeVH4fjL4VaK8aaZcgMA44ZG/VimhaZ4qGHjDYFvRoyOgXGMkXW7UqwfleSdTsTbBvNsv6Ll5DMlY/odZL5Mh5Xjf25q5Jc0SCRL+Jg7/DRBj/NIX26QkRERETkcKmoLiIiM5dl2Z3pwxvtDvJY98Rmme96Hh78MmSGxq81H2fHvTTMmbjzTYTXol4KGfsGwwyLeimWzb0DGjVktFYMpQqs35Vg3c4E63Yl2RnPjTsmUygTDbgZShcO+3Wjfjcl4zDzYupEumB/0sLvdtHdEKAjFqAx6NEnLEREREREjpCK6iIiMjOVizC6xc4594UgOIGFY7MMz/4YnvsxWOb49cX/DZZ/GFzeiTvnRNg/6qXrtBkR9fLakNFkrsxotsBYxh7SaFoaMjoVLMtiT7LAup0JXtqVYP2uBHuSb1wof7J3lLcs6uC7D2857HNduriTbOHIutunI9OySOXLpIslgh43C1rDtMf8RP0T+IkcEREREZEZpr7fKYuIiFRSSMHQRkjuglALeAIT99qpAXjgS7Bn3fg1fwzO/zTMOXvizjcRzPLebnrnjIh6MUyLVL5EKl9mOF0gkStRKBs4HU57yGhYMRjVYlkWO+I51u38c5zLSKZ4xK/z86e285X/dir/8siWwxpW6nTANSvmkcxVmHFQJwzTsoePlsqE/R5ObI/QFvUT9OryX0RERETkWOmqWkREZpb0oJ2fXkhBtBOcE/hP4daH4ZGvQjE9fq37dLjgsxPbEX+sZlDUS75kkMyXSGRLjLw2ZNT885DRpqBXERhVYFoW20aye4vodiE9fhSF7YDHxaKuKCd3RTmlK8aCtjBBj4uPXXQc3/jTpjd8/scuOg6300F2b0Z+PSkZJvFsiZJhEAt6WdjeQHPIi9+jT1yIiIiIiEwUFdVFRGRmMA2I98Pwq/ZQ0GjXxA0HLedhzXfglf8Yv+ZwwpnXw5L3T2xe+7F6LerFF63LqBfTtEjvHTI6nLK70XPFMg6Hwy6ihzRktBoM02LrUNruQt9bRE8fReRK2Ofm5K4oi7tiLO6O0dMSGvdpgj3JPDecOx+Ab63aVLFj3emwC+o3nDuf7aPZo/qaalW+ZJDIlbCA5pCX7saofs5FRERERCaJw7KsmTWhaZIkk0lisRiJRIJoNDrV2xERkf2V8jC8CeLbwB8FX2TiXnt0K6y6Dcb6xq9FOuDCz0P7yRN3vmO1f9RLwxxonAve0FTvakIUyyapfIlkrsRQukC6UKZYtvC4HIR9bvwel4aMTrKSYbJ5MM26XQnW7Uzyyu4kudKRd4M3BDx2Eb07xsldMeY2Bw/r987jctAe9VM2LX7weB/3r9tNMl8m6ndz6eJOrlkxD7fTwZ5kvm6GlGaLZeK5Em6Xg9awj66GAE1BL05FGImIiEgNUv1M6oWK6hNEfymIiNSo3BgMboTMIITbwO2bmNe1LHjlt3aHulEhA3r+BXDe34E3PDHnO1aVol6CTRPXrT8FLMsiWzRI5e0ho6MZuxvdMC2CXjchrxuvW126k6lYNtm4J8W6nQnW7UqwYSBFsVxhOO8baA55WdwdY3FXjJO7o8xqCBxTHE/Q6yLidxP0ufG4HJQMi2zB/uRCPUS+WJZFulAmmS8T8Dhpj/rpiPmJBTyKMRIREZGapvqZ1Iv6+Zy3iIjI/iwLUrvtgno5B7HuiYtfySft7PS+R8evuf2w8uNw/KW1U7A+IOplCUS6pm3US9kwSe8tjg6l8yRzZQplA5fDScjrpjXs15DRSZQrGmwYSLJuV5L1uxJsHEhRPpzJoAdpj/rsKJe9cS7tUd+EFoOzRWNv8bwwYa9ZC0zLIpkrkSkahH0ujmsL0xb1EfF7pnprIiIiIiIzyvR8Ry0iIvJ6jLIdyzK6BVxeeyDpRNn9Ajzwpb0RKgdpXggX3WrHqtSC/aNemhdO26iX/YeMDqeKZIplyqaF3+0k5NOQ0cmUKZR5eXdy72DRJJuH0hhHUUTvbgjs7USPcnJXjNbIBH1iZIYoGyaJXIl82dwbjROiJewj4NXwURERERGRqaCiuoiI1Jdixh5GGt8OwWbwBifmdc0yPPtjeO7HYFWIt1h8OSz/iF3En2rTPOrlUENGnU4HAY+b5pAXt4YvTopErsTLuxKs2ztYtHcow9HkBM5rDnLy3i70k7uiNAZr4M/FNFQsm8SzRQzLojHk5fiGAC1hn2KNRERERESmmIrqIiJSPzIjMPgy5OJ2Mdk1QZEI6UG7O33gxfFrviic/2mYu2JiznWsSlnIjoI3Mq2iXioNGS2VLTwuJyGfi1ggoCGjk2AsU7SHiu6yu9H7R7NH/BpOB8xvCe8bLLqoM0o0oDiSY5EvGcRzRRw4aI346Gzw0xzyKdpIRERERKRG1P67bBERkTdimpDYbneoW8be/PQJKj71PgqPfAUKqfFrXW+CCz4LoZaJOdex2D/qpXE+NM2r6aiXQw0ZNU0IeF3E/F51406CwVSe9bv+HOeyM5474tdwOR0sbA3vi3M5qTNKyKdLyolgDx8t4nU56W4I0BEL0BDw4FQxXURERESkpugdkIiITG/lIgxvgnifXUT2T1CBu1yAJ/4ZXv7N+DWHE5ZeB0veD84pzjS2LMgnoJCGSPveqJfmmox62X/I6GAqTyqvIaOTybIsdifyrN+VYN1OO85lMHXkgzs9LgfHt0f2FtFjnNgRwe9RlvdEsSyLZL5MulAi6HUzvyVMW9RPTN3+IiIiIiI1qyaL6t/5znf46le/ysDAAEuWLOGf/umfWLZs2Rs+79577+X9738/73znO/n1r38NQKlU4nOf+xy///3v2bp1K7FYjIsvvpg777yTrq6ufc+dN28e27ZtO+D17rjjDj796U9P6NcmIiITKJ+EoY2Q2g3hVnD7J+Z1R7fCqttgrG/8WrjdHkbafvLEnOtYlHKQHanpqJc3GjLaHNLAyoliWRY7xnJ2nMtOO9JlNFM84tfxuZ2c1BndN1T0+PaIPjUwCQzTIpkrkSuVCfs9nNgRoTXiV9e/iIiIiMg0UHNX7ffddx8333wzd911F8uXL+cb3/gGl1xyCRs3bqStre2Qz+vr6+OTn/wk55577gGPZ7NZnn32WT7/+c+zZMkSxsbG+PjHP8473vEOnn766QOOve2227jhhhv2/ToSiUzsFyciIhMnNWAX1ItpiHaCcwL+SbMseOW3sOY7YFQoRs6/AM69GXxT/O+DWYbMsP3fNRb1oiGj1WNaFttGMry0M8n6XXacSyJXOuLXCXpdLOqM7h0sGmVBaxiPfo8mTckwSeRKFMsGsaCX+W0xWsI+df+LiIiIiEwjDsuyrKnexP6WL1/OmWeeybe//W0ATNNk9uzZfPSjHz1k17hhGJx33nlcd911PProo8Tj8X2d6pU89dRTLFu2jG3btjFnzhzA7lT/xCc+wSc+8Ymj2ncymSQWi5FIJIhGo0f1GiIichhMA8a2wfBGu5A+UXnm+SQ88jXoe2T8mtsPKz4KJ7xtamNVDoh6aYOmBTUR9fLakNFErsRQqkCmeOCQ0YDHhaMG42imG8O02DKU3peHvn53gkzBOOLXifjcnNy9t4jeFaOnJaTYnSoolA3GsvZNj6aQh+6GIM1hr25giIiIyIyi+pnUi5rqVC8WizzzzDPccsst+x5zOp1cfPHFrFmz5pDPu+2222hra+P666/n0UcffcPzJBIJHA4HDQ0NBzx+5513cvvttzNnzhyuvPJKbrrpJtzuyt+iQqFAofDnXNJkMvmG5xURkWNUysHQqxDvh2ADeMMT87q7X4QHvgSZwfFrTQvsuJfGuRNzrqNVQ1EvlmWRKRqk8iVG00XGskWyRQPL0pDRiVQyTDYNplm/M8G6XQle2Z0iVzryInpD0MPJXTFO2RvnMqc5iFM3OaomWywTz5Vwuxy0R310NQRoDHp1I0NEREREZBqrqaL68PAwhmHQ3t5+wOPt7e1s2LCh4nMee+wxvv/97/P8888f1jny+Tx///d/z/vf//4D7oh97GMf4/TTT6epqYnHH3+cW265hd27d/OP//iPFV/njjvu4Itf/OLhfWEiInLssqN23EtmyB7I6fIe+2uaBjz3E3j2h2CZ49dPfjcs/wi4pzD3u0aiXsqGSSr/5yGj6XyZfNnA7bSHjLZFPCoSHqNC2WDjQIr1u5Ks25lgw54UxXKFn8s30BL2srgrxuLuGCd3ReluCOiTAlVmWRbpQplkvozf42ROU4DOWIBYwKPfCxERERGROlBTRfUjlUql+OAHP8j3vvc9Wlre+OP/pVKJ9773vViWxXe/+90D1m6++eZ9/33qqafi9Xr5yEc+wh133IHPN76YcssttxzwnGQyyezZs4/hqxERkYosC5I77YJ6uQixbnBMQBd0ehAe/DLsfmH8mi8K538a5q449vMcrRqIesnt7UaPZ0sMpwtki8aBQ0bDGjJ6LHJFg1cGkvviXF7dk6JsHnkqX0fUz+LX4ly6Y7RHfCrcThHTsoePZooGYZ+L49rCtEV9RPyeqd6aiIiIiIhMoJoqqre0tOByudizZ88Bj+/Zs4eOjo5xx2/ZsoW+vj7e/va373vMNO2OLrfbzcaNG1mwYAHw54L6tm3beOCBB94wt2n58uWUy2X6+vo44YQTxq37fL6KxXYREZlARglGtsDoFvAEIDr+34Kj0vcoPPxVKFSI7uo8DS78LIRaJ+ZcR2Nf1Eu4qlEvpmmRKpRJ5UuMpIvEc0VyRQOXhoxOiHShzMu7kqzblWD9rgSbB9McRQ2dWY0BFnfZXeiLu+0hlzK1DNMini1SMAxifi+LOoO0RvwEvBo+KiIiIiJSj2qqqO71ejnjjDNYtWoVl112GWAXyVetWsWNN9447vgTTzyRl1566YDHPve5z5FKpfjmN7+5r3P8tYL6pk2bePDBB2lubn7DvTz//PM4nU7a2tqO/QsTEZEjV0jb3enJnRBqBk/w2F+zXIAnvgsv/3r8msMJZ1wLp10JzikqhE1B1EuhbJDKl0lkSwylXxsyauJzuwh6XTQGvOp6PkqJXIn1uxL7OtF7hzMczXT4ec3BfXEui7qiNAYnIPpIJkTJMBnLFjFMi8aQl+MbIrSEfZopICIiIiJS52qqqA52DMvVV1/N0qVLWbZsGd/4xjfIZDJce+21AHzoQx+iu7ubO+64A7/fz+LFiw94/mvDR197vFQq8d/+23/j2Wef5Xe/+x2GYTAwMABAU1MTXq+XNWvWsHbtWi644AIikQhr1qzhpptu4gMf+ACNjY3V++JFRMSWHoKhDZCPQ7QTnBPwz9VYH6y6DUa3jl8Lt8OFn4eOxePXqqGKUS+HHDIKBD12Ed2jbvSjMpopsm7vUNF1u5JsH80e8Ws4HTC/NczivV3oizqjig6pQfmSQTxXxIGDlrCXroYATfokh4iIiIjIjFFzRfUrrriCoaEhbr31VgYGBjjttNO4//779w0v7e/vx+k8/DcsO3fu5Le//S0Ap5122gFrDz74IOeffz4+n497772XL3zhCxQKBXp6erjpppsOyEwXEZEqME2Ib4PhTYAF0e5jLyxbFmz4HTz+bTAK49fnnw/n/h34Isd2nqNVhaiX14aMJvMlhlIF0vkyhb1DRoM+DRk9WoPJ/L4C+rqdCXYn8kf8Gi6ng+Pawvs60U/qjBD01tzlmeyVKZRJ5It4XU46YwE6Y34ag16c+vMjIiIiIjKjOCzLOppPIstBkskksViMRCLxhnntIiJSQblgF9PH+uwCt38C/i4tpOCRr0Hvw+PXXD5Y8VE48S+rOvxzn/2jXmJzJjzq5bUho2PZIiNpuxvdMC38HjvWxe9R1vORsCyL3Ym9RfSddiF9KFXhJs0b8LgcnNAeYXF3jMVdMU7oiOj3osZZlkUyXyZdKBHwuuiMBmiP+Yn63YpGEhERETlCqp9JvVAr1P/f3p3HSVbX9/5/n6VOnarqrp7pGXobBhgWWWZYZBtAURGUqDFifDyu5Hoj4BZzwYRg5Mb8DLglqOgvgBAhGjMkhpgbvRATEwjCBUFxATO/MIAEkc3pZWaY6a7equos398f51R19TJM18z0/no+Hm11VZ2qOtV9KGve9en3FwCw8MpDSd3L8IDU0iG5B2Hhxf7Hpfs/K40MTL+u/Ujp/Guk1Ucc+OM0a46qXqLYaGTKIqPlIJZjWcplHBYZbZIxRi/uHtMTtYVFt5e0e6za9P1kXVvHdxfrdS6v6mylXmeJiGKj0nigsSBUi5/RsZ2t6ij6KmR5+wwAAACsdPyrAAAwf+I4qWAJy1JQTk4rI8nEdjgmFXsOfJHQOJK2/p302BbJxNOv3/hOafOHD05w36xgPHmu2Vap+6Sk3uYAql5mXGQ0ipV1WGS0WVFs9MLLo+kkeklP9A6pVA6bvp+85+iE7mJ9Ev2oQwp8mLHEBFGsofFA1ShWWy6jIzvatLYly18UAAAAAKgjVAcAHHxxlATIYRqgh2WpXJKqI1JUTS43kSQrCZVdPwnUD9TIDun//qnU9/9Nvy5blF5/tXTEaw/8cZrVWPXSfpS0+nAp29L03bDI6METRrF+uWu0vrDok70ljVajpu+n1Xe1qadNG9NJ9CPWFOinX6IqYaTBsUBGRu0FT+tW5bWmhf+mAAAAAExHqA4A2H9hNQ3Na9Pn40m1STCWhOdRRaqt3OFkJNeTMn7Sl24f5P8Lev5h6cEvSJXS9Ou6T5bO+3+Sapn5VKt6qY4kj736SKmwtqmqlyCKNfIKi4x2FjOymUbfpyCK9V8Dw0mdy/YhPdVfUjmY4S8Z9mFVPlNfVHRTT1Hr2/P8/Je4sWqoofFAjmOpo5hVd1tO7QWPD0cAAAAA7BWhOgDglRnTMHFekcJxqTKahNdhOQ3Pg2Rby5IcL/nKtkpOu2TN8ZRnWJF+fKv0xJ3Tr7Ns6bRLpVPec+C1Ms1qrHrpaq7qZaZFRsM4Vi7jqpB1taZlAaprlphyEOnpgWE9kS4q+nT/sKpR8yH62pasNq0rJkF6T5t6VvlU6iwDxhiNViINlQP5GVvr23PqKua0Kp/h9wsAAABgnwjVAQCJOJ6oaql9VYaTrzCdOo/SjmnHTYJz15O8vGRnDnihzf2y5wXpvk9Lu5+dfl1Lp/TGT0hdJ87vPk2qejkyWQx1H1UvtUVGS+OBdo9OLDJqy1Lec7S2JcvU7D6MVUM91TesJ3qHtG37kJ7ZMaIwNvu+4RTdbf6kOpfOoj8He4uFEhuj4XKokUqgQtbV0R0FdRZ9tfqZhd41AAAAAEsIoToArDRRMLmypTouVYeTipKwkkyeGyNZSsJyx0sW9fRbD35ly/4yRnr6u9IPvpyE/VNteJ30uo8lU+LzuU+VoWTh1VlUvZSDdJHR8ap2jVRZZLRJI+VQT/YN6fHtJW3rHdIvd45oPzJ0rV+d06Z1bdrYk9S58FcAy1MUGw2OVVWJIrVmMzq+u6iOVl85j8VHAQAAADRvkaQjAICDyph0QdDG8HxMKg9KQa2ypZpsa2missUrSM7q+a9KaUZlWHroS9IvH5h+neNJ53xEOu7X53dyPhiXRl9OJtK7TkoWXXUmT74ak0yjD5dD7R6dWGRUYpHR2RgcqyZ96L1DeqK3pOd3jarZDN2SdMTaQjKFnk6jr8p7c7G7WATCKFY1ijVSCRXFRqsLnl61qlVrWjxl3UX8GgcAAABg0SNUB4ClLI6TSe1aeB6MJ5PSleF06ryS1JFISb+4m4bnmWJyutQmofu3Sfd/RhoZmH7d6g3S+ddI7Rvmb38mVb1smFb1EkSxhsuhSuNV7RypaqQcqBLGyqSLjHaxyOhevTxS0bbeUr3O5aU9403fh21JRx3Soo09bTpxXVEndLepxeetz3JjjFEQGVXDWJUwUiWKZcnItm1lXVtrCp56ViWLj7p8cAUAAADgIOBflgCwFMRREpjXFwwtS+VSUtkSVZPOc1PrO89M9J1nC9MmppekOJK23iE99teSmWGxyRMuks763aSmZr6UB2esehmrJtPoe0arenm0qrFKpMgki4y2+hmtZUJ2RgOlsrZtH6pPo/cNlZu+D9e2dExHizatSxYVPa67VXmPtzrLSWxq4XkSoAdRLEuS59ryXEerC56Kvqt81pWfceRnbKbSAQAAABx0/EsTABaTcEplSzAuVUpSdTStbKmo3nnhZJLgPONLfnHx9J0fbKM7pf/7Z1Lvf0y/Ltsqve5qacO587c/YVka2VWveolaujUSWCrtGdeukYpK44HGg1iOZamQdXRIK4uMTmWMUe9gWdt6h+p1LjuHZ+jG3wfPsXVsV6s29RS1cV2bju1slZ8hQF0uotgkk+dpiB6ZZPFez02C8q6ir2IuUw/PcxmHSXQAAAAA82KZJjAAsIgZ0zBxnp5WRpLwPKz1nQfJtpY10XeebZWc9qTGZaV44YfSA59LfjZTdZ0kvfETyaT4fIhDaexlKTaqtB2ukr9OQ4GnXS8Na6QSKoxi+a6jvOdqdd5mkdEGsTF6afeYtvWW0mn0Ie0ZC5q+Hz9j6/iuYrqwaFGv6mylh36ZCKJYlSCZPq9GyV+j2FYSoOe8JEAv+K5yaYDuu45sPqwCAAAAsEAI1QFgrsTxRFVL7asynPadp1PncbJQpWxbcrLJ5LmXl+zM0us7b5bXknxQ4LUkU/dRkNTZVIaT8Ponfylt+/b021m2dOol0qv/x7wtqGrGB1UeGdJIpl07vR69PFrU2GAgS4Hynqv2PIuMNopio+dfHtW27ROT6MPlsOn7KXiOTkgXFd20rk1Hri0wibzEGWNUCeN6hUsQR5IsZRxLnuuoLZ9RWy6jvJcE6NlM0ovOh1QAAAAAFhNCdQA4UFEohQ1950E5rWyp9Z1XkoDdtpKw3PGS7m+/dflWtrwSx5Nau6U4kH50q/Tzf5bKQ5LfJh33dmnz7yQ/t5d+Mv22hY6fNv51AAA700lEQVRkOr37pDnfzSA2GhsdVWWwT7vDrPZkD9Ww3Skn8FTwHHXlPBYZTYVRrGd3jiZ1LtuH9FRfSaPVqOn7KfquNva0adO6JEg/fE2B6pwlrFbfUgvQwziuT59nHUcdRU9F35Pv2ekEusOHUwAAAACWBMsYY/a9GfalVCqpra1NQ0NDKhaLC707AA42Y9KAvKGypTomVYak6nha2VJNtrU0UdlS+5qniepFz/GkVYdLj3xZevALMy86atnSuR+VTn+f9FdvloZeSi4/4lzpdR9L+uPngDFSOYw0Vo1UGi1rbHBA1SDSWL5bpu0I+S1FFjxMBVGs/xoYTifRS/p5f0nlYIbf5T6szmfqi4pu7ClqfXueDyqWqCCKGybQIxlJjmUp41ryXUdtuYxa6vUtjnIZ6lsAAABWIvIzLBcrcEQSAF5BHCe1LLXwPBhP+86Hk/NRJenWlpLw101D80wxOSUQfGWt3Umg/sDn9r6NiaXvX598/+5vSH/1Junsy6Xjf+Og/3yjWBqrhhqthhocCzRaCRWNDcqLRmW3dCjTfZSK+bUr/vdaDiI93T9cn0R/emBYQdT8Z/KHtGa1qadYD9K723xqPZYYY4yqaf95NUpCdFmS61jKOrZac47W+b4K2YkAnfoWAAAAAMsNoTqAlSmOJqpaan3n5cbKlqpk0voKx00rWzwpW0j6v9E8ryWpfHnwC7Pb/qEvSaddKr3nWwd1Or0SxhqrRhopBxocDzRWjRTFsXyFao0G5RRaFBRPUdDSLWOvzN/1WDXUk30lPbG9pG29Q3pmx4iiuPkQvbvNTwP0pM6lo+jPwd5irkSxUTWM0xA9UhAbWZaUdWx5rq01LZ7acpkkPPcc+a4jz6W+BQAAAMDyR6gOYHkLp1S2BONp3/loWtlSkWpZoZNJgvOMn4S4K7HvfK6YOOmR/9GtM1e+7O02j35dOvND0nDffj90HEvjYaixSqTB8UAj5VDlMPnAxHcdFT1bfjAoGaOguEHl4mGKM4X9frylaLgc6Mm+UlLnsr2kX+4a0X5k6Frfnq8H6Bt7ilrTkj34O4s5EU6qb4kVG5P0n2csZV1Ha1pyasm6ynlOfQKdvnsAAAAAKxWJEYClz5iJ0Lzedz6ahOdBre88SLa1rIme82yL5LQnNS44cMGYNNwvlXqlUp803HA63CdduS1ZlLQZT31Heu0fNL8rsdFYJdJoJdCesUBj1VBhbORYlnIZR2sKWVmWZFeG5JRHFOUPUaV4hCJ/zYqoetkzVtWTvWmI3jukF14eU7MZuiVpw9qCNqZ1Lht72tSWW5mT/UuJMUZB1LCAaBRLMnJsW1nXVj7rqGe1r7w3Ud/iZ6hvAQAAAIBGhOoAlo44nqhqCStSOJ50nVeGk4n0qJLUukiSbaeVLVnJy0t2ZkWEpXMqjqSxXTOH5qU+qTz4yrfPtkjloeYes1xKfo/70LjI6NB4VaXxUOUgllGsrOOq4LnKNNRSWGFZbnm3Yjev8poTFLT0JMfIMvXySEWPp4uKPtE7pF/tGW/6PmxLOrqjRRt7kj70E7qLavF5G7GYxcbUJ88rYaQwMpKMPNeW5zpaXfBU9F3ls249PGcxXgAAAADYN/41DGDxicIkMK9XtpTTypaGvvM4SlI+OzMRnvutVLYcqOpIEpCX0uny2ulwXzKFXlukdX9URiS/TRrZMfvb+MXkdz6DmRYZrUaRLNnKZWytznuyp/4Rgonkjr8sGaNq62EKiocvu6oXY4wGhivatn1IT/QmdS79pXLT9+Palo7pbK0vLHpcV6vyHv99LVZRnEyfV9IQPTKxbFnKurayGUftBV+tfkZ+xlHOc+S7tlyHv9IBAAAAgP3Bv44BLJxplS1jE5UtYWUiTLWspO/c8aRMXvJXSTbTlPslDpNQe6bQvNSX/Pznygs/lI77denhP5/9bY7/jaTKJ1UJY41WQ42UQw2NpYuMmliek9RUFP1M0ksyA7syJKeaVL1UWw9XmFu7LP56wRij7YPj2rY9mULf1jukXSMzfxDxSjzH1nFdrWmVS1HHdrUytbxIBVGsSpBMn1ejWMak9S0ZWznPUXebr3w2qW/JZRxlXVs2/ecAAAAAcNAQqgOYW8ZMqWwpJxPL5VIanFcmpp8tO1ko1PGSCWXHWxah57wyJgnGZ+o0L/VJIwOzXyh0f+XXSsVuqbUnPe1OTlcdJh3xGukHN85uHyxbZvOHNT78skaGKxocCzRSCVUJQsmykkVGcxm5zisfI8ut6iU2Ri++PKZtvRN1LoNjQdP3k8s4Or67NVlUdF2bjuloUYbJ5UXFGFOfPK+GsYI4kmQp4ySLh67Ke2rLucrV+s896lsAAAAAYD4QqgM4OOJoIjgPxpPvy1MqW0yt79xJ6lpcT8oWkil0zF5UlYYHGkLzvskhejC67/s4EK4vFXuSsLy1O/m+FqK3diW/272xM9Lrr5Ye+Nw+H8a87mpVYktPPNerIDJy7WSR0ZYWf3aftUytemk9TLHXMvvnuUhEsdFzu0aTEH37kJ7sLWm40nwNTyHraGN3W31h0aMOaZHD9PKi0VjfUg1jhXEs27LkubayjqOOoqei78n37PoConwIAgAAAAALg1AdQHOiYKKeJSwn31dKSUVHVE2+TCwprWxxPSnjJ5Pn9J3PjjHS+J6GWpYpC4KO7pRk5nAHLKmloyE0754I0Ys9Sf3O/v4FwXCvdPZHku8f/MLME+uWrfh1Vys66wr94qnH1eJl5LrNPZ5dLcmpDC/JqpcwivWLnSPatr2kbb1DeqqvpLFq1PT9tOUy2thT1MaeNp24rqjD2guE6ItEEKWLhwaRgjhWbCQnDdD9jK3OYlaFWn2L58h3HepbAAAAAGARIeECMJ0xM/Sdj070nUfVJFyX0r7ztLIl25JWtjA9uU9hJVn4sxaUT+04D5tfWLIpmUJDUD4lNG/pSH6PByiMjaLYKIxjRZEUmlhRVJEZ/i+tPu2Dck57n6yf3Cbrqe8kf9XgFxUf/xvSmb+jwDjq/eVTytrpgrSztBSrXqphrP8aGNa23iE90VvSU30lVcLmK3ra8542rSumnehtWr86J2uJfJCwXBljVE37z6tRMoFuJGXcpL6lmHfV5nvKZ5369HnWtfm9AQAAAMAiR6gOrGRxPKXvfFyqDCdfYTXtO69VtthJ0OpmJS+fBJUEP3tnYmls995D87GX5/bxLVtq6ZzSbd5wmm3dr9+fMVJojKLIpCG5FMVxPUCvhJGqYXIaxUaRSS6P4tpkvZFkye7fo7ZVq9V94mVqOedKWa4nEwUaHxnSnv5+lUebXDC1XvUSq9q6XkHr4Yu26qUcRPp5/7C2bU8WFf2vgWEFUfN/edDRmtWmnjZtWpdMo3e3+YSxCyiKjaphunhoGCs0ye806yQ952taPK3KefIztnwvCdGpbwEAAACApYlQHVgJorAhPC9LQTmtbGnsO08nY53MRHjut1LZ8kqCsWTafGo9y3BvcnlUndvHzxb3Hpq3HNLU786YZLI8rAXkURKIh1GsMDLplK1RNYoUxkZxQ2Ce3oMkS44s2bYlJ/3KOLZsy5JrW9LUvDcc1Y7eUe04wB9Dveolt1bV4hGLrupltBLqqb6StvWWtG37kH6xc6Th5zZ7PW1+fQp9U09RHUV/DvYWsxHW6lvS/vPYmKT/PJNMoK9tzarVd+VnJibQqd4BAAAAgOWDtAxYTqZVtoxNrmwJK8l2ljURnmfySUe27Szori9KcSSN7Zo5NC/1SeXBuX18200W/mydUs9Sq2yZxST21LA8jCYqWcLIqBpGqkSxgtBMmiyPjVEtKFfa99wYlnsZW46VfD8tLJ8nSdXLHsVublFVvZTGAz3RV9IT6ST6c7tGtR8Zug5rz2vTuiRA39jTpvbCgVfyoDnGGAVRwwKiUSzJyLFt+a6tQtbRutW+8t5E/zn1LQAAAACw/BGqA0uNMVMqW8pSpdZ3Xk4rW8JkW8tOFgp1vKTuI79mUU3wLgrVkZnrWYb7kmnz2s9yruRW7z00z6/d64cdcZxMy0aRSUPzibA8SCfKq2EaljdUsMS1BU7TvDwJxm3ZluTaljx74cPyfTKR3PHdkolUbT10wate9oxV9UQ6hb5t+5Be2D3W9H1YkjYcUkjqXHqKOqGnTW25hf+AYCWJTa2+JalwCaPkgyXPTepb2gue2nKuct7EBLrnUt8CAAAAACsRoTqwWMXRRHAejCffl6dWttT6zp2krsX1pGwhmUJHIg6lkR1TQvOGBUIrTXZ3N8vxJkLyaTUtXclfCjSIYiULe8ZGUdUojIP6+aChbiKMTNJtHseKYyk2cfqBiZGliWDctiTXsZR17SQ8X+IZ4GKoetk5XNETvUNpJ3pJ2wfHm74P25KO6WjVxp5kYdHju4tqyfJ/yfOl1v9fCWJVoqS+xZKUdW1lM47aC75a/UwSnnuOfNeWS/85AAAAACDFv+CBhRYFaWieTp0H42nf+WgSnke1vvO0ssX1pIwv+UX6zqVkcr9Saugy75tc0TIyMNEXP1fya6eE5Q0her5dsuxJk+RRpGSRz7JROFZWEDWG5bEio+lhuZEsa6J+xbGkjGMp52Zk29aSD8v3pVb1Yua56sUYo4FSRdu2D+nx3iE90TukgVKl6ftxbUvHdrXW+9CP6yoq51G5NB+CKE7C8zBSNYplTFLfks3YymUdded8FbIT0+dZ15ZN/zkAAAAA4BWQyAHzwZgZ+s6nVLZEQbKtZSXTzY4nZVuSU2uZJ6b7ElWl4YGJBUCndpwHo3P7+K4/cz1La4/CQqci21OQhuVRbaHP2KhSjlQdGVMljOp95XFsFMWSaahhaQzLXcuS51hy0oUNV3xbzzxXvRhj9KvBcW3bPlSvdHl5tPkFZz3X1nFdrfU6l1d1tSrrEqLPpWn1LXEsY1Svb1mVn6hvyWUc+Z7N7wQAAAAAsF8I1YGDKY5n6DsfTsLzsJr2ndcqW+yJ8NxblUzdrtQE1RhpfM9El/nU0Hx0p6T9WOlx1iyppaMempuWLkWt3YoK3QpbOhVmVikyE73l5SBSNTSqjkUKR8qK4/F6b3n6hCRZstU4WW7JdWw5GYuwfJbqVS/+GlXbNsxJ1UtsjF54eSwN0ZMgfXA8aPp+chlHx3cXtWldUZt62nR0R4sy1IXMmXp9SxqixyaWJaseoHcWs2rLeck0ejqBTn0LAAAAAOBgIVQH9kcUNgTn48m0eWVq33laOeK4kpNNOs/91pVb2RJWJneZT10YNCzP7eNnCjLFHpnWLkUt3YoKXQoLXQoK3ar6axQqo2oUqxqaicny0CjabRTHJclKV/aU5MiSbU8E5l7Glm1Zchfz4p5LiBVV5I7vTqpe2k9Q2NIt43gH5b6j2OiXO0eSKfQ0RB+pNL8YbUvW1caeYtKJ3tOmIw9pSRZ3xUFXq0eqBJGCdPrctpIAPeclAXqLn5Gfnvddh/oWAAAAAMCcWqHpHjBLYTUJzeuVLWNpZct42ndekWKTTp1nkqnzTF7yVyWLh64kJpbGdu89NB97eW4f3rJlWjoVF5LQPAnMu1TJdWrc79C4nVcQqV7DEsVGsTHSuKSxQLKCdHFPO1nc07bk2RMLfhKWz4M5qHoJoli/2DGibb1D2ra9pKf6ShoPoqbvpy2X0aaeYtKJvq5Nh6/Jy+bPDQ4qY4yq9f7zWEEUS/WFdh0V867afC/tP7flp/3nFr8HAAAAAMA8I1QHjJmhsiXtO69dFqeTrJadLBTqeFK2VXLWrKzKlmBs5k7zWtd51Hz3dDPibHFiwjzfqXKuU+N+p8aynSpn1yiSnYTlDX3lsiSnasm1TRKWO5aylp2E57RBLA4mkh2MyqmOKMq2H1DVSzWM9fTAsLZtH9K23iH9vH9Y1bD5hWrbC17Sh57WuRy6Okd4exBFca3/PKlwiUzy32zWSepb1rZ6WpXz5Ht2fQFR6nQAAAAAAIsFoTpWjjiaHJwH42nf+XBDZUut79xJgnM3K3mFZAp9JYgjaWzX3kPz8T1z+vDGdhXmO1TNJxPmZT8Jzsf8DpX9DgVOLg1ajSxNTJFPhOUWYfliYYxkQllxJCsOJRPJMsn3lokkI2WLa5XtOEbZtkOUcV0FUazRSqiRSqywOrtp8vFqpJ/3l7Stt6Qneof0dP+wwrj5/v2O1qw2rWvTiT1t2riuqK6iT4h+kARRPGkBUSPJliUvY8l3Ha1tzarVd+vhuZ8u0gsAAAAAwGJFqI7lJwrS0LzcEJ6XpOpoWtlSTQI/KQnLXU/K+JJfXBl959WRmetZhvuS4Dxuvl+6GaHXpkq+S+Vcpyq5Do2nwXk516mqt1qW7TYs7inZtiXXstVqW4TlC80YycSy0rC8HpqbSIpDWSZWY0+OsV3JdmQsV8ZyFLsFGTcr22/Tmp4jFBhHX/vh87rniWdVGg9VzLm6cGOXLnvNBq0ueBoolRVEkwPy0UqoJ/tK6cKiJf1i50jDArGzt25VLqlzWdemjT1FdbT6B/rTWfGMMQqiiQVEq1HyFwKObcl3bbX4jtbl/KS+xXWU86hvAQAAAAAsTZYxpvk0AtOUSiW1tbVpaGhIxWJxoXdn+TNmYuK83neeVrYE5YnwXEomm520sqVW3WIt43Q2DqWRHZPD8sap80ppbh/e9lTJddSD8nKuQ5Vcl6q5ToX5TlleXo41eaFPMrUFZqIkJE+DccuEyfe1wNxY9YVajeWkQbmThOaOp9jxFTvZZDFROyNjuzL10+RLlitZljKOpfXteX31oV/qpvue0Ux5uG1Jv3f+MfrguUfqyd6S/uPFPdqWLiz63M5R7c//aR3entemNEDf1NOm1YWDs/DpShUbMzF9HkQKYyPLkjJp/3kh66ot5yrvucqmE+ieu4xfdwEAAADMCvkZlosVMJaLJS2OZ+g7H0n7zivpQqG1yhZ7Ijz32iQ7szz7zo1Jnn+pNl0+paplZCBZNHQOVbPtKuc6VE7D8mq+S2GhU2GhSybXLtt25DYs7ulIys3pHmEaE9fD8nrlSnpaq1+plc4by0r+SsNyk6lyO6vYa5NxfBk3OxGQW249MFc9LG8uKO0s+vrqQ7/UDd97Zq/bxEa64XvPKIqN3nxCl/7s337e1GPYlrRhbaG+qOjG7qKKuRVS4TQHwiietIBoLCNLUta1lc04ai/4avUzynlOvcKF+hYAAAAAwHJGqI7FIQpn6DsvJVUl9b7zNCh2XMnJJn3nfuvyrGyJqtLwQMOk+ZTgPBid24d3fFVynarkOhXkuxQUktA8KnQrbumUcbPTbmNJIracY8akQXlatTKts3yifsUoDcptux6Gx5kWyckqdnwZJzNpmlyTAnNnTnY/79kK4lg33bf3QL3RLf/3F3r3Get15oZ2/eS53XvdzrEtHX1IS31R0eO7iypkl+HrwjyohvHEAqJRLEtGtmUrm7GVzzrqXpXWt9T7z6lvAQAAAACsPKQOmF9hVQrHGypbxtLKlvG0sqWSDs9aSd+540mZvOSvmrOgb0EYkyz6ubfQfHSntF8lF7N8eFkK/LWq5juTwDzfpaglCc+DQpcir215TvkvVpPqVxpC8lr9iqzkmLEa61dcGduRyfiKHF+x68s4XhKKW05DYJ5JQ3PngH+nxhiVg1ij1TBdUDTUaCWqn08uS86PVUKNVqN0m+Trz37zRD2xvTRj5ctMYiPd8eMX9d6zDp8Uqru2pWO7WrUpnUQ/trNVOW8ZvT7Mg0n1LWGkMI4lI2VcW1nX0eqCp6LvKp/2n/tecjkAAAAAACBUx1wwZobKlrTvPKz1nQfJtpY90XOebZWcNcsnzA0rycKftaB86sKgYXlOHz5y8woK3QoKnQrTafOgFqLnOmQc5srnlIn3spjnTPUrdvKhUa1+xckpdpOgXGlQPqmn3HLT+pXmKo6MMRoPoiQIr4QN4Xhyfqw6EYqP1sPw5HwtHN+PNUHrzjyiXX9+7381dZt7nujXJeccoZMPTatcepIQnX7u2YviicVDK2Gs2MSyLUsZJwnKu4q+irmM/HTyPJdx5Dr8fAEAAAAA2BtCdey/OJpS2VKeqGwJK2llS63v3JlYKNQrJFPoS52JpbHdew/Nx16e24e3bAW5DoWFiYqWJDhPTuNMy/L5gGKxmFS/0rCwZ71+xagelNfrV2pT5RnFmRYZ10+6yqcu5DkpMJ95IrgWio+UQ41WAo1WxieC8IaQfLSSTog3nK9dfyCh+IEqZF2VxsOmblMqh1qVz+izF504R3u1vATRxOKh1SipzLItS55rK+c56ixm1eJn6tUtvuvIpv8cAAAAAICmLMpQ/ZZbbtH111+v/v5+nXzyyfryl7+sM888c5+3++Y3v6nf+q3f0jve8Q7ddddd9cuNMbr22mv11a9+VYODg3rNa16jr3zlKzrmmGPq2+zevVsf+chH9M///M+ybVvvete7dOONN6qlpWUunuLSE0dJXUkw3tB3PppOnVeTsFFKwnLXkzK+5BeXft95MJZMmzfUs8SlXmm4X9Zwn6yoOqcPH3nF+nR5/TQNzsPcIcurEmchxZEsE6anU0PzySl0bXHOWv1K7BZkXE+xk6v3lMuqTZVP1LDIdhUbo3IQTdSm1IPvikYqYxPT4dWJ60cagvGxBQ7FD9RoJVQx52rnSGXWtyn6roJoCT/pOWKMUaXefx6rGkWyLEuuYynrOmrLZ9SWyyjvucplHGUztrIu/ecAAAAAABwMiy7x/Id/+AddddVVuvXWW7V582bdcMMNuvDCC/X000+ro6Njr7d7/vnn9Yd/+Ic699xzp133hS98QTfddJNuv/12bdiwQX/yJ3+iCy+8UE8++aR835ckvec971FfX5/uvfdeBUGgyy67TB/60Id0xx13zNlzXVLGB6Xe/0gm0C0rmTp3PCnbkpxaS7QqII6ksV0yQ72KSr3SUK/MSJ+sUp/skT7Z5cFpNzmYz9RYroJ8x7Qp8yCfLAwaZwoH8dFWmGn1K5NDcxlLshrrV9ykrzytX4ncWk95Np0idxRZrsYiWyOBpZFAGqlKo+VoUhA+UhmfHoyn58cq0Rw25c+/vOeokHVVSE9bsq4Knqt8Nj3vuSqk3xeyrkYroS7c2KW/eODZWT/Gr23q1liluen25aZW31IL0CNjZEny0vqWQ4qe2nxPvmfXFxDNUN8CAAAAAMCcsYwxiyrj2bx5s8444wzdfPPNkqQ4jrV+/Xp95CMf0R/90R/NeJsoivS6171O73vf+/TQQw9pcHCwPqlujFFPT48++tGP6g//8A8lSUNDQ+rs7NSWLVt08cUX66mnntIJJ5ygn/70pzr99NMlSXfffbfe+ta36le/+pV6enr2ud+lUkltbW0aGhpSsVg8CD+JRWZ0l/Tij6S2dQu9J01J6t2HFQ1tlyn1SkN9skb6ZA33yRnplzM6kIStcyjMrmqYNu9SOGnavD1ZQBKzY8zMHeVxmITojfUrliVZjfUrrkInq9HY03DkaSS0NBJYGg5sjYTSaNVoJDDJaTWqT42PVaNlG4rXwvDGYLwwJQifGozXLst5jpwma0MKnqPWXEab/+x7s5q4ty3px398gUrjgcaq0X4+y6UliOJJC4gaSY5lKeNa8l1HbbmMWnw3rW9Jvpr9PQAAAADAQln2+RlWjEU1qV6tVvXYY4/p4x//eP0y27Z1wQUX6JFHHtnr7T796U+ro6ND73//+/XQQw9Nuu65555Tf3+/LrjggvplbW1t2rx5sx555BFdfPHFeuSRR7Rq1ap6oC5JF1xwgWzb1o9//GO9853vnPaYlUpFlcpEhUGpVNqv54z9E8dSaGJFkVEYBoqHd6Td5r2yRvplD/fJGe1TZnRAmWBYc9ngHtvelMVAa/3mnQrynTJubg4ffZlIJ8lnXtgzlpSEhpGRRiNXI5Gj4dDVcGhrJMpqOHI1ErpJUF6VRoNaQB5rpBprtBql9SkjyyYUt6RkItybEorXJ8anBuGTz+cWIIwdrUZaXfD0e+cfoxu+98w+t/+984+Ra1vLMlA3xqjaEKBXw1iyJMe25Lu2WnOO1vl+/XflZxzqWwAAAAAAWCQWVai+a9cuRVGkzs7OSZd3dnbq5z//+Yy3efjhh/VXf/VX2rp164zX9/f31+9j6n3Wruvv759WLeO6rtrb2+vbTHXdddfpU5/61D6f07IQjEuZnHTMm6QoSBYirQwnpwdZFEthHCuKjaLYKIyNwihSXC4l1SzDyZS5O9ovb6xf3tiAWso70+B17oR++7R6llrPeeSvXrr1N3PJRGlIHtXrV6IomfZOpsItjYRKpsVDW6ORo5HA1nBoaSTMaCTIptPk6RR5NdZYMPX3bCRV06+lpTEUb6mH3RMhecuU88llE+dzniN7CQasA6WyPnjukZKkm+57ZsaJddtKAvUPnnukXto9Ns97ePDFxqgSxKpGyQKiQWxkWZKX9p+vafFU9F3lPVe+58h3HXkurykAAAAAACxWiypUb9bw8LB++7d/W1/96le1du3aeX3sj3/847rqqqvq50ulktavXz+v+zDnqmNSMCr96CvSU9+RykOS3yYd93bprA9L+TXJdHgU7POuZgzL41hhZBREsSrVilQakDPSJ3esX5nRfmXHBpQd71fr+A650dwGa7HjN4TlnZOC8zDfkfRqQ1EU1ae+RyuhRoPaJHjyVe8aD6wkEA9r/eOWRgJbY6E3y0eK06/Fy7akfEM1Sos3PRivB+H1WpXk+pasKz+zNEPxAxVERi/tHtMlZx+h92w+XFt++Lzu3tanUjlU0Xf1a5u6dek5R8i1Lb20e2zJLVIaRvGkBURjY2RbljzXUjbjqL3FV2s2o5zn1PvPqW8BAAAAAGBpWVSh+tq1a+U4jgYGBiZdPjAwoK6urmnbP/vss3r++ef19re/vX5ZHCdBnOu6evrpp+u3GxgYUHd396T7POWUUyRJXV1d2rFjx6T7DsNQu3fvnvFxJSmbzSqbXcZBazAu/fAm6cHPS41T4CM7pIf/X+kHN0ivv1rm7I+ouus5hUFZUdRQyRKnYXnaCxyGsazKoDKj/cqMDSg7NiB/fED58eTUq7wsaw6LOYwshbm1U8LyzvoCoZHXlizAusxFsUmqUarSSGA0Up1SkVKNNZJOhScVKpoIyQNL49HUn5ElyUm/lhbb0qSguxZ8t0w5v7dgfKWG4gdDEBn9as+48p6jS84+XL/7hqOUcSwFkdFYJVwSHerGGAWRScPzSJUoliUj27aVdW3ls466V02ub/Ez1LcAAAAAALAcLKpQ3fM8nXbaabrvvvt00UUXSUpC8vvuu09XXHHFtO2PO+44Pf7445Mu+8QnPqHh4WHdeOONWr9+vTKZjLq6unTffffVQ/RSqaQf//jH+t3f/V1J0tlnn63BwUE99thjOu200yRJ999/v+I41ubNm+fuCS9W1bEkUH/gur1vY2Lpgc/JGCk+9QN68on/VBxUlC3vUHa8X/7YDvnjA2otJ6F5dnxAdlTZ+/0dBJGbV1DontJvngTnYa5DxpnLZvX5EU4JxWuLa45Ua5fXusSlkVpY3rB9eVoovjeLv3rCttTQHz5lMU2vIQhvvL4hFM9lHALOBTZWjdLwfG5fGw5UbMykxUODKJYlyXNtea6j1YW0viX9CwQ/YyvrLr0PmgAAAAAAwOwsqlBdkq666ipdcsklOv3003XmmWfqhhtu0OjoqC677DJJ0nvf+16tW7dO1113nXzf16ZNmybdftWqVZI06fIrr7xSn/3sZ3XMMcdow4YN+pM/+RP19PTUg/vjjz9ev/Zrv6YPfvCDuvXWWxUEga644gpdfPHF6unpmZfnvagEo8mE+izY3/+C/FN/W69+7i/lPnP3nO6WsWwFuQ6FDWF54+R5nGlZ9NPmQWQaFtHUjIH4xPlkwc3G68rhgTz64vrZOLY1q8U18w2heEtDSM7UL+ZCFJtk8jwN0SMTy5Ylz7XlZxy1F3y1+pmkusVz5Lu2XGfxfwgFAAAAAAAOnkUXqr/73e/Wzp07dc0116i/v1+nnHKK7r777vpCoy+++KJsu7kA4+qrr9bo6Kg+9KEPaXBwUK997Wt19913y/f9+jZ/93d/pyuuuELnn3++bNvWu971Lt10000H9bktCcG49MhfTK58eSUmlvXTv5J78rulgxCqR15xIjCfEpyHuUMke2GnP4PINEyIa6+B+EzT5KNVo/LibrRoimNbew3CC9lkardlynUFbyIYz7qE4lhYQRSrEiTT59UoljFGjm3Lc23lPEfdbb7yaX1LLuMo69qy6T8HAAAAAGDFs4wxS2sVuEWqVCqpra1NQ0NDKhaLC707+y8Yl247V9r1zOxvs/YY6ZJ/lr503D43NZarIN8xbco8yHcpLHQqzhQOYOf3rRpNDcHTafAZa1Q07fLKMgrF3Voo3rC45r6C8EJDiE4ojqXCGFOfPK+GsYI4kmQp41jyXEctnqtizlXeSwL0bMbm+AYAAACAObBs8jOseItuUh0LzHal8lBztymXJK+lfjbMrmqYNu9K6lrq0+btkrX/0+bVqCH4bgjFR6pmLx3jk6fFF/nah01xbUst/kx94WmfeMP5QjYJDhvPew6hIZafWn1LrQM9jGPZVlLfknUcdRQ9FX1PvmfXFxDNUN8CAAAAAACaQKiOyeJQ8tukkR2zv41fVByFeumNNyvId8q4ub1umoTicUPwrVcIxKdPiwezbKVZCjzHVj4Nv1tq0+L1/vCGDvGG80m9SnLecwkCsbIF0cT0eSWMZCQ5lqWMaymXcdRRzKpQq2/xHPmuQ30LAAAAAAA4YITqmMYc/xuyHvpSE9u/Q31DZd3+q+40EB+fEpRPdI8vr1DcaqhEaZgQn0UwXvAIxYHZMsaomvafV6MkRDeSMq6lrGOrNefoUD+nfNapT59T3wIAAAAAAOYKoTomqciTfcbvKPPwn89usVLLVnjG7+iFHba+/p+Vud/BgyjrWCp49qRO8YKfUSGbmegQ9xomxNPztelyQnHg4ItiU588T/rPjSxLyjrJAqJrWjytynnyM7b8dPqc/xYBAAAAAMB8IlTHJLGRBsZs9Zx7tdzvf26f2wfnXq3eMUtHdbTsc9uDzXekQkZq8WwVPDsNvRsX2cwo73tq8TPK+1kVsplJITk9ysCBi42RMck0uZl0XjKa/H3csN3U28RxcmpblryMpazraG1rVi1ZVzlvYgLdob4FAAAAAAAsMEJ1TOI6li7/x6f1N++9XK2S3Ie+MPPEumUrPPdqjZx2ua74m//U33/orKYfy3eMWjJGLRlNfHm2ChmrPhmez3rK+xkVsl46Re6p4GeV8z25maxk7/+ip8ByYcz0wDo2RlJDyN2w3aTLG4JtGSlOt1N6P5Ikq/HEyNQuMJJtWbKs5ErbkmxZkiVZVnqdJMuyZMuSY0uObcm2LDm2lX4v2ZYtz7WU9yb6z6lvAQAAAAAAixWhOiYJI6Oxaqi33/qotvyP92vdqe+T9+htcn7+HalckvyiouN+Q9XTf0e/GpEu/cqj8jO2ytVQZ3eEaTgeq+BKLZ6lFs9SIWOn3eLJxHg+l1M+l5XrZGRsV8ZuPHUky00SOWAJmTGwTie1JwLuNMROQ+56kD1lejuW6pdLkiyjerItyaoF2ya5fGqwXQuyLUuylATXVhpw14Jsx7brAffE+eR6x7Jk2xP3a1m18LshJLcmnzaG6BO3U3r/6eVMmQMAAAAAgGXAMqY+i4gDUCqV1NbWpqGhIRWLxYXenf02Xo108/3P6JYHnpUknbmhXR88q1tnbmiXn/VUrlT1k+d266s/6tNPntstSbr8DUfpsjPW6uWB7RMBueXWg3LZyfeyqFvB3HrFSe2GCpLJQfZegu/G+7GU3EhW/XRqsF0LkOtT2poIsmvhsyxr4nJLcm1Lti05lp0G20mY7TZMc1u1+7YnAmu7HqBbk4NsWw3bTA6265cTbAMAAAAAFshyyc8AJtUxSc5zdNlrN+grDz6r2Eg/eW53PTyfiW1Jl75mg4bGA1WLR8zfjmJRm23P9tRgu7GiZGofd1pIIhmrPrldi7hljNQQHEvJZzh2ulVt2ro+da2JENqx00DbspOAOw20G4PtGSevZwqs01C9PtXdcFqbGG+c/gYAAAAAAMDSQ6iOafKeo987/xjd8L1n9rnt751/jFzb0lg1moc9QzOmBtv1ELvJBSQbw+4kwE4fYGqwXbtYDUF2bXJajfUkVn2CuxZs1wJp106C7XodSTqh7dr29CB7H4F1Yz1JLWi3rem1JQAAAAAAAEAzCNUxTd5z9eHXHyVJuum+Z+o90Y1sKwnUP3jukXpp99g87+HSMRcLSJqpCXb95MAWkJy8eGTtsoPTs21bE0G7PfVygm0AAAAAAAAsIXSqHyTLsRNqrBpqvBrpr3/wvP5tW59K5VBF39WvberWpeccIde2NFAqK4gW9yF0MBeQ3FvP9kTTdvq/+7GAZC2Mdmbo2Z4cdlsTk9f2RGC91yDbZgFJAAAAAACw8JZjfoaViVD9IFnOLwrj1UhGRo5lKYiNxiqhhsthU5Uvr1gtchB6tmvLRk7t2a5NQTe7gGQSYifB9sSCkum0tmXJdQ7+ApL0bAMAAAAAgOVsOednWFmof8E+5TxHL49U9NPn9yjr2jP2bFtKgm5ZJo21NXG5JgJlafoCkq/Us53UjkxeQNKp9W2zgCQAAAAAAACAeUaojlnxM44OXZ2TZGRbtlxb9cltx7ZZQBIAAAAAAADAikCojlkpZF1tWte20LsBAAAAAAAAAAvKXugdAAAAAAAAAABgqSBUBwAAAAAAAABglgjVAQAAAAAAAACYJUJ1AAAAAAAAAABmiVAdAAAAAAAAAIBZIlQHAAAAAAAAAGCWCNUBAAAAAAAAAJglQnUAAAAAAAAAAGaJUB0AAAAAAAAAgFkiVAcAAAAAAAAAYJYI1QEAAAAAAAAAmCVCdQAAAAAAAAAAZolQHQAAAAAAAACAWSJUBwAAAAAAAABglgjVAQAAAAAAAACYJUJ1AAAAAAAAAABmiVAdAAAAAAAAAIBZIlQHAAAAAAAAAGCWCNUBAAAAAAAAAJglQnUAAAAAAAAAAGbJXegdWC6MMZKkUqm0wHsCAAAAAAAALD613KyWowFLFaH6QTI8PCxJWr9+/QLvCQAAAAAAALB4DQ8Pq62tbaF3A9hvluGjoYMijmP19vaqtbVVlmUt9O4cdKVSSevXr9dLL72kYrG40LuDRY7jBc3geEEzOF7QDI4XNIPjBc3geEEzOF7QjOV+vBhjNDw8rJ6eHtk2rdRYuphUP0hs29ahhx660Lsx54rF4rJ8Ucfc4HhBMzhe0AyOFzSD4wXN4HhBMzhe0AyOFzRjOR8vTKhjOeAjIQAAAAAAAAAAZolQHQAAAAAAAACAWSJUx6xks1lde+21ymazC70rWAI4XtAMjhc0g+MFzeB4QTM4XtAMjhc0g+MFzeB4AZYGFioFAAAAAAAAAGCWmFQHAAAAAAAAAGCWCNUBAAAAAAAAAJglQnUAAAAAAAAAAGaJUB11t9xyi4444gj5vq/NmzfrJz/5yStu/4//+I867rjj5Pu+TjzxRP3rv/7rPO0pFoNmjpctW7bIsqxJX77vz+PeYqF8//vf19vf/nb19PTIsizddddd+7zNAw88oFNPPVXZbFZHH320tmzZMuf7icWh2ePlgQcemPbaYlmW+vv752eHsaCuu+46nXHGGWptbVVHR4cuuugiPf300/u8He9fVqb9OV54/7JyfeUrX9FJJ52kYrGoYrGos88+W//2b//2irfhtWXlavZ44bUFjT73uc/JsixdeeWVr7gdrzHA4kOoDknSP/zDP+iqq67Stddeq5/97Gc6+eSTdeGFF2rHjh0zbv/DH/5Qv/Vbv6X3v//9+o//+A9ddNFFuuiii7Rt27Z53nMshGaPF0kqFovq6+urf73wwgvzuMdYKKOjozr55JN1yy23zGr75557Tm9729t03nnnaevWrbryyiv1gQ98QPfcc88c7ykWg2aPl5qnn3560utLR0fHHO0hFpMHH3xQl19+uX70ox/p3nvvVRAEevOb36zR0dG93ob3LyvX/hwvEu9fVqpDDz1Un/vc5/TYY4/p0Ucf1Rvf+Ea94x3v0BNPPDHj9ry2rGzNHi8Sry1I/PSnP9Vtt92mk0466RW34zUGWJwsY4xZ6J3Awtu8ebPOOOMM3XzzzZKkOI61fv16feQjH9Ef/dEfTdv+3e9+t0ZHR/Uv//Iv9cvOOussnXLKKbr11lvnbb+xMJo9XrZs2aIrr7xSg4OD87ynWEwsy9Kdd96piy66aK/b/K//9b/03e9+d9IbxIsvvliDg4O6++6752EvsVjM5nh54IEHdN5552nPnj1atWrVvO0bFqedO3eqo6NDDz74oF73utfNuA3vX1Azm+OF9y9o1N7eruuvv17vf//7p13HawumeqXjhdcWSNLIyIhOPfVU/cVf/IU++9nP6pRTTtENN9ww47a8xgCLE5PqULVa1WOPPaYLLrigfplt27rgggv0yCOPzHibRx55ZNL2knThhRfudXssH/tzvEjJm4bDDz9c69ev3+fkBlYuXluwP0455RR1d3frTW96k37wgx8s9O5ggQwNDUlKgoy94TUGNbM5XiTev0CKokjf/OY3NTo6qrPPPnvGbXhtQc1sjheJ1xZIl19+ud72trdNe+2YCa8xwOJEqA7t2rVLURSps7Nz0uWdnZ177aXt7+9vanssH/tzvBx77LH6+te/rn/6p3/SN77xDcVxrHPOOUe/+tWv5mOXsYTs7bWlVCppfHx8gfYKi1V3d7duvfVWffvb39a3v/1trV+/Xm94wxv0s5/9bKF3DfMsjmNdeeWVes1rXqNNmzbtdTvev0Ca/fHC+5eV7fHHH1dLS4uy2aw+/OEP684779QJJ5ww47a8tqCZ44XXFnzzm9/Uz372M1133XWz2p7XGGBxchd6BwAsf2efffakSY1zzjlHxx9/vG677TZ95jOfWcA9A7CUHXvssTr22GPr58855xw9++yz+vM//3P97d/+7QLuGebb5Zdfrm3btunhhx9e6F3BEjDb44X3Lyvbscceq61bt2poaEjf+ta3dMkll+jBBx/ca1CKla2Z44XXlpXtpZde0u///u/r3nvvZYFaYIkjVIfWrl0rx3E0MDAw6fKBgQF1dXXNeJuurq6mtsfysT/Hy1SZTEavfvWr9Ytf/GIudhFL2N5eW4rFonK53ALtFZaSM888k2B1hbniiiv0L//yL/r+97+vQw899BW35f0LmjlepuL9y8rieZ6OPvpoSdJpp52mn/70p7rxxht12223TduW1xY0c7xMxWvLyvLYY49px44dOvXUU+uXRVGk73//+7r55ptVqVTkOM6k2/AaAyxO1L9AnufptNNO03333Ve/LI5j3XfffXvtgTv77LMnbS9J99577yv2xmF52J/jZaooivT444+ru7t7rnYTSxSvLThQW7du5bVlhTDG6IorrtCdd96p+++/Xxs2bNjnbXiNWbn253iZivcvK1scx6pUKjNex2sLpnql42UqXltWlvPPP1+PP/64tm7dWv86/fTT9Z73vEdbt26dFqhLvMYAixWT6pAkXXXVVbrkkkt0+umn68wzz9QNN9yg0dFRXXbZZZKk9773vVq3bl298+v3f//39frXv15f+tKX9La3vU3f/OY39eijj+ov//IvF/JpYJ40e7x8+tOf1llnnaWjjz5ag4ODuv766/XCCy/oAx/4wEI+DcyDkZGRSVM3zz33nLZu3ar29nYddthh+vjHP67t27frb/7mbyRJH/7wh3XzzTfr6quv1vve9z7df//9+t//+3/ru9/97kI9BcyjZo+XG264QRs2bNDGjRtVLpf1ta99Tffff7/+/d//faGeAubR5ZdfrjvuuEP/9E//pNbW1nqvaFtbW/0vW3j/gpr9OV54/7JyffzjH9db3vIWHXbYYRoeHtYdd9yhBx54QPfcc48kXlswWbPHC68tK1tra+u09TwKhYLWrFlTv5zXGGBpIFSHJOnd7363du7cqWuuuUb9/f065ZRTdPfdd9cXw3jxxRdl2xN/2HDOOefojjvu0Cc+8Qn98R//sY455hjdddddr7jYE5aPZo+XPXv26IMf/KD6+/u1evVqnXbaafrhD39IJ+UK8Oijj+q8886rn7/qqqskSZdccom2bNmivr4+vfjii/XrN2zYoO9+97v6gz/4A91444069NBD9bWvfU0XXnjhvO875l+zx0u1WtVHP/pRbd++Xfl8XieddJK+973vTboPLF9f+cpXJElveMMbJl3+13/917r00ksl8f4FE/bneOH9y8q1Y8cOvfe971VfX5/a2tp00kkn6Z577tGb3vQmSby2YLJmjxdeW7AvvMYAS4NljDELvRMAAAAAAAAAACwFdKoDAAAAAAAAADBLhOoAAAAAAAAAAMwSoToAAAAAAAAAALNEqA4AAAAAAAAAwCwRqgMAAAAAAAAAMEuE6gAAAAAAAAAAzBKhOgAAAAAAAAAAs0SoDgAAAAAAAADALBGqAwAAAE365Cc/KcuyFno3AAAAACwAQnUAAAAsiC1btsiyrPqX67pat26dLr30Um3fvn2hdw8AAAAAZuQu9A4AAABgZfv0pz+tDRs2qFwu60c/+pG2bNmihx9+WNu2bZPv+wu9ewAAAAAwCaE6AAAAFtRb3vIWnX766ZKkD3zgA1q7dq0+//nP6zvf+Y7+23/7bwu8dwAAAAAwGfUvAAAAWFTOPfdcSdKzzz5bv+z+++/Xueeeq0KhoFWrVukd73iHnnrqqUm3u/TSS3XEEUdMu7+Z+s8ty9IVV1yhu+66S5s2bVI2m9XGjRt19913T7v9ww8/rDPOOEO+7+uoo47SbbfddhCeJQAAAIClikl1AAAALCrPP/+8JGn16tWSpO9973t6y1veoiOPPFKf/OQnNT4+ri9/+ct6zWteo5/97GczBumz8fDDD+v//J//o//5P/+nWltbddNNN+ld73qXXnzxRa1Zs0aS9Pjjj+vNb36zDjnkEH3yk59UGIa69tpr1dnZeTCeKgAAAIAliFAdAAAAC2poaEi7du1SuVzWj3/8Y33qU59SNpvVr//6r0uSPvaxj6m9vV2PPPKI2tvbJUkXXXSRXv3qV+vaa6/V7bffvl+P+9RTT+nJJ5/UUUcdJUk677zzdPLJJ+vv//7vdcUVV0iSrrnmGhlj9NBDD+mwww6TJL3rXe/SiSeeeKBPGwAAAMASRagOAACABXXBBRdMOn/EEUfoG9/4hg499FD19fVp69atuvrqq+uBuiSddNJJetOb3qR//dd/PaDHrQXqtfssFov65S9/KUmKokj33HOPLrroonqgLknHH3+8LrzwwgN6bAAAAABLF53qAAAAWFC33HKL7r33Xn3rW9/SW9/6Vu3atUvZbFaS9MILL0iSjj322Gm3O/7447Vr1y6Njo7u1+M2BuU1q1ev1p49eyRJO3fu1Pj4uI455php2820PwAAAABWBibVAQAAsKDOPPNMnX766ZKSWpfXvva1+u///b/r6aefbup+pi5GWhNF0YyXO44z4+XGmKYeFwAAAMDKwqQ6AAAAFg3HcXTdddept7dXN998sw4//HBJmjFg//nPf661a9eqUChISqbMBwcHp21Xm3Zv1iGHHKJcLqdnnnlm2nXNBv4AAAAAlg9CdQAAACwqb3jDG3TmmWfqhhtu0OrVq3XKKafo9ttvnxSYb9u2Tf/+7/+ut771rfXLjjrqKA0NDek///M/65f19fXpzjvv3K/9cBxHF154oe666y69+OKL9cufeuop3XPPPft1nwAAAACWPkJ1AAAALDof+9jHNDAwoC1btuj666/Xyy+/rLPPPltf/OIX9ZnPfEZvfOMb1dbWpk9+8pP121x88cUqFAp65zvfqRtvvFHXXXedNm/erFe96lX7vR+f+tSnJEnnnnuuPv/5z+tP//RPdd5552njxo0H+hQBAAAALFGE6gAAAFh0fvM3f1NHHXWUvvjFL+q8887T3XffrTVr1uiaa67RF7/4RZ111ln6wQ9+oA0bNtRvs2bNGt15553K5/O6+uqrdfvtt+u6667T29/+9v3ej5NOOkn33HOPDjnkEF1zzTX6+te/rk996lN65zvfeTCeJgAAAIAlyDKsxAQAAAAAAAAAwKwwqQ4AAAAAAAAAwCwRqgMAAAAAAAAAMEuE6gAAAAAAAAAAzBKhOgAAAAAAAAAAs0SoDgAAAAAAAADALBGqAwAAAAAAAAAwS4TqAAAAAAAAAADMEqE6AAAAAAAAAACzRKgOAAAAAAAAAMAsEaoDAAAAAAAAADBLhOoAAAAAAAAAAMwSoToAAAAAAAAAALNEqA4AAAAAAAAAwCz9/w0GsJs7PqJRAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "==== Loss Comparison: 4-bit vs 8-bit ====\n", + "\n", + "Training Loss (4-bit): 0.1754 ± 0.0877\n", + "Training Loss (8-bit): inf ± nan\n", + "\n", + "Eval Loss (4-bit): 0.4618 ± 0.0725\n", + "Eval Loss (8-bit): 0.4909 ± 0.0776\n" + ] + } + ], + "source": [ + "# Visualize training and validation loss across quantization methods\n", + "plot_loss_metrics(flflow_4bit, flflow_8bit)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "c3be3b11", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABjUAAAJOCAYAAAD/KYUYAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAA6xFJREFUeJzs3XmcjeX/x/H3ObMbhmEW+05IjIhQochS1rL0s04lS0pGCwnJGkKLiBCK0LdQ36K+U5OdskQ1tFkSw9hmbDPDnPv3x90cc2ZhmDNzzsy8no/Hecx9rvu67/O5z7kP93U+93VdFsMwDAEAAAAAAAAAALg5q6sDAAAAAAAAAAAAyAqSGgAAAAAAAAAAIE8gqQEAAAAAAAAAAPIEkhoAAAAAAAAAACBPIKkBAAAAAAAAAADyBJIaAAAAAAAAAAAgTyCpAQAAAAAAAAAA8gSSGgAAAAAAAAAAIE8gqQEAAAAAAAAAAPIEkhoAALtDhw7JYrHYH1FRUdneZ79+/ez7a968ebb3h5tXsWJF+2fw6quvujqcAin19+qDDz5wdTgAACADOXEtfCteffVVewwVK1Z0SQyu9sEHHzh8Fsh/3OX7BiBvIqkBIJ3atWs7XFyUKlVKV69edXVY+VbqH/2z+uCHaedK22jigtr50r7HKQ9PT0+VKFFCd999tyZMmKC4uDhXhwoAAPKBqKioLF1X9+vXz9WhOk3aYwsJCVFiYmK6emfPnpW/v79DXWclTwrCjRyGYejLL79U7969Vb16dQUEBMjLy0uhoaF64IEH9Prrr+v48eOuDhMA8jVPVwcAwL388MMP+uWXXxzKYmJitG7dOj388MMuigq5pXjx4po2bZr9eZUqVbK9zx49eqh27dqSpHLlymV7f8hfkpOTdebMGW3fvl3bt2/XRx99pB07dqhIkSKuDg0AABQwOXEt7EqxsbFavnx5usTN/PnzdenSJdcElUV33XWXw2fhLv7++2/93//9nzZt2pRu3cmTJ/Xtt9/q22+/VXR0dL5N6jhLfvu+AchdJDUAOMjswuuDDz5w26TGxYsX5efnJ6s1b3Y+S/2jf4pJkybp7NmzkqTKlStr0KBBDuubNGmS6f6y834EBATo+eefv+ntrqdNmzZq06aNU/eJvG/gwIGqUqWKTp8+rY8//liHDh2SJO3fv1+LFi3Ss88+69oAAQBAvtK9e3c1aNAgXXnq6/CcuBZ2tbffftshqZGcnKx3333XdQHdQHx8vAICAnT77bfr9ttvd3U4Dk6cOKFmzZrp4MGD9rJKlSqpQ4cOCg0N1dmzZ7Vt27YMEx64JikpSYZh5MvvG4BcZADAvxISEozAwEBDkiHJqF69un3Z29vbOHXqlL3u77//bl8nyfjuu+/S7a9hw4b29U8++aTDuj179hjh4eFG5cqVDV9fX8Pf398ICwszJk6caFy4cCHdvipUqGDf19ixY42NGzcaDzzwgBEQEGBIMs6ePWtcuXLFeOWVV4y2bdsalStXNooWLWp4enoaxYsXN+655x7jrbfeMpKSkjI89vnz5xu1a9c2fHx8jLJlyxrDhw83Lly4kO5107rZ48iq1K/brFmzXHs/Dh48mOnnOnbsWHt5hQoVjHPnzhnPP/+8Ub58ecPLy8uoVKmSMXHiRMNmsznss2/fvpkeS+rXWrRokfH1118bzZs3N/z9/Y3ChQsbbdq0MX7++ecM36Nb/cwysmjRohuezxk5cOCAMXDgQKN69eqGn5+f4efnZ1SrVs146qmnjOjo6HT1L1y4YIwbN86oV6+eUbhwYcPT09MIDg426tatazz55JPGV1995VB/w4YNRqdOnYzSpUsbXl5ehr+/v1GhQgWjTZs2xtixY41z585lKc6078nOnTuNNm3aGAEBAUbhwoWNBx980Pjxxx/t9f/44w/DarXat1m/fn26fTZo0MC+fuDAgTeM4XrvcXR0tMO6AQMGZLiPTz75xGjXrp0RGhpqeHl5GcWKFTMaN25sTJ8+3bh48aJD3eudy4ZhGM2aNbOv69u373W3W758udGwYUPDz8/PKFasmPHoo48aR44cSRfflStXjMmTJxtVq1Y1vL29jcqVKxvjx483kpKS0p3rAAAgZ3333Xc3/f9vTlwL79692xg0aJDRsGFDo3Tp0oavr6/h4+NjlC9f3ujWrZuxcePGdHGkfa2sSh176mu51K/xySef2Ms9PDyu+zoJCQnG22+/bdx7771GYGCg4eXlZZQsWdJ49NFHjS1btjjUTX1tldEj9f7Tfi6rV682GjdubPj7+xtFixY1DCP9tWNaV65cMRYsWGC0atXKCAkJMby8vIygoCCjUaNGxquvvupQ11nX1D169HCIadCgQcaVK1fS1fvtt9+MDz/8MF35zVzLZvQ+LVmyxKhbt67h6+trVKlSxZgxY4b9vRg/frxRsWJFw9vb26hRo4Yxb968dPtLe/0bHR1tdOnSxQgMDDT8/PyMpk2bGt9880267T799FOjV69exh133GF/r/39/Y2aNWsaTz/9tHHw4MEbvta+ffuMjh07GsWLFzckGbt3777u9+3KlSvGzJkzjbvvvtsoWrSo4eHhYRQvXtyoVauW0bt3b2P58uXpXvPo0aPG888/b9SuXdvw9/c3fHx8jAoVKhg9e/Y0tm/fnq7+rX6nAbgHkhoA7FasWOFwUbF161bDy8vL/vytt95yqH/vvffa1z311FMO6/744w+HfaW+6H333XcNT0/PTC94a9WqZRw/ftxhf6l/lG3cuLHDBXjKj/jnz5+/7oW0JKNly5bG1atXHfY9YsSIDOs2bNjQCA0NzfQH8ls5jqy6maSGM9+PrDbkSpQoYdSsWTPDfY4ePdoh1qwmNZo2bWpYLJZ0+ytRooRx8uRJp3xmmbmVpMbKlSsNX1/fTN9bHx+fdBfbzZs3v+7n0b17d3vd//3vf+k+17SPjBInGUl9vjzwwAOGj49Pun35+fk5NHgfeugh+7quXbs67O+vv/5y2HbHjh03jOF673F8fLzDulGjRjlse/XqVaNbt27XfS9q1qxpHDt2zL6Ns5Ia99xzT4avV61aNePy5csO+0zb0E15pH4vJZIaAADkhpxMatzMtfDbb7993WsYi8WSLjZnJDU6dOhgv7ZOfS1333332a9V27Ztm+nrnDx50ggLC8s0bqvVasyaNcte/1aTGqnblJKylNQ4ffq0cdddd2X6Win7MAznXVMfO3bMoa0SFhZmJCcnZ+lzuZVr2bTvU/369TM93zp27JjhugULFjjsL/VnVL9+ffsNcWk/15UrVzps98gjj1w39oCAAGPv3r2Zvla9evUMf39/h21ulNRI3YbM6NGoUSOH1/v+++8dbtDM6LjeeOMNh21u9TsNwD0w/BQAu9RDT9155526++671bJlS3311Vf29c8884y9Tnh4uDZu3ChJ+uSTT/TOO+/Iy8tLkrR8+XJ7vRo1aqhx48aSpC1btmjIkCGy2WySpLvvvltt2rTR+fPntXjxYp06dUq//vqr+vTpo6+//jrDOLdu3apChQqpV69eKlOmjHbv3i0PDw9ZLBZVrlxZd999t8qUKaPAwEBduXJF+/fv16pVq3T16lX973//03/+8x9169ZNkjmHyOuvv27fd0hIiPr27avz589r4cKFSkpKyjAGZxyHszjz/ciq06dP6+zZs+rTp49Kly6t999/X6dOnZIkvfnmm3rllVfk7e19U/vcvHmzatSooS5dumjPnj368ssv7a+1YMECjRgxQtKtf2bO9Mcff6h37972iRdLlCihvn37ymKx2D//xMRE9e3bV/Xr11e1atUUHR1tn4DcarWqT58+ql69uk6dOqWDBw+mm5x83rx5Sk5OlmR+h7p27SpPT08dOXJEe/bs0a5du24p9sjISFWvXl1du3bV0aNHtXTpUtlsNl2+fFnh4eHav3+/PDw89Mwzz+i///2vJGnNmjU6deqUgoKCJEmrVq2y7+/222/XXXfddUuxSNKZM2ccPk+LxaKuXbs61Jk0aZJWrlxpf3733XfrwQcfVHR0tD2W6Oho9ezZU99+++0tx5KRTZs26a677lLr1q313XffafPmzZKk33//XatXr1aPHj0kmf8Gfvzxx/btqlatqm7duumff/7R0qVLnRoTAAC4eevWrbNfr6bWvXv3m5737WauhX18fHT33XcrLCxMJUqUUOHChRUXF6fIyEj98MMPMgxDw4cPV/fu3eXn55f9A/1XtWrV1K5dO/33v//VZ599pqNHj+r06dPasGGDJNmvYTLTu3dv7dmzR5JUpEgR/d///Z/Kli2rzZs3a926dbLZbBo2bJgaNGigpk2batCgQXr44Yf1wgsv2PeResivokWLZvg6GzduVFBQkHr06KESJUqkm98xs9h++OEH+/OaNWuqXbt28vHx0e7du7V9+3b7OmddU3/33XcyDMP+vG/fvlke7tcZ17I7d+5U48aN1apVK61YsUIHDhyQJI0fP16S1KxZM913332aP3++YmJiJElTp07V448/nun+SpcurUGDBun8+fNasGCBEhMTZbPZ9NRTT+nBBx+0f2bFihXTgw8+qJo1ayowMFDe3t46ceKEPvvsMx05ckTx8fF66aWX7O23tHbv3i1PT0/17t1b1apV0/79++Xr65vp+3XhwgV9+OGH9uePPPKI7rzzTsXFxenw4cP6/vvvHeqfO3dOXbp0sQ/f7Ofnp/DwcAUEBGj58uU6fPiwbDabnn/+edWvX1/NmjVL95o50b4FkMNcnVUB4B6OHTvmcAfLtGnTDMMwjCVLljjcpZD6DowLFy4YhQsXtq/7/PPP7etq1aplL3/99dft5Z07d7aXN2/e3OHulh07dji81k8//WRfl/pOcw8PD2Pnzp2ZHsuJEyeMNWvWGO+++64xffp0Y9q0aUbt2rXt2z/++OP2ugMGDHC4eyP1UEdp7w5Kfdf/rR5HVt1MTw1nvh9ZvTtNksOdWatXr870PMlqT41y5coZ8fHx9nX16tWzr+vSpYu9/FY/s+u52Z4aQ4cOdYhh37599nX79u1z6O4/dOhQwzAMY9euXQ53YqXtxnz16lXj0KFD9ucdOnSw18+oe/Xx48cz7KaekdTnS1BQkEMX+4kTJzoce0qXc5vN5jAEXeo7m1LfKZb2jqfMpH2PM3oEBgam66qfnJxs76YumT2TUvcuevHFF9Pd9WUYzuup0bBhQ/swbUlJSUZISIh9XUREhH271q1b28uLFi1qnD59OtP3mJ4aAADkvLQ9NTJ7pL5GyIlr4RQ//fST8eGHHxpvvvmmMW3aNGPChAkO22zYsCHD17rVnhrDhw83vv76a/vzkSNHGuHh4fbnO3fudLhOT/06P/30k8O+vv32W4fXadeunX1d586dM40hs2ue1HUCAgKMw4cPp6uTWU+NvXv3OpS3a9cu3ZC6f/75p33ZWdfUU6dOdXjdtMPGZuZWr2UNw/F9qlWrlv04169f77Cubt269n3OnTvXYV3q9lXq618vLy+HYaM++ugjh+3mz5/vcBxJSUnGhg0bjAULFhgzZ840pk2b5nA++fj4OHwOaXvurF69Ot17k9n37cyZMw7nR2JiosN2NpvN+Ouvv+zPZ86c6bCfL7/80r7uxIkTDr9ZdOzY0b4uu99pAK5FTw0AkqSlS5fa72CxWCzq3r27JKlTp07y9fVVQkKCJGnRokWaMWOGJMnf319du3bVokWLJJm9Mx5++GHt3btXv/76qyTJw8NDvXv3tr9Oyl3OkhQVFSUPD49MY9qyZYvq1KmTrrxt27a6884705VfvnxZgwcP1pIlS+w9KDJy9OhR+/KPP/5oX65fv77DZHS9evVS//79dfXq1XT7cMZxOIsz34+s8vDw0IABA+zPb7vtNof1KXfJ3IzevXurSJEi9ufVq1fX7t270+3vVj8zZ9q6datDDKknmKxdu7bq169vv3sspW7NmjVVokQJnT59WtHR0apatarq1aun6tWrq06dOmrZsqUqVKhg38+9996rtWvXSpL69eun9957T9WrV9dtt92mpk2bqmHDhrJYLDcde4cOHRzulOvVq5dGjRplf75z5061bNlSFotFQ4YMsU/Y/f777ysiIkIHDx7Uzp07JUleXl7q1avXTceQmccffzxdr6EDBw7ozJkzDvGm/r717dtXU6dOtT/funWrwsLCnBbTk08+ae+B5uXlpUqVKunkyZOSMj8v27Rpo+LFizvEnPo9BgAAedvNXAvv2rVLffr0uWEPhFu5Jr+RVq1aqVatWvr11181b948Xbx4UZLUtGnTDNsPKVK3dSTp/vvvz7Tuli1bshVjnz59VL58+SzXTzsJ99ixY+3XaikqV65sX86pa+qscta1bLdu3ezHWbFiRYd1Xbp0se+zSpUqDuvOnj3r0MZKce+99zrsp3v37urXr5+uXLkiyWwTPPnkk5Kkjz76SM8991yGPZ1SJCYm6tSpUypVqlS6dbVr11bHjh0z3TatwMBA3X777frll18UHx+vSpUq6a677lK1atV0xx136IEHHlClSpXs9VO3zYKDg9W2bVv785CQELVt29beIyZ13dRyon0LIGdlra8cgHwv9dBTTZo0sXfBLlKkiB566CH7uo8++sjhB+PU3VnXrFmjS5cuadmyZfaytm3bOlzYpL6gu5HY2NgMy2vUqJFh+ciRI/XBBx9c9wd8SfYhgySzq2qKkiVLOtTz9PS0D7eTljOOw1mc+X5kVWhoqEOXYR8fH4f1N3rNjKS9OE+9z9T7u9XPzJlSf/6hoaHp1qcuS7kA9vX11cqVK+2Ntr/++kv/+c9/NHnyZD322GMqU6aMPWEoSc8995x69+4tDw8PJSYmKioqSvPmzdPw4cN19913q06dOjp+/PhNxx4SEpJprJLj+9uvXz97Iyg6OlqbN2926Dr/0EMPpdtfVg0cOFDjx4/Xvffeay9744039NRTTznUS/tdSxtv2ueZNTiMVMMFSFk/72/lvLzRewwAAHLfokWLZJjzijo8mjdvftP7yuq18OXLl/Xwww9naUilW7kmz4qU4YNPnz5tv1Ft6NCh190mN9s6mbVlMpM2ttQ/bmfEWdfUZcqUcXi+f//+W4r3Vq9lS5cubV9OOwxS6nWeno73LmfWLkt7verh4aESJUrYn6dc26Yk5a6X0EiR2Tl8s5+xJC1btky1atWSJB07dkxr1qzR9OnT1bdvX5UvX14RERH2urfSNsuojrPbtwByFj01AGj79u2Kjo62P9+8eXOmd6ucPHlSX375pTp06CBJuueee1StWjX9/vvvunjxotasWeMwrnx4eLjD9sWLF7ff5XzPPfdc946NJk2aZFju7++fYfmKFSvsy3fccYeWL1+u2267TZ6enurWrZvDPAApihUr5nBsqV29ejXTizdnHIezOPP9yKq0d0M54+6mrO7zVj8zZ0p9F/6JEyfSrU9dFhgYaF++//77dfDgQe3atUt79uzRH3/8oS1btmjjxo1KSkrSCy+8oA4dOqhq1ary9PTUkiVL9MYbb2jLli06cOCADhw4oM8++0xnz57Vzz//rBEjRmjx4sU3FXva9yxt/Knf3yJFiqhfv356++23JZm9Nfbt22dfn/b7fTO6d++u5s2b6+WXX9bDDz/sMHfP448/bk92pH6vM4o37fOU9zvtGMeXL1+2L9tsNv35559ZivNmzsvTp09LuvF7DAAA8rasXh9s2LDB4Qfz4cOHa8SIEQoKCtKlS5cyvY53pj59+ujll1+2/5hbrlw5de7c+brbpL3+eu2115w630dqN/sepI3t4MGDCg4OzrS+s66pW7RoIYvFYr9RZsmSJXr22WdvOK/GrV7LppX2nEstbSIjK9JeryYnJ9uvZaVrbYJVq1bZf9C3WCxatmyZ2rdvL39/f3355ZcON0Bm5lbO8zp16uiXX37Rvn37tGvXLv3+++/atWuXvvrqK9lsNs2cOVPt27dXixYtbrltllpOtG8B5CySGgAcemlktX5KUkMyf9h8+eWXJUmjRo3S4cOHJUlBQUFq3769w7ZNmjTR6tWrJUkxMTF66qmnFBAQ4FDn8uXLWrVq1U0nA1JfhLVo0cI+LFFsbGy6SZhTNGjQwD6Uzo8//qg//vhDVatWlSR9+OGHmQ5jlJPH4Sy38n7kBbf6mTlTkyZNtGPHDklm1+xffvnF/v7+/PPP9vhS6kpSQkKCDh48qJo1a6pBgwb2SRMNw1BgYKDi4uJks9n0008/qWrVqjpw4IDKlSun4OBgh6RZ7dq17Xcm3cpk4WvXrlV8fLz9fE09CZ9kDqeV2pAhQ/TOO+/IMAwtX77cfgdWaGio2rVrd9Ovn5bVatVbb72lGjVq2IfAGzNmjL777jtJZtfv4sWL2+/A+vDDDzVgwAB7F/u0DdCU9zt1ckaStm3bZo93/vz5Tu9B1aBBA61fv16SORHpmTNn7A2stO8xAAAoGFJfj0tSz5497b2KU/d+zUmFChXSk08+qWnTpkmSBg0adMMfwdO2X4KCgjRo0KB09X755Zd0d757enrar8cvXbqUndAzdM899zg8Hz9+vD777DOHYzp8+LB9WFdnXVOXKlVK3bp1s984tnv3bg0dOlSzZs1KNxTx77//rh07dqhnz563fC2b0zZu3KhDhw7ZeyWvWLHCPvSUdK1NkPocLlq0qLp162ZP5OTkObxnzx6FhYXpjjvu0B133GEvr1u3rvbu3SvJ/NxatGihJk2a2GOJjY3VV199ZR+C6uTJk/abp6Tce38B5DySGkABl5CQ4NCzolKlSmrYsGG6evv27bPPk/HFF1/o1KlT9gvyPn36aPTo0UpOTtbBgwft2/Tq1SvdHQ/Dhw/XmjVrZBiG/vjjD9WuXVtdunRRaGio4uLitG/fPn3//fe6ePGi+vTpc1PHctttt+nnn3+WZP5oabVaVahQIS1dujTTHzCfeOIJzZs3T4ZhKDk5Wffdd5/69Omj+Ph4LViwINPXysnjcJZbeT/yglv9zG7GgAEDMhx7tn79+nrvvff09NNPa86cOUpMTJTNZlOzZs3Ut29fWSwWLV682H43k7e3t55++mlJZhfuWrVq6fbbb1fDhg1VunRp+fn5adOmTYqLi7O/RsqP8TNnztTSpUvtY8aGhobqzJkzWrJkSbq6N+PUqVO666671LVrVx09elRLly61r6tSpYpatGjhUL969ep68MEHtX79eocu5b17976lu8IyUrVqVXXv3t0+dF1UVJS2bNmiJk2ayGq1atiwYRo9erQkcxzce+65Rw8++KD279/v0Jhq0aKF6tatK0kKCAhQ9erV9dtvv0mSJk6cqN27d+vy5cv69ttvnRJ3ak888YQ9qREXF6dGjRqpe/fu6d5jAABQcKQdl79Xr17q3r27Dh06lKvXBy+++KL9x9y013oZqVu3rlq1aqVvvvlGknmTy1dffaX69evLarXq8OHD2rJli6KjozV27FiHREOZMmXsN7m98cYbOn36tPz8/FSvXj098MAD2T6WO+64Q+3atdOXX34pyWyb1q1bV+3atZOvr69++eUXbdiwwd5725nX1DNnztS2bdvsx/fOO+/oq6++Uvv27e373b59uzZu3Kg+ffqoZ8+et3wtm9OuXLmipk2bqnfv3jp//rxDO6po0aLq2rWrJMdz+Ny5c3rooYfUpEkTbdq0SV9//XWOxXf33XerdOnSuvfee1W6dGkFBATop59+sic0pGufW9++fTV+/Hh7AuaRRx7R448/roCAAC1btkwXLlyQZPa+eO6553IsZgC5zCXTkwNwG8uXLzck2R8ffvhhhvUiIyMd6s2aNcthfdu2bR3WSzL27t2b4b5mz55teHp6pquf9pFahQoV7OVjx47N0rGkPEqVKmW0atXK/rxZs2YO240YMSLD7e68804jNDTU/nzcuHHZPo6sSn28aePNyffj4MGDDnW/++47+7qxY8fayytUqODwWtfbrm/fvpkeS+ptFi1a5LDuetvd6meWmUWLFt3wc0wbx8qVKw1fX99M6/r4+BjLly+31z9+/PgN99+wYUPjypUrhmEYxoABA65b12q1Gp999lmWji/1+dKkSRPDy8sr3f58fX2N77//PsPtv/jii3T1f/nllyy9dmbvcepzxDAMY9++fYbFYrGvb9u2rX3d1atXja5du173/ahZs6bxzz//OOzz/fffz7Bu5cqVjRo1atif9+3b177N9c5lwzCMZs2aZbidYRiZxti8efPrnusAAMD5vvvuu5v+/zcnroXbtGmT4fVB6mvdtPFd77WuJ/X+hg8ffsP6qWNI+zonTpwwwsLCbnj9mrYtMmzYsAzrPf300xnGmdnnkvbaMbVTp04Zd911V6YxFS1a1F7XmdfUhmEYhw4dMho3bnzD9yX1deKtXstm9j6lPd9Sr0t73h88eNC+LvV17N13320UL148w/cjdRvm9OnTRunSpbN0Dmf2WmmvmTM7jtTfGx8fn+u+X5UqVTLOnTtnr//9998bxYoVu+7nPH36dIfXv9XvNAD3wEThQAGXeuipokWLqkuXLhnWa9GihcOEuWmHrEo7tn79+vUduommNnjwYO3evVtPPfWUqlevrkKFCsnT01OhoaFq1qyZRo8erZ9++ummj6VHjx5auXKl6tatKy8vL5UoUULdu3fXtm3bHCZPS2vy5MmaN2+ebr/9dnl7e6tUqVIaMmSIIiMjFR8fb6+X9g6enDoOZ7nV9yMvuNXPzJm6du2qPXv2aODAgapatap8fX3l6+urKlWqqH///tq9e7d69Ohhrx8YGKh33nlHjz32mGrVqqXixYvLw8NDAQEBatCggcaPH6/IyEh774cnnnhCL730ku677z6VK1dOvr6+8vb2Vrly5dS1a1d9//336tSp003H3apVK23YsEGtWrVSkSJF5O/vby+77777MtymXbt29iG+JKlRo0b2ifucpXbt2g7D1X311Vf2oQA8PDy0cuVKrVq1Su3atVNISIg8PT1VtGhRNWrUSNOmTdMPP/yQ7rx+4oknNH/+fNWsWVPe3t4qWbKkBg0apB07duTI5N0fffSRJk6cqMqVK8vLy0sVK1bUqFGjHLq8AwCAguU///mPnnvuOZUqVUre3t6qWrWqJk2a5LQexjklJCRE27dv15w5c3T//fcrKChIHh4e8vf3V40aNdSrVy999NFHeuGFFxy2mzhxooYOHaqyZcumG5bJWUqUKKHNmzfr/fffV8uWLRUcHCxPT08FBgaqfv36DnfjO/uaukKFCtq8ebM+//xz9ezZU1WrVpW/v788PT0VEhKili1bavbs2Zo6dap9m1u9ls1Jt912m3bs2KFHH31UgYGB8vPzU5MmTfTll186tGGKFy+uTZs2qUuXLgoICJCfn5/uuusuffrpp+rXr1+OxTdnzhyFh4erTp069s+3cOHCqlOnjl588UVt375dRYsWtde/77779PPPP2v48OG6/fbbVahQIXl7e6t8+fLq2bOntmzZouHDh+dYvAByn8Uw/p3lCAAKqMuXL2c48d0XX3zh8CPr5s2bGYPTTfCZ5b42bdrYh1eaO3euBgwY4OKIAAAAAGRV8+bN9f3330syh2y62bk1AcCdMKcGgALv5Zdf1p49e9S+fXtVqlRJV69e1Y8//qh3333XXqdBgwZq3LixC6NEanxmuWP//v36559/tG3bNvuYucWKFVPPnj1dHBkAAAAAACioSGoAKPAMw1BUVJSioqIyXF+1alWtWrVKFosldwNDpvjMcseUKVO0ePFih7KJEyeqcOHCLooIAAAAAAAUdCQ1ABR4nTp10okTJ7R9+3bFxsYqISFBxYoVU+3atdW5c2c9+eSTKlSokKvDRCp8ZrnLx8dHVatW1bBhw/TEE0+4OhwAAAAAAFCAMacGAAAAAAAAAADIE6yuDgAAAAAAAAAAACArSGoAAAAAAAAAAIA8gTk1ssBms+nYsWMqUqQIk84CAAAAmTAMQ+fPn1fp0qVltXL/VAraEwAAAMCNZbU9QVIjC44dO6Zy5cq5OgwAAAAgT/j7779VtmxZV4fhNmhPAAAAAFl3o/YESY0sKFKkiCTzzQwICHBJDDabTbGxsQoODuauN9wSziFkF+cQsotzCNnFOeT+4uPjVa5cOfv1M0zu0J6Ac/HvEZD38T0G8j6+x/lPVtsTJDWyIKWLeEBAgEuTGgkJCQoICOBLilvCOYTs4hxCdnEOIbs4h/IOhlhy5A7tCTgX/x4BeR/fYyDv43ucf92oPcGnDQAAAAAAAAAA8gSSGgAAAAAAAAAAIE8gqQEAAAAAAAAAAPIE5tQAAABAgZOcnKwrV664Oow8x8vLSx4eHq4OAwAAAHAp2hO3xlntCZIaAAAAKDAMw1BMTIzOnTvn6lDyrGLFiqlkyZJMBg4AAIACh/ZE9jmjPUFSAwAAAAVGSgMkJCREhQoV4of5m2AYhi5duqSTJ09KkkqVKuXiiAAAACBJV65c0bBhw/TRRx/JYrGoZ8+emjlzpjw90//0269fPy1btkze3t72sm+++UaNGzeWJP3zzz96+umntXHjRlksFt1///2aPXu2goODJUmFCxd22F9iYqJq1qypvXv3Zmn/eR3tiVvnzPYESQ0AAAAUCMnJyfYGSIkSJVwdTp7k5+cnSTp58qRCQkLy7FBUs2fP1rRp0xQTE6O6devq7bffVsOGDTOtf+7cOY0aNUqffvqpzpw5owoVKmjWrFlq165dLkYNAACQsQkTJmjTpk369ddfJUlt27bVpEmTNGbMmAzrDx48WLNmzcpw3dNPPy1JOnz4sAzDUM+ePfXss89q+fLlkqQLFy441K9Tp4569OiR5f3nZbQnss9Z7QkmCgcAAECBkDLmbaFChVwcSd6W8v7l1TGEV6xYoYiICI0dO1a7du1S3bp11bp1a/sdY2klJSWpVatWOnTokD755BMdOHBA8+fPV5kyZXI5cgAAgIwtXLhQr7zyikqVKqVSpUpp1KhRWrBgwS3t66+//lK3bt1UuHBhFSlSRN27d9e+ffsyrLtjxw79+uuv6tevXzaizztoTziHM9oTJDUAAABQoNBFPHvy+vs3Y8YM9e/fX+Hh4apVq5bmzp2rQoUKaeHChRnWX7hwoc6cOaPVq1eradOmqlixopo1a6a6devmcuQAAADpnT17VkePHlVYWJi9LCwsTEeOHFFcXFyG2yxZskTFixfX7bffrjfeeEM2m82+LiIiQqtWrVJcXJzOnTun5cuXq3379hnuZ8GCBWrbtq1Kly6d5f3nB3n9etjVnPH+kdQAAAAAUCAkJSVp586datmypb3MarWqZcuW2rp1a4bbrF27Vo0bN9bTTz+t0NBQ1a5dW5MmTVJycnJuhQ0AAJCplOGgihUrZi9LWT5//ny6+s8++6wOHDig2NhYLViwQG+++abefPNN+/qmTZvq5MmTCgwMVPHixXX27FmNHDky3X4uXryojz/+WE8++eRN7R9wBubUAAAAALIoIUFatUpavVo6fVoqUULq1Enq2lXy9XV1dNccOnRIlSpV0u7dux3u2kstKipKLVq00NmzZx0awfnZqVOnlJycrNDQUIfy0NBQ7d+/P8Nt/vrrL3377bfq2bOnvvzyS/3xxx8aPHiwrly5orFjx2a4TWJiohITE+3P4+PjJUk2my3f3alYUNlsNhmGwecJ5GF8j5FfpAzlc/bsWRUvXty+LEn+/v7pzvHU14YNGzbUSy+9pKVLl2ro0KGy2Wxq1aqVunbtqvXr10uSxo0bpwcffFBbtmxx2M+KFStUqFAhtW3b1uE1rrd/Z8vJ7/GVK1cUERGhZcuWyWKx6P/+7/80adIkGYZhf6Q4dOiQzpw549ADoXr16vL397fv68iRI/YEVJEiRVS+fHn7RO6GYejvv//WmTNnJEklSpRQ2bJl7fu70fY54dChQ6pcubJ27dp13fbE/fffrzNnztxUeyLl/cvo2jirnyVJDQAAACAL1q6V+vWTzp6VrFbJZjP/fvqpNHSotHixlEnPfKeaMmWKRo4cqaFDh2ZrAsYmTZro+PHjKlq0qCTpgw8+0HPPPadz5845J9B8wmazKSQkRPPmzZOHh4fq16+vf/75R9OmTcs0qTF58mSNGzcuXXlsbKwSEhJyOmTkApvNpri4OBmGIauVARCAvIjvMfKT0qVL6/vvv1eRIkUkSRs2bFDp0qWVmJiY6bxhKS5cuKArV67o5MmTOn36tA4fPqzHHnvM/gN6jx49NH36dEVHRztMjj137lw9+uij9h/is7J/Z8vJ7/G0adP0/fffKyoqSpLUs2dPvfnmm+rcubOuXr2qq1evOsTx4os15ZvuLicz8XH5cpKkkvZJsi9fviwpSX5+5iTZiYmJunq1uAoVKitJunTpkry8EuXt7ZPh9jNn/qnDhw+rQoUKt3RsU6dO1SuvvKJnnnlGb7zxRoZ1Uo4v7bGm1rBhQx05ckT+/v66evWqlixZouHDhys2Nva6r3/16lXZbDadPn1aXl5eDusy6l2UEZIaAAAAwA2sXWv2yEiRcgNRyt9z56SOHc0eHB065FwcP/zwg9577z3VqVMn2/vy9vZWyZIlnRBV3hEUFCQPDw+dOHHCofzEiROZvhelSpWSl5eXPDw87GU1a9ZUTEyMkpKS5O3tnW6bkSNHKiIiwv48Pj5e5cqVU3BwsAICApx0NHAlm80mi8Wi4OBgfgwF8ii+x8hPHn/8cb377rtq166dJOndd9/VU089pZCQkHR1V65cqTZt2qhIkSLauXOn5syZo8GDByskJEQhISGqWrWqVq5cqTFjxkiSZs6cqbJly6pmzZr2fRw4cEA//vijli5dmu41rrd/Z8vJ7/HKlSv1xhtv6I477pAkjR49Wm+++aYeeeQReXp6OvSSsFqtslgssloznivCZjPk4+Njj9HLy1uJiYn2+leuXJGfn688PMz1vr4+unw5wZ4kSbt9iRIlFBMTc0s9NX744Qe9//77qlOnjiwWS6b7SClPe6xp66SeND0lvhvF5enpKavVqhIlSqRLBKVPDGWyjyzVAgAAAAqohASzh4Ykpepl7sAwJIvFrHfsWM4MRXXhwgX17NlT8+fP14QJE7K0zf79+zV48GDt2rVLVatW1ezZs9WsWTNJjsNP7dmzR+Hh4ZKuTdw3duxYvfrqq84/EBfy9vZW/fr1FRkZqU7/ZqlsNpsiIyM1ZMiQDLdp2rSpli1bJpvNZm+o/fbbbypVqlSGCQ1J8vHxkY+PT7pyq9XKD2f5iPnjBZ8pkJfxPUZ+MWbMGJ05c0a33367JKlXr14aNWqUrFarBg4cKMnsWSGZCY+BAwfq6tWrKlOmjAYPHqznn3/e/j1Ys2aNhg0bpnLlyslms6levXpau3atw/dk0aJFuvfee3Xbbbeli+VG+3e2nPgep0y+fuedd9r3e+edd+rYsWMyDEMWiyXdZNdJSUlKSkqS1WqRt7f3v9eCZh0fHx9duXJFXl7mT/FXriT9u2yRYZhDMJk30Jj1PTw87ENrWSzWdNufOXNGRYsWvekJty9cuKBevXrZ2xMZHUeKlPIDBw7o6aefzlJ74vHHH5d0LbmRWXsi5XUz+tyy+jmS1HBzKeM2f/aZRTExgSpZ0qLOnd1v3GYAAID8atUqc8ipGzEMs94nn0i9ejk/jqeffloPPfSQWrZsmeWkxgsvvKBZs2apVq1amjFjhtq3b6+DBw86DB0gmUNRzZo1S2PGjNGBAwckSYULF3b6MbiDiIgI9e3bVw0aNFDDhg01a9YsXbx40Z7U6dOnj8qUKaPJkydLkgYNGqR33nlHQ4cO1TPPPKPff/9dkyZN0rPPPuvKwwAAALDz8vLS7NmzNXv27HTrUpIZKTZs2HDdfdWqVcs+n0Zmpk6dmum6G+0/L7je5OsZzfkQEhKiIkX8ZLVadPVqsi5duiTJYr/JxdPTQ0lJSYqLi//3uad8fMwfdq/dNJU6uWCxr7NY0m+fnJysUqVK3fRx5af2BEkNN5Z+3GYfWa2GPvssd8dtBgAAyK8aNJBiYq5f5/Tpm9tn//7SiBHXr1OypPTjj1nf58cff6xdu3bphx9+uKlYhgwZokceeUSSNGfOHK1bt04LFizQiy++6FDP29vbfrdXfh+Sqnv37oqNjdWYMWMUExOjsLAwrVu3zj55+JEjRxzuECtXrpzWr1+vYcOGqU6dOipTpoyGDh2ql156yVWHAAAAgByU8mN8XFycgoKC7MtSxj0J/P39lVJsJix8dOVK0r9JDUMXLlyUt7eXfeLwxMQEXbx4QYULF9G1jhKpu4Sby+a69NsXLnxBv/32m8OQYDeS39oTJDXcVPpxmy0Of3Nr3GYAAID8LCZG+ucf5+4zIcG5+/z77781dOhQffPNNxmOMTtw4EB9+OGH9ucpd5ZJUuPGje3Lnp6eatCggaKjo50XXB41ZMiQTIebSpkMMrXGjRtr27ZtORwVAAAA3EFgYKDKli2rPXv2qEqVKpKkPXv2qFSpUjc9zJXNZshms8nb28c+pJO3t48SEhLtw5tarVYlJyfLajXncDOXrbJYrLLZbOm2DwkJUUxMzL9DUnll+top8mN7gqSGG3KXcZsBAADyu6zcRHT6tHl9llW+vlKa3ti39Lopdu7cqZMnT+rOO++0lyUnJ2vDhg1655139M8//+j555/P+g4BAAAAXFd4eLgmTpyopk2bSpImTZqknj17Zlj3zJkzMoxAWSzmdXpiYqJ8fMy511KSFklJifYhp5KSEh3mk/D29lJCQqI8PMyf6hMSEu1zt2W0/cmTJ+Xt7Z2lhIaUP9sTJDXckLuM2wwAAJDfZWUIqKVLpT59sr7P+fOde232wAMPaN++fQ5l4eHhqlGjhl566SWFhobah05Ka9u2bbrvvvskSVevXtXOnTsz7aHg7e2t5ORk5wUOAAAA5FGjR4/W6dOn7UM89erVSwMGDNDff/+tf/75R56enqpQoYIkM8kQH2/2srBaLfLx8bbPpyGZw1MlJFzW+fPxMgxzIvCUoaQkycfHV4ZxWefPn5dkXpf7+ma+/cWLF1W1atUsH0t+bE+Q1HBDq1enzKFx47pWq/TZZyQ1AAAAckrXruZ8ZufOZd6LVjJ70RYrJj36qHNfv0iRIqpdu7ZDmb+/v0qUKJGuPK3Zs2erWrVqqlmzpmbOnKmzZ8/q8ccfz7BuxYoVdeHCBUVGRqpu3boqVKiQChUq5LTjAAAAAPKKjCZfT/i3+3aZMmUchnGqUaOGihbNfF9mEiPzSbMtFov8/ArJzy9r299223VeLAP5sT1xc4OAIVecPp21hIZk1jtzJmfjAQAAKMh8faXFi83laxP5OUopX7zYvYYFnTJliqZMmaK6detq06ZNWrt2rX2yw7SaNGmigQMHqnv37goODtbUqVNzOVoAAAAA7sRd2xP01HBDJUrcXE+N4sVzPiYAAICCrH17szdtv37m8J8p12opf4sVMxMa7dvnTjwZTWadWsWKFWX8263ksccey7BO8+bN7XVSzJkzR3PmzHFKjAAAAHCUW9eKBYXFIs2b5+ooTJ9/7uoIbk5eb0+Q1HBDnTpJn36atbo2m9S5c46GAwAAAEkdOkjHjpnzmX32mdlbtnhx81rs0Ufdq4cGAAAAAORXJDXckKvHbQYAAEDGfH3NucyYzwwAAAAAXIM5NdxQVsZtTuFu4zYDAAAAAAAAAJBTSGq4qZRxm4sVM59brSldNq513Vi4kLH4AAAAAAAAAAAFB8NPubHU4zZ/+qkUE5Oo06e99dtv5vpdu8zJKgEAAAAAAACgwBk/Xvr77+uP4e8sQUHmj7EWi+ThkfOv5wrVqrk6giyhp4abSxm3+ZNPDH366Vlt2GCoUCFz3bx5ZtIDAAAAAAAAAICCgKRGHhMcLD39tLmcmChNneraeAAAAAAAAAAAyC1umdSYPXu2KlasKF9fXzVq1Eg7duzItO6nn36qBg0aqFixYvL391dYWJiWLl3qUKdfv36yWCwOjzZt2uT0YeSY55+XvbfGe+9Jx4+7Nh4AAAAAAAAAAHKD2yU1VqxYoYiICI0dO1a7du1S3bp11bp1a508eTLD+sWLF9eoUaO0detW7d27V+Hh4QoPD9f69esd6rVp00bHjx+3P5YvX54bh5MjQkKkQYPM5YQEado018YDAAAAAAAA05UrVzRkyBAFBgaqePHieuaZZ3T16tUM6/br10/e3t4qXLiw/bF169Ysr5ektWvXKiwsTP7+/ipdurTmzp0rSTp58qR69uypsmXLKiAgQPXq1dPatWtz7sABIJe4XVJjxowZ6t+/v8LDw1WrVi3NnTtXhQoV0sKFCzOs37x5c3Xu3Fk1a9ZUlSpVNHToUNWpU0ebNm1yqOfj46OSJUvaH4GBgblxODnmhRckPz9zee5c6cQJ18YDAABQICQkSEuXSo88IjVvbv5dutQsdzMWi0WrV6/OdP2hQ4dksVi0Z8+eXIsJAICCYMKECdq0aZN+/fVX/fLLL9q4caMmTZqUaf3BgwfrwoUL9kfjxo2zvH7dunUaPHiwZs2apfj4eP3yyy9q3ry5JOnChQuqV6+etm3bpnPnzum1117TY489pl9//TVHjhtA/uLO7QnPXH/F60hKStLOnTs1cuRIe5nValXLli3TZaEzYhiGvv32Wx04cECvv/66w7qoqCiFhIQoMDBQ999/vyZMmKASJUpkuJ/ExEQlJiban8fHx0uSbDabbDbbrRxattlsNhmGYX/94GBpwACLZs2y6PJlaepUQ9OmGS6JDXlD2nMIuFmcQ8guziFkV3bPoZTtUx43be1aKTxclrNnZVitsths5t9PP5UxdKj0wQdS+/a3FNv1JCcn69VXX9VHH32kmJgYlS5dWn379tUrr7wii8Vy3W2vd6xly5bVsWPHFBQUJMMwFBUVpfvvv19nzpxRsWLFbrjPjK6N+X4DQN505coVDRs2TB999JEsFot69uypmTNnytMz/c9G/fr107Jly+Tt7W0v++abb+w/tN9o/TPPPKPVq1crLi5ORYoUUdeuXTV16lSH+pJ04sQJ1axZU+XLl89zCfiFCxdq5syZKlWqlCRp1KhRev755zVmzBinv9bo0aM1ZswYeyIjMDDQfiNv5cqV9fzzz9vrtm/fXrfddpu2bdumWrVqOT0WoEAbMCD3Xuu9926qenJysl59+219uHatYmJjVTokRP26dNErgwffsD1xPeXKldPx48cVFBQkyfz9vUWLFjp79ux12xPO4FZJjVOnTik5OVmhoaEO5aGhodq/f3+m28XFxalMmTJKTEyUh4eH3n33XbVq1cq+vk2bNurSpYsqVaqkP//8Uy+//LLatm2rrVu3ysPDI93+Jk+erHHjxqUrj42NVYKL7sKz2WyKi4uTYRiyWs0ONuHhVs2dG6yEBIvefVcKDz+loCAakshYRucQcDM4h5BdnEPIruyeQ1euXJHNZtPVq1czHQIiM5bPP5fHo49ee/7vj/cpf3XunNSpk5I/+USGkxMbU6ZM0dy5c7VgwQLVqlVLO3fuVP/+/VWkSBENGTLkutsmJydf91hTGiBXr15VcnKyffl621y9elU2m02nT5+Wl5eXw7rz589n9bAAAG4kdc8CSWrbtq0mTZqU6Y/wKT0DMnO99YMHD9aUKVPk7++vU6dO2ZMar7zyikO9IUOGqF69ejp9+vQtHZOrnD17VkePHlVYWJi9LCwsTEeOHFFcXJyKFi2abpslS5ZoyZIlKlWqlB5//HENGzbM4Vons/UXL17Uzp071a5dO1WvXl3x8fG699579dZbb9kTKqmdPHlS0dHRqlOnTo4cOwD39Pq8eZqzbJkWv/66bq9WTT/+/LPCR45U0SJF9GyfPre8Xw8PD5UsWdKJkWadWyU1blWRIkW0Z88eXbhwQZGRkYqIiFDlypXtWeoePXrY695xxx2qU6eOqlSpoqioKD3wwAPp9jdy5EhFRETYn8fHx6tcuXIKDg5WQEBAjh9PRmw2mywWi4KDg+3/sYWESE89Jb31lpSQYNHixcF6/XV6ayBjGZ1DwM3gHEJ2cQ4hu7J7DiUkJOj8+fPy9PTM8M7T62woPfmkJMmSSa8Hi2HIsFjk8eST0j//SL6+Nx1fZrZv364OHTqoQ4cOkqSqVatq1apV2rlz5w2P4+TJk+rQoYOioqJUqlQpvf7663r03+TMoUOHVLlyZe3atUvFihWz3xQUEhIiSerbt68WLVqUbp+enp6yWq0qUaKEfNMcZ9rnAIC8ITd7FtSsWdO+nHKjwu+//+5QZ82aNTpz5ox69+593eSJO7pw4YIkOdylnLJ8/vz5dEmNZ599VtOmTVPx4sX1ww8/qFu3brJarRo2bNgN1589e1aGYWj16tX65ptvVKJECQ0cOFC9evVSZGSkw+skJSWpR48e6tatmxo0aJBzbwAAt7Nl9251bNlSD7VoIUmqWLasln/xhXbs3XvDbY8fP662bdva2xNTp051aE9UqlRJu3fvVrFixdTi3/2n9Bbr27evPvjggxw5JrdKagQFBcnDw0Mn0kwQceLEietmfaxWq6pWrSrJzH5HR0dr8uTJ9qRGWpUrV1ZQUJD++OOPDJMaPj4+8vHxyfB1XPkjjMViSRfDSy+ZPY4SE6V337XoxRctCg52WYhwcxmdQ8DN4BxCdnEOIbuycw5ZrVZZLBb7I8s++UQ6e/bGsRmGWe8//5F69brp+DLTpEkTzZs3T7///ruqV6+un376SZs2bdKMGTNueBxjxozRlClT9Oabb2rp0qV67LHHVLt2bdWsWdO+rcViUfny5fWf//xHjzzyiA4cOKCAgAD5+flluP+U9y+jz4HvNgDkPbnZsyDFlClTNGHCBF28eFElSpRwGEI8Li5OERERWrdunTZv3pwzB52DChcuLMk8jpQekXFxcZLMm3LTuvPOO+3Ld999t0aMGKElS5bYkxrXW5/yWs8++6wqVKggSRo3bpyqVaumixcvyt/fX5KZ0Hj00UdVqFAhzZ8/39mHDMDNNalXT/NWrtRvBw+qeqVK+ik6Wpt27tSMVFNAZGb06NEO7YkePXpo3759DglqyRyKKqP2RE5xq1aHt7e36tev75BNttlsioyMTDdJ0vXYbDaHOTHSOnr0qE6fPp1hV7y8pnRps7eGJF26JL3xhmvjAQAAyFMaNJDKlr3+I+ViK6v697/xPm/iDskRI0aoR48eqlGjhry8vFSvXj0999xz6tmz5w237dq1q5588klVr15d48ePV4MGDfT222+nq+fh4aHixYtLMntqlCxZMsMfsQAA+c+Nehak9eyzz+rAgQOKjY3VggUL9Oabb+rNN9/M8nrJ/L/twoUL+vXXXzVw4ECHG1lffPFF9evXT9WqVXPiUeaewMBAlS1b1mEekD179qhcuXJZ+r/1RjcIpF5frFgxlS9fPsN6KXNqJSUlqWvXrkpKStJ//vOfdHOXAMj/RgwYoB7t2qlGmzbyqlVL9Tp10nN9+6rnvz3Br8dd2xNuldSQpIiICM2fP1+LFy9WdHS0Bg0apIsXLyo8PFyS1KdPH4eJxCdPnqxvvvlGf/31l6Kjo/XGG29o6dKl6vXv3XEXLlzQCy+8oG3btunQoUOKjIxUx44dVbVqVbVu3dolx+hsL70kpfyf9M470qlTro0HAAAgz4iJMYeLut7jZudUS0i48T5jYrK8u5UrV+qjjz7SsmXLtGvXLi1evFjTp0/X4sWLJUmTJk1S4cKF7Y8jR47Yt017Y1Djxo0VHR19c8cDAMjXUvcsSHGjngXBwcHy8PCw9xxYsWJFltenVrNmTdWtW1f9+vWTJG3cuFGbN2/WSy+95KzDc4nw8HBNnDhRMTExiomJ0aRJk/Tkv0NZprVy5UrFx8fLMAz9+OOPmjJlih555JEsr3/qqaf09ttv659//tHly5f12muv6YEHHlDhwoV15coVdevWTRcvXtTq1aszHJUEQP638ssv9dHnn2vZG29o12efafHrr2v6woVa/OmnkqRJc+aocFiY+cgj7Qm3Gn5Kkrp3767Y2FiNGTNGMTExCgsL07p16+yThx85csQhK33x4kUNHjxYR48elZ+fn2rUqKEPP/xQ3bt3l2Rmifbu3avFixfr3LlzKl26tB588EGNHz8+3/xjXqaMeUPg7NnSxYvSjBnSpEmujgoAACAPyMrEdqdP31xiw9dXKlEi+6/7rxdeeMHeW0My54g7fPiwJk+erL59+2rgwIHq1q2bvX7p0qWzHisAoMBL3bOgSpUqknKuZ0FGrly5Yp9TIzIyUn/99Zf9/7LExERdvnxZQUFB2rdvX54ZcWP06NE6ffq0fXiWXr166eWXX5YkDRw4UJI0d+5cSdI777yjp556SlevXlWZMmU0ePBgDR8+3L6vG60fMWKEzpw5o7p160qSWrRooaVLl0qStmzZojVr1sjX19c+FJYkvfzyy/Z4AOR/L0ydqhFPPaUeDz8sSbrjttt0+NgxTX7vPfXt0kUDH3tM3dq1MytXrJgn2hNul9SQpCFDhmjIkCEZrouKinJ4PmHCBE2YMCHTffn5+Wn9+vXODM8tjRghzZ8vJSVJb78tDR9+47Y0AABAgffjjzeus3Sp1KdP1vc5f75T59S4dOlSuh+EPDw8ZLPZJEnFixe3d/VOa9u2beqTKvZt27apXr16GdZNGY4iOTnZGWEDAPKQlJ4FTZs2laQb9ixo06aNihQpop07d2rKlCl6+umns7T+woULWrVqlTp37qyiRYvq559/1oQJE+wjaURERDi87qpVq/T+++9r/fr1CgkJyanDdzovLy/Nnj1bs2fPTrcuJZmRYsOGDdfd143We3h46I033tAbGYxH3qxZM/swVAAKrksJCenbE1arbP/++1C8WDEVTxmC8N95q1O4a3vC7Yafwq0pW1Z6/HFz+cIFaeZM18YDAACQb3TtKgUGSjeaXNxiMes9+qhTX759+/aaOHGi/vvf/+rQoUP67LPPNGPGDHXu3PmG265atUoLFy7Ub7/9prFjx2rHjh2Z3jxUoUIFWSwWffHFF4qNjbWPsQ4AyP9Gjx6txo0bq2bNmqpZs6aaNm3q0LMgpXeBZPYcKF++vIoUKaKePXtm2LMgs/UWi0XLli1TlSpVVKRIEXXs2FEPPfSQZs2aJUkKCAhQ2bJl7Y/AwEB5eXmpbNmy8vDwyL03BADykfYtWmjinDn673ff6dDRo/rs6681Y9EidW7V6obbumt7wi17auDWjBwpLVggXbkivfWWFBEhZXLTHgAAALLK11davFjq2NFMXGR0x2NKwmPxYrO+E7399tsaPXq0Bg8erJMnT6p06dIaMGCAxowZc8Ntx40bp48//liDBw9WqVKltHz5ctWqVSvDumXKlNG4ceM0YsQIhYeHq0+fPvrggw+ceiwAAPeUWz0L/P399c0332Q5rn79+tnn2wAA3Jq3R4/W6Dff1OBx43Ty9GmVDgnRgB49NCZVL7vMuGt7wmLQD+2G4uPjVbRoUcXFxSkgIMAlMdhsNp08eVIhISHXHY9ywABp3jxzefRo6bXXcilAuL2snkNAZjiHkF2cQ8iu7J5DCQkJOnjwoCpVqiTfW0k8rF0r9esnnT0rWa2SzXbtb2CgmdBo3/7m95vHXO99dIfrZnfE+5L/8H8akPfxPYYrFIBLxVxlsdg0r9wzCvn7b1lz4SfuhKAgHezXT5VCQ+WbX3uPVauW4y/hjPYE/2rnMyNHSp7/9r95802zzQ0AAAAn6NBBOnbMnGOjUyepeXPz79KlZjmtVAAAAADIcQw/lc9UrGjeQPj++1J8vJnYePVVFwcFAACQX/j6mpOAO3EicAAAAABA1tFTIx96+eVrvTVmzZLi4lwaDgAAAAAAAAAATkFPjXyoUiWpTx9p4UIzofHWW+b8GgAAAAAAAPjX+PHS339LTDfrPJ9/7uoIABQA9NTIp0aNklLmq5k50xyKCgAAAAAAAACAvIyeGvlU5cpS797SBx+Yk4W//baZ6AAAACjobDabq0PI03j/AMA12rd3dQT5i8UizSvn6igA5Ck2m2QY4mo4e5zRniCpkY+NGiUtXSolJ0szZkjPPisVKeLqqAAAAFzD29tbVqtVx44dU3BwsLy9vWWxWFwdVp5hGIaSkpIUGxsrq9Uqb29vV4cEAAAA5BrvuDhZz5/XMX9/BRcuLG+LRfmuNZGQkGO7dmZ7gqRGPla1qtSzp7RkiXTmjPTOO9LIka6OCgAAwDWsVqsqVaqk48eP69ixY64OJ88qVKiQypcvL6uVkWwBAABQcFiTk1Vp+XIdv/9+Hatc+drY//lJLswx5Iz2BEmNfO6VV6QPPzR7R73xhvTMM1Lhwq6OCgAAwDW8vb1Vvnx5Xb16VcnJya4OJ8/x8PCQp6cnPVwAAABQIHmfP6/ya9fqqp+fkv38zLHs8pM5c3J0985qT5DUyOeqVZP+7//MxMbp09Ls2dJLL7k6KgAAANexWCzy8vKSl5eXq0MBAAAAkMdYDENely7J69IlV4fifL6+ro4gS+gzXgC88oqU0ptn+nTpwgXXxgMAAAAAAAAAwK0gqVEA3Hab1KOHuXzqVI73IgIAAAAAAAAAIEeQ1CggXnnl2hBv06ZJFy+6Nh4AAAAAAAAAAG4WSY0ComZNqXt3czk2Vpo717XxAAAAAAAAAABws0hqFCCpe2tMnSrlx7lsAAAAAAAAAAD5F0mNAuT226VHHzWXT56U3nvPtfEAAAAAAAAAAHAzSGoUMKNHX1ueOlW6fNl1sQAAAAAAAAAAcDNIahQwd9whPfKIuRwTI82b59p4AAAAAAAAAADIKpIaBdCYMdeWX39dSkhwXSwAAAAAAAAAAGQVSY0CqE4dqXNnc/n4cWn+fNfGAwAAAAAAAABAVpDUKKBS99aYMoXeGgAAAAAAAAAA90dSo4AKC5M6djSXjx2TFixwaTgAAAAAAAAAANwQSY0CLG1vjcRE18UCAAAAAAAAAMCNkNQowO68U2rf3lw+elRatMi18QAAAAAAAAAAcD0kNQq4sWOvLU+eLCUluS4WAAAAAAAAAACuh6RGAVe/vvTQQ+bykSPSBx+4NBwAAAAAAAAAADJFUgMOvTUmTaK3BgAAAAAAAADAPZHUgO66S2rb1lw+fFhassS18QAAAAAAAAAAkBGSGpDk2Ftj4kTpyhXXxQIAAAAAAAAAQEZIakCS1KiR1Lq1uXzokLR0qUvDAQAAAAAAAAAgHZIasKO3BgAAAAAAAADAnZHUgF3jxlKrVubyX39JH33k2ngAAAAAAAAAAEiNpAYcpO6tMWGCdPWq62IBAAAAAAAAACA1khpw0LSpdP/95vKff0rLlrk2HgAAAAAAAAAAUpDUQDr01gAAAAAAAAAAuCOSGkjnvvuk5s3N5d9/lz7+2KXhAAAAAAAAAAAgiaQGMpG2t0ZysutiAQAAAAAAAABAIqmBTDRvbvbYkKQDB6QVK1waDgAAAAAAAAAAJDWQudS9NcaPp7cGAAAAAAAAAMC1SGogUy1aSPfcYy7v3y+tWuXaeAAAAAAAAAAABRtJDWTKYknfW8Nmc108AAAAAAAAAICCjaQGruuBB6QmTczlX3+VPvnEtfEAAAAAAAAAAAoukhq4LnprAAAAAAAAAADcBUkN3FCrVtLdd5vLP/8sffaZa+MBAAAAsmP27NmqWLGifH191ahRI+3YsSPTuh988IEsFovDw9fXNxejBQAAAJAaSQ3cUNreGq+9Rm8NAAAA5E0rVqxQRESExo4dq127dqlu3bpq3bq1Tp48mek2AQEBOn78uP1x+PDhXIwYAAAAQGokNZAlrVtLDRuay3v3SmvWuDYeAAAA4FbMmDFD/fv3V3h4uGrVqqW5c+eqUKFCWrhwYabbWCwWlSxZ0v4IDQ3NxYgBAAAApEZSA1mSUW8Nw3BdPAAAAMDNSkpK0s6dO9WyZUt7mdVqVcuWLbV169ZMt7tw4YIqVKigcuXKqWPHjvrll19yI1wAAAAAGfB0dQDIO9q2lRo0kH78UdqzR1q7VurY0dVRAQAAAFlz6tQpJScnp+tpERoaqv3792e4zW233aaFCxeqTp06iouL0/Tp09WkSRP98ssvKlu2bIbbJCYmKjEx0f48Pj5ekmSz2WRjHNd8wWazyTAMPk/kKovF1RHkLxaLTYbFIhtvrHPx7+J1cbo5F9/jHODi73BWr61IaiDLUnprtG9vPh83TurQgX+QAQAAkH81btxYjRs3tj9v0qSJatasqffee0/jx4/PcJvJkydr3Lhx6cpjY2OVkJCQY7Ei99hsNsXFxckwDFmtDICA3FGunKsjyG9sOhcUZH6PXR1KfnKdOarA99j5+B47nYu/w+fPn89SPZIauCkPPSTVry/t3Cnt3i198cW1JAcAAADgzoKCguTh4aETJ044lJ84cUIlS5bM0j68vLxUr149/fHHH5nWGTlypCIiIuzP4+PjVa5cOQUHBysgIODWgodbsdlsslgsCg4OJqmBXPP3366OIH+xWGwqZjml4KNHZWV8becJCXF1BG6N77Fz8T3OAS7+Dvv6+mapHkkN3BSLRRoz5tqwU+PGSQ8/TG8NAAAAuD9vb2/Vr19fkZGR6tSpkyTzx+nIyEgNGTIkS/tITk7Wvn371K5du0zr+Pj4yMfHJ1251WrlB/B8xGKx8JkiV/F7nfNZDEPWfx9wEv5NvC5ONefje+xkLv4OZ/W6in9pcNPat5fCwszlnTulL790aTgAAABAlkVERGj+/PlavHixoqOjNWjQIF28eFHh4eGSpD59+mjkyJH2+q+99pq+/vpr/fXXX9q1a5d69eqlw4cP68knn3TVIQAAAAAFGj01cNNSemt06WI+HzdOateO3hoAAABwf927d1dsbKzGjBmjmJgYhYWFad26dfbJw48cOeJwh9jZs2fVv39/xcTEKDAwUPXr19eWLVtUq1YtVx0CAAAAUKCR1MAt6dhRqlNH2rtX+uEHad06qW1bV0cFAAAA3NiQIUMyHW4qKirK4fnMmTM1c+bMXIgKAAAAQFYw/BRuidVq9tZIMW4c4wICAAAAAAAAAHIWSQ3css6dpdq1zeXt26Wvv3ZtPAAAAAAAAACA/I2kBm4ZvTUAAAAAAAAAALmJpAay5ZFHpNtvN5e3bpX+9z/XxgMAAAAAAAAAyL9IaiBbrFZp9Ohrz+mtAQAAAAAAAADIKSQ1kG2PPirVrGkub94sffuta+MBAAAAAAAAAORPJDWQbR4e9NYAAAAAAAAAAOQ8khpwim7dpBo1zOWNG6WoKJeGAwAAAAAAAADIh0hqwCk8PKRXXrn2/LXXXBcLAAAAAAAAACB/IqkBp+nRQ6pe3VyOipI2bHBpOAAAAAAAAACAfMYtkxqzZ89WxYoV5evrq0aNGmnHjh2Z1v3000/VoEEDFStWTP7+/goLC9PSpUsd6hiGoTFjxqhUqVLy8/NTy5Yt9fvvv+f0YRQ4aXtrjBvnulgAAAAAAAAAAPmP2yU1VqxYoYiICI0dO1a7du1S3bp11bp1a508eTLD+sWLF9eoUaO0detW7d27V+Hh4QoPD9f69evtdaZOnaq33npLc+fO1fbt2+Xv76/WrVsrISEhtw6rwHjsMalqVXP522+lTZtcGw8AAAAAAAAAIP9wu6TGjBkz1L9/f4WHh6tWrVqaO3euChUqpIULF2ZYv3nz5urcubNq1qypKlWqaOjQoapTp442/ftrumEYmjVrll555RV17NhRderU0ZIlS3Ts2DGtXr06F4+sYPD0pLcGAAAAAAAAACBnuFVSIykpSTt37lTLli3tZVarVS1bttTWrVtvuL1hGIqMjNSBAwd03333SZIOHjyomJgYh30WLVpUjRo1ytI+cfN69pSqVDGX//c/acsW18YDAAAAAAAAAMgfPF0dQGqnTp1ScnKyQkNDHcpDQ0O1f//+TLeLi4tTmTJllJiYKA8PD7377rtq1aqVJCkmJsa+j7T7TFmXVmJiohITE+3P4+PjJUk2m002m+3mD8wJbDabDMNw2evfDKtVGjlSevJJM2f26quG1q0zXBwV8tI5BPfEOYTs4hxCdnEOuT8+GwAAAAA5za2SGreqSJEi2rNnjy5cuKDIyEhFRESocuXKat68+S3tb/LkyRqXwbhJsbGxLpuHw2azKS4uToZhyGp1qw42GXrwQal8+SAdOeKpb76x6Kuvzqh+/SuuDqtAy2vnENwP5xCyi3MI2cU55P7Onz/v6hAAAAAA5HNuldQICgqSh4eHTpw44VB+4sQJlSxZMtPtrFarqv47O3VYWJiio6M1efJkNW/e3L7diRMnVKpUKYd9hoWFZbi/kSNHKiIiwv48Pj5e5cqVU3BwsAICAm718LLFZrPJYrEoODg4zzTiR42SBgwwl99+u7i+/JLeGq6UF88huBfOIWQX5xCyi3PI/fn6+ro6BAAAAAD5nFslNby9vVW/fn1FRkaqU6dOkszGa2RkpIYMGZLl/dhsNvvwUZUqVVLJkiUVGRlpT2LEx8dr+/btGjRoUIbb+/j4yMfHJ1251Wp1aQPaYrG4PIab0a+fNGmSdPiwtH69RT/+aFHDhq6OqmDLa+cQ3A/nELKLcwjZxTnk3vhcAAAAAOQ0t2t1REREaP78+Vq8eLGio6M1aNAgXbx4UeHh4ZKkPn36aOTIkfb6kydP1jfffKO//vpL0dHReuONN7R06VL16tVLktnwfe655zRhwgStXbtW+/btU58+fVS6dGl74gQ5w9tbevnla88zGNELAAAAAAAAAIAsc6ueGpLUvXt3xcbGasyYMYqJiVFYWJjWrVtnn+j7yJEjDneAXbx4UYMHD9bRo0fl5+enGjVq6MMPP1T37t3tdV588UVdvHhRTz31lM6dO6d77rlH69ato3t8LujXT5o4UTpyRPryS+mHH6S77nJ1VAAAAAAAAACAvMjtkhqSNGTIkEyHm4qKinJ4PmHCBE2YMOG6+7NYLHrttdf02muvOStEZJG3tzRypJQy0tdrr0mff+7amAAAAAAAAAAAeZPbDT+F/Cc8XCpb1lz+4gtp507XxgMAAAAAAAAAyJtIaiDH+fiYvTVS0GEGAAAAAAAAAHArSGogVzzxhFSmjLm8dq20e7dr4wEAAAAAAAAA5D0kNZArfHykESOuPae3BgAAAAAAAADgZpHUQK558kmpdGlzefVq6aefXBoOAAAAAAAAACCPIamBXOPrK7300rXn9NYAAAAAAAAAANwMkhrIVf37SyVLmsuffirt3evaeAAAAAAAAAAAeQdJDeQqPz/H3hoTJrguFgAAAAAAAABA3kJSA7luwAApNNRc/uQT6ZdfXBsPAAAAAAAAACBvIKmBXOfnJ734orlsGNL48a6NBwAAAAAAAACQN5DUgEsMHCiFhJjLK1dKv/7q2ngAAAAAAAAAAO6PpAZcolAh6YUXzGXDYG4NAAAAAAAAAMCNkdSAywwaJAUFmcsffyzt3+/aeAAAAAAAAAAA7o2kBlzG3196/nlzmd4aAAAAAAAAAIAbIakBl3r6aalECXN5+XLpt99cGw8AAAAAAAAAwH2R1IBLFS4sDR9uLtts9NYAAAAAAAAAAGSOpAZcbsgQqXhxc/mjj6Tff3dtPAAAAAAAAAAA90RSAy5XpIgUEWEu22zSxImujQcAAAAAAAAA4J5IasAtPPOMFBhoLn/4ofTnn66NBwAAAAAAAADgfkhqwC0EBEjDhpnLycn01gAAAAAAAAAApEdSA27j2WelYsXM5SVLpL/+cmk4AAAAAAAAAAA3Q1IDbqNoUem558zl5GRp0iSXhgMAAAAAAAAAcDMkNeBWhg41kxuStHixdOiQS8MBAAAAAAAAALgRkhpwK8WKmYkNSbp6ld4aAAAAAAAAAIBrSGrA7Tz3nDlxuCQtWiQdPuzScAAAAAAAAAAAboKkBtxOYKA5abhk9taYPNm18QAAAAAAAAAA3ANJDbilYcOkIkXM5YULpSNHXBsPAAAAAAAAAMD1SGrALRUvLj3zjLl85Yr0+uuujQcAAAAAAAAA4HokNeC2IiKkwoXN5fffl44edW08AAAAAAAAAADXIqkBt1WihDRkiLmclERvDQAAAAAAAAAo6EhqwK0NHy75+5vL8+dLx465Nh4AAAAAAAAAgOuQ1IBbCwqSnn7aXE5MpLcGAAAAAAAAABRkJDXg9oYPlwoVMpfnzZOOH3dtPAAAAAAAAAAA1yCpAbcXEiINGmQuJyRIU6e6Nh4AAAAAAAAAgGuQ1ECe8MILkp+fuTx3rhQT49p4AAAAAAAAAAC5j6QG8oTQUGngQHM5IUGaNs218QAAAAAAAAAAch9JDeQZL74o+fqay3PmSCdOuDYeAAAAAAAAAEDuIqmBPKNkSWnAAHP58mVp+nTXxgMAAAAAAAAAyF0kNZCnvPii5ONjLr/7rnTypGvjAQAAAAAAAADkHpIayFNKl5aeespcvnRJeuMN18YDAAAAAAAAAMg9JDWQ57z0kuTtbS7Pni2dOuXaeAAAAAAAAAAAuYOkBvKcMmWk/v3N5YsX6a0BAAAAAAAAAAUFSQ3kSSNGXOutMXOm1L691Ly59Mgj0tKlUkKCS8MDAAAAAAAAAOQAkhrIk8qWle6/31xOTJT++1/p+++l1aulPn3MuTc+/9ylIQIAAAAAAAAAnIykBvKktWul9euvPTcM86/NZv49d07q2NGsBwAAAAAAAADIH0hqIM9JSJD69bt+nZQkR79+DEUFAAAAAAAAAPkFSQ3kOatWSWfPXktcZMYwzHqffJI7cQEAAAAAAAAAchZJDeQ5q1dL1iyeuVar9NlnORoOAAAAAAAAACCXkNRAnnP69LW5M27EZpPOnMnZeAAAAJC3zJ49WxUrVpSvr68aNWqkHTt2ZGm7jz/+WBaLRZ06dcrZAAEAAABkiqQG8pwSJW6up0bx4jkbDwAAAPKOFStWKCIiQmPHjtWuXbtUt25dtW7dWidPnrzudocOHdLzzz+ve++9N5ciBQAAAJARkhrIczp1urmeGg0b5mg4AAAAyENmzJih/v37Kzw8XLVq1dLcuXNVqFAhLVy4MNNtkpOT1bNnT40bN06VK1fOxWgBAAAApOXp6gCAm9W1qzR0qHTu3I0nC5ekceOk4GApPFyyWHI8PAAAALippKQk7dy5UyNHjrSXWa1WtWzZUlu3bs10u9dee00hISF64okntHHjxhu+TmJiohITE+3P4+PjJUk2m022rN6dA7dms9lkGAafJ3IV7VnnslhsMiwW2XhjnYt/F6+L0825+B7nABd/h7N6bUVSA3mOr6+0eLHUsaP5n0FGiY3U5ZcvS088IUVGSnPnSkWK5G68AAAAcA+nTp1ScnKyQkNDHcpDQ0O1f//+DLfZtGmTFixYoD179mT5dSZPnqxx48alK4+NjVVCQsJNxQz3ZLPZFBcXJ8MwZM3q2LhANpUr5+oI8hubzgUFmd9jV4eSn9xgOMeCju+xs/E9djoXf4fPnz+fpXokNZAntW8vrV4t9esnnT1rzp1hs137W6yYNG+e9M035l9JWrZM2rFDWrFCuvNOFwYPAACAPOH8+fPq3bu35s+fr6CgoCxvN3LkSEVERNifx8fHq1y5cgoODlZAQEBOhIpcZrPZZLFYFBwcTFIDuebvv10dQf5isdhUzHJKwUePypqVYSCQNSEhro7ArfE9di6+xznAxd9hX1/fLNUjqYE8q0MH6dgx6ZNPpM8+k86cMScF79xZevRRs0fHo49KDzwg9e8vxcdLf/whNW4sTZ8uDRlCtz8AAICCJCgoSB4eHjpx4oRD+YkTJ1SyZMl09f/8808dOnRI7du3t5eldIn39PTUgQMHVKVKlXTb+fj4yMfHJ1251WrlB/B8xGKx8JkiV/F7nfNZDEPWfx9wEv5NvC5ONefje+xkLv4OZ/W6in9pkKf5+kq9ekn/+Y/03Xfm3169zPIU3bpJu3dLDRqYz5OSpGefNZMfZ864Jm4AAADkPm9vb9WvX1+RkZH2MpvNpsjISDVu3Dhd/Ro1amjfvn3as2eP/dGhQwe1aNFCe/bsUTnGkAAAAAByHT01UCBUrixt3iyNHCnNmGGWrVkjhYVJH38sNWni0vAAAACQSyIiItS3b181aNBADRs21KxZs3Tx4kWFh4dLkvr06aMyZcpo8uTJ8vX1Ve3atR22L1asmCSlKwcAAACQO0hqoMDw9pbeeEO6/36pb1/p9GlzLMP77pPGj5deesnlPawAAACQw7p3767Y2FiNGTNGMTExCgsL07p16+yThx85coThhAAAAAA3RlIDBc5DD0l79kg9e0obNkjJydLLL5vDVy1dKv3bngUAAEA+NWTIEA0ZMiTDdVFRUdfd9oMPPnB+QAAAAACyjFuQUCCVLStFRkpjxlybLPybb6S6dc2/AAAAAAAAAAD3Q1IDBZanpzRunJncKFXKLDtxQmrdWho1Srp61bXxAQAAAAAAAAAckdRAgdeihTkcVZs25nPDkCZNkpo3l44ccWVkAAAAAAAAAIDUSGoAkkJCpP/+V5o61ezBIUmbN0thYdKaNS4NDQAAAAAAAADwL5IawL+sVumFF6SNG6WKFc2ys2elTp2kZ5+VEhNdGR0AAAAAAAAAgKQGkMbdd0u7d0uPPHKt7O23pSZNpN9/d11cAAAAAAAAAFDQkdQAMlCsmLRqlfTuu5KPj1m2a5d0553SsmUuDQ0AAAAAAAAACiySGkAmLBZp0CBp+3bpttvMsgsXpJ49pSeekC5edG18AAAAAAAAAFDQuGVSY/bs2apYsaJ8fX3VqFEj7dixI9O68+fP17333qvAwEAFBgaqZcuW6er369dPFovF4dGmTZucPgzkE3XrSj/+KPXte61s4ULprrukfftcFxcAAAAAAAAAFDRul9RYsWKFIiIiNHbsWO3atUt169ZV69atdfLkyQzrR0VF6bHHHtN3332nrVu3qly5cnrwwQf1zz//ONRr06aNjh8/bn8sX748Nw4H+UThwtIHH0hLlkj+/mZZdLTUsKE0b55kGC4NDwAAAAAAAAAKBLdLasyYMUP9+/dXeHi4atWqpblz56pQoUJauHBhhvU/+ugjDR48WGFhYapRo4bef/992Ww2RUZGOtTz8fFRyZIl7Y/AwMDcOBzkM717Szt3mr03JCkhQRowQOrRQ4qLc21sAAAAAAAAAJDfuVVSIykpSTt37lTLli3tZVarVS1bttTWrVuztI9Lly7pypUrKl68uEN5VFSUQkJCdNttt2nQoEE6ffq0U2NHwXHbbdK2bdLTT18rW7nSnET8hx9cFxcAAAAAAAAA5Heerg4gtVOnTik5OVmhoaEO5aGhodq/f3+W9vHSSy+pdOnSDomRNm3aqEuXLqpUqZL+/PNPvfzyy2rbtq22bt0qDw+PdPtITExUYmKi/Xl8fLwkyWazyWaz3cqhZZvNZpNhGC57fTjy9pbeektq3lzq39+ic+cs+usvqUkTQ5MnG3ruOcnqVilDziFkH+cQsotzCNnFOeT++GwAAAAA5DS3Smpk15QpU/Txxx8rKipKvr6+9vIePXrYl++44w7VqVNHVapUUVRUlB544IF0+5k8ebLGjRuXrjw2NlYJCQk5E/wN2Gw2xcXFyTAMWd3t1/IC7J57pK+/9tCgQUW1c6e3rl616IUXLFq/PkGzZsWpRAn3mWyDcwjZxTmE7OIcQnZxDrm/8+fPuzoEAAAAAPmcWyU1goKC5OHhoRMnTjiUnzhxQiVLlrzuttOnT9eUKVP0v//9T3Xq1Llu3cqVKysoKEh//PFHhkmNkSNHKiIiwv48Pj5e5cqVU3BwsAICAm7iiJzHZrPJYrEoODiYRrybCQmRNm+WxowxNHWqRZL0v//5qnVrH334oaH77nNxgP/iHEJ2cQ4huziHkF2cQ+4v9Y1FAAAAAJAT3Cqp4e3trfr16ysyMlKdOnWSJPuk30OGDMl0u6lTp2rixIlav369GjRocMPXOXr0qE6fPq1SpUpluN7Hx0c+Pj7pyq1Wq0sb0BaLxeUxIGM+PtLrr0v3329OJh4bK/3zj0UPPGDR2LHSqFFSBiOd5TrOIWQX5xCyi3MI2cU55N74XAAAAADkNLdrdURERGj+/PlavHixoqOjNWjQIF28eFHh4eGSpD59+mjkyJH2+q+//rpGjx6thQsXqmLFioqJiVFMTIwuXLggSbpw4YJeeOEFbdu2TYcOHVJkZKQ6duyoqlWrqnXr1i45RuRfrVtLP/1kJjckyWaTxo6VWraUjh1zbWwAAAAAAAAAkNe5XVKje/fumj59usaMGaOwsDDt2bNH69ats08efuTIER0/ftxef86cOUpKStKjjz6qUqVK2R/Tp0+XJHl4eGjv3r3q0KGDqlevrieeeEL169fXxo0bM+yNAWRXqVLS119L48dfmyw8KkqqW1dat86loQEAAAAAAABAnuZWw0+lGDJkSKbDTUVFRTk8P3To0HX35efnp/Xr1zspMiBrPDykV16RmjWTHntM+ucf6dQpqW1b6YUXpIkTJS8vV0cJAAAAAAAAAHmL2/XUAPKTe+81h6N6+OFrZdOmmeUHD7ouLgAAAAAAAADIi0hqADmsRAlp7Vpp5sxrvTO2b5fq1ZM++cS1sQEAAAAAAABAXkJSA8gFFov03HPSli1S5cpmWVyc1LWrNHiwlJDg0vAAAAAAAAAAIE8gqQHkogYNpF27pO7dr5XNmSM1aiTt3++6uAAAAAAAAAAgLyCpAeSyokWl5cul+fMlPz+zbO9eqX59afFi18YGAAAAAAAAAO6MpAbgAhaL9OST0o4dUq1aZtmlS1K/flKfPtKFCy4NDwAAAAAAAADcEkkNwIVq15Z++EF64olrZUuXmr029uxxWVgAAAAAAAAA4JZIagAuVqiQ9P770rJlUpEiZtlvv0l33y3Nni0ZhmvjAwAAAAAAAAB3QVIDcBOPPWZOIl6/vvk8MVEaMkR65BHp7FnXxgYAAAAAAAAA7oCkBuBGqlaVNm+WnnvuWtlnn0n16klbt7osLAAAAAAAAABwCyQ1ADfj4yPNnCmtWSMVL26WHT4s3Xuv9Prrks3m2vgAAAAAAAAAwFVIagBuqkMHc7Lwe+4xnycnSyNGSO3aSSdPujQ0AAAAAAAAAHAJkhqAGytXTvruO+mVVySLxSxbv16qW1eKjHRtbAAAAAAAAACQ20hqAG7O01MaP1765hspNNQsi4mRWrWSRo+Wrl51bXwAAAAAAAAAkFtIagB5xAMPSD/9JD34oPncMKQJE6T775eOHnVtbAAAAAAAAACQG0hqAHlIaKj01VfSlCmSh4dZtnGjORzV55+7NjYAAAAAAAAAyGkkNYA8xmqVXnrJTGaUL2+WnTljTiw+bJiUmOja+AAAAAAAAAAgp5DUAPKoxo2lPXukzp2vlc2aJTVtKv3557WyhARp6VLp0Uct6tIlUI8+atHSpWY5AAAAAAAAAOQlJDWAPCwwUPrPf6S335a8vc2ynTulevWkjz+W1q6VSpeW+vSR1qyRtm710Zo15vPSpRmyCgAAAAAAAEDeQlIDyOMsFmnIEGnbNqlaNbPs/Hnpscekjh2lc+fMMpvN4vD33Dlz/dq1uR8zAAAAAAAAANwKkhpAPlGvntlLo1cvx3LDyLh+Snm/fgxFBQAAAAAAACBvIKkB5CNFikhLlkj9+2etvmFIZ89Kn3ySs3EBAAAAAAAAgDOQ1ADyGYtFOn1asmbx2221Sp99lrMxAQAAAAAAAIAzkNQA8qHTpyWbLWt1bTbpzJmcjQcAAAAAAAAAnIGkBpAPlShxcz01ihfP2XgAAAAAAAAAwBlIagD5UKdON9dTo3PnHA0HAAAAAAAAAJyCpAaQD3XtKgUGmvNrZEV8vDlpOAAAAAAAAAC4M5IaQD7k6ystXmwuZyWx8fTTUpcu0qlTORsXAAAAAAAAAGQHSQ0gn2rfXlq9WipWzHxutRoOf4sVk1q3vlZ/9WqpTh3p669zM0oAAAAAAAAAyDqSGkA+1qGDdOyYtHSp1LGj1Lhxojp2NJ8fPy6tW2cmM4KCzPrHj5uJjogIKSHBpaEDAAAAAAAAQDokNYB8ztdX6tVL+uQTQ59+elaffGKoVy+zXDKTHXv3Sg8+eG2bmTOlRo2kX35xTcwAAAAAAAAAkBGSGgBUqpT01VfSrFmSj49ZtnevVL++9PbbTCIOAAAAAAAAwD2Q1AAgSbJapaFDpR9+kGrXNssSE6Vnn5XatZNiYlwbHwAAAAAAAABkK6lx5MgRbdq0yaHsp59+Up8+fdS9e3etXr06O7sH4AJ33GEmNoYOvVa2bp05ifgXX7guLgAAUPDQ3gAAAACQlmd2Nn722Wd14cIF/e9//5MknThxQi1atFBSUpKKFCmiTz75RKtWrVKXLl2cEiyA3OHraw5F1bat1K+f2UsjNlZq314aNEiaPl0qVMjVUQIAgPyO9gYAAACAtLLVU2PHjh1q1aqV/fmSJUt0+fJl/fTTT/rnn3/0wAMPaPr06dkOEoBrtG5tzq3RocO1sjlzzLk2du1yXVwAAKBgoL0BAAAAIK1sJTXOnDmjkJAQ+/MvvvhCzZo1U5UqVWS1WtWlSxft378/20ECcJ3gYGn1amnuXMnPzyzbv1+6+25p2jTJZnNpeAAAIB+jvQEAAAAgrWwlNYKDg3X48GFJ0rlz57Rt2za1bt3avv7q1au6evVq9iIE4HIWizRggNk74847zbIrV6QXX5RatZKOHnVtfAAAIH+ivQEAAAAgrWzNqdGyZUu99dZbCggIUFRUlGw2mzp16mRf/+uvv6pcuXLZjRGAm6hRQ9q6VRozRpo6VTIM6dtvzUnE33tP6trV1RECAID8hPYGAAAAgLSy1VNjypQpqlmzpp5//nl9/fXXmj59uipVqiRJSkxM1MqVK/XAAw84JVAA7sHbW5oyxUxmlC1rlp09K3XrJj3+uHT+vGvjAwAA+QftDQAAAABpZaunRmhoqDZv3qy4uDj5+fnJ29vbvs5msykyMpI7p4B8qnlzcxLxAQOkVavMskWLpA0bpA8/NOfcAAAAyA7aGwAAAADSylZPjRRFixZ1aGBIkp+fn+rWravixYs74yUAuKHAQGnFCumDD6TChc2yP/+U7rlHeu01iSGuAQCAM9DeAAAAAJAiW0mNyMhITZs2zaFs4cKFKl++vEJDQzVs2DAlJydnK0AA7s1ikfr2lfbsudY7IzlZGjtWatZMOnjQpeEBAIA8jPYGAAAAgLSyldR49dVX9dNPP9mf79u3TwMGDFBwcLCaN2+ut956S9OnT892kADcX5Uq0saNZjLD+u+/LFu2SHXrmsNRGYZr4wMAAHkP7Q0AAAAAaWUrqREdHa0GDRrYny9dulQBAQHauHGjVqxYof79+2vJkiXZDhJA3uDpKb36qpnc+HcOT50/L/XuLf3f/0nnzrkyOgAAkNfQ3gAAAACQVraSGhcvXlRAQID9+bp169SmTRsVKlRIknTXXXfp8OHD2YsQQJ7TpIk5HFWfPtfKPv7Y7LWxYYPLwgIAAHkM7Q0AAAAAaWUrqVGuXDn98MMPkqQ//vhDP//8sx588EH7+jNnzsjHxyd7EQLIkwICpMWLzWRGsWJm2ZEjUvPm0qhR0pUrrowOAADkBbQ3AAAAAKSVraRGz549NW/ePHXo0EGtW7dWYGCgOnbsaF+/c+dOVa9ePdtBAsi7uneXfvrJnDRcMufWmDTJ7M3x22+ujQ0AALg32hsAAAAA0spWUmPUqFEaMWKE/v77b5UvX16rV69WsX9vyT5z5oyioqLUoUMHZ8QJIA8rX16KjJQmTzbn3ZCkH3+U6tWT5s9nEnEAAJAx2hsAAAAA0vLM1saenpo4caImTpyYbl3x4sUVExOTnd0DyEc8PKQRI6RWrcxJw3/7Tbp0SXrqKenLL83kRlCQq6MEAADuhPYGAAAAgLSy1VMjtQsXLig6OlrR0dG6cOGCs3YLIJ+pX1/atUsaMOBa2erVUp060jffuCwsAADg5pzZ3pg9e7YqVqwoX19fNWrUSDt27Mi07qeffqoGDRqoWLFi8vf3V1hYmJYuXZqt1wcAAABw67Kd1Pjhhx/UokULBQYGqnbt2qpdu7YCAwN1//3368cff3RGjADyGX9/ae5cM5lRooRZdvy49OCDUkSElJDg0vAAAIAbcXZ7Y8WKFYqIiNDYsWO1a9cu1a1bV61bt9bJkyczrF+8eHGNGjVKW7du1d69exUeHq7w8HCtX78+u4cGAAAA4BZka/ip7du3q3nz5vL29taTTz6pmjVrSpKio6O1fPly3XfffYqKilLDhg2dEiyA/KVjR6lhQ6lfP+nrr82ymTPN+TeWLZNuv92l4QEAABfLifbGjBkz1L9/f4WHh0uS5s6dq//+979auHChRowYka5+8+bNHZ4PHTpUixcv1qZNm9S6detbPzgAAAAAtyRbSY1Ro0apTJky2rRpk0qWLOmw7tVXX1XTpk01atQofcOYMgAyUaqU9NVX0ttvSy++KCUlSXv3msNUTZsmDRkiWSyujhIAALiCs9sbSUlJ2rlzp0aOHGkvs1qtatmypbZu3XrD7Q3D0LfffqsDBw7o9ddfz7ReYmKiEhMT7c/j4+MlSTabTTabLUuxwr3ZbDYZhsHniVxFu8i5LBabDItFNt5Y5+LfxevidHMuvsc5wMXf4axeW2W7p8aYMWPSNTAkKTQ0VE899ZTGjx+fnZcAUABYrdLQoVKLFlLPntLPP0uJidKzz5oJj4ULpQz+mQEAAPmcs9sbp06dUnJyskJDQ9Pta//+/ZluFxcXpzJlyigxMVEeHh5699131apVq0zrT548WePGjUtXHhsbqwTG2cwXbDab4uLiZBiGrFanTVUJXFe5cq6OIL+x6VxQkPk9dnUo+UkmwznCxPfY2fgeO52Lv8Pnz5/PUr1sJTWsVquuXr2a6frk5GQu8ABkWZ060g8/SCNGSG++aZZ99ZVZvnCh9PDDro0PAADkLndpbxQpUkR79uzRhQsXFBkZqYiICFWuXDnd0FQpRo4cqYiICPvz+Ph4lStXTsHBwQoICMjxeJHzbDabLBaLgoODafMi1/z9t6sjyF8sFpuKWU4p+OhRWQ3D1eHkHyEhro7ArfE9di6+xznAxd9hX1/fLNXLVlKjSZMmmj17tv7v//5PFSpUcFh35MgRvfvuu2ratGl2XgJAAePrK82aJbVta861ERMjxcZK7dtLgwZJ06dLhQq5OkoAAJAbnN3eCAoKkoeHh06cOOFQfuLEiQx7g6SwWq2qWrWqJCksLEzR0dGaPHlypkkNHx8f+fj4ZLgffgDPPywWC58pchW/1zmfxTBk/fcBJ+HfxOviVHM+vsdO5uLvcFavq7KV1Jg0aZLuu+8+1ahRQ507d1b16tUlSQcOHNCaNWvk4eGhyZMnZ+clABRQrVubc2s8+aS0dq1ZNmeO9N135iTi9eq5Nj4AAJDznN3e8Pb2Vv369RUZGalOnTpJMu+4j4yM1JAhQ7K8H5vN5jBnBgAAAIDck62kRr169bR9+3aNGjVKa9eu1aVLlyRJhQoVUps2bfTqq68qKCjIKYECKHiCg6XVq6V586Rhw6TLl6X9+6VGjaSJE6Xhw12eQAYAADkoJ9obERER6tu3rxo0aKCGDRtq1qxZunjxosLDwyVJffr0UZkyZezJksmTJ6tBgwaqUqWKEhMT9eWXX2rp0qWaM2eOcw8WAAAAQJZkK6khSbVq1dJnn30mm82m2NhYSbKPKzpx4kSNGTNGycnJ2Q4UQMFksUgDBkjNmpmTiO/aJV25Ir34orRunbR4sVS2rKujBAAAOcXZ7Y3u3bsrNjZWY8aMUUxMjMLCwrRu3Tr75OFHjhxx6PZ+8eJFDR48WEePHpWfn59q1KihDz/8UN27d3fugQIAAADIkmwnNVJYrVZ7QwAAnK1GDWnrVmnMGGnqVHMcym+/NScRf+89qWtXV0cIAABykjPbG0OGDMl0uKmoqCiH5xMmTNCECROc8roAAAAAso+BWwDkGd7e0pQpUmTktd4ZZ89K3bpJjz8unT/v2vgAAAAAAAAA5CySGgDynBYtzEnEU/fOWLTInDx82zbXxQUAAAAAAAAgZ5HUAJAnBQZKK1ZIH3wgFS5slv35p3TPPdJrr0lXr7o0PAAAAAAAAAA54Kbn1Ni1a1eW6x47duxmdw8AWWaxSH37momMXr3MXhrJydLYsdL69dKHH0qVKrk6SgAAcDNobwAAAAC4nptOajRo0EAWiyVLdQ3DyHJdALhVVapIGzdKEyZI48dLNpu0ZYtUt6707rtSz55mAgQAALg/2hsAAAAAruemkxqLFi3KiTgAIFs8PaVXX5UefNDstXHwoDlxeO/e0n//K82ZIxUr5uooAQDAjdDeAAAAAHA9N53U6Nu3b07EAQBO0aSJtGeP9Mwz0pIlZtnHH5s9N5Yule67z6XhAQCAG6C9AQAAAOB63HKi8NmzZ6tixYry9fVVo0aNtGPHjkzrzp8/X/fee68CAwMVGBioli1bpqtvGIbGjBmjUqVKyc/PTy1bttTvv/+e04cBwEUCAqTFi81kRtGiZtmRI1Lz5tKoUdKVKy4NDwAAAAAAAMAtcrukxooVKxQREaGxY8dq165dqlu3rlq3bq2TJ09mWD8qKkqPPfaYvvvuO23dulXlypXTgw8+qH/++cdeZ+rUqXrrrbc0d+5cbd++Xf7+/mrdurUSEhJy67AAuED37tLevVKzZuZzw5AmTTJ7c/z2m1mWkGD24HjkETPp8cgj5nP+eQAAAAAAAADcj9slNWbMmKH+/fsrPDxctWrV0ty5c1WoUCEtXLgww/offfSRBg8erLCwMNWoUUPvv/++bDabIiMjJZm9NGbNmqVXXnlFHTt2VJ06dbRkyRIdO3ZMq1evzsUjA+AK5ctLkZHS5MnmvBuS9OOPUr160tNPS6VLS336SKtXS99/b/7t08cs//xzV0YOAAAAAAAAIK2bnlMjJyUlJWnnzp0aOXKkvcxqtaply5baunVrlvZx6dIlXblyRcWLF5ckHTx4UDExMWrZsqW9TtGiRdWoUSNt3bpVPXr0SLePxMREJSYm2p/Hx8dLkmw2m2w22y0dW3bZbDYZhuGy10feV5DPIYtFevFF6f77pd69LfrtN4suXZLefVeSDEkWpbwtKX/PnTPUsaP06aeGOnRwUeBupiCfQ3AOziFkF+eQ++OzAQAAAJDT3CqpcerUKSUnJys0NNShPDQ0VPv378/SPl566SWVLl3ansSIiYmx7yPtPlPWpTV58mSNGzcuXXlsbKzLhqyy2WyKi4uTYRiyWt2ugw3yAM4hs9fGV19Z9MorRbR8eaF/Sy0Z1jUMiywWQ/36SXv2nJSvb66F6bY4h5BdnEPILs4h93f+/HlXhwAAAAAgn3OrpEZ2TZkyRR9//LGioqLkm41fIEeOHKmIiAj78/j4eJUrV07BwcEKCAhwRqg3zWazyWKxKDg4mEY8bgnn0DWtW0vLl9+4nmFYFBdn0YYNIerVK+fjcnecQ8guziFkF+eQ+8vONTgAAAAAZIVbJTWCgoLk4eGhEydOOJSfOHFCJUuWvO6206dP15QpU/S///1PderUsZenbHfixAmVKlXKYZ9hYWEZ7svHx0c+Pj7pyq1Wq0sb0BaLxeUxIG/jHDKtXStZrdeGmroeq1Vas8aqPn1yPq68gHMI2cU5hOziHHJvfC4AAAAAcppbtTq8vb1Vv359+yTfkuyTfjdu3DjT7aZOnarx48dr3bp1atCggcO6SpUqqWTJkg77jI+P1/bt26+7TwD51+nTWUtoSGa9M2dyNh4AAAAAAAAAWeNWPTUkKSIiQn379lWDBg3UsGFDzZo1SxcvXlR4eLgkqU+fPipTpowmT54sSXr99dc1ZswYLVu2TBUrVrTPk1G4cGEVLlxYFotFzz33nCZMmKBq1aqpUqVKGj16tEqXLq1OnTq56jABuFCJElnvqSFJhQrduA4AAAAAAACAnOd2SY3u3bsrNjZWY8aMUUxMjMLCwrRu3Tr7RN9Hjhxx6NY+Z84cJSUl6dFHH3XYz9ixY/Xqq69Kkl588UVdvHhRTz31lM6dO6d77rlH69atY8xfoIDq1En69NOs14+MlCZPloYNExOGAwAAAAAAAC7kdkkNSRoyZIiGDBmS4bqoqCiH54cOHbrh/iwWi1577TW99tprTogOQF7Xtas0dKh07pxkGDeun5govfyyNH++NG2a1KWLZLHkeJgAAAAAAAAA0nCrOTUAIDf4+kqLF5vLmSUnLBbz0batOVSVJB08KD36qHT//dKePbkSKgAAAAAAAIBUSGoAKJDat5dWr5aKFTOfpyQuUv4WKyatWSN9+aWZwHjggWvbRkVJd94pDRggnTyZayEDAAAAAAAABR5JDQAFVocO0rFj0tKl5jwbzZubf5cuNcvbtzfr3XGH9M03ZhKkShWzzDCkefOkatWkN96QkpJccwwAAAAAAABAQUJSA0CB5usr9eol/ec/0nffmX979Uo/IbjFInXsKP3yi/T661KRImZ5fLz0/PNS7drS559nbY4OAAAAAAAAALeGpAYA3AQfH+nFF6Xff5eeeOLanBy//272/Gjd2kx8AAAAAAAAAHA+khoAcAtCQ6X335d+/FG6995r5d98I9WtKz3zjHTmjOviAwAAAAAAAPIjkhoAkA133il9/720cqVUoYJZlpwsvfOOVLWq9Pbb0pUrro0RAAAAAAAAyC9IagBANlksUteuUnS0NH68VKiQWX72rPTss2bPjfXrXRsjAAAAAAAAkB+Q1AAAJ/Hzk155RfrtN6l372vl0dFSmzZS+/bmOgAAAAAAAAC3hqQGADhZmTLSkiXStm3S3XdfK//iC6l2bWn4cOncOZeFBwAAAAAAAORZJDUAIIc0aiRt3ix9+KGZ6JDM+TVmzJCqVZPee8+cfwMAAAAAAABA1pDUAIAcZLVKPXtKBw5Io0dLvr5m+alT0sCBUv36UlSUS0MEAAAAAAAA8gySGgCQC/z9pddek/bvl7p3v1b+009SixbSI49If/3luvgAAAAAAACAvICkBgDkogoVpI8/ljZulO6881r5p59KNWtKI0dK58+7Lj4AAAAAAADAnZHUAAAXuOce6YcfpAULpNBQsywpSZoyRapeXVq0SLLZXBsjAAAAAAAA4G5IagCAi1it0uOPS7/9Jr30kuTtbZbHxJjlDRuaE40DAAAAAAAAMJHUAAAXCwgwe2j8+qvUufO18p07zR4djz0mHTniuvgAAAAAAAAAd0FSAwDcRJUq5twakZFSnTrXyj/+WKpRQxo7Vrp40XXxAQAAAAAAAK5GUgMA3Mz990u7dklz50pBQWbZ5cvSa69Jt90mffSRZBiujREAAAAAAABwBZIaAOCGPDykAQOk33+Xhg2TPD3N8n/+kXr1kpo2NScaBwAAAAAAAAoSkhoA4MaKFZNmzJB+/ll66KFr5Vu3mhOJ9+0rHTvmsvAAAAAAAACAXEVSAwDygNtuk774Qlq3TqpZ81r5kiVS9erSxInmEFUAAAAAAABAfkZSAwDykNatpZ9+kt58UwoMNMsuXpReecVMdnzyCfNtAAAAAAAAIP8iqQEAeYyXl/Tss+Z8G08/bc6/IUmHD0tdu0rNm0u7d7s0RAAAAAAAACBHkNQAgDzq/9u77/gaz/+P469zEhKxEhHUTMSMlZhF0dZI7VFaWlWKDqNUVUtttUr71aI21aKl1OqwYpPaUWLPmJHYQRLJOb8/7l9OhGiVJCfj/Xw8ziPOdV/3fT533Pc5Offnvq6PuztMmmSM3KhfP75982aoVAm6doXQUPvFJyIiIiIiIiIiktSU1BARSePKlIHVq2HFCihWzGizWmHmTCheHMaPh+ho+8YoIiIiIiIiIiKSFJTUEBFJB0wmaNoUgoONJEaOHEb77dvwySdG4mPFCtXbEBERERERERGRtE1JDRGRdCRzZvj4Y6PeRteuRrID4MQJaN4cGjSAgwftG6OIiIiIiIiIiMjTUlJDRCQdypMHpk+HvXuhTp349nXroEIFo8B4eLj94hMREREREREREXkaSmqIiKRjvr6wYQMsXgyenkabxQLffWfU2/jmG7h/354RioiIiIiIiIiIPDklNURE0jmTCV59FQ4fhpEjIWtWo/3GDejdG8qXh1Wr7BmhiIiIiIiIiIjIk1FSQ0Qkg3B2hgED4NgxePvt+PYjR6BhQ2jcGI4etV98IiIiIiIiIiIi/0ZJDRGRDCZ/fvj+e9i5E6pXj2//4w8oWxY++giuX49vj4yEH3+E1q1NtGrlRuvWJn780WgXERERERERERFJSUpqiIhkUFWqwLZtMH8+FCxotMXEwIQJRr2NqVNh6VIjCdKhAyxfDoGBTixfbjzPnx9WrrTrLoiIiIiIiIiISAajpIaISAZmMsEbbxhTUA0ZAlmyGO1Xr8IHH0CrVkbtDQCLxZTg540b0Lw5rFiR8nGLiIiIiIiIiEjGpKSGiIiQNSsMHWrU1GjXLuEyqzXxdeLaO3bUVFQiIiIiIiIiIpIylNQQERGbQoVgwQIYNOjJ+lutRv2NxYuTNy4RERERERERERFQUkNERBIRHAzmJ/yEMJuN2hsiIiIiIiIiIiLJTUkNERF5xNWrYLE8WV+LBa5dS954REREREREREREQEkNERFJhLv7k4/UAKPguIiIiIiIiIiISHJTUkNERB7RosWTj9QA2LAB2reHkJBkC0lERERERERERERJDREReVSbNuDm9t9GYMyfDyVLwoABcOtW8sUmIiIiIiIiIiIZl5IaIiLyCGdnmDvX+PfjEhsmk/Ho2hVy5TLaIiNh9GgoVgymTIGYmJSJV0REREREREREMgYlNUREJFFNm8KyZeDqajw3m60Jfrq6wvLlMH06nDwJfftC5sxG37Aw6NYNypWD334DqzXFwxcRERERERERkXRISQ0REXmsZs3g4kX48Udo3hyqV4+ieXPj+cWLRuIDjATHuHFw5Ai8/nr8+keOGH3q1oV9++yyCyIiIiIiIiIiko4oqSEiIv/I2dkoAr54sZVff73O4sVW2rc32h/m5QU//wyBgVCjRnz7hg1QqRK8/TacP59ysYuIiIiIiIiISPqipIaIiCS555+HrVth8WLw9jbarFb44QcoUQIGDoTbt+0bo4iIiIiIiIiIpD1KaoiISLIwmeDVV+HQIfjf/8DNzWi/dw9GjjSKiU+bpmLiIiIiIiIiIiLy5JTUEBGRZJU5M/TuDSdOwEcfQaZMRvuVK/D++1ChAvzxh4qJi4iIiIiIiIjIv1NSQ0REUkSuXPD113D4MLRuHd9+6BA0bgwNGsD+/faLT0REREREREREUj8lNUREJEV5e8Mvv8C2bUbtjTjr1oGfH7zzDly4YL/4REREREREREQk9VJSQ0RE7KJGDdi+HRYuBC8vo81qhTlzjGLiQ4ZARIR9YxQRkfRp8uTJeHp64uzsTLVq1di5c+dj+86YMYNatWrh5uaGm5sb9erV+8f+IiIiIiKSvJTUEBERuzGZ4LXXjCmpxo8HV1ej/e5dGD4ciheHmTMhNtauYYqISDqycOFC+vTpw5AhQ9i7dy8VKlTA39+fK1euJNp/48aNtGvXjg0bNhAYGEihQoVo0KABFzSsUERERETELpTUEBERu3Nygo8/NoqJ9+oFjo5G++XL0LUr+PrC6tV2DVFERNKJr7/+mq5du9KpUyd8fHyYOnUqLi4uzJ49O9H+8+fPp1u3bvj6+lKqVClmzpyJxWIhICAghSMXEREREREAR3sHICIiEsfdHSZMgO7d4bPP4NdfjfaDB+GVV8DfH8aNg3Ll7BqmiIikUdHR0ezZs4f+/fvb2sxmM/Xq1SMwMPCJtnH37l3u379Prly5HtsnKiqKqKgo2/Nbt24BYLFYsFgsTxm9pCYWiwWr1ar/T0lRJpO9I0hfTCYLVpMJi36xSUvvi/9Ih1vS0nmcDOx8Dj/p31ZKaoiISKpTvDgsWQJbthgjOHbtMtpXr4a1a41i4sOHw3PP2TdOERFJW8LDw4mNjSVv3rwJ2vPmzcuRI0eeaBuffvop+fPnp169eo/tM3r0aIYNG/ZIe1hYGJGRkf8taEmVLBYLN2/exGq1YjZrAgRJGYUK2TuC9MbCjdy5jfPY3qGkJ4+ZzlEMOo+Tms7jJGfnc/j27dtP1E9JDRERSbVq1YK//jKKiffvD2fPGjcNzJwJP/0E/foZSY+sWe0dqYiIZARjxozh559/ZuPGjTg7Oz+2X//+/enTp4/t+a1btyhUqBAeHh7kyJEjJUKVZGaxWDCZTHh4eCipISnm3Dl7R5C+mEwWXE3heJw/j9lqtXc46UeePPaOIFXTeZy0dB4nAzufw//0N/aDlNQQEZFUzWyGdu2gZUv49lsYORJu3YI7d2DIEJg2Db74Ajp0AAcHe0crIiKpWe7cuXFwcCA0NDRBe2hoKPny5fvHdcePH8+YMWNYt24d5cuX/8e+Tk5OODk5PdJuNpt1ATwdMZlM+j+VFKXrdUnPZLVi/v+HJBG9J/4jHWpJT+dxErPzOfykf1fpnUZERNIEZ2djZMbJk9CjR3wx8YsXjemoKlWCdevsG6OIiKRumTNnplKlSgmKfMcV/a5evfpj1/vyyy8ZMWIEq1atonLlyikRqoiIiIiIPIaSGiIikqbkzg0TJxrFw5s3j2/fvx/q14dGjSA42H7xiYhI6tanTx9mzJjB3LlzOXz4MB988AF37tyhU6dOAHTo0CFBIfGxY8cyaNAgZs+ejaenJ5cvX+by5ctERETYaxdERERERDI0JTVERCRNKlkSli2DjRuNURpx/vwTypeH996Dh2YXERER4fXXX2f8+PEMHjwYX19fgoKCWLVqla14eEhICJcuXbL1nzJlCtHR0bRu3ZrnnnvO9hg/fry9dkFEREREJENTTQ0REUnT6tSBnTuNwuH9+xuF1ywWmD4dFiyATz+FPn3AxcXekYqISGrRo0cPevTokeiyjRs3Jnh+5syZ5A9IRERERESemEZqiIhImmc2w5tvwtGjMGoUZM9utEdEwKBBUKIEzJ1rJDtERERERERERCTtUlJDRETSjSxZjNEaJ05At27g4GC0X7gAHTtC5cqwYYNdQxQRERERERERkWegpIaIiKQ7efLA5Mlw4AA0bRrfvm8fvPyy0Xb4sP3iExERERERERGRp6OkhoiIpFulS8OKFRAQAH5+8e2//QblyhmjOa5csV98IiIiIiIiIiLy3yipISIi6d7LL8Pu3UZdjQIFjLbYWJgyBYoVg9Gj4d49+8YoIiIiIiIiIiL/LtUlNSZPnoynpyfOzs5Uq1aNnTt3PrZvcHAwr776Kp6enphMJiZMmPBIn6FDh2IymRI8SpUqlYx7ICIiqZHZDB06wLFj8MUXkC2b0X77NgwYACVLwrx5KiYuIiIiIiIiIpKapaqkxsKFC+nTpw9Dhgxh7969VKhQAX9/f648Zm6Qu3fvUrRoUcaMGUO+fPkeu90yZcpw6dIl22Pr1q3JtQsiIpLKubjA55/D8ePw3ntGsgPg3Dl46y2oWhU2bbJvjCIiIiIiIiIikrhUldT4+uuv6dq1K506dcLHx4epU6fi4uLC7NmzE+1fpUoVxo0bR9u2bXFycnrsdh0dHcmXL5/tkTt37uTaBRERSSPy5YOpU+Hvv6FRo/j2PXvgxRehRQs4etRe0YmIiIiIiIiISGIc7R1AnOjoaPbs2UP//v1tbWazmXr16hEYGPhM2z5+/Dj58+fH2dmZ6tWrM3r0aAoXLvzY/lFRUURFRdme37p1CwCLxYLFTvOSWCwWrFar3V5f0j4dQ/Ks0usxVLo0rFwJ69bBJ5+Y+PtvEwDLl8Pvv1t57z0YNMiKh4edA00H0usxJClHx1Dqp/8bERERERFJbqkmqREeHk5sbCx58+ZN0J43b16OHDny1NutVq0a33//PSVLluTSpUsMGzaMWrVqcfDgQbJnz57oOqNHj2bYsGGPtIeFhREZGfnUsTwLi8XCzZs3sVqtmM2paoCNpBE6huRZpfdjqHx5+OMP+OWXLIwdm43Llx2IiTExeTL88IOVXr3u0LnzHZyd7R1p2pXejyFJfjqGUr/bt2/bOwQREREREUnnUk1SI7k0bNjQ9u/y5ctTrVo1ihQpwqJFi+jcuXOi6/Tv358+ffrYnt+6dYtChQrh4eFBjhw5kj3mxFgsFkwmEx4eHvoSL09Fx5A8q4xyDH34IXTuDF9/bWHcOBN37pi4fdvMF19k58cfszFypJXXX4+vxREZCb/8AsuXm7h2DXLlgubNrbRpgxIgD8kox5AkHx1DqZ+z3vhERERERCSZpZqkRu7cuXFwcCA0NDRBe2ho6D8WAf+vXF1dKVGiBCdOnHhsHycnp0RrdJjNZrt+gTaZTHaPQdI2HUPyrDLKMZQ9OwwZAu++C4MHw+zZYLHA2bMm2rc38c038PXXcO0adOwI168bSQ6Lxfi5dKmJjz6CuXOhaVN7703qklGOIUk+OoZSN/2/iIiIiIhIcks13zoyZ85MpUqVCAgIsLVZLBYCAgKoXr16kr1OREQEJ0+e5LnnnkuybYqISPr03HMwYwYEBYG/f3z7rl1QqxY0bw43bhhtcdPIx/28ccNYvmJFCgYsIiIiIiIiIpLOpZqkBkCfPn2YMWMGc+fO5fDhw3zwwQfcuXOHTp06AdChQ4cEhcSjo6MJCgoiKCiI6OhoLly4QFBQUIJRGH379mXTpk2cOXOG7du307JlSxwcHGjXrl2K75+IiKRN5crBqlXGo1y5hMus1sTXiWvv2NGYokpERERERERERJ5dqpl+CuD1118nLCyMwYMHc/nyZXx9fVm1apWteHhISEiCIe0XL17Ez8/P9nz8+PGMHz+eOnXqsHHjRgDOnz9Pu3btuHr1Kh4eHrzwwgv89ddfeHh4pOi+iYhI2ufvD/XqwXvvwaxZ/97fajWmplq8GNq3T/74RERERERERETSu1SV1ADo0aMHPXr0SHRZXKIijqenJ9bH3SL7/37++eekCk1ERAQHh4Q1NP6NUWNDSQ0RERERERERkaSQqqafEhERSQuuXn2yhAYY/a5dS954REREREREREQyCiU1RERE/iN3d2MExpM6cQL27Uu+eEREREREREREMgolNURERP6jFi2efKQGwPnzULEiNGgAAQGPLy4uIiIiIiIiIiL/TEkNERGR/6hNG3BzA5Pp3/s+2GftWqPQeOXKsHAhxMQkX4wiIiIiIiIiIumRkhoiIiL/kbMzzJ1r/PtxiQ2TyXgsWgTffQdFi8Yv27sX2raFEiVg8mS4ezf5YxYRERERERERSQ+U1BAREXkKTZvCsmXg6mo8j6uxEffT1RWWL4fWreGDD+DYMSPBUalS/DZOn4YePaBIERg+3ChALiIiIiIiIiIij6ekhoiIyFNq1gwuXoQffzTqbLz4ovHzxx+N9qZN4/s6OBjTVu3aZdTV8PePXxYeDkOGQOHC8OGHcOZMyu6HiIiIiIiIiEhaoaSGiIjIM3B2hvbtYckS2LDB+Nm+vdGeGJMJXn4ZVq2CoCB4800j4QHGNFQTJ0KxYvDGG7BvX4rthoiIiIiIiIhImqCkhoiIiJ1UqADz5sHJk9CrF7i4GO2xsfDTT1CxojGiIyAArFb7xioiIiIiIiIikhooqSEiImJnRYrAhAkQEmLU1sidO37ZmjVQrx5UrgwLF0JMjN3CFBERERERERGxOyU1REREUgl3dxg0CM6ehe++g6JF45ft3Qtt20KJEjB5sjFVlYiIiIiIiIhIRqOkhoiISCrj4gIffADHjhmjMypVil92+jT06GGM7hg+HK5etV+cIiIiIiIiIiIpTUkNERGRVMrBAV57DXbtMupq+PvHLwsPhyFDoHBh+PBDOHPGbmGKiIiIiIiIiKQYJTVERERSOZMJXn4ZVq2CoCB44w0j4QHGNFQTJ0KxYkZ7UJA9IxURERERERERSV5KaoiIiKQhFSrA/Plw4oQxQsPFxWiPjYWffgI/P2NER0AAWK32jVVEREREREREJKkpqSEiIpIGeXrCN99ASIhRWyN37vhla9ZAvXpQubJRkyMmxm5hioiIiIiIiIgkKSU1RERE0jB3dxg0CM6ehcmToWjR+GV790LbtlCyJHz3nTFVlYiIiIiIiIhIWqakhoiISDrg4gLdusHRo8bojEqV4pedOgXdu0ORIsaojqtX7ReniIiIiIiIiMizUFJDREQkHXF0hNdeg127jLoa/v7xy8LDYcgQKFzYqMdx5ozdwhQREREREREReSpKaoiIiKRDJhO8/DKsWgX79sEbb4CDg7Hs7l2YOBGKFYM334SgILuGKiIiIiIiIiLyxJTUEBERSed8fWH+fDhxwhih4eJitMfGwoIF4OdnjOgICACr1a6hioiIiIiIiIj8IyU1REREMghPT/jmGwgJMWpr5M4dv2zNGqhXD6pUgUWLICbGbmGKiIiIiIiIiDyWkhoiIiIZjLs7DBoEZ8/C5Mng5RW/bM8eeP11KFkSvvvOmKpKRERERERERCS1UFJDREQkg3JxgW7d4NgxWLgQKlaMX3bqFHTvDkWKGKM6rl61X5wiIiIiIiIiInGU1BAREcngHB3htddg925Ytw4aNIhfFh4OQ4ZA4cLQqxecOWO3MEVERERERERElNQQERERg8kEdevC6tWwbx+88QY4OBjL7t6Fb7+FYsXgzTchKMiuoYqIiIiIiIhIBqWkhoiIiDzC1xfmz4cTJ6BnT8iSxWiPjYUFC8DPD/z9ISAArFa7hioiIiIiIiIiGYiSGiIiIvJYnp7GCI2QEBg2DHLnjl+2Zg3UqwdVqsCiRRATk/g2IiPhxx+hdWsTrVq50bq1iR9/NNpFRERERERERP4LJTVERETkX+XODYMHw9mzMHkyeHnFL9uzB15/HUqWhO++M6aqirNiBeTPDx06wPLlEBjoxPLlxvP8+WHlypTfFxERERERERFJu5TUEBERkSfm4gLdusGxY/Dzz1CxYvyyU6ege3coUgRGjIB586BFC7hxw1husZgS/LxxA5o3NxIfIiIiIiIiIiJPQkkNERER+c8cHY3RGbt3w7p10KBB/LLwcGNUx1tvGfU2HldzI669Y0dNRSUiIiIiIiIiT0ZJDREREXlqJhPUrQurV8O+fdCuHTg4PPn6Vitcvw6LFydfjCIiIiIiIiKSfiipISIiIknC1xcWLIDjx8Hb+8nXM5th6dJkC0tERERERERE0hElNURERCRJeXlBwYJP3t9igWvXki8eEREREREREUk/lNQQERGRJOfubozAeBImE+TIkbzxiIiIiIiIiEj6oKSGiIiIJLkWLYwRGE/CajWKjQ8YAJcvJ2tYIiIiIiIiIpLGKakhIiIiSa5NG3BzM0ZhPIm7d2H0aChSBLp0gSNHkjc+EREREREREUmblNQQERGRJOfsDHPnGv9+XGLDZDIedetCpkxGW3Q0zJoFpUtDs2awZYsxkkNEREREREREBJTUEBERkWTStCksWwaursZzs9ma4KerKyxfbkw9dfo09OuXsLbGypVQuzZUrw5LlkBsbIqGLyIiIiIiIiKpkJIaIiIikmyaNYOLF+HHH6F5c6hePYrmzY3nFy8aiQ+AAgVg7Fg4dw7Gj4eCBeO3sWMHtG4NpUrBlClw75599kVERERERERE7E9JDREREUlWzs7Qvj0sXmzl11+vs3ixlfbtjfaH5cgBH38Mp07BDz9AuXLxy06cgG7doHBhGDYMwsNTbh9EREREREREJHVQUkNERERSnUyZ4K23YP9+WLXKqLsRJzwchg41khvdu8PJk3YLU0RERERERERSmJIaIiIikmqZTODvb9Td2LMH2rUDBwdj2b178N13UKIEtGkDO3faN1YRERERERERSX5KaoiIiEiaULEiLFhgTEPVqxdkzWq0WyyweDFUqwZ16sDvvxttIiIiIiIiIpL+KKkhIiIiaYqnJ0yYACEhMHIk5M0bv2zzZmjSxKjFMWcOREXZK0oRERERERERSQ5KaoiIiEialCsXDBgAZ87AjBlQsmT8skOH4J13wMsLxo6FGzfsFaWIiIiIiIiIJCUlNURERCRNc3aGLl2MRMby5fDCC/HLLl2Czz6DQoXg44/h3Dn7xSkiIiIiIiIiz05JDREREUkXzGZo1gy2bIHAQGjVyig0DhARAV9/DUWLwltvwf799o1VRERERERERJ6OkhoiIiKS7jz/PCxZAkePwvvvG6M5AGJiYN488PUFf39Ytw6sVruGKiIiIiIiIiL/gZIaIiIikm4VLw5TpsDZszB4MLi7xy9bswbq14eKFWHBArh/335xioiIiIiIiMiTUVJDRERE0r08eWDYMAgJgUmTjGmo4gQFwZtvQrFiMGGCMVWViIiIiIiIiKROSmqIiIhIhuHiAt27w7FjsGgRVKkSvywkBD76yCgqPmAAXL5svzhFREREREREJHFKaoiIiEiG4+AAbdrAjh2wcSM0bhy/7MYNGD0aihSBLl3gyBF7RSkiyWXy5Ml4enri7OxMtWrV2Llz52P7BgcH8+qrr+Lp6YnJZGLChAkpF6iIiIiIiDxCSQ0RERHJsEwmqFMHfvsNDh6ETp0gUyZjWXQ0zJoFpUtDs2awZYuKioukBwsXLqRPnz4MGTKEvXv3UqFCBfz9/bly5Uqi/e/evUvRokUZM2YM+fLlS+FoRURERETkYUpqiIiIiABlysDs2XDmDHz6KeTMGb9s5UqoXRuqV4clSyA21m5hisgz+vrrr+natSudOnXCx8eHqVOn4uLiwuzZsxPtX6VKFcaNG0fbtm1xcnJK4WhFRERERORhSmqIiIiIPCB/fhgzxqix8dVXULBg/LIdO6B1ayhVCqZMgXv37BeniPx30dHR7Nmzh3r16tnazGYz9erVIzAw0I6RiYiIiIjIk3K0dwAiIiIiqVGOHNCnD/TsCQsXwrhx8PffxrITJ6BbNxg8GHr0MIqP585t33hF5N+Fh4cTGxtL3rx5E7TnzZuXI0lYQCcqKoqoqCjb81u3bgFgsViwWCxJ9jpiPxaLBavVqv9PSVEmk70jSF9MJgtWkwmLfrFJS++L/0iHW9LSeZwM7HwOP+nfVkpqiIiIiPyDTJmgfXt4801Yu9ZIbqxbZywLD4ehQ2HsWKMeR58+4O1t13BFJBUYPXo0w4YNe6Q9LCyMyMhIO0QkSc1isXDz5k2sVitmsyZAkJRRqJC9I0hvLNzInds4j+0dSnrymBpVYtB5nNR0Hic5O5/Dt2/ffqJ+SmqIiIiIPAGTCRo0MB779sH48cYIjthYYxqq776DqVOhVSv45BOoWtXeEYvIw3Lnzo2DgwOhoaEJ2kNDQ5O0CHj//v3p06eP7fmtW7coVKgQHh4e5MiRI8leR+zHYrFgMpnw8PBQUkNSzLlz9o4gfTGZLLiawvE4fx6z1WrvcNKPPHnsHUGqpvM4aek8TgZ2PoednZ2fqJ+SGiIiIiL/kZ8fzJ8Po0bBhAkwYwbcuWOM1F282HjUrm0kNxo1Al3vEkkdMmfOTKVKlQgICKBFixaAcXE6ICCAHj16JNnrODk5JVpU3Gw26wJ4OmIymfR/KilK1+uSnslqxfz/D0kiek/8RzrUkp7O4yRm53P4Sf+u0juNiIiIyFMqUgT+9z/jjqtRo+DBG703b4amTaFsWZg9Gx6YXl9E7KhPnz7MmDGDuXPncvjwYT744APu3LlDp06dAOjQoQP9+/e39Y+OjiYoKIigoCCio6O5cOECQUFBnDhxwl67ICIiIiKSoSmpISIiIvKM3Nygf384cwZmzoRSpeKXHT4MnTuDlxeMGQM3biRcNzISfvwRXn0VXnzR+Pnjj0a7iCS9119/nfHjxzN48GB8fX0JCgpi1apVtuLhISEhXLp0ydb/4sWL+Pn54efnx6VLlxg/fjx+fn506dLFXrsgIiIiIpKhafopERERkSTi5GQkMDp1gt9/N4qKb9liLLt0yUh8jBwJXbtC794QFAQdO8L168YoX4vF+Pnrr9CrF8yda4z2EJGk1aNHj8dON7Vx48YEzz09PbFqOgMRERERkVRDIzVEREREkpjZbCQjNm+Gv/4yRl+YTMayiAhjyiovL2jePH7khsWS8OeNG8byFStSOnoRkf/m/v379OjRAzc3N3LlykXPnj2JiYl5qr7ZsmVL8MiUKRPly5cHICoqiq5du+Ll5UX27NkpVaoUs2fPtq0bEhLyyPqOjo40a9YseX8BIiIiIpKilNQQERERSUbVqhmFw48dgw8+AGdnoz0uefG4G8Dj2jt21FRUIpK6ffHFF2zdupVDhw4RHBzMli1bGDVq1FP1jYiISPAoXbo0bdu2BSAmJobnnnuOdevWcevWLb7//ns+/vhj1qxZA0DhwoUTrHvt2jVcXV1t64uIiIhI+qCkhoiIiEgKKFYMvvsOQkKgRYsnW8dqNaamWrw4WUMTEXkms2fPZuDAgTz33HM899xzfP7558yaNeuZ++7cuZNDhw7RsWNHALJmzcrw4cPx9vbGZDLx/PPP89JLL7F169ZE11+2bBkWi4VWrVolyX6KiIiISOqgpIaIiIhICvLwMKanMj/hX2FmMyxdmrwxiYg8revXr3P+/Hl8fX1tbb6+voSEhHDz5s2n7gswa9YsGjZsSP78+RN97cjISHbu3Gmbniqx9d98802c44bIiYiIiEi6kOqSGpMnT8bT0xNnZ2eqVavGzp07H9s3ODiYV199FU9PT0wmExMmTHjmbYqIiIgkt6tX46ef+jcWC5w5k6zhiIg8tYiICABcXV1tbXH/vn379lP3vXPnDj///DNdunRJ9HWtVitdunShePHiiY7EOHv2LOvWrXvs+iIiIiKSdqWqpMbChQvp06cPQ4YMYe/evVSoUAF/f3+uXLmSaP+7d+9StGhRxowZQ758+ZJkmyIiIiLJzd39yUdqAOzdCzVqwMKFcP9+8sUlIvJfZcuWDSDBSIu4f2fPnv2p+/7yyy+4uLjQuHHjR17TarXSrVs3jh49yrJlyzAn8oY6Z84c/Pz8qFChwtPsloiIiIikYqkqqfH111/TtWtXOnXqhI+PD1OnTsXFxYXZs2cn2r9KlSqMGzeOtm3b4uTklCTbFBEREUluLVo8+UiNOIGB0LYtFC0Ko0dDeHiyhCYi8p+4ublRsGBBgoKCbG1BQUEUKlSInDlzPnXfmTNn8vbbb+Po6Jig3Wq10r17d3bs2MGaNWseWQ/AYrEwZ84cjdIQERERSadSTVIjOjqaPXv2UK9ePVub2WymXr16BAYGppptioiIiDyrNm3AzQ1Mpn/uZzKBiwv4+MS3nT8PAwZAoULQpQscOJC8sYqI/JtOnToxcuRILl++zOXLlxk1atRjEwpP0vfo0aNs376dzp07P7J+jx492LZtG2vXrsXNzS3R11i7di3h4eG0a9fu2XdORERERFIdx3/vkjLCw8OJjY0lb968Cdrz5s3LkSNHUnSbUVFRREVF2Z7funULMO74sfzX2yqTiMViwWq12u31Je3TMSTPSseQPCsdQ/EyZ4Y5c6BlSxMmE1itj2Y3TCYrAAsWWGnSBDZuhG+/NbFypdE/MhJmzTIeL71kpWdPo5+DQwrvTArSMZT66f8mYxo0aBBXr16ldOnSALRv354BAwYA8P777wMwderUf+0bZ9asWdSqVYvixYsnaD979izfffcdTk5OFClSxNbevn172/bj1m/dunWiozhEREREJO1LNUmN1GT06NEMGzbskfawsDAiIyPtEJHxBfHmzZtYrdZE54wV+Tc6huRZ6RiSZ6VjKKFq1WDOHCd69crJzZsmzGYrFkv8zxw5rHz77U2qVYsiLAzKlIFp02DAAAdmz3bhp5+ycPu28XvcsMHEhg0mihSJoVOnu7Rrd48cOax23sOkp2Mo9Xu42LNkDJkyZWLy5MlMnjz5kWUPJhv+rW+cL7/8MtH2IkWKYLX++3vbokWL/rWPiIiIiKRdqSapkTt3bhwcHAgNDU3QHhoa+tgi4Mm1zf79+9OnTx/b81u3blGoUCE8PDzIkSPHU8XyrCwWCyaTCQ8PD32Jl6eiY0ielY4heVY6hh711lvGVFSLF1tYtszEtWtWcuWCFi0stG4Nzs6P3mWcJw9UqQJffgk//GBh4kQTx48bIz3OnnVk6NAcjB+fnbffhh49rJQokdJ7lXx0DKV+zs7O9g5BRERERETSuVST1MicOTOVKlUiICCAFi1aAMYX14CAAHr06JGi23Ryckq08LjZbLbrF2iTyWT3GCRt0zEkz0rHkDwrHUOPcnGBDh2MR7x/KbYB5MwJPXtC9+6wahV8+y2sXm0si4gwMXkyTJ5somFD6NULGjT49xoeaYGOodRN/y8iIiIiIpLcUtW3jj59+jBjxgzmzp3L4cOH+eCDD7hz5w6dOnUCoEOHDvTv39/WPzo6mqCgIIKCgoiOjubChQsEBQVx4sSJJ96miIiISFpmNkOjRkZi49Ah+OADI1ES588/4ZVXjGLjU6bAnTv2i1VERERERETkWaWqpMbrr7/O+PHjGTx4ML6+vgQFBbFq1Spboe+QkBAuXbpk63/x4kX8/Pzw8/Pj0qVLjB8/Hj8/P7p06fLE2xQRERFJL0qXhu++g/PnYdw4eKCOLkeOQLduULAg9O0LZ87YLUwRERERERGRp5aqkhoAPXr04OzZs0RFRbFjxw6qVatmW7Zx40a+//5723NPT0+sVusjj40bNz7xNkVERETSGzc3I3Fx4gQsWQK1a8cvu3EDvvoKvL2hVSvYtAmeoO6uiIiIiIiISKqQ6pIaIiIiIpI0HB3jExf79kGnThBXNsxigaVL4cUXwc8P5syByEi7hisiIiIiIiLyr1JNoXARERERST6+vjB7NowdC9OmGdNUxc3quX8/vPMOfPopvPeeUZcjf367hisiyaBpU3tHkH6YTDB9ur2jEBEREcmYNFJDREREJAPx8ICBA42aGvPnQ9Wq8cvCwuCLL4xaHG+8ATt22C1MERERERERkUQpqSEiIiKSAWXOHJ+4+OsvaNfOmK4KICYGfvoJnn/eePz0E9y/b994RUREREREREBJDREREZEMr1o1WLDAGL3x+eeQO3f8sh07jOSHpyeMHGmM5hARERERERGxFyU1RERERASAAgWM6adCQmDWLChfPn7ZxYvGtFWFChn1N/bvt1+cIiIiIiIiknEpqSEiIiIiCWTJYiQugoJgwwZo0QLM//9XY1QUzJljFB5/8UVYuhRiY+0Xq4iIiIiIiGQsSmqIiIiISKJMpvjExYkT8PHHkDNn/PJNm6BVKyhWDL76Cm7csFekIiIiIiIiklEoqSEiIiIi/8rLC8aPh/PnYfJkKFkyftmZM9C3rzF9VbducOSI3cIUERERERGRdE5JDRERERF5YtmyGYmLQ4fgzz/hlVfil929C1OmQOnSRvuff4LFYr9YRUREREREJP1RUkNERERE/jOzOT5xcfiwkejImjV++erV0KgR+PgYIzsiIuwXq4iIiIiIiKQfSmqIiIiIyDMpVcpIXJw/b9TW8PSMX3b0KPToYUxN1acPnDpltzBFREREREQkHVBSQ0RERESShKurkbg4ccIoLv7ii/HLbt2C//3PKCreogVs2ABWq50CFRERERERkTRLSQ0RERERSVIODvGJi/37oXNncHIyllmtsHw5vPwy+PrCrFlw7549oxUREREREZG0REkNEREREUk25cvDzJnG1FQjR0L+/PHL/v4bunSBQoXg88+NPomJjIQff4TWrU20auVG69YmfvzRaBcREREREZGMRUkNEREREUl2uXPDgAFw5gz89BM8/3z8sqtXYdQo8PKCtm0hMDB+aqoVK4xESIcOxgiPwEAnli83nufPDytX2mV3RERERERExE6U1BARERGRFJMpU3ziYscOeOMNcHQ0lsXEwMKFUKMGVKtm1Odo0QJu3DCWWyymBD9v3IDmzY3Eh4iIiIiIiGQMSmqIiIiIiF1UrQrz58PZszBoEHh4xC/btcsoLG61Pr6geFx7x46aikpERERERCSjUFJDREREROwqf34YPhxCQmDOHKOA+JOyWuH6dVi8ONnCExERERERkVRESQ0RERERSRWcnY1RF3v3Qu3aT76e2QxLlyZbWCIiIiIiIpKKONo7ABERERGRB5lMxuNJWSxGwfBWraB6daMmR6VKRpJERERERERE0hclNUREREQk1XF3N0ZgWCxP1v/+fWO0RtyIjUyZjMRGXJKjRg1jmisRERERERFJ2zT9lIiIiIikOi1aPHlCAyBLloTP79+Hv/4yio23aQMFCoCnJ7RrBxMnwp49Rh8RERERERFJWzRSQ0RERERSnTZtoFcvuHHDKAb+OCYTuLrC+fNw+jQEBsL27cbj6NGEfc+eNR4//2w8d3GBKlXiR3I8/zzkzp1ceyQiIiIiIiJJQUkNEREREUl1nJ1h7lxo3txIXCSW2IiruzF3rpGgKFPGeHTpYrRfvWqM1ohLcuzcCXfvxq9/9y5s2mQ84pQoEZ/kqF4dfHyMabBEREREREQkdVBSQ0RERERSpaZNYdky6NgRrl8Hs9mKxWKy/XR1NRIaTZsmvr67OzRubDwAYmLg77/jkxzbtxsjNx507Jjx+P5743nOnMYIjrgkR7VqkCNH8uyviIiIiIiI/DslNUREREQk1WrWDC5ehMWL4ddf4fLlKPLly0yrVtC6tTGi40k5OkLFisajRw+j7eLFhFNW7d0L0dHx69y8CatXGw8wRoeUK5ewALm3d/yoEREREREREUleSmqIiIiISKrm7Azt28Mbb1i5cuU6efLkwWxOmixC/vzw6qvGAyAy0khsPDiaIzQ0vr/Vaoz2+PtvmDbNaPPwSJjkqFz50cLlIiIiIiIikjSU1BARERER+X/OzvHJCTCSGGfOGMmNuBEd+/eDxRK/TlgYrFhhPMAYEeLnF7+dGjWgYMEU3xUREREREZF0SUkNEREREZHHMJnAy8t4vPmm0RYRYRQdj0tyBAYaNT/ixMTArl3G45tvjLaCBRMWIPf1hcyZU3x3RERERERE0jwlNURERERE/oNs2eDll40HGKM2jh5NWJvj8OGE65w/D4sWGQ8wRoRUqRKf5KheHfLkebp4IiPhl1+MoupXrxoF0lu0gDZt/lvNERERERERkbRASQ0RERERkWdgNkPp0sbjnXeMtmvXYMeO+CTHjh1w5078OpGRsGWL8YhTrFh8kqNGDShTBhwc/vm1V6yAjh2NkSJms5FgMZuNouq9esHcudC0aZLvsoiIiIiIiN0oqSEiIiIiksRy5YKGDY0HGFNSHTyYsAD56dMJ1zlxwnj88IPxPHt2eP75+CRHtWrg6hrff8UKY0RGnLg6H3E/b9yA5s2NERzNmiX9PoqIiIiIiNiDkhoiIiIiIsnM0dGoo+HrC926GW2XLyecsmrPHoiKil/n9m1Yu9Z4gFHfw8fHSHBUqQJ9+xrtVmvir2m1Gut07AgXL2oqKhERERERSR+U1BARERERsYN8+aBlS+MBRkJj37744uPbtsGlS/H9rVYIDjYeM2Y82WtYrcbUVIsXQ/v2Sb8PIiIiIiIiKU1JDRERERGRVMDJyZhu6vnnjedWK4SExCc5tm+HoCCIjf1v2zWbYelSJTVERERERCR9UFJDRERERCQVMpmgSBHj0a6d0XbnDuzaZRQkf7gmx+NYLEbhchERERERkfTAbO8ARERERETkyWTNCi++CH5+xgiMJ2E2G4XLRURERERE0gMlNURERERE0pgWLYwRGE/CYomv2yEiIiIiIpLWKakhIiIiIpLGtGkDbm7GFFX/xGQy+rVunTJxiYiIiIiIJDclNURERERE0hhnZ5g71/j34xIbce1z5xr9RURERERE0gMlNURERERE0qCmTWHZMnB1NZ7H1diI++nqCsuXG/1ERERERETSC0d7ByAiIiIiIk+nWTO4eBEWL4alS+HaNaMoeMuWxpRTGqEhIiIiIiLpjZIaIiIiIiJpmLMztG9vPERERERERNI7TT8lIiIiIiIiIiIiIiJpgpIaIiIiIiIiIiIiIiKSJiipISIiIiIiIiIiIiIiaYKSGiIiIiIiIiIiIiIikiYoqSEiIiIiIiIiIiIiImmCkhoiIiIiIiIiIiIiIpImKKkhIiIiIiIiIiIiIiJpgpIaIiIiIiIiIiIiIiKSJiipISIiIiIiIiIiIiIiaYKSGiIiIiIiIiIiIiIikiYoqSEiIiIiIhnK5MmT8fT0xNnZmWrVqrFz585/7P/LL79QqlQpnJ2dKVeuHH/88UcKRSoiIiIiIg9TUkNERERERDKMhQsX0qdPH4YMGcLevXupUKEC/v7+XLlyJdH+27dvp127dnTu3Jl9+/bRokULWrRowcGDB1M4chERERERASU1REREREQkA/n666/p2rUrnTp1wsfHh6lTp+Li4sLs2bMT7f/NN9/wyiuv8Mknn1C6dGlGjBhBxYoVmTRpUgpHLiIiIiIioKSGiIiIiIhkENHR0ezZs4d69erZ2sxmM/Xq1SMwMDDRdQIDAxP0B/D3939sfxERERERSV6O9g4gLbBarQDcunXLbjFYLBZu376Ns7MzZrNyUfLf6RiSZ6VjSJ6VjiF5VjqGUr+4v5fj/n5ObcLDw4mNjSVv3rwJ2vPmzcuRI0cSXefy5cuJ9r98+fJjXycqKoqoqCjb85s3bwJw48YNLBbL04b/zGJi7PbS6Y7JZOHWrVtkzpxZ70eSYnQOJy2TycKt+/fJHBODOZV+bqVJN27YO4JUTedx0tJ5nAzsfA4/6fcJJTWewO3btwEoVKiQnSMREREREUn9bt++Tc6cOe0dht2MHj2aYcOGPdJepEgRO0QjyeXPP+0dgYg8K53GycDNzd4RSAaj8ziJpZJz+N++Tyip8QTy58/PuXPnyJ49OyaTyS4x3Lp1i0KFCnHu3Dly5MhhlxgkbdMxJM9Kx5A8Kx1D8qx0DKV+VquV27dvkz9/fnuHkqjcuXPj4OBAaGhogvbQ0FDy5cuX6Dr58uX7T/0B+vfvT58+fWzPLRYL165dw93d3W7fJyRp6f1IJO3TeSyS9uk8Tn+e9PuEkhpPwGw2U7BgQXuHAUCOHDl0ksoz0TEkz0rHkDwrHUPyrHQMpW6peYRG5syZqVSpEgEBAbRo0QIwEg4BAQH06NEj0XWqV69OQEAAvXv3trWtXbuW6tWrP/Z1nJyccHJyStDm6ur6rOFLKqT3I5G0T+exSNqn8zh9eZLvE0pqiIiIiIhIhtGnTx/efvttKleuTNWqVZkwYQJ37tyhU6dOAHTo0IECBQowevRoAHr16kWdOnX46quvaNy4MT///DO7d+9m+vTp9twNEREREZEMS0kNERERERHJMF5//XXCwsIYPHgwly9fxtfXl1WrVtmKgYeEhCQo/FyjRg0WLFjAwIEDGTBgAMWLF2fZsmWULVvWXrsgIiIiIpKhKamRRjg5OTFkyJBHhrGLPCkdQ/KsdAzJs9IxJM9Kx5AklR49ejx2uqmNGzc+0tamTRvatGmTzFFJWqL3I5G0T+exSNqn8zjjMlmtVqu9gxAREREREREREREREfk35n/vIiIiIiIiIiIiIiIiYn9KaoiIiIiIiIiIiIiISJqgpIaIiIiIiIiIiIiIiKQJSmqkAZMnT8bT0xNnZ2eqVavGzp077R2SpBGjR4+mSpUqZM+enTx58tCiRQuOHj1q77AkDRszZgwmk4nevXvbOxRJQy5cuED79u1xd3cnS5YslCtXjt27d9s7LEkjYmNjGTRoEF5eXmTJkgVvb29GjBiBysKJiIiIiEh6YrVasVgs9g4jTVBSI5VbuHAhffr0YciQIezdu5cKFSrg7+/PlStX7B2apAGbNm2ie/fu/PXXX6xdu5b79+/ToEED7ty5Y+/QJA3atWsX06ZNo3z58vYORdKQ69evU7NmTTJlysSff/7JoUOH+Oqrr3Bzc7N3aJJGjB07lilTpjBp0iQOHz7M2LFj+fLLL5k4caK9QxMRSTesVisxMTG6kCKSRlgsFmJiYnSTh0g6ERsbi9VqxWQyYTbrcv2TMFn1DpiqVatWjSpVqjBp0iTA+OAqVKgQPXv25LPPPrNzdJLWhIWFkSdPHjZt2kTt2rXtHY6kIREREVSsWJHvvvuOL774Al9fXyZMmGDvsCQN+Oyzz9i2bRtbtmyxdyiSRjVp0oS8efMya9YsW9urr75KlixZmDdvnh0jExFJn6Kiorh8+TJFihSxdygi8gRu3bqFk5MTTk5O9g5FRJ5A3A0EiSUvzp49y9atW3Fzc6NRo0a2RIc8SqmfVCw6Opo9e/ZQr149W5vZbKZevXoEBgbaMTJJq27evAlArly57ByJpDXdu3encePGCd6PRJ7EihUrqFy5Mm3atCFPnjz4+fkxY8YMe4claUiNGjUICAjg2LFjAOzfv5+tW7fSsGFDO0cmIpK2WCwWYmNjE1125coVhg4dSqlSpfDy8mLw4MFERkamcIQi8qDY2NjHnrP79u2jc+fOFC9enGrVqrFmzRoAjdwQScUeTGY8nND4448/KFu2LD4+PkyePJn169cTExOjhMY/cLR3APJ44eHhxMbGkjdv3gTtefPm5ciRI3aKStIqi8VC7969qVmzJmXLlrV3OJKG/Pzzz+zdu5ddu3bZOxRJg06dOsWUKVPo06cPAwYMYNeuXXz44YdkzpyZt99+297hSRrw2WefcevWLUqVKoWDgwOxsbGMHDmSN998096hiYikKQ9eQDl58iTh4eFUrFiRTJkyMWHCBDZt2kSfPn2oWbMmly5d4t69ezg7O9sxYpGMzcHBwfbvo0eP4uLiQqFChTh69Ch9+/bFzc2NMWPGkD17drJmzQqgC6AiqYTFYnkkcRH3fP369WzYsIGCBQvyzjvvYLVaGT9+PA0bNmTIkCFky5aNs2fP6nz+F0pqiGQQ3bt35+DBg2zdutXeoUgacu7cOXr16sXatWv1pVaeisVioXLlyowaNQoAPz8/Dh48yNSpU5XUkCeyaNEi5s+fz4IFCyhTpgxBQUH07t2b/Pnz6xgSEXlI3F3dD14MjRMSEsLYsWNZuHAhJpOJRo0aMWjQINzd3fntt99o27Yt7777LgBlypQBEr8oIyJJw2q1Ehsbm+hd22Bc+Bw9ejQ7duygUKFCdO3ald69e7Nr1y62b9/OvXv37BC1SMb1b5+JcSOlHlcXY/ny5fTr14+IiAiqVKlC5syZuX79OlevXuXs2bN06tSJbNmyceHCBU0B+QSU1EjFcufOjYODA6GhoQnaQ0NDyZcvn52ikrSoR48e/Pbbb2zevJmCBQvaOxxJQ/bs2cOVK1eoWLGirS02NpbNmzczadIkoqKiEv3SLBLnueeew8fHJ0Fb6dKlWbJkiZ0ikrTmk08+4bPPPqNt27YAlCtXjrNnzzJ69GglNUREHhL3d1lMTAw3btwgd+7ctmULFixg7969/PLLL5QrV47w8HDy5MnD3bt3qVSpEvPmzePy5cu4u7uTP39+vL29efnll+21KyLpnslkwtHRuCx37949HB0dyZQpE2Bc9xkyZAh+fn6MHDmSPHny2K4NeXh4kCVLFj799FOcnJwoUqQIhQsXxtfXFw8PD7vtj0h6929J/riRFRcvXmTTpk0ULVrUNiLy9u3bzJ07lzp16jB9+nTAmCI+Z86cWK1WWrRoQdeuXfnqq6/w8fEhIiICPz8/hg0bluz7lVbplotULHPmzFSqVImAgABbm8ViISAggOrVq9sxMkkrrFYrPXr0YOnSpaxfvx4vLy97hyRpTN26dTlw4ABBQUG2R+XKlXnzzTcJCgpSQkP+Vc2aNTl69GiCtmPHjunOE3lid+/efeQLhIODg21OWhGRjOLixYvs27fvse9/sbGxLFy4kAoVKuDu7k67du349ttvAbh8+TIbNmygXLlyvPTSS2TPnp1SpUqRI0cO8uXLxxdffEGNGjW4e/cuFy5cYNKkSTRr1ozVq1en5C6KpAtxd2ufOnXKlohIrNZFeHg4AwYMsF347NOnDxcuXABg9+7d7N+/nyFDhlC1alUKFChAtWrVAPD39+ezzz7j77//5ty5c8yfP59WrVrRrVs3Wx1NEUl648aNY968eY/9HA4LC+Ott96iWLFiDB06lI4dO/Lee+8BxvkeGhqKm5sbkZGR/P3337brKXnz5mXw4MEsW7aMQYMGUbVqVXx8fBg7diy//fZbiu1fWqORGqlcnz59ePvtt6lcuTJVq1ZlwoQJ3Llzh06dOtk7NEkDunfvzoIFC1i+fDnZs2fn8uXLAOTMmZMsWbLYOTpJC7Jnz/5IDZasWbPi7u6u2izyRD766CNq1KjBqFGjeO2119i5cyfTp0+33Z0i8m+aNm3KyJEjKVy4MGXKlGHfvn18/fXXvPPOO/YOTUQkRcRNd9GzZ0+OHTvGpk2byJUr1yP9/v77b4YPH85rr71Gw4YNWb58OZ999hnOzs68++67VK1aldmzZ1OvXj28vLzImTMnRYsWpX379hQoUICZM2cCEBERQbZs2ShfvjwrV67E398/pXdZJE24e/cuLi4uWK3WBHPfm0wmjh8/ToUKFZg0aRLvvPNOonPj/+9//yMgIICBAweSK1cuevToQUhICJMnT6ZQoUIUK1aMJk2aULx4cby8vHB1daV69eo8//zz9OvXj379+nHt2jVy5crFxo0befnll/nyyy/JmTNnSv4aRNKFqKgofv31V+rWrUuePHkSnNcxMTE4Ojryxx9/YLFYaNasGTly5HhkG0uWLGHr1q3s2LGDkiVLsnTpUj755BMGDhzIF198wdtvv81HH33E4sWLKVOmDGfPnqVEiRJMnjyZPHny8MorryTY3syZMzl+/HiK7H9apKRGKvf6668TFhbG4MGDuXz5Mr6+vqxateqR4uEiiZkyZQoAL774YoL2OXPm0LFjx5QPSEQynCpVqrB06VL69+/P8OHD8fLyYsKECSryLE9s4sSJDBo0iG7dunHlyhXy58/Pe++9x+DBg+0dmohIioi7w7t9+/Z07dqVK1euPJLUsFgsLFiwgMyZM9O7d29y5sxJ1apVuX37NrNnz6Zhw4aMGDGCvHnzcufOHSIjI9m9ezfz588nLCyMIUOGEBISQpYsWfDw8GDVqlU4OTlRpUoVe+yySKo3d+5cPvzwQ27evJlowqJ48eIUL16c06dPExkZ+Uh9wpCQEH766Se6du1qu1EjT5489O7dm2nTpjFixAjGjBnDn3/+iYuLC6dOnWL//v1MmjSJHTt24OrqSmhoKAUKFOD+/fts2bKFypUr4+rqmhK7L5LuHD16lHfffZehQ4fy8ccfY7FYbCMp4s7xd999l969e3PlypVHkhpRUVEsXryYRo0aUa5cOcC4prt//37Wrl1Lt27dePfdd6lbty43b97k3LlzhIWFMWrUKKZMmcKQIUNYtWoVrq6uREZGMmfOHCpWrEjjxo1T9heRhiipkQb06NGDHj162DsMSYMSG+Iq8qw2btxo7xAkjWnSpAlNmjSxdxiSRmXPnp0JEyYwYcIEe4ciIpLkrFarbSRG3EWTh+/6jruo4u/vz7Vr1zh06BClSpVKsA2z2cymTZt48cUXE9yl/corr9iKCr/++uuPfK98/fXXCQgIYMiQIcyZM4fAwECCg4OJiIigS5cuvP7668m5+yJpVokSJYiIiODYsWOUKFEiwbLY2FgcHBzw8/Nj7969XLlyhcKFCycoIhwcHEzmzJmpU6eObb0yZcpQq1Yt1q5dy4gRI2jQoAENGjTg/v37ZMqUiYsXL1KwYEEuX77M4cOHmT9/PkeOHCE4OBg3NzcmTpyIm5tbiv4eRNK6uM/cAgUK0LhxYzZv3szHH3+coE/c5/Arr7xCWFgYR48epVixYgm24eTkxIkTJ6hbt26C7VatWpW1a9eyd+9eW70qgIoVKxIdHc2UKVO4du0a0dHRbN68mV9++YXbt29Ts2ZNBg4c+Mj7i8RTTQ0REREREREROzCZTDg4ONiSGEePHk30rm+LxYKLiwslSpRgy5YtREdHJ1gG4OPjQ3BwcIJlxYoVIyYmhitXrgAQFBTEiRMnCA8PZ8GCBZw9e9Y233f9+vVp2rQp8+fP5/r164wbN+6Ru8tFMqLY2FhiY2MTtBUvXpxChQrZ5rtP7IZCf39/Tpw4wdmzZ21tced3mTJluHTpErdu3bKtmzNnTvLly0dkZCQ3b97k3r17HDp0iIiICMLCwvjf//7Hiy++SO7cufHx8cHX15emTZuyevVqjh07pqniRP5f3Dl1/vx5zp8/n6DNYrEkOJ/jzklXV1eqVq3Kjh07AB6pH2qxWHBzc6NIkSJs3bqV+/fvP/J6VapUITAwkKioKNt2CxQoQEREBE5OTkRGRvLDDz+wfPlypk2bRsuWLcmWLRsfffQRmTNn5oMPPmDDhg1cvnyZJUuWULt27eT49aQbSmqIiIiIiIiIJJOHL6A8KDQ0lG+//ZbPPvuMefPmUbp0aTZt2mRb78FtADRq1IiNGzfaigE/OKqjVatWtpEWcW7fvs2+fft46aWXAPjtt9/o0qULxYoV47PPPqNhw4a0bNkSgBo1atC9e3fbRZTY2FiN/JYMY8mSJbRp04YzZ84ACZMUDg4Oj1zgzJEjBy+88EKiSY24vi+99BJ37tzh6NGjAAlGYxUuXJh8+fIREBBATEyMbd2//voLb29vcubMSVBQEN9++y0vvPACXl5eBAQE8Omnn5IvXz7c3d15//33+fjjj6lYsSLAY4sXi2Q0JpOJgwcPUqdOHX788ccEy8xms+0cPXLkCLdu3QKM87ZcuXJERkayd+9eIPHP4QYNGrBx40bbeg+e+23btmXr1q2sX7/e1rZ9+3bOnTtHjRo1cHJy4vLly3zxxRdMmTIFHx8fpkyZgqenJwCFChWiYMGCSfzbSL+U1BARERERERF5SlFRUXz88cfs2rXL1vbgRcoHL6A86OjRozRr1oypU6cSGxvL/PnzAeOiJiS8mGI2G1/dW7VqxaFDhzh37hxgXLiJW9asWTPKli1L586dWb16NQcOHODLL7+kUaNGFChQAIDXXnuNzz//nODgYEJCQhgyZAguLi6214mbDgtIMIJEJL2KO95v3LjB8ePH2b9/P4AtEXn79m1mzZpF06ZN6dChA8uWLQMgc+bM1K1bl507d9qmkHuQ1WolX758FChQgP3793P79u0EywC6du3K77//zqeffsqVK1f4448/OHr0KPXr1weM0RytWrVi7NixnD9/nr179z4yGsNisdi293AMIhlRXEK+dOnSeHl5cenSJe7fv2/7PDt9+jQdO3YkZ86c+Pv789prr7Fo0SIAvL29KVy4MCtXrgQS/xxu3bo1+/fv5+LFi0DCz+FWrVrRpEkT2rdvz8iRI+nbty+TJ09m7NixZMmSBZPJxHvvvcf27dsJCgpi3Lhx+Pj4pNjvJr3RO56IiIiIiIjIU3JycuLkyZM4OsaXrHzw35s3b2bAgAEsWrQowYXNefPmERoayrZt2xg3bhxjxoyhSpUqLF26FEh4gTLu3y+88AKA7cJrnNu3b3Pv3j1mz55N2bJl6d27N1WrViU0NJShQ4fa5tkvUaIE9evXp0CBAlit1kdGYzx4cUYkvbNYLLYLnS+++CK5cuWyJScdHR2xWq18+umnTJ48GW9vb7y9vWnfvj2TJ08mNjaW559/nqioKNt0NQ9vG6BmzZocOHCAy5cv25aZzWZiYmJ455136N27N+vXr6d8+fK8/vrrNGjQgDfffBMwRoM0aNCAJk2a4Orqmuiorwfr8YhIfELewcGBUqVKcerUKU6fPm1bPn36dM6fP8/atWvZs2cPFStW5JtvviEgIICiRYtSqVIlVq9eDST+OVy3bl2io6M5ePCgbdm1a9cICAggKCiISZMmMXz4cNavX8+BAwcYOHAgnTt3tq2fM2dOMmXKlBK/inRPhcJFRERERERE/oOHi3nH3b0dZ82aNWzatIkyZcowatQocuXKxaJFi1i4cCFLlizh1q1bHDlyhPr169sSDhUqVODtt9+mf//+REREkC1btgTbjCs+XKVKFTZt2kT16tU5cOAAf/zxB0uWLKFRo0b89NNPzJgxg7Nnz1K8ePFEL3bGxR530Ucko4q7yLh3714iIiLw9vbm0KFDtpEXv//+O5s2bWLlypUULVoUgCxZsvDVV19RoUIFXnjhBUqUKMGKFSuoXr16ghEbcefeK6+8wsqVKzl69CiOjo6sXbuW3377jQMHDvDtt9/y7rvvUq9ePe7cuUO5cuUSjTPunFXCUTKKuFGDiX1GxS17+JyIO0927NjBlClTKFy4MA4ODly/fp3Dhw9TokQJ9uzZw6pVq/jzzz/Jly8f165dI3/+/Ozbt4+ffvqJunXrUqNGDZYtW0Z0dDSZM2dO8Npxn8MVK1Zk0aJFXLhwgXXr1rF7926uXbvGwIEDGT58ON27d6d79+7J/nvK6PSOKCIiIiIiIvKAHTt22O6stlqttkfc3dcPJwtOnz5N165dbXeDXr16ldGjRzNt2jSWL1/O5s2b+fLLL1m6dCl79uwhR44cXL16FRcXlwSjN0qWLInZbGbjxo0AidbiaNSoET/88AO+vr68++673LhxgwkTJjB9+nRMJhNOTk6UKFECk8mUaIFj3dUtGUVsbGyC6WMerhGzcuVKChcuTKNGjZg0aRJbtmzh5MmTthoYCxYsoFatWly+fJn333+fsmXLMmzYMPLly2c7j+rVq8eff/4JJDy34i621qpViwsXLtCmTRu8vb1tI6e+/vprGjduDEDRokVtCY3EatnonJWM5p+S7nHLHkxoxCU59uzZQ7t27bh16xaurq62aZ6OHTsGGOfl/v37GTRoEEWLFsXT05OZM2fSp08fWxLCx8eHTJky2T6HE3sPeeWVV1i+fDkzZ87Ey8uLRYsWYbFYGD58eHL8OuQxNFJDRERERERE5P8tW7aMKVOm8Pnnnye4eAnGxZTIyEh27dqFh4cHpUqVAiBbtmzMmjWL5s2b4+Xlxcsvv0z27NmpWbMm3t7egDHXtpeXF0uWLKFSpUqUK1eOo0ePcvHiRUqWLAkY02bcuXOH9evX06RJkwQXN+Mu4HTu3JkcOXLQuHFjvLy8/nFfNBJD0pOHR0jFiY2N5a+//qJIkSIJiuzGHf9Xr17l9OnTVKhQgUyZMmG1Wrl37x4TJkygdu3afP/995w7d44vvviCxYsXs3fvXkqXLk3p0qUZMmQI69atw8/Pj759+/Liiy/aivoCvPzyy0yfPj3R0VUWi4UsWbIwatQo3N3deeWVV/Dw8PjHfdM5KxlFXD2YxI75w4cPM2DAAKpUqcKAAQMSLLt06RKLFy/myJEj1K5dmzZt2mA2m7l//z5TpkwhX758zJ8/nyxZsvDuu+/i4+PD/v37uXv3LpkyZSJ//vycO3eOiRMnUqFChUcKc3t5eeHj48Pvv/9OgwYNEozAiov1008/pV+/fo+c85KyNFJDRERERERE5P81adKE1atXU7t2bVtS4dixYwQGBjJp0iQKFCjAG2+8QYsWLVi3bh0xMTF4eHjg6elJYGAg0dHR5M2bl6JFixIREQHE3+nZoEED1q1bBxiFvcPCwpg8ebKtz6ZNm8iePbutz4MXe+Iu5ubNm5cePXrYEhoxMTGJjugQSW8eN2KhYsWK1KpVi+XLlye4qzogIABfX18KFixIu3bt6NSpEzt27MBkMrFr1y7Onj3LG2+8gaOjI15eXgwaNIiSJUva6mqULl0aR0dHAgIC+OWXX+jYsSOenp7cvHmT3bt3c//+ffz8/IiKirKN1kgsEdm7d2/eeustPDw8sFgsiZ6zGo0hGY3ZbE7wGRc3IhJg7dq1LF++nB9++IH79+/b+uzZs4eXXnqJOXPmEBERQc+ePenSpQsRERFkypSJgIAAXnvtNbJkyUJsbCzZsmWjTZs2hISEEBISQqFChciTJw+FCxemcePGtoTGpUuXmDt3LgcOHCBPnjx4e3uzePFiIGGNrLjzNGvWrEpopAJKaoiIiDxk6NCh+mIhIiKSDjw8jcuD4i4uPtzH0dGRsLAwfv/9d6KjowHo2bMnHTp0YOvWraxdu5YDBw7w3HPP8dVXX3Ho0CHAKDS8YcMG23RS9erVY/PmzURHR9subjZr1ozg4GDOnj3LSy+9RN++fZk9ezb16tWjQoUK7NixgxEjRnDr1i1CQ0Mf+/dI3B2ucfHq7m5J7y5fvszEiRP566+/bG1Wq5Vr167ZRkPt27ePqKgoAC5evMioUaPw9fXl3LlzfPvtt5w/f553330XAA8PD0JCQhLcpV2wYEGKFy/OkSNHuHPnDq1atcLNzY1hw4axe/duAC5cuMD//vc/fv31V27dukXevHnp0qULzs7OwOOTE3FJDLPZrHNWBDh+/DgVKlTg+eef58aNG7ZaTzExMRw6dIhmzZpx4sQJTpw4YVunV69elC1blu3btzN37lymTZvGb7/9xty5cwHjHI6baiomJgaAmjVrcvz4cQ4dOkTOnDnp27cvv/76K2+88Qbbtm1j3rx59OzZk8WLF5M5c2YcHR3p168fAQEBKf9Lkf9ESQ0REUkW33//ve0PE5PJhKOjIwUKFKBjx45cuHDB3uGJiIhIOvRg3QtIeIHx4eRF3MVFk8mU4E5QgOnTp9OrVy927twJQNOmTbl06RLlypWjYsWKuLq68vnnn3Pt2jUCAwMBY3qp/fv3c/HiRQAaNmzI4cOHE/zdU6dOHe7du0dgYCBms5l27doREBBAjRo16N69O3/88Qd3797F1dWVGzduPHY/zWazbsCQDCHufP7ss8/o1asXAwcOZOvWrYBxfh8/fhwHBwc++OAD/vzzT65evQrAkSNH2LRpE6NGjSJ37tw0bNiQ77//ngMHDvDnn39SsmRJMmfOzN9//w0Y7w+Ojo44OTlx9OhRDh8+jNlsZvr06Rw5coT3338fX19fSpYsyapVq6hSpQo5c+bEycmJ6dOn07Rp03/cDyUxJKPZtWsXs2bNAhLWpYhjsVg4cOAAx44dY+DAgURGRgJGon737t3Ur1+fYsWKsXr1agDOnTvH1atXadq0qS2J2LJlS1q2bMlPP/0EQN26dVm9ejWxsbE4OTkB4O7uztWrVzlw4AAAb7zxBtOmTeP+/ft07NiRQYMGUbBgQYYPH26bCrJkyZK26SUl9VJSQ0REktXw4cP58ccfmTp1Kg0bNmTevHnUqVPH9keLiIiIyNOyWq0JCuuaTCbbqIgdO3bw9ddfc+fOHduyB23dupV3332XcuXK8dZbb7Fs2TJbcqNatWoUKFDAdsGzYsWKFCxYkKxZs9rWL1++PDly5CA4OBgwCodGRkZy8OBB2zayZs1qS3pYrVZcXFyoXr06V69etcVcpUoVhg8fzvvvv09YWBgrVqygTJkyFCtWLFl+ZyJpSdx50qJFC8CoX/Ppp5/alufMmZOAgAD69evHpUuXOHnyJACnTp2icOHCtnP2/v37eHp6UrZsWQICAnBwcOCll15i3rx5HDt2DJPJxJUrVzh+/DgRERFs3rwZgObNm7NmzRo+//xzPv74Y44dO8Zff/1Fy5YtE0xLE3dXuIgY58PcuXMZMWJEgpoUDypWrBhly5bl5ZdfZseOHbYEyP3793F3d8dsNtOgQQN+++03AMLCwnB1deXmzZtA/OinBg0asHfvXgDeeecdLl68yKBBgwgPD+f27dtMnTqV7NmzExgYSGhoKACvvvoqs2fPJigoiNOnTzNhwgT8/PyS/fciSUtJDRERSVYNGzakffv2dOnShZkzZ9K3b19OnjzJihUr7B2aiIiIpHFxhXVNJhMnT55k1qxZ1K1bF2dnZ+rWrcuZM2dsd0jv27ePb775hrNnz7J9+3Z69uxJdHQ0H330EW5ubvTr14958+YBxlz6OXLkICgoCABfX1/y5MnDuXPnbHec5smThyJFinD69GkuXLiAo6MjFSpUYPPmzdy7d4+sWbNSoEABVq1aZYsVYNu2bXTv3t32/MyZM7Rv3x5/f3/b3eODBg3Snd2S7j2clIxre1DcefDSSy8B0Lp1a0JDQ5k0aRL379/n2rVrlCxZEjc3N4oVK8b69esBY877vHnzJpiuCqBs2bIcPnwYgE8++YQbN27w2muv8c0339C7d2+8vb1p0KBBgiRFtmzZaNmyJW+99Rb58+e3xf2gBxMcIhmdo6Mj1atXJ1u2bOzZswd4dLSGg4MDfn5+mEwmOnfuzPz589m0aRPXrl0jJiaG8uXLU6VKFXbt2sX9+/cpXrw4rq6utvM37r0hJiaGnDlzcunSJQoXLsxXX33F0qVLqVOnDkWKFKFw4cKMHz+eXr164ebmBhjvM9mzZ09wo4KkPUpqiIhIiqpVqxaA7S4qgPXr11OrVi2yZs2Kq6srzZs3t/2xEieuMN/DEqt/YTKZ6NGjB8uWLaNs2bI4OTlRpkwZ20WFB23dupUqVarg7OyMt7c306ZNS4K9FBERkZQQFBREq1atyJEjB2XLlqVr166cPHmSv/76i4iICD766CMGDx7MggUL6NChA9u2bSMyMpJChQrRt29fvv/+e9555x2++eYbKlasyLx587BarRQoUABPT0/OnDnDlStXcHFxoVixYpw4cYLz58/bXr9SpUrcuHGD/fv3A8bojFWrVtkKhK9YsYKZM2c+EveDF0Q9PT3x9vamTp06rFu3jnXr1mnaC8kQHkxKgjHHfmLTqlksFnLmzEmhQoW4cOECAwYMYNGiRWzfvp2DBw9SoEABcuTIQe3atW3z4JcvX54sWbKwdOlSADJlysS5c+c4dOiQ7fyqVasW8+fPp1q1asyZM4c8efIwfPhw5s+fT79+/RKN48G4RTK6uNpUD4r7fCtevDjZs2dnw4YNQOI1rho3bszGjRupW7cujRo14qOPPiJbtmzs2LGDEiVKUL16de7cucPOnTvJnj07NWvWZPXq1bbv9bGxsUydOpV69eqRM2dOALp168Yvv/zChx9+yG+//cbYsWPp1KkTDRs2JHPmzMDja99I2qJUsoiIpKgzZ84A2O6SWLduHQ0bNqRo0aIMHTqUe/fuMXHiRGrWrMnevXsTTWQ8ia1bt/Lrr7/SrVs3smfPzrfffsurr75KSEgI7u7uABw4cIAGDRrg4eHB0KFDiYmJYciQIeTNmzcpdlVERESSSdx0FtevX6d48eK8//771K9fnzlz5jBixAhKlCgBwN27dxk/fjwlSpRg8uTJ1K1b17aNN998k19++YVp06axd+9erFYr2bJlY/fu3VSpUgUfHx927drFvn378Pf3p1q1avz4448cPXqUwoULA1CuXDmuXbvGqVOnABg5ciTDhg3Dw8MDAC8vr0Tjf/CCqNlsZtiwYcnyexKxt7gLmVar9ZEpaK5cucIvv/zCpUuXKF26NG+99RaBgYFUq1YtwZQ1cf9u2LAhy5cvZ82aNZw5c4axY8dSuHBh7t69CxgjxBcuXEhISAilS5emU6dOvP3222TLlo2GDRvy66+/Yjab6dWrly0Gb29vpkyZ8khsiU2Zk9gUOiIZidVqTZAQMJvNtvPi0qVLODk5kStXLsBI2BctWpRt27bZ+j6sdu3a3Lhxg9OnT/P555/z008/8c4771C4cGEiIiIoVqwYxYoV488//6RmzZp89NFHHDt2jM6dO1O1alVOnDiB2Wxm1KhRuLi42LZbtmxZypYtm5y/CkkF9I4sIiLJ6ubNm4SHh3P+/HmWLFnCsGHDcHJyokmTJoAx7DtXrlwEBgbyySefMHjwYAICArh58yZDhgx56tc9fPgwW7ZsYeDAgfTq1YulS5dy9+5dWxExgMGDB2O1WtmyZQufffYZAwcOZMOGDba5sUVERMT+Hi7+DfEXR1566SXGjh1LgwYNMJlMVK9enbNnz3Lo0CEAypQpQ4ECBShQoAA1a9a0bQ9g4cKFjB07lipVqrB582Z++eUXsmXLZptLP+5O73379gFQtWpVQkJCbMXDAapXr86aNWvo0aMHALly5SJPnjzJ+NsQSR0enn7pcctNJpOt1s2Dd2ofPnyYJk2aMGnSJO7du8cPP/wAkKAGTZy4871Nmzbs3buXixcv0rt3b9zd3Zk+fToFCxYEoH79+sTGxrJ//35MJhNvvfUWM2bMIDg4mDfeeIMjR47YEiFxscVt32q1EhMTY3tdJTBEDBaLJcH5HMdqtbJjxw7efvtt8ubNS506dejQoQNbtmwBjCkafXx8OHXqFLdu3Up0dES+fPnw8fHhjz/+wGQyMX78eLZu3Yq7uzs5cuQAwN/fnz///BMwppX77rvvmDlzJgULFqRXr15s3LiR6tWrJzoSRNI3vUuLiEiyqlevHh4eHhQqVIjWrVuTNWtWVqxYQcGCBbl06RJBQUF07NjRdkcHGBcR6tevzx9//PFMr+vt7Z1gmzly5LDdSRkbG8vq1atp0aKF7YsNGHNo+/v7P/XrioiIyLN7XPHvf1OyZEkKFizIxo0bbetXrlwZi8Viu5vbZDJx+/Zt5s2bR758+Rg9ejRly5Yla9asXLhwwVZHw8fHBwcHBzZt2gSAn58fw4YNo127drbXc3BwoECBAkm12yJpxr9NvxS3fMeOHRw+fJhOnTolmNJpzpw5XLt2jb/++ouvvvqKkSNH4ufnx6+//grwyN3gAHXr1iUmJoadO3eSK1cuhg4dyocffkjPnj2xWCzkyJEDV1dXli5dakuEdu7cmYULFxIaGkpAQAD169dPNF6TyYSjo6OmpRF5iNlsxsHBgZiYGDZv3mwbeQEwa9YsnJ2dmTt3LitWrCBr1qwMHDiQY8eOAVCiRAliY2NtycqHb1AAo9D3hg0bsFqt1K9fnw0bNvDbb7/ZZleoVasWp06d4vbt2wC2kVcTJ06kS5cuthkgdO5mPEpqiIhIspo8eTJr165l8eLFNGrUiPDwcJycnAA4e/YsYFyAeFjp0qUJDw/nzp07T/W6DyYq4ri5uXH9+nUAwsLCuHfvHsWLF3+kX2LxiIiISPJ5eE7uuHn2L1y4wMmTJ/n666/54IMPCAsL+8ftmM1mateuzZo1a2zbbNasGcHBwba/AeJeL2vWrLYLLNevX2fhwoU899xzrF69mps3b+Lm5ka3bt0YNGgQYNyV2qlTJ4oWLZqUuy6S5litVnr16oWfnx+7du0CHr1YOXfuXLJnz07btm0ZN24cP/74o63v9evXOXHiBPXr17fNg1+5cmU6dOjAgQMHiI6OfiSRGXeneMWKFW21cby9vZkwYQLlypWzXdCcOXMm3bt3T5B0ibvjOzY29l9HmIhkNA/XxXh4xMOxY8d49dVXyZ49O126dOHnn38mLCwMk8nEq6++ysSJE3nllVcoWbIktWrV4u+//7bVvPD29iZv3rz/WFejSZMmHD16lNOnT+Po6EiJEiVs7wsALVq04Pr162TPnj05dl/SMCU1REQkWVWtWpV69erx6quvsmLFCsqWLcsbb7xhK6D5pB5358Xjvpg87u4xDUsVERFJGf/l4qGjY8Jyj9u2bcPX1xdvb29GjhzJxIkT2bVrl+2GiH/6PG/SpAk7d+7k2rVrADRt2pTw8HDbnaNg3OjQqlUrgoODqVChAt7e3ly/fp2JEycyZ84cnJ2dsVqttGrViho1agDxf4vobwnJ6C5dukRYWBg3btxg7Nix3L59G7PZbEtsnD17luHDh9O3b18OHz5Mz549adCgAdu3b+fWrVu4ubkRHh5OlixZEtzAVLp0aSwWCxs3bgQSJkrizrsXX3yRn3/+matXr9r6PDjPf6NGjahUqVKicTs4OKjAt8hDzGaz7TP49u3btnPJYrFgsVgYO3YsERERBAYGsm/fPnr27EnWrFkBY2qoB+tUfvPNN7i5ubF06VIAihQpQokSJWwJzcTOP19fX6Kjozl8+HCi8T3894FIHCU1REQkxTg4ODB69GguXrzIpEmTKFKkCABHjx59pO+RI0fInTu37Q8mNzc3bty48Ui/uIsb/5WHhwdZsmTh+PHjjyxLLB4RERH5b/7LxcMVK1bQs2dPQkNDsVqtfPfdd+TKlYtLly7Rt29f/P39CQ4O5siRI/+6rdq1a3Pnzh0OHjwIQO7cufH09GTbtm3cv3/f1u+1117j559/plu3bqxdu5a5c+fSoEEDGjdujJOTU4ILOw/SFBeS0V29epXTp0/z888/A0adOoifJurUqVOEhYXx/vvv4+zsjJ+fH0OHDsXR0ZG1a9cCxsjoY8eOcfnyZdt2zWYzd+7csd3V/eC5F7ftDz74gC+//BIPDw9b+8PnZGJT3IhkVEeOHPnHZPzff//Nm2++Sf78+WnUqBHTp0/n+vXrmM1m1q9fz6pVq3j77bfx9fXFxcWFEiVK2IpyX7p0iY8++oiiRYuyfv16WxHv/fv3ExMTg7u7O2XKlOHKlStcuHABePTGgJw5c3L16lUaN26cfL8ESZeU1BARkRT14osvUrVqVSZMmICbmxu+vr7MnTs3QcLi4MGDrFmzhkaNGtnavL29uXnzJn///bet7dKlS7a7QP4rBwcH/P39WbZsGSEhIbb2w4cPs3r16qfapoiIiBgSm54mbnqLCxcucP78eQCio6MB2L17N6tXryZv3rwEBwezfv16unXrhpubGz4+Pnz77bcULlyY/fv3A/+cWMifPz9ly5YlICDAdvHE19eXNWvWcO/evQR9q1evznvvvWe7s9tqtT5ywUUFg0UScnd358SJE5QpU4YBAwYwb948AgICbMv379+Pj49PguniihcvToUKFVi5ciVgjKi6dOkSU6ZMAYxExKZNm8iePbst8fFgYjTuPPTy8qJr165kzpz5sfHpnBUxjB49mpYtW9pqRT38+RYWFsaQIUO4fv06s2bNonbt2nzxxRd89tlngPEZffXqVV555RXA+Oy1Wq22xOHx48fZuXMnvXv3pnz58gDs2bOHGzdusGPHDgAKFCjAtWvX2L59u20bD4uriyHyX+idXkREUtwnn3xCaGgo33//PePGjePq1atUr16d8ePHM2LECF5++WVy5szJ0KFDbeu0bduWrFmz0rJlS7755htGjx5NtWrVKFGixFPHMWzYMMAoPjZ27FhGjhzJSy+9RJkyZZ51F0VERDK0h6enuXXrFo6OjgQHB/Pqq6/Su3dvADJnzmwrCu7l5QUYSYnQ0FCKFSsGGNNYZc6cGT8/P44cOcLFixeBf54GqmLFiixZssSWNJkyZQpr1661za3/oLjpa8C42KKRGCL/LDAwEF9fX0JCQqhYsSLvv/8+M2bMYPPmzQAULFgQR0dH22gpgKxZs1KgQAFbwWB/f38+/PBDvvvuO/z9/alYsSL79u1jxIgRhIeHc+3atceeixqJIfJkypYti7u7OwcOHAAe/dwMDAxk5cqVTJo0iYYNG/LFF18wZswYZsyYQWhoKPnz5yc2NtZ2IwIYn5NxicP79++TJ08efv31V65du8acOXNwcHDA3d2d+fPnA8boyYCAANq0aaPpGyVJKakhIiIprlWrVnh7ezN+/HheeuklVq1ahbu7O4MHD2b8+PE8//zzbNu2zXZxA4w7wpYuXYqLiwv9+vVj7ty5jB49mqZNmz51HOXLl2f16tV4eHgwePBgZs+ezbBhw2jZsmVS7KaIiEiG9bjpaXx8fBgzZgwrVqxg/fr1WCwWHBwc2LNnD2XLluX+/ftkypSJggULsm3bNiD+Ikzx4sXZt28fp06dStCemOHDh7N27VqcnJwAyJs3b4LCow9KbPoaEXlU3Dl37tw5XF1d8fHxAYxaGJs3b+arr74C4PnnnydbtmwsWbLEtu7Zs2fZvXs3x48f5/Llyzg7O/P222+zbt06KlWqxHvvvcfKlSuJiIggV65ctpo4idFIDJEnU758ebJly2YbMfnwZ11QUBC+vr4ULVrUtrxJkyZ4eHiwatUqW22rGTNmJJi+MTg4mODgYOrWrUvXrl2ZPXs2+fPnZ8KECbRv354dO3bY3g88PDwoVapUoq8v8ixMVqXJREREREREJAldvHiRChUqcPr0aY4dO4a/vz8//fQT9erVA6B9+/aEhYUxfPhwqlWrxosvvkjFihX5+uuvAejatSv79+/n999/x8PDgzt37tC2bVt27tzJ4MGD6d69uz13TyRDmz59Ou+//z61atVi586dZMmShRdeeIHAwEBGjBjB+++/z/r163nllVdo3bo1VatWZdu2bVSoUIGJEycyYcIE2rVrZxtxEZekuHLlCi1atKBEiRLMnj1byQuRJPDee+8REhLCokWLyJ49O2AkKE0mE8OGDWP79u2MGzeO8uXL2xKXderUoXTp0kybNo2ZM2cycuRIypUrR69evTh48CDr16+nadOmdOnSBYB9+/bh7u5O4cKF7bafkvHoE0JERERERESSVGLT08ycOdM2PU2/fv1wcHDgq6++IioqCjc3twR3cH700UfcuHGDFi1asGjRIoYMGUK+fPkoWrQoISEhtvocIpLygoODKVu2LNWrV2f16tUcPXqUFStW0LNnTyZPnszy5ct5+eWXWbp0KRaLhalTp1KuXDmaN29OkSJFOHnyJGAkM06cOMFbb71Fw4YNKV26NNmyZaN///5KaIgkER8fH27cuGGbgspisdgSin5+fty8eZO//voLMEZSnDt3jtu3b5MvXz4AOnTowLRp03B0dOSdd95h1qxZ1KhRg2bNmgFGgsTPz08JDUlxjvYOQERERERERNKHuLs/E5ueZtasWURGRlK7dm3Kly/P559/TuvWrZk2bRpnz56ldevWtu34+PiwaNEixo4dS79+/fDx8eHnn3+mZcuWXL9+XRc8RewoODiYl19+mTFjxgDx01L16tWLXbt2MX/+fJo0aULjxo1p3Lixbb1ly5Zx/PhxKlasaGsrWrQohQoVomTJkowYMYLKlSun7M6IpHMVKlRg0aJF7Nq1ixo1agDg4OAAwAsvvEDFihX5/PPPKVKkCJUqVWLRokXcvXuXDh06AJApUyYaNGjA888/T/bs2R+ZQkpTSom9KKkhIiIiIiIiSSLu4oaLiwtLliyhTp06Caan2bZtG9OmTePdd9+lZs2avPnmm8yfP5+goCCee+45wLhAarVa8fX1Zf78+QmmpgkODqZq1aqYzWZbAkVEUk7ceXfnzh0AYmNjbRdIc+bMyeLFi221bO7fv8+CBQvInDkzmzdvZsuWLbzzzjs0atTItj1HR0dGjRqV8jsikkGUKVOG3Llzs3//fiBhTZpMmTLx1VdfER4eTt++fTlz5gyurq4MHz4cb29vwPhct1qt5MiRwy7xizyOkhoiIiIiIiKSpB6cnmbEiBGULl0aDw8Phg8fzuTJk8mXLx/Nmzfn888/p3PnzoSFhZEnTx7AuIBiMpmIjIwkODiYkydPcuXKFebPn4+vry/9+vWz9RORlHX//n3u3r1LhQoVgPg7vuM4OTnZEpOZMmXi4sWL/Prrr+TPn5/BgwfbpqwRkZTh4eGBt7c3wcHBnDhxgrt37/Lnn3+yYsUKHB0dmTlzJosWLWLPnj24urrakhkP0uetpEYqFC4iIiIiIiJJql69epQtW5YJEyYA8Xd337x5k/bt25MlSxYWLFiAo6MjFosl0emkrFYrGzZs4KOPPsLR0ZFmzZrRoUMHvLy8UnhvROS/ijvnIyMjcXZ2tnc4IhnajBkz+OSTT7hz5w5Wq5USJUpQv3593nrrLSpXrqyRj5ImKakhIiIiIiIiScZqtdKgQQM8PT2ZMWNGgulpAKKiomzT08SxWCxYrdZH7vq+f/8+jo6Outgikso8LhkpIqnPkSNHWLlyJaVLl6Z+/fqPfAaLpEWafkpERERERESSzJNOT/NgouJxF0czZcqUfIGKyFNTQkMk7ShVqhSlSpWydxgiSUojNUREREREREREREREJE1Qal1ERERERESSnMVisXcIIiIiIpIOaaSGiIiIiIiIiIiIiIikCRqpISIiIiIiIiIiIiIiaYKSGiIiIiIiIiIiIiIikiYoqSEiIiIiIiIiIiIiImmCkhoiIiIiIiIiIiIiIpImKKkhIiIiIiIiIiIiIiJpgpIaIiIiIiIiIiIiIiKSJiipISIiIiIiIiIiIiIiaYKSGiIiIiIiIiIiIiIikiYoqSEiIiIiIiIiIiIiImmCkhoiIiIiIiIiIiIiIpIm/B/bm9ojwbolswAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "==== Percentage Difference (8-bit vs 4-bit) ====\n", + "\n", + "Avg Training Loss: 8-bit is inf% higher than 4-bit\n", + "Agg Model Loss: 8-bit is 0.52% higher than 4-bit\n", + "Local Model Loss: 8-bit is 1.13% lower than 4-bit\n" + ] + } + ], + "source": [ + "# Visualize aggregated metrics (memory vs. performance tradeoff)\n", + "plot_aggregated_metrics(flflow_4bit, flflow_8bit)" + ] + }, + { + "cell_type": "markdown", + "id": "9a4de940", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "This notebook has demonstrated how to implement federated fine-tuning of Microsoft's Phi-4 model using OpenFL with both 4-bit and 8-bit quantization approaches. The visualization and analysis above help us understand the tradeoffs between these quantization methods:\n", + "\n", + "### Memory Usage and Performance Comparison\n", + "\n", + "- **Memory Footprint**: 4-bit quantization used approximately 2.6% less memory (55,770 MB vs 57,204 MB) compared to 8-bit quantization.\n", + "\n", + "- **Model Quality**: 4-bit quantization achieved better loss metrics overall:\n", + " - Training Loss: 0.1754 ± 0.0877 for 4-bit vs. significantly higher for 8-bit\n", + " - Evaluation Loss: 0.4618 ± 0.0725 for 4-bit vs. 0.4909 ± 0.0776 for 8-bit (6.3% higher)\n", + "\n", + "- **Performance Difference by Metric**:\n", + " - Average Training Loss: 8-bit significantly underperformed compared to 4-bit\n", + " - Aggregated Model Loss: 8-bit was 0.52% higher (worse) than 4-bit\n", + " - Local Model Loss: 8-bit was 1.13% lower (better) than 4-bit\n", + "\n", + "### Key Insights\n", + "\n", + "1. **Memory-Performance Tradeoff**: While 8-bit quantization required slightly more memory, the relative performance differences in evaluation metrics were more significant, suggesting 4-bit quantization offers a better memory-performance balance for this model and task.\n", + "\n", + "2. **Training Stability**: The 4-bit quantization approach demonstrated more stable and better training performance compared to 8-bit quantization.\n", + "\n", + "3. **Efficiency Considerations**: Despite the memory analysis suggesting 8-bit provides more efficient memory usage relative to loss in some metrics, the overall performance profile favors 4-bit quantization for practical federated learning deployments.\n", + "\n", + "By combining federated learning with appropriate quantization techniques, we can successfully fine-tune large language models while balancing computational resource constraints across federated devices. For this Phi-4 model, the 4-bit quantization approach appears to offer the better balance of memory efficiency and model performance." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python (myenv)", + "language": "python", + "name": "myenv" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/openfl-tutorials/experimental/workflow/LLM/phi-4.ipynb b/openfl-tutorials/experimental/workflow/LLM/phi-4.ipynb deleted file mode 100644 index 0c6884f384..0000000000 --- a/openfl-tutorials/experimental/workflow/LLM/phi-4.ipynb +++ /dev/null @@ -1,705 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a59f475d-d843-46bc-b75e-10984b687ed3", - "metadata": {}, - "source": [ - "# Federated Fine-Tuning of Phi-4 Using OpenFL" - ] - }, - { - "cell_type": "markdown", - "id": "20c74cb9-51a2-42e2-893f-d280e227e8bf", - "metadata": {}, - "source": [ - "\n", - "In this tutorial, we demonstrate how to fine-tune Microsoft's Phi-4 model in a federated learning workflow.\n", - "\n", - "We will fine-tune **Microsoft's [Phi4](https://huggingface.co/microsoft/phi-4)** model using a diverse dataset such as [Math_10k](https://github.com/AGI-Edgerunners/LLM-Adapters/tree/main), an open-source dataset containing mathematical question-answer pairs collected from various smaller math datasets." - ] - }, - { - "cell_type": "markdown", - "id": "d07c32d3-1a8d-4162-af45-bc3a10e0ae3f", - "metadata": {}, - "source": [ - "## The Workflow Interface" - ] - }, - { - "cell_type": "markdown", - "id": "e3d74610-e48d-4dd4-b622-eb910fbe91aa", - "metadata": {}, - "source": [ - "The workflow interface is an innovative approach to designing federated learning experiments with OpenFL. It was developed in response to discussions with researchers and users who had unique use cases that didn’t perfectly align with the traditional horizontal federated learning model. This interface enables more flexible compositions of experiments, allowing for greater customization and adaptability in complex, real-world scenarios" - ] - }, - { - "cell_type": "markdown", - "id": "413e1d95-fd76-4fe0-b8d0-4c625c2a8fd3", - "metadata": {}, - "source": [ - "## Installing OpenFL\n", - "To install OpenFL, follow the official documentation: \n", - "[OpenFL Installation Guide](https://openfl.readthedocs.io/en/latest/installation.html)" - ] - }, - { - "cell_type": "markdown", - "id": "53654c70", - "metadata": {}, - "source": [ - "After installation, activate experimental APIs using: \n", - "`fx experimental activate`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "05b2ad75-8c7b-499c-902e-dbd5b24361bc", - "metadata": {}, - "outputs": [], - "source": [ - "# Install dependencies \n", - "!pip install torch transformers peft datasets trl==0.12.2 -q" - ] - }, - { - "cell_type": "markdown", - "id": "440a9c39-ec42-45a5-80f6-9a9e0bc90d2f", - "metadata": {}, - "source": [ - "## Import libraries" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "be4690ae-0671-4d3a-8f21-620ab865a03e", - "metadata": {}, - "outputs": [], - "source": [ - "import hashlib\n", - "import os\n", - "\n", - "import numpy as np\n", - "import requests\n", - "import torch\n", - "import transformers\n", - "from datasets import load_dataset\n", - "from peft import LoraConfig, get_peft_model\n", - "from peft.utils import get_peft_model_state_dict, set_peft_model_state_dict\n", - "from transformers import AutoModelForCausalLM, AutoTokenizer, TrainingArguments\n", - "from transformers.trainer_callback import PrinterCallback\n", - "from trl import SFTTrainer\n", - "\n", - "from openfl.experimental.workflow.interface import Aggregator, Collaborator, FLSpec\n", - "from openfl.experimental.workflow.placement import aggregator, collaborator\n", - "from openfl.experimental.workflow.runtime import LocalRuntime" - ] - }, - { - "cell_type": "markdown", - "id": "08576aa0-f628-4ae6-8fc3-dd167d164784", - "metadata": {}, - "source": [ - "## Acquiring and preprocessing dataset" - ] - }, - { - "cell_type": "markdown", - "id": "7ba1d8b6-8a5b-41a2-8c77-c9a85e869cda", - "metadata": {}, - "source": [ - "We can download the dataset directly from the [LLM-Adapters repository](https://github.com/AGI-Edgerunners/LLM-Adapters)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d615d626-8727-4169-b2a6-3ba15c3cdb95", - "metadata": {}, - "outputs": [], - "source": [ - "def file_checksum(file_path, algorithm=\"sha256\"):\n", - " \"\"\"\n", - " Calculate the checksum of a file using the specified hashing algorithm.\n", - "\n", - " Parameters:\n", - " file_path (str): The path to the file for which the checksum is to be calculated.\n", - " algorithm (str): The hashing algorithm to use (default is 'sha256').\n", - "\n", - " Returns:\n", - " str: The calculated checksum of the file.\n", - " \"\"\"\n", - " hash_func = hashlib.new(algorithm)\n", - " with open(file_path, \"rb\") as f:\n", - " for chunk in iter(lambda: f.read(4096), b\"\"):\n", - " hash_func.update(chunk)\n", - " return hash_func.hexdigest()\n", - "\n", - "\n", - "if not os.path.exists(\"math_10k.json\"):\n", - " r = requests.get(\n", - " \"https://raw.githubusercontent.com/AGI-Edgerunners/LLM-Adapters/main/ft-training_set/math_10k.json\",\n", - " )\n", - " with open(\n", - " \"math_10k.json\",\n", - " \"wb\",\n", - " ) as f:\n", - " f.write(r.content)\n", - "\n", - " actual_checksum = file_checksum(\"math_10k.json\")\n", - " if (\n", - " actual_checksum\n", - " != \"0342d0d860ad8592b579329337c90e42eefd3d9f2898043140cbd120630418b8\"\n", - " ):\n", - " raise ValueError(\n", - " \"Checksum verification failed. The file may have been altered.\"\n", - " )\n", - "\n", - "raw_dataset = load_dataset(\"json\", data_files=\"math_10k.json\")" - ] - }, - { - "cell_type": "markdown", - "id": "3ab15ad6-db35-4a58-a2d5-54a6d3ccdc78", - "metadata": {}, - "source": [ - "## Initialize arguments and configurations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "eada9809-468a-47c6-9b03-55aa887c9487", - "metadata": {}, - "outputs": [], - "source": [ - "training_config = {\n", - " \"bf16\": True,\n", - " \"use_cpu\": True,\n", - " \"do_eval\": False,\n", - " \"learning_rate\": 5.0e-06,\n", - " \"log_level\": \"info\",\n", - " \"logging_steps\": 20,\n", - " \"lr_scheduler_type\": \"cosine\",\n", - " \"num_train_epochs\": 1,\n", - " \"output_dir\": \"./checkpoint_dir\",\n", - " \"overwrite_output_dir\": True,\n", - " \"per_device_eval_batch_size\": 1,\n", - " \"per_device_train_batch_size\": 1,\n", - " \"save_steps\": 100,\n", - " \"save_total_limit\": 1,\n", - " \"seed\": 0,\n", - " \"gradient_checkpointing\": True,\n", - " \"gradient_checkpointing_kwargs\": {\"use_reentrant\": False},\n", - " \"warmup_ratio\": 0.2,\n", - "}\n", - "\n", - "peft_config = {\n", - " \"r\": 1,\n", - " \"lora_alpha\": 2,\n", - " \"lora_dropout\": 0.05,\n", - " \"bias\": \"none\",\n", - " \"task_type\": \"CAUSAL_LM\",\n", - " \"target_modules\": \"all-linear\",\n", - " \"modules_to_save\": None,\n", - "}\n", - "model_kwargs = dict(\n", - " use_cache=False,\n", - " trust_remote_code=True,\n", - " torch_dtype=torch.bfloat16,\n", - " device_map=None,\n", - ")\n", - "train_conf = TrainingArguments(**training_config)\n", - "peft_conf = LoraConfig(**peft_config)" - ] - }, - { - "cell_type": "markdown", - "id": "ffe93234-2a1a-4809-a431-efe2f35ce496", - "metadata": {}, - "source": [ - "## Load and initialize model" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8ab371f1-64c3-4225-82e7-fb3c5b05578c", - "metadata": {}, - "outputs": [], - "source": [ - "checkpoint_path = \"NyxKrage/Microsoft_Phi-4\"\n", - "model = AutoModelForCausalLM.from_pretrained(\n", - " checkpoint_path, return_dict=True, **model_kwargs\n", - ")\n", - "model = get_peft_model(model, peft_conf)\n", - "\n", - "tokenizer = AutoTokenizer.from_pretrained(checkpoint_path)\n", - "sequence_max_length = 512\n", - "val_set_size = 2000\n", - "tokenizer.pad_token_id = 0 # we want this to be different from the eos token\n", - "tokenizer.padding_side = \"left\" # Allow batched inference" - ] - }, - { - "cell_type": "markdown", - "id": "dd058fff-f6dd-4cc6-acaf-7e2fa2c1132d", - "metadata": {}, - "source": [ - "## Preprocess dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4392ddab-10b7-41f6-a8e0-65ba298ea457", - "metadata": {}, - "outputs": [], - "source": [ - "def generate_prompt(data_point):\n", - " \"\"\"\n", - " Generate a prompt based on the given data point.\n", - "\n", - " Parameters:\n", - " data_point (dict): A dictionary containing the instruction, input, and output.\n", - "\n", - " Returns:\n", - " str: The generated prompt as a string.\n", - " \"\"\"\n", - " if data_point[\"input\"]:\n", - " return f\"\"\"Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request. \n", - "\n", - " ### Instruction:\n", - " {data_point[\"instruction\"]}\n", - " \n", - " ### Input:\n", - " {data_point[\"input\"]}\n", - " \n", - " ### Response:\n", - " {data_point[\"output\"]}\"\"\"\n", - " else:\n", - " return f\"\"\"Below is an instruction that describes a task. Write a response that appropriately completes the request. \n", - "\n", - " ### Instruction:\n", - " {data_point[\"instruction\"]}\n", - " \n", - " ### Response:\n", - " {data_point[\"output\"]}\"\"\"\n", - "\n", - "\n", - "def tokenize(prompt, add_eos_token=True):\n", - " \"\"\"\n", - " Tokenize the given prompt.\n", - "\n", - " Parameters:\n", - " prompt (str): The prompt to be tokenized.\n", - " add_eos_token (bool): Whether to add an end-of-sequence token (default is True).\n", - "\n", - " Returns:\n", - " dict: A dictionary containing the tokenized input IDs and attention mask.\n", - " \"\"\"\n", - " result = tokenizer(\n", - " prompt,\n", - " truncation=True,\n", - " max_length=sequence_max_length,\n", - " padding=False,\n", - " return_tensors=None,\n", - " )\n", - " if (\n", - " result[\"input_ids\"][-1] != tokenizer.eos_token_id\n", - " and len(result[\"input_ids\"]) < sequence_max_length\n", - " and add_eos_token\n", - " ):\n", - " result[\"input_ids\"].append(tokenizer.eos_token_id)\n", - " result[\"attention_mask\"].append(1)\n", - "\n", - " result[\"labels\"] = result[\"input_ids\"].copy()\n", - "\n", - " return result\n", - "\n", - "\n", - "def generate_and_tokenize_prompt(data_point):\n", - " \"\"\"\n", - " Generate and tokenize a prompt based on the given data point.\n", - "\n", - " Parameters:\n", - " data_point (dict): A dictionary containing the instruction, input, and output.\n", - "\n", - " Returns:\n", - " dict: A dictionary containing the tokenized input IDs, attention mask, and labels.\n", - " \"\"\"\n", - " full_prompt = generate_prompt(data_point)\n", - " tokenized_full_prompt = tokenize(full_prompt)\n", - " user_prompt = generate_prompt({**data_point, \"output\": \"\"})\n", - " tokenized_user_prompt = tokenize(user_prompt, add_eos_token=False)\n", - " user_prompt_len = len(tokenized_user_prompt[\"input_ids\"])\n", - "\n", - " tokenized_full_prompt[\"labels\"] = [-100] * user_prompt_len + tokenized_full_prompt[\n", - " \"labels\"\n", - " ][user_prompt_len:]\n", - " return tokenized_full_prompt\n", - "\n", - "\n", - "train_val = raw_dataset[\"train\"].train_test_split(\n", - " test_size=val_set_size, shuffle=True, seed=42\n", - ")\n", - "\n", - "processed_train_dataset = train_val[\"train\"].shuffle().map(generate_and_tokenize_prompt).select(range(3))\n", - "processed_test_dataset = train_val[\"test\"].shuffle().map(generate_and_tokenize_prompt).select(range(3))" - ] - }, - { - "cell_type": "markdown", - "id": "812cfcc8-33ec-4a2b-8a74-27bfc2a41d7b", - "metadata": {}, - "source": [ - "## Define Federated Averaging Method\n", - "The FedAvg method is used to average the models from all the collaborators after training." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6dc85c57-68b2-4514-9373-43e3d7c05c10", - "metadata": {}, - "outputs": [], - "source": [ - "def FedAvg(peft_params, model, weights=None):\n", - " \"\"\"\n", - " Perform Federated Averaging (FedAvg) on the model parameters.\n", - "\n", - " Parameters:\n", - " peft_params (list): A list of state dictionaries containing the model parameters from different clients.\n", - " model (torch.nn.Module): The model to which the averaged parameters will be applied.\n", - " weights (list, optional): A list of weights for averaging the parameters. If None, equal weights are used.\n", - "\n", - " Returns:\n", - " torch.nn.Module: The model with the averaged parameters applied.\n", - " \"\"\"\n", - " state_dicts = peft_params\n", - " state_dict = get_peft_model_state_dict(model)\n", - " for key in peft_params[0]:\n", - " dtype = state_dicts[0][key].dtype\n", - " state_dict[key] = torch.from_numpy(\n", - " np.average(\n", - " [state[key].to(torch.float).numpy() for state in state_dicts], axis=0, weights=weights\n", - " )\n", - " ).to(dtype)\n", - " set_peft_model_state_dict(model, state_dict)\n", - " return model" - ] - }, - { - "cell_type": "markdown", - "id": "810eb75e", - "metadata": {}, - "source": [ - "Now we come to the flow definition. The OpenFL Workflow Interface adopts the conventions set by Metaflow, that every workflow begins with `start` and concludes with the `end` task. The aggregator begins with an optionally passed in model and optimizer. The aggregator begins the flow with the `start` task, where the list of collaborators is extracted from the runtime (`self.collaborators = self.runtime.collaborators`) and is then used as the list of participants to run the task listed in `self.next`, `aggregated_model_validation`. The model, optimizer, and anything that is not explicitly excluded from the next function will be passed from the `start` function on the aggregator to the `aggregated_model_validation` task on the collaborator. Where the tasks run is determined by the placement decorator that precedes each task definition (`@aggregator` or `@collaborator`). Once each of the collaborators (defined in the runtime) complete the `aggregated_model_validation` task, they pass their current state onto the `train` task, from `train` to `local_model_validation`, and then finally to `join` at the aggregator. It is in `join` that an average is taken of the model weights, and the next round can begin.\n", - "\n", - "![Workflow Interface](../../../../docs/images/workflow_interface.png)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "58298e8e-ab9e-4377-966e-143823441697", - "metadata": {}, - "outputs": [], - "source": [ - "class FederatedFlow(FLSpec):\n", - " def __init__(self, model=None, optimizer=None, rounds=3, **kwargs):\n", - " \"\"\"\n", - " Initialize the class with the given model, optimizer, and number of rounds.\n", - "\n", - " Parameters:\n", - " model (torch.nn.Module, optional): The model to be used. If None, a ValueError is raised.\n", - " optimizer (torch.optim.Optimizer, optional): The optimizer to be used.\n", - " rounds (int, optional): The number of rounds for training or processing (default is 3).\n", - " **kwargs: Additional keyword arguments to be passed to the superclass initializer.\n", - "\n", - " Raises:\n", - " ValueError: If no model is provided.\n", - " \"\"\"\n", - " super().__init__(**kwargs)\n", - " if model is not None:\n", - " self.model = model\n", - " self.peft_params = get_peft_model_state_dict(self.model)\n", - " self.optimizer = optimizer\n", - " else:\n", - " raise ValueError(\"No model inputted\")\n", - "\n", - " self.rounds = rounds\n", - " \n", - "\n", - " @aggregator\n", - " def start(self):\n", - " \"\"\"\n", - " Initialize the model and set up the collaborators for federated learning.\n", - "\n", - " This method performs the initial setup for the model, including setting the\n", - " collaborators, initializing private variables, and starting the first round\n", - " of the federated learning process.\n", - " \"\"\"\n", - " print(f\"Performing initialization for model\")\n", - " self.collaborators = self.runtime.collaborators\n", - " self.current_round = 0\n", - " self.next(\n", - " self.aggregated_model_validation,\n", - " foreach=\"collaborators\",\n", - " )\n", - "\n", - " \n", - " @collaborator\n", - " def aggregated_model_validation(self):\n", - " \"\"\"\n", - " Perform aggregated model validation for a collaborator.\n", - "\n", - " This method loads the model, applies the PEFT configuration, and evaluates\n", - " the model using the provided training and evaluation datasets. The validation\n", - " score is then stored and the next step in the process is triggered.\n", - " \"\"\"\n", - " print(f\"Performing aggregated model validation for collaborator {self.input}\")\n", - " self.model = AutoModelForCausalLM.from_pretrained(\n", - " checkpoint_path, return_dict=True, **model_kwargs\n", - " )\n", - " self.model = get_peft_model(self.model, peft_conf)\n", - " set_peft_model_state_dict(self.model, self.peft_params)\n", - " trainer = SFTTrainer(\n", - " model=self.model,\n", - " args=train_conf,\n", - " peft_config=peft_conf,\n", - " train_dataset=self.train_dataset,\n", - " eval_dataset=self.eval_dataset,\n", - " max_seq_length=sequence_max_length,\n", - " dataset_text_field=\"text\",\n", - " tokenizer=tokenizer,\n", - " packing=True,\n", - " data_collator=transformers.DataCollatorForSeq2Seq(\n", - " tokenizer, pad_to_multiple_of=8, return_tensors=\"pt\", padding=True\n", - " ),\n", - " )\n", - "\n", - " trainer.remove_callback(PrinterCallback)\n", - " out = trainer.evaluate()\n", - " self.agg_validation_score = out[\"eval_loss\"]\n", - " print(f\"{self.input} value of {self.agg_validation_score}\")\n", - " self.next(self.train)\n", - "\n", - " @collaborator\n", - " def train(self):\n", - " \"\"\"\n", - " Train the model for a collaborator.\n", - "\n", - " This method trains the model using the provided training and evaluation datasets.\n", - " The training loss is stored, the model is saved, and the next step in the process\n", - " is triggered.\n", - " \"\"\"\n", - " trainer = SFTTrainer(\n", - " model=self.model,\n", - " args=train_conf,\n", - " peft_config=peft_conf,\n", - " train_dataset=self.train_dataset,\n", - " eval_dataset=self.eval_dataset,\n", - " max_seq_length=sequence_max_length,\n", - " dataset_text_field=\"text\",\n", - " tokenizer=tokenizer,\n", - " packing=True,\n", - " data_collator=transformers.DataCollatorForSeq2Seq(\n", - " tokenizer, pad_to_multiple_of=8, return_tensors=\"pt\", padding=True\n", - " ),\n", - " )\n", - "\n", - " out = trainer.train()\n", - " self.loss = out.training_loss\n", - " trainer.save_model()\n", - " self.training_completed = True\n", - " self.next(self.local_model_validation)\n", - "\n", - " @collaborator\n", - " def local_model_validation(self):\n", - " \"\"\"\n", - " Perform local model validation for a collaborator.\n", - "\n", - " This method evaluates the model using the provided training and evaluation datasets.\n", - " The validation score is stored, the PEFT parameters are updated, and the next step\n", - " in the process is triggered.\n", - " \"\"\"\n", - " trainer = SFTTrainer(\n", - " model=self.model,\n", - " args=train_conf,\n", - " peft_config=peft_conf,\n", - " train_dataset=processed_train_dataset,\n", - " eval_dataset=processed_test_dataset,\n", - " max_seq_length=sequence_max_length,\n", - " dataset_text_field=\"text\",\n", - " tokenizer=tokenizer,\n", - " packing=True,\n", - " data_collator=transformers.DataCollatorForSeq2Seq(\n", - " tokenizer, pad_to_multiple_of=8, return_tensors=\"pt\", padding=True\n", - " ),\n", - " )\n", - " out = trainer.evaluate()\n", - " self.local_validation_score = out[\"eval_loss\"]\n", - " self.peft_params = get_peft_model_state_dict(self.model)\n", - " print(f\"Doing local model validation for collaborator {self.input}\")\n", - " self.next(self.join, exclude=[\"training_completed\", \"model\"])\n", - "\n", - " @aggregator\n", - " def join(self, inputs):\n", - " \"\"\"\n", - " Aggregate the results from all collaborators and update the model.\n", - "\n", - " This method calculates the average loss, aggregated model accuracy, and local model\n", - " accuracy from all collaborators. The model parameters are updated using Federated\n", - " Averaging (FedAvg), and the next round of the process is triggered if applicable.\n", - " \"\"\"\n", - " self.average_loss = sum(input.loss for input in inputs) / len(inputs)\n", - " self.aggregated_model_accuracy = sum(\n", - " input.agg_validation_score for input in inputs\n", - " ) / len(inputs)\n", - " self.local_model_accuracy = sum(\n", - " input.local_validation_score for input in inputs\n", - " ) / len(inputs)\n", - " print(\n", - " f\"Average aggregated model validation values = {self.aggregated_model_accuracy}\"\n", - " )\n", - " print(f\"Average training loss = {self.average_loss}\")\n", - " print(f\"Average local model validation values = {self.local_model_accuracy}\")\n", - "\n", - " self.model = FedAvg([input.peft_params for input in inputs], self.model)\n", - " self.peft_params = get_peft_model_state_dict(self.model)\n", - "\n", - " self.model.save_pretrained(\"./aggregated/model\")\n", - " tokenizer.save_pretrained(\"./aggregated/tokenizer\")\n", - " self.current_round += 1\n", - " if self.current_round < self.rounds:\n", - " self.next(\n", - " self.aggregated_model_validation,\n", - " foreach=\"collaborators\",\n", - " exclude=[\"model\"],\n", - " )\n", - " else:\n", - " self.next(self.end)\n", - "\n", - " @aggregator\n", - " def end(self):\n", - " \"\"\"\n", - " End the federated learning process.\n", - "\n", - " This method marks the end of the federated learning process and performs any\n", - " necessary cleanup or finalization steps.\n", - " \"\"\"\n", - " print(f\"This is the end of the flow\")\n" - ] - }, - { - "cell_type": "markdown", - "id": "e120a656-f4a5-47a5-a3d4-62c5f3672bba", - "metadata": {}, - "source": [ - "You'll notice in the `FederatedFlow` definition above that there were certain attributes that the flow was not initialized with, namely the `train_dataset` and `eval_dataset` for each of the collaborators. These are **private_attributes** that are exposed only through the runtime. Each participant has its own set of private attributes: a dictionary where the key is the attribute name, and the value is the object that will be made accessible through that participant's task.\n", - "\n", - "Below, we segment shards of the Math_10k dataset for **two collaborators**: Portland and Seattle. Each has their own slice of the dataset that's accessible via the `train_dataset` or `eval_dataset` attribute. Note that the private attributes are flexible, and you can choose to pass in a completely different type of object to any of the collaborators or aggregator (with an arbitrary name). These private attributes will always be filtered out of the current state when transferring from collaborator to aggregator, or vice versa." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5e108c6-5150-4931-9c01-6b64a913fa04", - "metadata": {}, - "outputs": [], - "source": [ - "# Setup participants\n", - "_aggregator = Aggregator()\n", - "_aggregator.private_attributes = {}\n", - "\n", - "# Setup collaborators with private attributes\n", - "collaborator_names = [\n", - " \"Portland\",\n", - " \"Seattle\",\n", - "]\n", - "_collaborators = [Collaborator(name=name) for name in collaborator_names]\n", - "\n", - "for idx, current_collaborator in enumerate(_collaborators):\n", - " # Set the private attributes of the Collaborator to include their specific training and testing data loaders\n", - " current_collaborator.private_attributes = {\n", - " \"train_dataset\": processed_train_dataset.shard(\n", - " num_shards=len(_collaborators), index=idx\n", - " ),\n", - " \"eval_dataset\": processed_test_dataset.shard(\n", - " num_shards=len(_collaborators), index=idx\n", - " ),\n", - " }\n", - "\n", - "local_runtime = LocalRuntime(\n", - " aggregator=_aggregator, collaborators=_collaborators, backend=\"single_process\"\n", - ")\n", - "print(f\"Local runtime collaborators = {local_runtime.collaborators}\")" - ] - }, - { - "cell_type": "markdown", - "id": "9cb61fc0", - "metadata": {}, - "source": [ - "## Run Experiment" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38894111-41d9-4dd4-b1c8-eb7ec3cdd3e1", - "metadata": {}, - "outputs": [], - "source": [ - "flflow = FederatedFlow(model, rounds=2)\n", - "flflow.runtime = local_runtime\n", - "flflow.run()\n", - "\n", - "# Determine the final model accuracy:\n", - "print(f'\\nFinal aggregated model accuracy for {flflow.rounds} rounds of training: {flflow.aggregated_model_accuracy}')" - ] - }, - { - "cell_type": "markdown", - "id": "7bc8fe27", - "metadata": {}, - "source": [ - "## 🎉 Congratulations! 🎉\n", - "\n", - "Now that you've completed this notebook, check out our [other tutorials](https://github.com/securefederatedai/openfl/tree/develop/openfl-tutorials/experimental/)\n", - "\n", - "- Using the LocalRuntime Ray Backend for dedicated GPU access\n", - "- Vertical Federated Learning\n", - "- Model Watermarking\n", - "- Differential Privacy\n", - "- And More!" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.8" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/openfl/utilities/phi_utils.py b/openfl/utilities/phi_utils.py new file mode 100644 index 0000000000..e1c2415ed1 --- /dev/null +++ b/openfl/utilities/phi_utils.py @@ -0,0 +1,488 @@ +""" +Utility functions for Phi-4 model quantization and federated learning experiments. +This module contains: +- Memory tracking utilities +- Visualization functions for comparing 4-bit and 8-bit quantization +""" + +# flake8: noqa: E501, E722 + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import seaborn as sns +import torch +from matplotlib.ticker import EngFormatter + + +def get_gpu_memory_info(): + """Get GPU memory usage information in MB.""" + try: + if torch.cuda.is_available(): + allocated = torch.cuda.memory_allocated() / (1024 * 1024) + reserved = torch.cuda.memory_reserved() / (1024 * 1024) + max_allocated = torch.cuda.max_memory_allocated() / (1024 * 1024) + return {"allocated": allocated, "reserved": reserved, "max_allocated": max_allocated} + else: + return {"allocated": 0, "reserved": 0, "max_allocated": 0} + except: + return {"allocated": 0, "reserved": 0, "max_allocated": 0} + + +class MemoryTracker: + """Track GPU memory usage during training""" + + def __init__(self, collaborator_name, quant_type): + self.collaborator_name = collaborator_name + self.quant_type = quant_type + self.timestamps = {} + self.peak = {"allocated": 0, "reserved": 0, "max_allocated": 0} + self.training_loss = None + self.eval_loss = None + + def log(self, timestamp): + """Log current memory usage at a specific timestamp""" + self.timestamps[timestamp] = get_gpu_memory_info() + + def log_loss(self, training_loss=None, eval_loss=None): + """Log training or evaluation loss""" + if training_loss is not None: + self.training_loss = training_loss + if eval_loss is not None: + self.eval_loss = eval_loss + + def update_peak(self): + """Update peak memory usage values""" + current = get_gpu_memory_info() + self.peak["allocated"] = max(self.peak["allocated"], current["allocated"]) + self.peak["reserved"] = max(self.peak["reserved"], current["reserved"]) + self.peak["max_allocated"] = max(self.peak["max_allocated"], current["max_allocated"]) + + def reset_peak(self): + """Reset peak memory usage values""" + self.peak = {"allocated": 0, "reserved": 0, "max_allocated": 0} + + def report(self): + """Print memory usage report""" + print(f"\n==== Memory Usage Report for {self.collaborator_name} ({self.quant_type}) ====") + print("Peak Memory Usage:") + print(f" Allocated: {self.peak['allocated']:.2f} MB") + print(f" Reserved: {self.peak['reserved']:.2f} MB") + print(f" Max Allocated: {self.peak['max_allocated']:.2f} MB") + + print("\nMemory Usage by Stage:") + for timestamp, mem in self.timestamps.items(): + print(f" {timestamp}:") + print(f" Allocated: {mem['allocated']:.2f} MB") + print(f" Reserved: {mem['reserved']:.2f} MB") + print(f" Max Allocated: {mem['max_allocated']:.2f} MB") + + print("\nPerformance Metrics:") + if self.training_loss is not None: + print(f" Training Loss: {self.training_loss:.4f}") + if self.eval_loss is not None: + print(f" Evaluation Loss: {self.eval_loss:.4f}") + print("-" * 50) + + def get_stats(self): + """Get all statistics as a dictionary""" + stats = { + "peak_allocated": self.peak["allocated"], + "peak_reserved": self.peak["reserved"], + "peak_max_allocated": self.peak["max_allocated"], + "quant_type": self.quant_type, + "training_loss": self.training_loss, + "eval_loss": self.eval_loss, + } + for timestamp, mem in self.timestamps.items(): + stats[f"{timestamp}_allocated"] = mem["allocated"] + stats[f"{timestamp}_reserved"] = mem["reserved"] + stats[f"{timestamp}_max_allocated"] = mem["max_allocated"] + return stats + + +def plot_memory_metrics(flow_4bit, flow_8bit): # NOQA: C901 + """Plot and compare memory metrics between 4-bit and 8-bit quantization.""" + try: + # Create figure with multiple subplots + fig, axs = plt.subplots(2, 2, figsize=(16, 12)) + fig.suptitle("4-bit vs 8-bit Quantization Comparison", fontsize=16) + + # Colors for consistent plotting + colors_4bit = {"Portland": "blue", "Seattle": "green"} + colors_8bit = {"Portland": "darkblue", "Seattle": "darkgreen"} + markers_4bit = {"Portland": "o", "Seattle": "s"} + markers_8bit = {"Portland": "^", "Seattle": "D"} + + # Flatten the metric data for plotting + memory_data = [] + for quant, flow in [("4-bit", flow_4bit), ("8-bit", flow_8bit)]: + stats = flow.all_memory_stats + for collab, rounds_data in stats.items(): + for round_name, metrics in rounds_data.items(): + round_num = int(round_name.split("_")[1]) + row = { + "Collaborator": collab, + "Round": round_num, + "Quantization": quant, + "Peak Memory (MB)": metrics.get("peak_max_allocated", 0), + "Training Loss": metrics.get("training_loss", 0), + "Eval Loss": metrics.get("eval_loss", 0), + } + memory_data.append(row) + + df = pd.DataFrame(memory_data) + + # Plot 1: Peak Memory Usage by Round + axs[0, 0].set_title("Peak Memory Usage by Round") + for quant_type in ["4-bit", "8-bit"]: + for collab in df["Collaborator"].unique(): + subset = df[(df["Quantization"] == quant_type) & (df["Collaborator"] == collab)] + color = colors_4bit[collab] if quant_type == "4-bit" else colors_8bit[collab] + marker = markers_4bit[collab] if quant_type == "4-bit" else markers_8bit[collab] + axs[0, 0].plot( + subset["Round"], + subset["Peak Memory (MB)"], + marker=marker, + linestyle="-", + label=f"{collab} ({quant_type})", + color=color, + ) + + axs[0, 0].set_xlabel("Round") + axs[0, 0].set_ylabel("Memory (MB)") + axs[0, 0].legend() + axs[0, 0].grid(True, alpha=0.3) + axs[0, 0].yaxis.set_major_formatter(EngFormatter(unit="B")) + + # Plot 2: Training Loss by Round + axs[0, 1].set_title("Training Loss by Round") + for quant_type in ["4-bit", "8-bit"]: + for collab in df["Collaborator"].unique(): + subset = df[(df["Quantization"] == quant_type) & (df["Collaborator"] == collab)] + color = colors_4bit[collab] if quant_type == "4-bit" else colors_8bit[collab] + marker = markers_4bit[collab] if quant_type == "4-bit" else markers_8bit[collab] + axs[0, 1].plot( + subset["Round"], + subset["Training Loss"], + marker=marker, + linestyle="-", + label=f"{collab} ({quant_type})", + color=color, + ) + + axs[0, 1].set_xlabel("Round") + axs[0, 1].set_ylabel("Loss") + axs[0, 1].legend() + axs[0, 1].grid(True, alpha=0.3) + + # Plot 3: Eval Loss by Round + axs[1, 0].set_title("Evaluation Loss by Round") + for quant_type in ["4-bit", "8-bit"]: + for collab in df["Collaborator"].unique(): + subset = df[(df["Quantization"] == quant_type) & (df["Collaborator"] == collab)] + color = colors_4bit[collab] if quant_type == "4-bit" else colors_8bit[collab] + marker = markers_4bit[collab] if quant_type == "4-bit" else markers_8bit[collab] + axs[1, 0].plot( + subset["Round"], + subset["Eval Loss"], + marker=marker, + linestyle="-", + label=f"{collab} ({quant_type})", + color=color, + ) + + axs[1, 0].set_xlabel("Round") + axs[1, 0].set_ylabel("Loss") + axs[1, 0].legend() + axs[1, 0].grid(True, alpha=0.3) + + # Plot 4: Memory vs Loss (bubble chart) + axs[1, 1].set_title("Memory Usage vs. Evaluation Loss") + for quant_type in ["4-bit", "8-bit"]: + for collab in df["Collaborator"].unique(): + subset = df[(df["Quantization"] == quant_type) & (df["Collaborator"] == collab)] + color = colors_4bit[collab] if quant_type == "4-bit" else colors_8bit[collab] + marker = markers_4bit[collab] if quant_type == "4-bit" else markers_8bit[collab] + + # Size proportional to round number for visual differentiation + sizes = [100 * (r + 1) for r in subset["Round"]] + + axs[1, 1].scatter( + subset["Peak Memory (MB)"], + subset["Eval Loss"], + s=sizes, + alpha=0.7, + label=f"{collab} ({quant_type})", + color=color, + marker=marker, + ) + + # Add round number annotations + for _, row in subset.iterrows(): + axs[1, 1].annotate( + f"R{int(row['Round'])}", + (row["Peak Memory (MB)"], row["Eval Loss"]), + xytext=(5, 5), + textcoords="offset points", + ) + + axs[1, 1].set_xlabel("Peak Memory (MB)") + axs[1, 1].set_ylabel("Evaluation Loss") + axs[1, 1].legend() + axs[1, 1].grid(True, alpha=0.3) + axs[1, 1].xaxis.set_major_formatter(EngFormatter(unit="B")) + + plt.tight_layout() + plt.subplots_adjust(top=0.92) + plt.show() + + # Print summary comparison + print("\n==== Performance Summary ====\n") + # Group by quantization and compute means + summary = ( + df.groupby("Quantization") + .agg({"Peak Memory (MB)": "mean", "Training Loss": "mean", "Eval Loss": "mean"}) + .reset_index() + ) + + # Calculate percentage difference + mem_diff_pct = ( + (summary.loc[1, "Peak Memory (MB)"] - summary.loc[0, "Peak Memory (MB)"]) + / summary.loc[0, "Peak Memory (MB)"] + * 100 + ) + + eval_diff_pct = ( + (summary.loc[1, "Eval Loss"] - summary.loc[0, "Eval Loss"]) + / summary.loc[0, "Eval Loss"] + * 100 + ) + + print("Memory Usage Comparison:") + print(f" 4-bit Avg: {summary.loc[0, 'Peak Memory (MB)']:.2f} MB") + print(f" 8-bit Avg: {summary.loc[1, 'Peak Memory (MB)']:.2f} MB") + print( + f" Difference: {abs(mem_diff_pct):.1f}% {'more' if mem_diff_pct > 0 else 'less'} memory with 8-bit" + ) + + print("\nEvaluation Loss Comparison:") + print(f" 4-bit Avg: {summary.loc[0, 'Eval Loss']:.4f}") + print(f" 8-bit Avg: {summary.loc[1, 'Eval Loss']:.4f}") + print( + f" Difference: {abs(eval_diff_pct):.1f}% {'higher' if eval_diff_pct > 0 else 'lower'} loss with 8-bit" + ) + + loss_efficiency = (summary.loc[0, "Eval Loss"] - summary.loc[1, "Eval Loss"]) / ( + summary.loc[0, "Peak Memory (MB)"] - summary.loc[1, "Peak Memory (MB)"] + ) + + if loss_efficiency > 0: + efficiency_msg = "8-bit provides more efficiency memory usage relative to loss" + else: + efficiency_msg = "4-bit provides more efficiency memory usage relative to loss" + + print(f"\nEfficiency Analysis: {efficiency_msg}") + except ImportError: + print( + "Plotting requires matplotlib and pandas. Install with: pip install matplotlib pandas" + ) + except Exception as e: + print(f"Error plotting metrics: {str(e)}") + + +def plot_loss_metrics(flow_4bit, flow_8bit): # NOQA: C901 + """Plot training and evaluation loss metrics comparing 4-bit and 8-bit quantization""" + # Extract and organize loss data + loss_data = [] + + # Helper function to safely convert tensor to float value + def tensor_to_float(val): + if val is None: + return None + if isinstance(val, torch.Tensor): + return val.detach().cpu().float().numpy().item() + return val + + # Process 4-bit data + for collab, rounds_data in flow_4bit.all_memory_stats.items(): + for round_name, stats in rounds_data.items(): + round_num = int(round_name.split("_")[1]) if "_" in round_name else 0 + quant_type = stats.get("quant_type", "4bit") + training_loss = tensor_to_float(stats.get("training_loss")) + eval_loss = tensor_to_float(stats.get("eval_loss")) + + if training_loss is not None or eval_loss is not None: + loss_data.append( + { + "Collaborator": collab, + "Round": round_name, + "Round Number": round_num, + "Training Loss": training_loss, + "Eval Loss": eval_loss, + "Quantization": quant_type, + } + ) + + # Process 8-bit data if provided + if flow_8bit is not None: + for collab, rounds_data in flow_8bit.all_memory_stats.items(): + for round_name, stats in rounds_data.items(): + round_num = int(round_name.split("_")[1]) if "_" in round_name else 0 + quant_type = stats.get("quant_type", "8bit") + training_loss = tensor_to_float(stats.get("training_loss")) + eval_loss = tensor_to_float(stats.get("eval_loss")) + + if training_loss is not None or eval_loss is not None: + loss_data.append( + { + "Collaborator": collab, + "Round": round_name, + "Round Number": round_num, + "Training Loss": training_loss, + "Eval Loss": eval_loss, + "Quantization": quant_type, + } + ) + + loss_df = pd.DataFrame(loss_data) + + # Create a figure with subplots for loss metrics + fig, axes = plt.subplots(2, 1, figsize=(15, 12), gridspec_kw={"height_ratios": [1, 1]}) + + # 1. Training loss across rounds (top plot) + group_var = "Quantization" if flow_8bit else "Collaborator" + + sns.lineplot( + x="Round Number", + y="Training Loss", + hue=group_var, + data=loss_df, + marker="o", + sort=True, + linewidth=3, + markersize=10, + ax=axes[0], + ) + axes[0].set_title("Training Loss Across Rounds", fontsize=14, fontweight="bold") + axes[0].set_xlabel("Round", fontsize=12) + axes[0].set_ylabel("Loss", fontsize=12) + axes[0].legend(title=group_var, bbox_to_anchor=(1.05, 1), loc="upper left") + + # 2. Evaluation loss across rounds (bottom plot) + sns.lineplot( + x="Round Number", + y="Eval Loss", + hue=group_var, + data=loss_df, + marker="o", + sort=True, + linewidth=3, + markersize=10, + ax=axes[1], + ) + axes[1].set_title("Evaluation Loss Across Rounds", fontsize=14, fontweight="bold") + axes[1].set_xlabel("Round", fontsize=12) + axes[1].set_ylabel("Loss", fontsize=12) + axes[1].legend(title=group_var, bbox_to_anchor=(1.05, 1), loc="upper left") + + plt.tight_layout() + plt.show() + + # Print summary statistics + if flow_8bit: + print("\n==== Loss Comparison: 4-bit vs 8-bit ====\n") + + # Group by quantization and compute means + summary = loss_df.groupby("Quantization").agg( + {"Training Loss": ["mean", "std"], "Eval Loss": ["mean", "std"]} + ) + + print( + f"Training Loss (4-bit): {summary.loc['4bit', ('Training Loss', 'mean')]:.4f} ± {summary.loc['4bit', ('Training Loss', 'std')]:.4f}" + ) + print( + f"Training Loss (8-bit): {summary.loc['8bit', ('Training Loss', 'mean')]:.4f} ± {summary.loc['8bit', ('Training Loss', 'std')]:.4f}" + ) + print( + f"\nEval Loss (4-bit): {summary.loc['4bit', ('Eval Loss', 'mean')]:.4f} ± {summary.loc['4bit', ('Eval Loss', 'std')]:.4f}" + ) + print( + f"Eval Loss (8-bit): {summary.loc['8bit', ('Eval Loss', 'mean')]:.4f} ± {summary.loc['8bit', ('Eval Loss', 'std')]:.4f}" + ) + + +def plot_aggregated_metrics(flow_4bit, flow_8bit): + """Plot aggregated metrics comparing 4-bit and 8-bit quantization""" + # Create a figure with subplots for aggregated metrics + fig, axes = plt.subplots(1, 2, figsize=(16, 6)) + + # Helper function to safely convert tensor to float value + def tensor_to_float(val): + if val is None: + return None + if isinstance(val, torch.Tensor): + return val.detach().cpu().float().numpy().item() + return val + + # Convert any tensor values to CPU before plotting + loss_history_4bit = [tensor_to_float(x) for x in flow_4bit.average_loss_history] + loss_history_8bit = [tensor_to_float(x) for x in flow_8bit.average_loss_history] + agg_model_loss_4bit = [tensor_to_float(x) for x in flow_4bit.agg_model_loss_history] + agg_model_loss_8bit = [tensor_to_float(x) for x in flow_8bit.agg_model_loss_history] + local_model_loss_4bit = [tensor_to_float(x) for x in flow_4bit.local_model_loss_history] + local_model_loss_8bit = [tensor_to_float(x) for x in flow_8bit.local_model_loss_history] + + # Setup data + rounds = list(range(len(loss_history_4bit))) + + # Plot average loss history + axes[0].plot(rounds, loss_history_4bit, "bo-", linewidth=2, markersize=8, label="4-bit") + axes[0].plot(rounds, loss_history_8bit, "ro-", linewidth=2, markersize=8, label="8-bit") + axes[0].set_title("Average Training Loss by Round", fontsize=14, fontweight="bold") + axes[0].set_xlabel("Round", fontsize=12) + axes[0].set_ylabel("Loss", fontsize=12) + axes[0].grid(True, alpha=0.3) + axes[0].legend(fontsize=10) + + # Plot final metrics comparison + metrics = ["Avg Training Loss", "Agg Model Loss", "Local Model Loss"] + values_4bit = [loss_history_4bit[-1], agg_model_loss_4bit[-1], local_model_loss_4bit[-1]] + values_8bit = [loss_history_8bit[-1], agg_model_loss_8bit[-1], local_model_loss_8bit[-1]] + + x = np.arange(len(metrics)) + width = 0.35 + + bars1 = axes[1].bar(x - width / 2, values_4bit, width, label="4-bit", color="blue", alpha=0.7) + bars2 = axes[1].bar(x + width / 2, values_8bit, width, label="8-bit", color="red", alpha=0.7) + + # Add value labels on bars + for bars in [bars1, bars2]: + for bar in bars: + height = bar.get_height() + axes[1].annotate( + f"{height:.4f}", + xy=(bar.get_x() + bar.get_width() / 2, height), + xytext=(0, 3), + textcoords="offset points", + ha="center", + va="bottom", + fontsize=9, + ) + + axes[1].set_title("Final Metrics Comparison", fontsize=14, fontweight="bold") + axes[1].set_ylabel("Loss", fontsize=12) + axes[1].set_xticks(x) + axes[1].set_xticklabels(metrics, rotation=15) + axes[1].legend(loc="upper right", fontsize=10) + axes[1].grid(True, alpha=0.3) + + plt.tight_layout() + plt.show() + + # Print percent differences + print("\n==== Percentage Difference (8-bit vs 4-bit) ====\n") + for i, metric in enumerate(metrics): + pct_diff = ((values_8bit[i] - values_4bit[i]) / values_4bit[i]) * 100 + direction = "higher" if pct_diff > 0 else "lower" + print(f"{metric}: 8-bit is {abs(pct_diff):.2f}% {direction} than 4-bit")