diff --git "a/examples/PrithviWxC_rollout.ipynb" "b/examples/PrithviWxC_rollout.ipynb" new file mode 100644--- /dev/null +++ "b/examples/PrithviWxC_rollout.ipynb" @@ -0,0 +1,3670 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PrithviWxC Rollout Inference\n", + "If you haven't already, take a look at the exmaple for the PrithviWxC core\n", + "model, as we will pass over the points covered there.\n", + "\n", + "Here we will introduce the PrithviWxC model that was trained furhter for\n", + "autoregressive rollout, a common strategy to increase accuracy and stability of\n", + "models when applied to forecasting-type tasks. " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "from pathlib import Path\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import torch\n", + "from huggingface_hub import hf_hub_download, snapshot_download\n", + "\n", + "# Set backend etc.\n", + "torch.jit.enable_onednn_fusion(True)\n", + "if torch.cuda.is_available():\n", + " torch.backends.cudnn.benchmark = True\n", + " torch.backends.cudnn.deterministic = True\n", + "\n", + "# Set seeds\n", + "random.seed(42)\n", + "if torch.cuda.is_available():\n", + " torch.cuda.manual_seed(42)\n", + "torch.manual_seed(42)\n", + "np.random.seed(42)\n", + "\n", + "# Set device\n", + "if torch.cuda.is_available():\n", + " device = torch.device(\"cuda\")\n", + "else:\n", + " device = torch.device(\"cpu\")\n", + "\n", + "# Set variables\n", + "surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + "]\n", + "static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "vertical_vars = [\"CLOUD\", \"H\", \"OMEGA\", \"PL\", \"QI\", \"QL\", \"QV\", \"T\", \"U\", \"V\"]\n", + "levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + "]\n", + "padding = {\"level\": [0, 0], \"lat\": [0, -1], \"lon\": [0, 0]}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Lead time\n", + "When performing auto-regressive rollout, the intermediate steps require the\n", + "static data at those times and---if using `residual=climate`---the intermediate\n", + "climatology. We provide a dataloader that extends the MERRA2 loader of the\n", + "core model, adding in these additional terms. Further, it return target data for\n", + "the intermediate steps if those are required for loss terms. \n", + "\n", + "The `lead_time` flag still lets the target time for the model, however now it\n", + "only a single value and must be a positive integer multiple of the `-input_time`. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lead_time = 3 # This variable can be change to change the task\n", + "input_time = -3 # This variable can be change to change the task" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data file\n", + "MERRA-2 data is available from 1980 to the present day,\n", + "at 3-hour temporal resolution. The dataloader we have provided\n", + "expects the surface data and vertical data to be saved in\n", + "separate files, and when provided with the directories, will\n", + "search for the relevant data that falls within the provided time range.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "159bec6eee1846d680fe284324094487", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Fetching 1 files: 0%| | 0/1 [00:00 dict[str, Tensor]:\n", + " \"\"\"Prepressing function for MERRA2 Dataset\n", + "\n", + " Args:\n", + " batch (dict): List of training samples, each sample should be a\n", + " dictionary with the following keys::\n", + "\n", + " 'sur_static': Numpy array of shape (3, lat, lon). For each pixel (lat, lon), the first dimension indexes sin(lat), cos(lon), sin(lon).\n", + " 'sur_vals': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'sur_tars': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'ulv_vals': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'ulv_tars': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'sur_climate': Torch tensor of shape (parameter, lat, lon)\n", + " 'ulv_climate': Torch tensor of shape (parameter, level, lat, lon)\n", + " 'lead_time': Integer.\n", + " 'input_time': Integer.\n", + "\n", + " padding: Dictionary with keys 'level', 'lat', 'lon', each of dim 2.\n", + "\n", + " Returns:\n", + " Dictionary with the following keys::\n", + "\n", + " 'x': [batch, time, parameter, lat, lon]\n", + " 'y': [batch, parameter, lat, lon]\n", + " 'static': [batch, parameter, lat, lon]\n", + " 'lead_time': [batch]\n", + " 'input_time': [batch]\n", + " 'climate (Optional)': [batch, parameter, lat, lon]\n", + "\n", + " Note:\n", + " Here, for x and y, 'parameter' is [surface parameter, upper level,\n", + " parameter x level]. Similarly for the static information we have\n", + " [sin(lat), cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod),\n", + " ...].\n", + " \"\"\" # noqa: E501\n", + " b0 = batch[0]\n", + " nbatch = len(batch)\n", + " data_keys = set(b0.keys())\n", + "\n", + " essential_keys = {\n", + " \"sur_static\",\n", + " \"sur_vals\",\n", + " \"sur_tars\",\n", + " \"ulv_vals\",\n", + " \"ulv_tars\",\n", + " \"input_time\",\n", + " \"lead_time\",\n", + " }\n", + "\n", + " climate_keys = {\n", + " \"sur_climate\",\n", + " \"ulv_climate\",\n", + " }\n", + "\n", + " all_keys = essential_keys | climate_keys\n", + "\n", + " if not essential_keys.issubset(data_keys):\n", + " raise ValueError(\"Missing essential keys.\")\n", + "\n", + " if not data_keys.issubset(all_keys):\n", + " raise ValueError(\"Unexpected keys in batch.\")\n", + "\n", + " # Bring all tensors from the batch into a single tensor\n", + " upl_x = torch.empty((nbatch, *b0[\"ulv_vals\"].shape))\n", + " upl_y = torch.empty((nbatch, *b0[\"ulv_tars\"].shape))\n", + "\n", + " sur_x = torch.empty((nbatch, *b0[\"sur_vals\"].shape))\n", + " sur_y = torch.empty((nbatch, *b0[\"sur_tars\"].shape))\n", + "\n", + " sur_sta = torch.empty((nbatch, *b0[\"sur_static\"].shape))\n", + "\n", + " lead_time = torch.empty((nbatch,), dtype=torch.float32)\n", + " input_time = torch.empty((nbatch,), dtype=torch.float32)\n", + "\n", + " for i, rec in enumerate(batch):\n", + " sur_x[i] = rec[\"sur_vals\"]\n", + " sur_y[i] = rec[\"sur_tars\"]\n", + "\n", + " upl_x[i] = rec[\"ulv_vals\"]\n", + " upl_y[i] = rec[\"ulv_tars\"]\n", + "\n", + " sur_sta[i] = rec[\"sur_static\"]\n", + "\n", + " lead_time[i] = rec[\"lead_time\"]\n", + " input_time[i] = rec[\"input_time\"]\n", + "\n", + " return_value = {\n", + " \"lead_time\": lead_time,\n", + " \"input_time\": input_time,\n", + " }\n", + "\n", + " # Reshape (batch, parameter, level, time, lat, lon) ->\n", + " # (batch, time, parameter, level, lat, lon)\n", + " upl_x = upl_x.permute((0, 3, 1, 2, 4, 5))\n", + " upl_y = upl_y.permute((0, 3, 1, 2, 4, 5))\n", + " # Reshape (batch, parameter, time, lat, lon) ->\n", + " # (batch, time, parameter, lat, lon)\n", + " sur_x = sur_x.permute((0, 2, 1, 3, 4))\n", + " sur_y = sur_y.permute((0, 2, 1, 3, 4))\n", + "\n", + " # Pad\n", + " padding_2d = (*padding[\"lon\"], *padding[\"lat\"])\n", + "\n", + " def pad2d(x):\n", + " return torch.nn.functional.pad(x, padding_2d, mode=\"constant\", value=0)\n", + "\n", + " padding_3d = (*padding[\"lon\"], *padding[\"lat\"], *padding[\"level\"])\n", + "\n", + " def pad3d(x):\n", + " return torch.nn.functional.pad(x, padding_3d, mode=\"constant\", value=0)\n", + "\n", + " sur_x = pad2d(sur_x).contiguous()\n", + " upl_x = pad3d(upl_x).contiguous()\n", + " sur_y = pad2d(sur_y).contiguous()\n", + " upl_y = pad3d(upl_y).contiguous()\n", + " return_value[\"static\"] = pad2d(sur_sta).contiguous()\n", + "\n", + " # Remove time for targets\n", + " upl_y = torch.squeeze(upl_y, 1)\n", + " sur_y = torch.squeeze(sur_y, 1)\n", + "\n", + " # We stack along the combined parameter x level dimension\n", + " return_value[\"x\"] = torch.cat(\n", + " (sur_x, upl_x.view(*upl_x.shape[:2], -1, *upl_x.shape[4:])), dim=2\n", + " )\n", + " return_value[\"y\"] = torch.cat(\n", + " (sur_y, upl_y.view(upl_y.shape[0], -1, *upl_y.shape[3:])), dim=1\n", + " )\n", + "\n", + " if climate_keys.issubset(data_keys):\n", + " sur_climate = torch.empty((nbatch, *b0[\"sur_climate\"].shape))\n", + " ulv_climate = torch.empty((nbatch, *b0[\"ulv_climate\"].shape))\n", + " for i, rec in enumerate(batch):\n", + " sur_climate[i] = rec[\"sur_climate\"]\n", + " ulv_climate[i] = rec[\"ulv_climate\"]\n", + " sur_climate = pad2d(sur_climate)\n", + " ulv_climate = pad3d(ulv_climate)\n", + "\n", + " return_value[\"climate\"] = torch.cat(\n", + " (\n", + " sur_climate,\n", + " ulv_climate.view(nbatch, -1, *ulv_climate.shape[3:]),\n", + " ),\n", + " dim=1,\n", + " )\n", + "\n", + " return return_value\n", + "\n", + "\n", + "def input_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> tuple[Tensor, Tensor]:\n", + " \"\"\"Reads the input scalers\n", + "\n", + " Args:\n", + " surf_vars: surface variables to be used.\n", + " vert_vars: vertical variables to be used.\n", + " levels: MERRA2 levels to use.\n", + " surf_path: path to surface scalers file.\n", + " vert_path: path to vertical level scalers file.\n", + "\n", + " Returns:\n", + " mu (Tensor): mean values\n", + " var (Tensor): varience values\n", + " \"\"\"\n", + " with h5py.File(Path(surf_path), \"r\", libver=\"latest\") as surf_file:\n", + " stats = [x.decode().lower() for x in surf_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " s_mu = torch.tensor([surf_file[k][()][mu_idx] for k in surf_vars])\n", + " s_sig = torch.tensor([surf_file[k][()][sig_idx] for k in surf_vars])\n", + "\n", + " with h5py.File(Path(vert_path), \"r\", libver=\"latest\") as vert_file:\n", + " stats = [x.decode().lower() for x in vert_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + "\n", + " v_mu = np.array([vert_file[k][()][mu_idx, l_idx] for k in vert_vars])\n", + " v_sig = np.array([vert_file[k][()][sig_idx, l_idx] for k in vert_vars])\n", + "\n", + " v_mu = torch.from_numpy(v_mu).view(-1)\n", + " v_sig = torch.from_numpy(v_sig).view(-1)\n", + "\n", + " mu = torch.cat((s_mu, v_mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((s_sig, v_sig), dim=0).to(torch.float32).clamp(1e-4, 1e4)\n", + " return mu, sig\n", + "\n", + "\n", + "def static_input_scalers(\n", + " scalar_path: str | Path, stat_vars: list[str], unscaled_params: int = 7\n", + ") -> tuple[Tensor, Tensor]:\n", + " scalar_path = Path(scalar_path)\n", + "\n", + " with h5py.File(scalar_path, \"r\", libver=\"latest\") as scaler_file:\n", + " stats = [x.decode().lower() for x in scaler_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " mu = torch.tensor([scaler_file[k][()][mu_idx] for k in stat_vars])\n", + " sig = torch.tensor([scaler_file[k][()][sig_idx] for k in stat_vars])\n", + "\n", + " z = torch.zeros(unscaled_params, dtype=mu.dtype, device=mu.device)\n", + " o = torch.ones(unscaled_params, dtype=sig.dtype, device=sig.device)\n", + " mu = torch.cat((z, mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((o, sig), dim=0).to(torch.float32)\n", + "\n", + " return mu, sig.clamp(1e-4, 1e4)\n", + "\n", + "\n", + "def output_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> Tensor:\n", + " surf_path = Path(surf_path)\n", + " vert_path = Path(vert_path)\n", + "\n", + " with h5py.File(surf_path, \"r\", libver=\"latest\") as surf_file:\n", + " svars = torch.tensor([surf_file[k][()] for k in surf_vars])\n", + "\n", + " with h5py.File(vert_path, \"r\", libver=\"latest\") as vert_file:\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + " vvars = np.array([vert_file[k][()][l_idx] for k in vert_vars])\n", + " vvars = torch.from_numpy(vvars).view(-1)\n", + "\n", + " var = torch.cat((svars, vvars), dim=0).to(torch.float32).clamp(1e-7, 1e7)\n", + "\n", + " return var\n", + "\n", + "\n", + "class SampleSpec:\n", + " \"\"\"\n", + " A data class to collect the information used to define a sample.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " inputs: tuple[pd.Timestamp, pd.Timestamp],\n", + " lead_time: int,\n", + " target: pd.Timestamp | list[pd.Timestamp],\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " inputs: Tuple of timestamps. In ascending order.\n", + " lead_time: Lead time. In hours.\n", + " target: Timestamp of the target. Can be before or after the inputs.\n", + " \"\"\"\n", + " if not inputs[0] < inputs[1]:\n", + " raise ValueError(\n", + " \"Timestamps in `inputs` should be in strictly ascending order.\"\n", + " )\n", + "\n", + " self.inputs = inputs\n", + " self.input_time = (inputs[1] - inputs[0]).total_seconds() / 3600\n", + " self.lead_time = lead_time\n", + " self.target = target\n", + "\n", + " self.times = [*inputs, target]\n", + " self.stat_times = [inputs[-1]]\n", + "\n", + " @property\n", + " def climatology_info(self) -> tuple[int, int]:\n", + " \"\"\"Get the required climatology info.\n", + "\n", + " :return: information required to obtain climatology data. Essentially\n", + " this is the day of the year and hour of the day of the target\n", + " timestamp, with the former restricted to the interval [1, 365].\n", + " :rtype: tuple\n", + " \"\"\"\n", + " return (min(self.target.dayofyear, 365), self.target.hour)\n", + "\n", + " @property\n", + " def year(self) -> int:\n", + " return self.inputs[1].year\n", + "\n", + " @property\n", + " def dayofyear(self) -> int:\n", + " return self.inputs[1].dayofyear\n", + "\n", + " @property\n", + " def hourofday(self) -> int:\n", + " return self.inputs[1].hour\n", + "\n", + " def _info_str(self) -> str:\n", + " iso_8601 = \"%Y-%m-%dT%H:%M:%S\"\n", + "\n", + " return (\n", + " f\"Issue time: {self.inputs[1].strftime(iso_8601)}\\n\"\n", + " f\"Lead time: {self.lead_time} hours ahead\\n\"\n", + " f\"Input delta: {self.input_time} hours\\n\"\n", + " f\"Target time: {self.target.strftime(iso_8601)}\"\n", + " )\n", + "\n", + " @classmethod\n", + " def get(cls, timestamp: pd.Timestamp, dt: int, lead_time: int):\n", + " \"\"\"Given a timestamp and lead time, generates a SampleSpec object\n", + " describing the sample further.\n", + "\n", + " Args:\n", + " timestamp: Timstamp of the sample, Ie this is the larger of the two\n", + " input timstamps.\n", + " dt: Time between input samples, in hours.\n", + " lead_time: Lead time. In hours.\n", + "\n", + " Returns:\n", + " SampleSpec\n", + " \"\"\" # noqa: E501\n", + " assert dt > 0, \"dt should be possitive\"\n", + " lt = pd.to_timedelta(lead_time, unit=\"h\")\n", + " dt = pd.to_timedelta(dt, unit=\"h\")\n", + "\n", + " if lead_time >= 0:\n", + " timestamp_target = timestamp + lt\n", + " else:\n", + " timestamp_target = timestamp - dt + lt\n", + "\n", + " spec = cls(\n", + " inputs=(timestamp - dt, timestamp),\n", + " lead_time=lead_time,\n", + " target=timestamp_target,\n", + " )\n", + "\n", + " return spec\n", + "\n", + " def __repr__(self) -> str:\n", + " return self._info_str()\n", + "\n", + " def __str__(self) -> str:\n", + " return self._info_str()\n", + "\n", + "\n", + "class Merra2Dataset(Dataset):\n", + " \"\"\"MERRA2 dataset. The dataset unifies surface and vertical data as well as\n", + " optional climatology.\n", + "\n", + " Samples come in the form of a dictionary. Not all keys support all\n", + " variables, yet the general ordering of dimensions is\n", + " parameter, level, time, lat, lon\n", + "\n", + " Note:\n", + " Data is assumed to be in NetCDF files containing daily data at 3-hourly\n", + " intervals. These follow the naming patterns\n", + " MERRA2_sfc_YYYYMMHH.nc and MERRA_pres_YYYYMMHH.nc and can be located in\n", + " two different locations. Optional climatology data comes from files\n", + " climate_surface_doyDOY_hourHOD.nc and\n", + " climate_vertical_doyDOY_hourHOD.nc.\n", + "\n", + "\n", + " Note:\n", + " `_get_valid_timestamps` assembles a set of all timestamps for which\n", + " there is data (with hourly resolutions). The result is stored in\n", + " `_valid_timestamps`. `_get_valid_climate_timestamps` does the same with\n", + " climatology data and stores it in `_valid_climate_timestamps`.\n", + "\n", + " Based on this information, `samples` generates a list of valid samples,\n", + " stored in `samples`. Here the format is::\n", + "\n", + " [\n", + " [\n", + " (timestamp 1, lead time A),\n", + " (timestamp 1, lead time B),\n", + " (timestamp 1, lead time C),\n", + " ],\n", + " [\n", + " (timestamp 2, lead time D),\n", + " (timestamp 2, lead time E),\n", + " ]\n", + " ]\n", + "\n", + " That is, the outer list iterates over timestamps (init times), the\n", + " inner over lead times. Only valid entries are stored.\n", + " \"\"\"\n", + "\n", + " valid_vertical_vars = [\n", + " \"CLOUD\",\n", + " \"H\",\n", + " \"OMEGA\",\n", + " \"PL\",\n", + " \"QI\",\n", + " \"QL\",\n", + " \"QV\",\n", + " \"T\",\n", + " \"U\",\n", + " \"V\",\n", + " ]\n", + " valid_surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PRECTOT\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + " ]\n", + " valid_static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "\n", + " valid_levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + " ]\n", + "\n", + " timedelta_input = pd.to_timedelta(3, unit=\"h\")\n", + "\n", + " def __init__(\n", + " self,\n", + " time_range: tuple[str | pd.Timestamp, str | pd.Timestamp],\n", + " lead_times: list[int],\n", + " input_times: list[int],\n", + " data_path_surface: str | Path,\n", + " data_path_vertical: str | Path,\n", + " climatology_path_surface: str | Path | None = None,\n", + " climatology_path_vertical: str | Path | None = None,\n", + " surface_vars: list[str] | None = None,\n", + " static_surface_vars: list[str] | None = None,\n", + " vertical_vars: list[str] | None = None,\n", + " levels: list[float] | None = None,\n", + " roll_longitudes: int = 0,\n", + " positional_encoding: str = \"absolute\",\n", + " rtype: type = np.float32,\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " data_path_surface: Location of surface data.\n", + " data_path_vertical: Location of vertical data.\n", + " climatology_path_surface: Location of (optional) surface\n", + " climatology.\n", + " climatology_path_vertical: Location of (optional) vertical\n", + " climatology.\n", + " surface_vars: Surface variables.\n", + " static_surface_vars: Static surface variables.\n", + " vertical_vars: Vertical variables.\n", + " levels: Levels.\n", + " time_range: Used to subset data.\n", + " lead_times: Lead times for generalized forecasting.\n", + " roll_longitudes: Set to non-zero value to data by random amount\n", + " along longitude dimension.\n", + " position_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' returns lat lon encoded in 3 dimensions using sine\n", + " and cosine\n", + " 'fourier' returns lat/lon to be encoded by model\n", + " returns lat/lon to be encoded by model\n", + " rtype: numpy data type used during read\n", + " dtype: torch data type of data output\n", + " \"\"\"\n", + "\n", + " self.time_range = (\n", + " pd.to_datetime(time_range[0]),\n", + " pd.to_datetime(time_range[1]),\n", + " )\n", + " self.lead_times = lead_times\n", + " self.input_times = input_times\n", + " self._roll_longitudes = list(range(roll_longitudes + 1))\n", + "\n", + " self._uvars = vertical_vars or self.valid_vertical_vars\n", + " self._level = levels or self.valid_levels\n", + " self._svars = surface_vars or self.valid_surface_vars\n", + " self._sstat = static_surface_vars or self.valid_static_surface_vars\n", + " self._nuvars = len(self._uvars)\n", + " self._nlevel = len(self._level)\n", + " self._nsvars = len(self._svars)\n", + " self._nsstat = len(self._sstat)\n", + "\n", + " self.rtype = rtype\n", + " self.dtype = dtype\n", + "\n", + " self.positional_encoding = positional_encoding\n", + "\n", + " self._data_path_surface = Path(data_path_surface)\n", + " self._data_path_vertical = Path(data_path_vertical)\n", + "\n", + " self.dir_exists(self._data_path_surface)\n", + " self.dir_exists(self._data_path_vertical)\n", + "\n", + " self._get_coordinates()\n", + "\n", + " self._climatology_path_surface = Path(climatology_path_surface) or None\n", + " self._climatology_path_vertical = (\n", + " Path(climatology_path_vertical) or None\n", + " )\n", + " self._require_clim = (\n", + " self._climatology_path_surface is not None\n", + " and self._climatology_path_vertical is not None\n", + " )\n", + "\n", + " if self._require_clim:\n", + " self.dir_exists(self._climatology_path_surface)\n", + " self.dir_exists(self._climatology_path_vertical)\n", + " elif (\n", + " climatology_path_surface is None\n", + " and climatology_path_vertical is None\n", + " ):\n", + " self._climatology_path_surface = None\n", + " self._climatology_path_vertical = None\n", + " else:\n", + " raise ValueError(\n", + " \"Either both or neither of\"\n", + " \"`climatology_path_surface` and\"\n", + " \"`climatology_path_vertical` should be None.\"\n", + " )\n", + "\n", + " if not set(self._svars).issubset(set(self.valid_surface_vars)):\n", + " raise ValueError(\"Invalid surface variable.\")\n", + "\n", + " if not set(self._sstat).issubset(set(self.valid_static_surface_vars)):\n", + " raise ValueError(\"Invalid static surface variable.\")\n", + "\n", + " if not set(self._uvars).issubset(set(self.valid_vertical_vars)):\n", + " raise ValueError(\"Inalid vertical variable.\")\n", + "\n", + " if not set(self._level).issubset(set(self.valid_levels)):\n", + " raise ValueError(\"Invalid level.\")\n", + "\n", + " @staticmethod\n", + " def dir_exists(path: Path) -> None:\n", + " if not path.is_dir():\n", + " raise ValueError(f\"Directory {path} does not exist.\")\n", + "\n", + " @property\n", + " def upper_shape(self) -> tuple:\n", + " \"\"\"Returns the vertical variables shape\n", + " Returns:\n", + " tuple: vertical variable shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nuvars, self._nlevel, 2, 361, 576\n", + "\n", + " @property\n", + " def surface_shape(self) -> tuple:\n", + " \"\"\"Returns the surface variables shape\n", + "\n", + " Returns:\n", + " tuple: surafce shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nsvars, 2, 361, 576\n", + "\n", + " def data_file_surface(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the surfcae data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA2_sfc_%Y%m%d.nc\"\n", + " data_file = self._data_path_surface / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_vertical(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the vertical data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA_pres_%Y%m%d.nc\"\n", + " data_file = self._data_path_vertical / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_surface_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"\n", + " Returns the path to a climatology file based either on a timestamp or\n", + " the dayofyear / hourofday combination.\n", + " Args:\n", + " timestamp: A timestamp.\n", + " dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_surface_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_surface / file_name\n", + " return data_file\n", + "\n", + " def data_file_vertical_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"Returns the path to a climatology file based either on a timestamp\n", + " or the dayofyear / hourofday combination.\n", + "\n", + " Args:\n", + " timestamp: A timestamp. dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_vertical_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_vertical / file_name\n", + " return data_file\n", + "\n", + " def _get_coordinates(self) -> None:\n", + " \"\"\"\n", + " Obtains the coordiantes (latitudes and longitudes) from a single data\n", + " file.\n", + " \"\"\"\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " self.lats = lats = handle[\"lat\"][()].astype(self.rtype)\n", + " self.lons = lons = handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " deg_to_rad = np.pi / 180\n", + " self._embed_lat = np.sin(lats * deg_to_rad).reshape(-1, 1)\n", + "\n", + " self._embed_lon = np.empty((2, 1, len(lons)), dtype=self.rtype)\n", + " self._embed_lon[0, 0] = np.cos(lons * deg_to_rad)\n", + " self._embed_lon[1, 0] = np.sin(lons * deg_to_rad)\n", + "\n", + " @ft.cached_property\n", + " def lats(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lat\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def lons(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def position_signal(self) -> np.ndarray:\n", + " \"\"\"Generates the \"position signal\" that is part of the static\n", + " features.\n", + "\n", + " Returns:\n", + " Tensor: Torch tensor of dimension (parameter, lat, lon) containing\n", + " sin(lat), cos(lon), sin(lon).\n", + " \"\"\"\n", + "\n", + " latitudes, longitudes = np.meshgrid(\n", + " self.lats, self.lons, indexing=\"ij\"\n", + " )\n", + "\n", + " if self.positional_encoding == \"absolute\":\n", + " latitudes = latitudes / 360 * 2.0 * np.pi\n", + " longitudes = longitudes / 360 * 2.0 * np.pi\n", + " sur_static = np.stack(\n", + " [np.sin(latitudes), np.cos(longitudes), np.sin(longitudes)],\n", + " axis=0,\n", + " )\n", + " else:\n", + " sur_static = np.stack([latitudes, longitudes], axis=0)\n", + "\n", + " sur_static = sur_static.astype(self.rtype)\n", + "\n", + " return sur_static\n", + "\n", + " @ft.cached_property\n", + " def valid_timestamps(self) -> set[pd.Timestamp]:\n", + " \"\"\"Generates list of valid timestamps based on available files. Only\n", + " timestamps for which both surface and vertical information is available\n", + " are considered valid.\n", + " Returns:\n", + " list: list of timestamps\n", + " \"\"\"\n", + "\n", + " s_glob = self._data_path_surface.glob(\"MERRA2_sfc_????????.nc\")\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + " v_glob = self._data_path_surface.glob(\"MERRA_pres_????????.nc\")\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"MERRA2_sfc_(\\d{8}).nc\\Z\")\n", + " v_re = re.compile(r\"MERRA_pres_(\\d{8}).nc\\Z\")\n", + " fmt = \"%Y%m%d\"\n", + "\n", + " s_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in s_files\n", + " if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in v_files\n", + " if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " # Each file contains a day at 3 hour intervals\n", + " times = {\n", + " t + timedelta(hours=i) for i in range(0, 24, 3) for t in times\n", + " }\n", + "\n", + " start_time, end_time = self.time_range\n", + " times = {pd.Timestamp(t) for t in times if start_time <= t <= end_time}\n", + "\n", + " return times\n", + "\n", + " @ft.cached_property\n", + " def valid_climate_timestamps(self) -> set[tuple[int, int]]:\n", + " \"\"\"Generates list of \"timestamps\" (dayofyear, hourofday) for which\n", + " climatology data is present. Only instances for which surface and\n", + " vertical data is available are considered valid.\n", + " Returns:\n", + " list: List of tuples describing valid climatology instances.\n", + " \"\"\"\n", + " if not self._require_clim:\n", + " return set()\n", + "\n", + " s_glob = self._climatology_path_surface.glob(\n", + " \"climate_surface_doy???_hour??.nc\"\n", + " )\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + "\n", + " v_glob = self._climatology_path_vertical.glob(\n", + " \"climate_vertical_doy???_hour??.nc\"\n", + " )\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"climate_surface_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + " v_re = re.compile(r\"climate_vertical_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + "\n", + " s_times = {\n", + " (int(m[1]), int(m[2])) for f in s_files if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (int(m[1]), int(m[2])) for f in v_files if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " return times\n", + "\n", + " def _data_available(self, spec: SampleSpec) -> bool:\n", + " \"\"\"\n", + " Checks whether data is available for a given SampleSpec object. Does so\n", + " using the internal sets with available data previously constructed. Not\n", + " by checking the file system.\n", + " Args:\n", + " spec: SampleSpec object as returned by SampleSpec.get\n", + " Returns:\n", + " bool: if data is availability.\n", + " \"\"\"\n", + " valid = set(spec.times).issubset(self.valid_timestamps)\n", + "\n", + " if self._require_clim:\n", + " sci = spec.climatology_info\n", + " ci = set(sci) if isinstance(sci, list) else set([sci]) # noqa: C405\n", + " valid &= ci.issubset(self.valid_climate_timestamps)\n", + "\n", + " return valid\n", + "\n", + " @ft.cached_property\n", + " def samples(self) -> list[tuple[pd.Timestamp, int, int]]:\n", + " \"\"\"\n", + " Generates list of all valid samlpes.\n", + " Returns:\n", + " list: List of tuples (timestamp, input time, lead time).\n", + " \"\"\"\n", + " valid_samples = []\n", + " dts = [(it, lt) for it in self.input_times for lt in self.lead_times]\n", + "\n", + " for timestamp in sorted(self.valid_timestamps):\n", + " timestamp_samples = []\n", + " for it, lt in dts:\n", + " spec = SampleSpec.get(timestamp, -it, lt)\n", + "\n", + " if self._data_available(spec):\n", + " timestamp_samples.append((timestamp, it, lt))\n", + "\n", + " if timestamp_samples:\n", + " valid_samples.append(timestamp_samples)\n", + "\n", + " return valid_samples\n", + "\n", + " def _to_torch(\n", + " self,\n", + " data: dict[str, Tensor | list[Tensor]],\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.from_numpy(x).to(dtype) for x in v]\n", + " else:\n", + " out[k] = torch.from_numpy(v).to(dtype)\n", + "\n", + " return out\n", + "\n", + " def _lat_roll(\n", + " self, data: dict[str, Tensor | list[Tensor]], n: int\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.roll(x, shifts=n, dims=-1) for x in v]\n", + " else:\n", + " out[k] = torch.roll(v, shifts=n, dims=-1)\n", + "\n", + " return out\n", + "\n", + " def _read_static_data(\n", + " self, file: str | Path, doy: int, hod: int\n", + " ) -> np.ndarray:\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " lats_surf = handle[\"lat\"]\n", + " lons_surf = handle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " npos = len(self.position_signal)\n", + " ntime = 4\n", + "\n", + " nstat = npos + ntime + self._nsstat\n", + " data = np.empty((nstat, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._sstat, start=npos + ntime):\n", + " data[i] = handle[key][()].astype(dtype=self.rtype)\n", + "\n", + " # [possition signal], cos(doy), sin(doy), cos(hod), sin(hod)\n", + " data[0:npos] = self.position_signal\n", + " data[npos + 0] = np.cos(2 * np.pi * doy / 366)\n", + " data[npos + 1] = np.sin(2 * np.pi * doy / 366)\n", + " data[npos + 2] = np.cos(2 * np.pi * hod / 24)\n", + " data[npos + 3] = np.sin(2 * np.pi * hod / 24)\n", + "\n", + " return data\n", + "\n", + " def _read_surface(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " data = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " data[i] = handle[key][tidx][()].astype(dtype=self.rtype)\n", + "\n", + " return data\n", + "\n", + " def _read_levels(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " lvls = handle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " data = np.empty((self._nuvars, self._nlevel, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " data[i] = handle[key][tidx, lidx][()].astype(dtype=self.rtype)\n", + "\n", + " return np.ascontiguousarray(np.flip(data, axis=1))\n", + "\n", + " def _level_idxs(self, lvls):\n", + " lidx = [np.argwhere(lvls == int(lvl)).item() for lvl in self._level]\n", + " return sorted(lidx)\n", + "\n", + " @staticmethod\n", + " def _date_to_tidx(date: datetime | pd.Timestamp, handle: h5py.File) -> int:\n", + " if isinstance(date, pd.Timestamp):\n", + " date = date.to_pydatetime()\n", + "\n", + " time = handle[\"time\"]\n", + "\n", + " t0 = time.attrs[\"begin_time\"][()].item()\n", + " d0 = f\"{time.attrs['begin_date'][()].item()}\"\n", + "\n", + " offset = datetime.strptime(d0, \"%Y%m%d\")\n", + "\n", + " times = [offset + timedelta(minutes=int(t + t0)) for t in time[()]]\n", + " return times.index(date)\n", + "\n", + " def _read_data(\n", + " self, file_pair: tuple[str, str], date: datetime\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " tidx = self._date_to_tidx(date, shandle)\n", + "\n", + " sdata = self._read_surface(tidx, nll, shandle)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " tidx = self._date_to_tidx(date, vhandle)\n", + "\n", + " vdata = self._read_levels(tidx, nll, vhandle)\n", + "\n", + " data = {\"vert\": vdata, \"surf\": sdata}\n", + "\n", + " return data\n", + "\n", + " def _read_climate(\n", + " self, file_pair: tuple[str, str]\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " sdata = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " sdata[i] = shandle[key][()].astype(dtype=self.rtype)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " lvls = vhandle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " vdata = np.empty(\n", + " (self._nuvars, self._nlevel, *nll), dtype=self.rtype\n", + " )\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " vdata[i] = vhandle[key][lidx][()].astype(dtype=self.rtype)\n", + "\n", + " data = {\n", + " \"vert\": np.ascontiguousarray(np.flip(vdata, axis=1)),\n", + " \"surf\": sdata,\n", + " }\n", + "\n", + " return data\n", + "\n", + " def get_data_from_sample_spec(\n", + " self, spec: SampleSpec\n", + " ) -> dict[str, Tensor | int | float]:\n", + " \"\"\"Loads and assembles sample data given a SampleSpec object.\n", + "\n", + " Args:\n", + " spec (SampleSpec): Full details regarding the data to be loaded\n", + " Returns:\n", + " dict: Dictionary with the following keys::\n", + "\n", + " 'sur_static': Torch tensor of shape [parameter, lat, lon]. For\n", + " each pixel (lat, lon), the first 7 dimensions index sin(lat),\n", + " cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod).\n", + " Where doy is the day of the year [1, 366] and hod the hour of\n", + " the day [0, 23].\n", + " 'sur_vals': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'sur_tars': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'ulv_vals': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'ulv_tars': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'sur_climate': Torch tensor of shape [parameter, lat, lon].\n", + " 'ulv_climate': Torch tensor of shape [paramter, level, lat, lon].\n", + " 'lead_time': Float.\n", + " 'input_time': Float.\n", + "\n", + " \"\"\" # noqa: E501\n", + "\n", + " # We assemble the unique timestamps for which we need data.\n", + " vals_required = {*spec.times}\n", + " stat_required = {*spec.stat_times}\n", + "\n", + " # We assemble the unique data files from which we need value data\n", + " vals_file_map = defaultdict(list)\n", + " for t in vals_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " vals_file_map[data_files].append(t)\n", + "\n", + " # We assemble the unique data files from which we need static data\n", + " stat_file_map = defaultdict(list)\n", + " for t in stat_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " stat_file_map[data_files].append(t)\n", + "\n", + " # Load the value data\n", + " data = {}\n", + " for data_files, times in vals_file_map.items():\n", + " for time in times:\n", + " data[time] = self._read_data(data_files, time)\n", + "\n", + " # Combine times\n", + " sample_data = {}\n", + "\n", + " input_upl = np.stack([data[t][\"vert\"] for t in spec.inputs], axis=2)\n", + " sample_data[\"ulv_vals\"] = input_upl\n", + "\n", + " target_upl = data[spec.target][\"vert\"]\n", + " sample_data[\"ulv_tars\"] = target_upl[:, :, None]\n", + "\n", + " input_sur = np.stack([data[t][\"surf\"] for t in spec.inputs], axis=1)\n", + " sample_data[\"sur_vals\"] = input_sur\n", + "\n", + " target_sur = data[spec.target][\"surf\"]\n", + " sample_data[\"sur_tars\"] = target_sur[:, None]\n", + "\n", + " # Load the static data\n", + " data_files, times = stat_file_map.popitem()\n", + " time = times[0].dayofyear, times[0].hour\n", + " sample_data[\"sur_static\"] = self._read_static_data(\n", + " data_files[0], *time\n", + " )\n", + "\n", + " # If required load the surface data\n", + " if self._require_clim:\n", + " ci_year, ci_hour = spec.climatology_info\n", + "\n", + " surf_file = self.data_file_surface_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " vert_file = self.data_file_vertical_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " clim_data = self._read_climate((surf_file, vert_file))\n", + "\n", + " sample_data[\"sur_climate\"] = clim_data[\"surf\"]\n", + " sample_data[\"ulv_climate\"] = clim_data[\"vert\"]\n", + "\n", + " # Move the data from numpy to torch\n", + " sample_data = self._to_torch(sample_data, dtype=self.dtype)\n", + "\n", + " # Optionally roll\n", + " if len(self._roll_longitudes) > 0:\n", + " roll_by = random.choice(self._roll_longitudes)\n", + " sample_data = self._lat_roll(sample_data, roll_by)\n", + "\n", + " # Now that we have rolled, we can add the static data\n", + " sample_data[\"lead_time\"] = spec.lead_time\n", + " sample_data[\"input_time\"] = spec.input_time\n", + "\n", + " return sample_data\n", + "\n", + " def get_data(\n", + " self, timestamp: pd.Timestamp, input_time: int, lead_time: int\n", + " ) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on timestamp and lead time.\n", + " Args:\n", + " timestamp: Timestamp.\n", + " input_time: time between input samples.\n", + " lead_time: lead time.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time'.\n", + " \"\"\"\n", + " spec = SampleSpec.get(timestamp, -input_time, lead_time)\n", + " sample_data = self.get_data_from_sample_spec(spec)\n", + " return sample_data\n", + "\n", + " def __getitem__(self, idx: int) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on sample index and random choice of sample.\n", + " Args:\n", + " idx: Sample index.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time', 'input_time'.\n", + " \"\"\"\n", + " sample_set = self.samples[idx]\n", + " timestamp, input_time, lead_time, *nsteps = random.choice(sample_set)\n", + " sample_data = self.get_data(timestamp, input_time, lead_time)\n", + " return sample_data\n", + "\n", + " def __len__(self):\n", + " return len(self.samples)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import functools as ft\n", + "import random\n", + "from collections import defaultdict\n", + "from copy import deepcopy\n", + "from pathlib import Path\n", + "\n", + "import numpy as np\n", + "import pandas as pd\n", + "import torch\n", + "from torch import Tensor\n", + "\n", + "# from PrithviWxC.dataloaders.merra2 import Merra2Dataset, SampleSpec\n", + "\n", + "\n", + "def preproc(\n", + " batch: list[dict[str, int | float | Tensor]], padding: dict[tuple[int]]\n", + ") -> dict[str, Tensor]:\n", + " \"\"\"Prepressing function for MERRA2 Dataset\n", + "\n", + " Args:\n", + " batch (dict): List of training samples, each sample should be a\n", + " dictionary with the following keys::\n", + "\n", + " 'sur_static': Numpy array of shape (3, lat, lon). For each pixel (lat, lon), the first dimension indexes sin(lat), cos(lon), sin(lon).\n", + " 'sur_vals': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'sur_tars': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'ulv_vals': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'ulv_tars': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'sur_climate': Torch tensor of shape (nstep, parameter, lat, lon)\n", + " 'ulv_climate': Torch tensor of shape (nstep parameter, level, lat, lon)\n", + " 'lead_time': Integer.\n", + " 'input_time': Interger\n", + "\n", + " padding: Dictionary with keys 'level', 'lat', 'lon', each of dim 2.\n", + "\n", + " Returns:\n", + " Dictionary with the following keys::\n", + "\n", + " 'x': [batch, time, parameter, lat, lon]\n", + " 'ys': [batch, nsteps, parameter, lat, lon]\n", + " 'static': [batch, nstep, parameter, lat, lon]\n", + " 'lead_time': [batch]\n", + " 'input_time': [batch]\n", + " 'climate (Optional)': [batch, nsteps, parameter, lat, lon]\n", + "\n", + " Note:\n", + " Here, for x and ys, 'parameter' is [surface parameter, upper level,\n", + " parameter x level]. Similarly for the static information we have\n", + " [sin(lat), cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod),\n", + " ...].\n", + " \"\"\" # noqa: E501\n", + "\n", + " b0 = batch[0]\n", + " nbatch = len(batch)\n", + " data_keys = set(b0.keys())\n", + "\n", + " essential_keys = {\n", + " \"sur_static\",\n", + " \"sur_vals\",\n", + " \"sur_tars\",\n", + " \"ulv_vals\",\n", + " \"ulv_tars\",\n", + " \"input_time\",\n", + " \"lead_time\",\n", + " }\n", + "\n", + " climate_keys = {\n", + " \"sur_climate\",\n", + " \"ulv_climate\",\n", + " }\n", + "\n", + " all_keys = essential_keys | climate_keys\n", + "\n", + " if not essential_keys.issubset(data_keys):\n", + " raise ValueError(\"Missing essential keys.\")\n", + "\n", + " if not data_keys.issubset(all_keys):\n", + " raise ValueError(\"Unexpected keys in batch.\")\n", + "\n", + " # Bring all tensors from the batch into a single tensor\n", + " upl_x = torch.empty((nbatch, *b0[\"ulv_vals\"].shape))\n", + " upl_y = torch.empty((nbatch, *b0[\"ulv_tars\"].shape))\n", + "\n", + " sur_x = torch.empty((nbatch, *b0[\"sur_vals\"].shape))\n", + " sur_y = torch.empty((nbatch, *b0[\"sur_tars\"].shape))\n", + "\n", + " sur_sta = torch.empty((nbatch, *b0[\"sur_static\"].shape))\n", + "\n", + " lead_time = torch.empty(\n", + " (nbatch, *b0[\"lead_time\"].shape),\n", + " dtype=torch.float32,\n", + " )\n", + " input_time = torch.empty((nbatch,), dtype=torch.float32)\n", + "\n", + " for i, rec in enumerate(batch):\n", + " sur_x[i] = torch.Tensor(rec[\"sur_vals\"])\n", + " sur_y[i] = torch.Tensor(rec[\"sur_tars\"])\n", + "\n", + " upl_x[i] = torch.Tensor(rec[\"ulv_vals\"])\n", + " upl_y[i] = torch.Tensor(rec[\"ulv_tars\"])\n", + "\n", + " sur_sta[i] = torch.Tensor(rec[\"sur_static\"])\n", + "\n", + " lead_time[i] = rec[\"lead_time\"]\n", + " input_time[i] = rec[\"input_time\"]\n", + "\n", + " return_value = {\n", + " \"lead_time\": lead_time,\n", + " \"input_time\": input_time,\n", + " \"target_time\": torch.sum(lead_time).reshape(-1),\n", + " }\n", + "\n", + " # Reshape (batch, parameter, level, time, lat, lon)\n", + " # -> (batch, time, parameter, level, lat, lon)\n", + " upl_x = upl_x.permute((0, 3, 1, 2, 4, 5))\n", + " upl_y = upl_y.permute((0, 3, 1, 2, 4, 5))\n", + "\n", + " # Reshape (batch, parameter, time, lat, lon)\n", + " # -> (batch, time, parameter, lat, lon)\n", + " sur_x = sur_x.permute((0, 2, 1, 3, 4))\n", + " sur_y = sur_y.permute((0, 2, 1, 3, 4))\n", + "\n", + " # Pad\n", + " padding_2d = (*padding[\"lon\"], *padding[\"lat\"])\n", + "\n", + " def pad2d(x):\n", + " return torch.nn.functional.pad(x, padding_2d, mode=\"constant\", value=0)\n", + "\n", + " padding_3d = (*padding[\"lon\"], *padding[\"lat\"], *padding[\"level\"])\n", + "\n", + " def pad3d(x):\n", + " return torch.nn.functional.pad(x, padding_3d, mode=\"constant\", value=0)\n", + "\n", + " sur_x = pad2d(sur_x).contiguous()\n", + " upl_x = pad3d(upl_x).contiguous()\n", + " sur_y = pad2d(sur_y).contiguous()\n", + " upl_y = pad3d(upl_y).contiguous()\n", + " return_value[\"statics\"] = pad2d(sur_sta).contiguous()\n", + "\n", + " # We stack along the combined parameter level dimension\n", + " return_value[\"x\"] = torch.cat(\n", + " (sur_x, upl_x.view(*upl_x.shape[:2], -1, *upl_x.shape[4:])), dim=2\n", + " )\n", + " return_value[\"ys\"] = torch.cat(\n", + " (sur_y, upl_y.view(*upl_y.shape[:2], -1, *upl_y.shape[4:])), dim=2\n", + " )\n", + "\n", + " if climate_keys.issubset(data_keys):\n", + " sur_climate = torch.empty((nbatch, *b0[\"sur_climate\"].shape))\n", + " ulv_climate = torch.empty((nbatch, *b0[\"ulv_climate\"].shape))\n", + " for i, rec in enumerate(batch):\n", + " sur_climate[i] = rec[\"sur_climate\"]\n", + " ulv_climate[i] = rec[\"ulv_climate\"]\n", + " sur_climate = pad2d(sur_climate)\n", + " ulv_climate = pad3d(ulv_climate)\n", + "\n", + " ulv_climate = ulv_climate.view(\n", + " *ulv_climate.shape[:2], -1, *ulv_climate.shape[4:]\n", + " )\n", + " return_value[\"climates\"] = torch.cat((sur_climate, ulv_climate), dim=2)\n", + "\n", + " return return_value\n", + "\n", + "\n", + "class RolloutSpec(SampleSpec):\n", + " \"\"\"\n", + " A data class to collect the information used to define a rollout sample.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " inputs: tuple[pd.Timestamp, pd.Timestamp],\n", + " lead_time: int,\n", + " target: pd.Timestamp,\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " inputs: Tuple of timestamps. In ascending order.\n", + " lead_time: Lead time. In hours.\n", + " target: Timestamp of the target. Can be before or after the inputs.\n", + " \"\"\"\n", + " super().__init__(inputs, lead_time, target)\n", + "\n", + " self.dt = dt = pd.Timedelta(lead_time, unit=\"h\")\n", + " self.inters = list(pd.date_range(inputs[-1], target, freq=dt))\n", + "\n", + " self._ctimes = deepcopy(self.inters)\n", + " self.stat_times = deepcopy(self.inters)\n", + "\n", + " self.stat_times.pop(-1)\n", + " self._ctimes.pop(0)\n", + " self.inters.pop(0)\n", + " self.inters.pop(-1)\n", + "\n", + " self.times = [*inputs, *self.inters, target]\n", + " self.targets = self.times[2:]\n", + " self.nsteps = len(self.times) - 2\n", + "\n", + " @property\n", + " def climatology_info(self) -> dict[pd.Timestamp, tuple[int, int]]:\n", + " \"\"\"Returns information required to obtain climatology data.\n", + " Returns:\n", + " list: list containing required climatology info.\n", + " \"\"\"\n", + " return [(min(t.dayofyear, 365), t.hour) for t in self._ctimes]\n", + "\n", + " def _info_str(self) -> str:\n", + " iso_8601 = \"%Y-%m-%dT%H:%M:%S\"\n", + "\n", + " inter_str = \"\\n\".join(t.strftime(iso_8601) for t in self.inters)\n", + "\n", + " return (\n", + " f\"Issue time: {self.inputs[1].strftime(iso_8601)}\\n\"\n", + " f\"Lead time: {self.lead_time} hours ahead\\n\"\n", + " f\"Target time: {self.target.strftime(iso_8601)}\\n\"\n", + " f\"Intermediate times: {inter_str}\"\n", + " )\n", + "\n", + " @classmethod\n", + " def get(cls, timestamp: pd.Timestamp, lead_time: int, nsteps: int):\n", + " \"\"\"Given a timestamp and lead time, generates a RolloutSpec object\n", + " describing the sample further.\n", + "\n", + " Args:\n", + " timestamp: Timstamp (issue time) of the sample.\n", + " lead_time: Lead time. In hours.\n", + "\n", + " Returns:\n", + " SampleSpec object.\n", + " \"\"\"\n", + " if lead_time > 0:\n", + " dt = pd.to_timedelta(lead_time, unit=\"h\")\n", + " timestamp_target = timestamp + nsteps * dt\n", + " else:\n", + " raise ValueError(\"Rollout is only forwards\")\n", + "\n", + " spec = cls(\n", + " inputs=(timestamp - dt, timestamp),\n", + " lead_time=lead_time,\n", + " target=timestamp_target,\n", + " )\n", + "\n", + " return spec\n", + "\n", + " def __repr__(self) -> str:\n", + " return self._info_str()\n", + "\n", + " def __str__(self) -> str:\n", + " return self._info_str()\n", + "\n", + "\n", + "class Merra2RolloutDataset(Merra2Dataset):\n", + " \"\"\"Dataset class that read MERRA2 data for performing rollout.\n", + "\n", + " Implementation details::\n", + "\n", + " Samples stores the list of valid samples. This takes the form\n", + " ```\n", + " [\n", + " [(timestamp 1, -input_time, n_steps)],\n", + " [(timestamp 2, -input_time, n_steps)],\n", + " ]\n", + " ```\n", + " The nested list is for compatibility reasons with Merra2Dataset. Note\n", + " that input time and n_steps are always the same value. For some reason\n", + " the sign of input_time is the opposite to that in Merra2Dataset\n", + " \"\"\"\n", + "\n", + " input_time_len = 2\n", + "\n", + " def __init__(\n", + " self,\n", + " time_range: tuple[str | pd.Timestamp, str | pd.Timestamp],\n", + " input_time: int | float | pd.Timedelta,\n", + " lead_time: int | float,\n", + " data_path_surface: str | Path,\n", + " data_path_vertical: str | Path,\n", + " climatology_path_surface: str | Path | None,\n", + " climatology_path_vertical: str | Path | None,\n", + " surface_vars: list[str],\n", + " static_surface_vars: list[str],\n", + " vertical_vars: list[str],\n", + " levels: list[float],\n", + " roll_longitudes: int = 0,\n", + " positional_encoding: str = \"absolute\",\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " time_range: time range to consider when building dataset\n", + " input_time: requested time between inputs\n", + " lead_time: requested time to predict\n", + " data_path_surface: path of surface data directory\n", + " data_path_vertical: path of vertical data directory\n", + " climatology_path_surface: path of surface climatology data\n", + " directory\n", + " climatology_path_vertical: path of vertical climatology data\n", + " directory\n", + " surface_vars: surface variables to return\n", + " static_surface_vars: static surface variables to return\n", + " vertical_vars: vertical variables to return\n", + " levels: MERA2 vertical levels to consider\n", + " roll_longitudes: Whether and now uch to randomly roll latitudes by.\n", + " Defaults to 0.\n", + " positional_encoding: The type of possitional encodeing to use.\n", + " Defaults to \"absolute\".\n", + "\n", + " Raises:\n", + " ValueError: If lead time is not integer multiple of input time\n", + " \"\"\"\n", + "\n", + " self._target_lead = lead_time\n", + "\n", + " if isinstance(input_time, int) or isinstance(input_time, float):\n", + " self.timedelta_input = pd.to_timedelta(-input_time, unit=\"h\")\n", + " else:\n", + " self.timedelta_input = -input_time\n", + "\n", + " lead_times = [self.timedelta_input / pd.to_timedelta(1, unit=\"h\")]\n", + "\n", + " super().__init__(\n", + " time_range,\n", + " lead_times,\n", + " [input_time],\n", + " data_path_surface,\n", + " data_path_vertical,\n", + " climatology_path_surface,\n", + " climatology_path_vertical,\n", + " surface_vars,\n", + " static_surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " roll_longitudes,\n", + " positional_encoding,\n", + " )\n", + "\n", + " nstep_float = (\n", + " pd.to_timedelta(self._target_lead, unit=\"h\") / self.timedelta_input\n", + " )\n", + "\n", + " if abs(nstep_float % 1) > 1e-5:\n", + " raise ValueError(\"Leadtime not multiple of input time\")\n", + "\n", + " self.nsteps = round(nstep_float)\n", + "\n", + " @ft.cached_property\n", + " def samples(self) -> list[tuple[pd.Timestamp, int, int]]:\n", + " \"\"\"Generates list of all valid samlpes.\n", + "\n", + " Returns:\n", + " List of tuples (timestamp, input time, lead time).\n", + " \"\"\"\n", + " valid_samples = []\n", + "\n", + " for timestamp in sorted(self.valid_timestamps):\n", + " timestamp_samples = []\n", + " for lt in self.lead_times:\n", + " spec = RolloutSpec.get(timestamp, lt, self.nsteps)\n", + "\n", + " if self._data_available(spec):\n", + " timestamp_samples.append(\n", + " (timestamp, self.input_times[0], lt, self.nsteps)\n", + " )\n", + "\n", + " if timestamp_samples:\n", + " valid_samples.append(timestamp_samples)\n", + "\n", + " return valid_samples\n", + "\n", + " def get_data_from_rollout_spec(\n", + " self, spec: RolloutSpec\n", + " ) -> dict[str, Tensor | int | float]:\n", + " \"\"\"Loads and assembles sample data given a RolloutSpec object.\n", + "\n", + " Args:\n", + " spec (RolloutSpec): Full details regarding the data to be loaded\n", + " Returns:\n", + " dict: Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',c'lead_time',\n", + " 'input_time'. For each, the value is as follows::\n", + "\n", + " {\n", + " 'sur_static': Torch tensor of shape [parameter, lat, lon]. For\n", + " each pixel (lat, lon), the first 7 dimensions index sin(lat),\n", + " cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod).\n", + " Where doy is the day of the year [1, 366] and hod the hour of\n", + " the day [0, 23].\n", + " 'sur_vals': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'sur_tars': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'ulv_vals': Torch tensor of shape\n", + " [parameter, level, time, lat, lon].\n", + " 'ulv_tars': Torch tensor of shape\n", + " [nsteps, parameter, level, time, lat, lon].\n", + " 'sur_climate': Torch tensor of shape\n", + " [nsteps, parameter, lat, lon].\n", + " 'ulv_climate': Torch tensor of shape\n", + " [nsteps, paramter, level, lat, lon].\n", + " 'lead_time': Float.\n", + " 'input_time': Float.\n", + " }\n", + "\n", + " \"\"\"\n", + "\n", + " # We assemble the unique timestamps for which we need data.\n", + " vals_required = {*spec.times}\n", + " stat_required = {*spec.stat_times}\n", + "\n", + " # We assemble the unique data files from which we need value data\n", + " vals_file_map = defaultdict(list)\n", + " for t in vals_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " vals_file_map[data_files].append(t)\n", + "\n", + " # We assemble the unique data files from which we need static data\n", + " stat_file_map = defaultdict(list)\n", + " for t in stat_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " stat_file_map[data_files].append(t)\n", + "\n", + " # Load the value data\n", + " data = {}\n", + " for data_files, times in vals_file_map.items():\n", + " for time in times:\n", + " data[time] = self._read_data(data_files, time)\n", + "\n", + " # Load the static data\n", + " stat = {}\n", + " for data_files, times in stat_file_map.items():\n", + " for time in times:\n", + " hod, doy = time.hour, time.dayofyear\n", + " stat[time] = self._read_static_data(data_files[0], hod, doy)\n", + "\n", + " # Combine times\n", + " sample_data = {}\n", + "\n", + " input_upl = np.stack([data[t][\"vert\"] for t in spec.inputs], axis=2)\n", + " sample_data[\"ulv_vals\"] = input_upl\n", + "\n", + " target_upl = np.stack([data[t][\"vert\"] for t in spec.targets], axis=2)\n", + " sample_data[\"ulv_tars\"] = target_upl\n", + "\n", + " input_sur = np.stack([data[t][\"surf\"] for t in spec.inputs], axis=1)\n", + " sample_data[\"sur_vals\"] = input_sur\n", + "\n", + " target_sur = np.stack([data[t][\"surf\"] for t in spec.targets], axis=1)\n", + " sample_data[\"sur_tars\"] = target_sur\n", + "\n", + " # Load the static data\n", + " static = np.stack([stat[t] for t in spec.stat_times], axis=0)\n", + " sample_data[\"sur_static\"] = static\n", + "\n", + " # If required load the climate data\n", + " if self._require_clim:\n", + " clim_data = {}\n", + " for ci in spec.climatology_info:\n", + " ci_year, ci_hour = ci\n", + "\n", + " surf_file = self.data_file_surface_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " vert_file = self.data_file_vertical_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " clim_data[ci] = self._read_climate((surf_file, vert_file))\n", + "\n", + " clim_surf = [clim_data[ci][\"surf\"] for ci in spec.climatology_info]\n", + " sample_data[\"sur_climate\"] = np.stack(clim_surf, axis=0)\n", + "\n", + " clim_surf = [clim_data[ci][\"vert\"] for ci in spec.climatology_info]\n", + " sample_data[\"ulv_climate\"] = np.stack(clim_surf, axis=0)\n", + "\n", + " # Move the data from numpy to torch\n", + " sample_data = self._to_torch(sample_data, dtype=self.dtype)\n", + "\n", + " # Optionally roll\n", + " if len(self._roll_longitudes) > 0:\n", + " roll_by = random.choice(self._roll_longitudes)\n", + " sample_data = self._lat_roll(sample_data, roll_by)\n", + "\n", + " # Now that we have rolled, we can add the static data\n", + " lt = torch.tensor([spec.lead_time] * self.nsteps).to(self.dtype)\n", + " sample_data[\"lead_time\"] = lt\n", + " sample_data[\"input_time\"] = spec.input_time\n", + "\n", + " return sample_data\n", + "\n", + " def get_data(\n", + " self, timestamp: pd.Timestamp, *args, **kwargs\n", + " ) -> dict[Tensor | int]:\n", + " \"\"\"Loads data based on timestamp and lead time.\n", + "\n", + " Args:\n", + " timestamp: Timestamp.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time', 'input_time'\n", + " \"\"\"\n", + " rollout_spec = RolloutSpec.get(\n", + " timestamp, self.lead_times[0], self.nsteps\n", + " )\n", + " sample_data = self.get_data_from_rollout_spec(rollout_spec)\n", + " return sample_data\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2_rollout import Merra2RolloutDataset\n", + "\n", + "dataset = Merra2RolloutDataset(\n", + " time_range=time_range,\n", + " lead_time=lead_time,\n", + " input_time=input_time,\n", + " data_path_surface=surf_dir,\n", + " data_path_vertical=vert_dir,\n", + " climatology_path_surface=surf_clim_dir,\n", + " climatology_path_vertical=vert_clim_dir,\n", + " surface_vars=surface_vars,\n", + " static_surface_vars=static_surface_vars,\n", + " vertical_vars=vertical_vars,\n", + " levels=levels,\n", + " positional_encoding=positional_encoding,\n", + ")\n", + "assert len(dataset) > 0, \"There doesn't seem to be any valid data.\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Model\n", + "### Scalers and other hyperparameters\n", + "Again, this setup is similar as before." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2 import (\n", + "# input_scalers,\n", + "# output_scalers,\n", + "# static_input_scalers,\n", + "# )\n", + "\n", + "surf_in_scal_path = Path(\"./climatology/musigma_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_in_scal_path = Path(\"./climatology/musigma_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "surf_out_scal_path = Path(\"./climatology/anomaly_variance_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_out_scal_path = Path(\"./climatology/anomaly_variance_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.rollout.2300m.v1\",\n", + " filename=\"config.yaml\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "in_mu, in_sig = input_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_in_scal_path,\n", + " vert_in_scal_path,\n", + ")\n", + "\n", + "output_sig = output_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_out_scal_path,\n", + " vert_out_scal_path,\n", + ")\n", + "\n", + "static_mu, static_sig = static_input_scalers(\n", + " surf_in_scal_path,\n", + " static_surface_vars,\n", + ")\n", + "\n", + "residual = \"none\"\n", + "masking_mode = \"local\"\n", + "decoder_shifting = True\n", + "masking_ratio = 0.99" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Model init\n", + "We can now build and load the pretrained weights, note that you should use the\n", + "rollout version of the weights." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'weights\\\\prithvi.wxc.rollout.2300m.v1.pt'" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "weights_path = Path(\"./weights/prithvi.wxc.rollout.2300m.v1.pt\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.rollout.2300m.v1\",\n", + " filename=weights_path.name,\n", + " local_dir=\"./weights\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "from functools import cached_property\n", + "from importlib.metadata import version\n", + "\n", + "from torch import Tensor\n", + "from torch.utils.checkpoint import checkpoint\n", + "\n", + "if version(\"torch\") > \"2.3.0\":\n", + " from torch.nn.attention import SDPBackend, sdpa_kernel\n", + "import numpy as np\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "\n", + "\n", + "# DropPath code is straight from timm\n", + "# (https://huggingface.co/spaces/Roll20/pet_score/blame/main/lib/timm/models/layers/drop.py)\n", + "def drop_path(\n", + " x: Tensor,\n", + " drop_prob: float = 0.0,\n", + " training: bool = False,\n", + " scale_by_keep: bool = True,\n", + ") -> Tensor:\n", + " \"\"\"Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks). Taken form timm.\n", + "\n", + " Args:\n", + " x (Tensor): Input tensor.\n", + " drop_prob (float): Probability of dropping `x`, defaults to 0.\n", + " training (bool): Whether model is in in traingin of eval mode,\n", + " defaults to False.\n", + " scale_by_keep (bool): Whether the output should scaled by\n", + " (`1 - drop_prob`), defaults to True.\n", + " Returns:\n", + " Tensor: Tensor that may have randomly dropped with proability\n", + " `drop_path`\n", + " \"\"\"\n", + " if drop_prob == 0.0 or not training:\n", + " return x\n", + " keep_prob = 1 - drop_prob\n", + " shape = (x.shape[0],) + (1,) * (x.ndim - 1)\n", + " random_tensor = x.new_empty(shape).bernoulli_(keep_prob)\n", + " if keep_prob > 0.0 and scale_by_keep:\n", + " random_tensor.div_(keep_prob)\n", + " return x * random_tensor\n", + "\n", + "\n", + "class DropPath(nn.Module):\n", + " \"\"\"\n", + " Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, drop_prob: float | None = None, scale_by_keep: bool = True\n", + " ) -> None:\n", + " super(DropPath, self).__init__()\n", + " self.drop_prob = drop_prob\n", + " self.scale_by_keep = scale_by_keep\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"Runs drop path on input tensor\n", + "\n", + " Args:\n", + " x: input\n", + "\n", + " Returns:\n", + " tensor: output after drop_path\n", + " \"\"\"\n", + " return drop_path(x, self.drop_prob, self.training, self.scale_by_keep)\n", + "\n", + "\n", + "class Mlp(nn.Module):\n", + " \"\"\"\n", + " Multi layer perceptron.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, features: int, hidden_features: int, dropout: float = 0.0\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Input/output dimension.\n", + " hidden_features: Hidden dimension.\n", + " dropout: Dropout.\n", + " \"\"\"\n", + " super().__init__()\n", + " self.net = nn.Sequential(\n", + " nn.Linear(features, hidden_features),\n", + " nn.GELU(),\n", + " nn.Dropout(dropout),\n", + " nn.Linear(hidden_features, features),\n", + " nn.Dropout(dropout),\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x (Tesnor): Tensor of shape [..., channel]\n", + " Returns:\n", + " Tenosr: Tensor of same shape as x.\n", + " \"\"\"\n", + " return self.net(x)\n", + "\n", + "\n", + "class LayerNormPassThrough(nn.LayerNorm):\n", + " \"\"\"Normalising layer that allows the attention mask to be passed through\"\"\"\n", + "\n", + " def __init__(self, *args, **kwargs):\n", + " super().__init__(*args, **kwargs)\n", + "\n", + " def forward(\n", + " self, d: tuple[Tensor, Tensor | None]\n", + " ) -> tuple[Tensor, Tensor | None]:\n", + " \"\"\"Forwards function\n", + "\n", + " Args:\n", + " d (tuple): tuple of the data tensor and the attention mask\n", + " Returns:\n", + " output (Tensor): normalised output data\n", + " attn_mask (Tensor): the attention mask that was passed in\n", + " \"\"\"\n", + " input, attn_mask = d\n", + " output = F.layer_norm(\n", + " input, self.normalized_shape, self.weight, self.bias, self.eps\n", + " )\n", + " return output, attn_mask\n", + "\n", + "\n", + "class MultiheadAttention(nn.Module):\n", + " \"\"\"Multihead attention layer for inputs of shape\n", + " [..., sequence, features].\n", + " \"\"\"\n", + "\n", + " def __init__(self, features: int, n_heads: int, dropout: float) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " \"\"\" # noqa: E501\n", + " super().__init__()\n", + "\n", + " if (features % n_heads) != 0:\n", + " raise ValueError(\n", + " f\"Features '{features}' is not divisible by heads '{n_heads}'.\"\n", + " )\n", + "\n", + " self.features = features\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + "\n", + " self.qkv_layer = torch.nn.Linear(features, features * 3, bias=False)\n", + " self.w_layer = torch.nn.Linear(features, features, bias=False)\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " d (tuple): tuple containing Tensor of shape [..., sequence, features] and the attention mask\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\" # noqa: E501\n", + " x, attn_mask = d\n", + "\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " passenger_dims = x.shape[:-2]\n", + " B = passenger_dims.numel()\n", + " S = x.shape[-2]\n", + " C = x.shape[-1]\n", + " x = x.reshape(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " # q, k, v [B, H, S, C/H]\n", + " q, k, v = (\n", + " self.qkv_layer(x)\n", + " .view(B, S, self.n_heads, 3 * (C // self.n_heads))\n", + " .transpose(1, 2)\n", + " .chunk(chunks=3, dim=3)\n", + " )\n", + "\n", + " # Let us enforce either flash (A100+) or memory efficient attention.\n", + " if version(\"torch\") > \"2.3.0\":\n", + " with sdpa_kernel(\n", + " [SDPBackend.FLASH_ATTENTION, SDPBackend.EFFICIENT_ATTENTION]\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, attn_mask=attn_mask, dropout_p=self.dropout\n", + " )\n", + " else:\n", + " with torch.backends.cuda.sdp_kernel(\n", + " enable_flash=True, enable_math=False, enable_mem_efficient=True\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, dropout_p=self.dropout\n", + " )\n", + "\n", + " # x [B, S, C]\n", + " x = x.transpose(1, 2).view(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " x = self.w_layer(x)\n", + "\n", + " # Back to input shape\n", + " x = x.view(*passenger_dims, S, self.features)\n", + " return x\n", + "\n", + "\n", + "class Transformer(nn.Module):\n", + " \"\"\"\n", + " Transformer for inputs of shape [..., S, features].\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.) dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = (\n", + " DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n", + " )\n", + "\n", + " self.attention = nn.Sequential(\n", + " LayerNormPassThrough(features),\n", + " MultiheadAttention(features, n_heads, dropout),\n", + " )\n", + "\n", + " self.ff = nn.Sequential(\n", + " nn.LayerNorm(features),\n", + " Mlp(\n", + " features=features,\n", + " hidden_features=features * mlp_multiplier,\n", + " dropout=dropout,\n", + " ),\n", + " )\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [..., sequence, features]\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\"\n", + " x, attn_mask = d\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " attention_x = self.attention(d)\n", + "\n", + " x = x + self.drop_path(attention_x)\n", + " x = x + self.drop_path(self.ff(x))\n", + "\n", + " return x\n", + "\n", + "\n", + "class _Shift(nn.Module):\n", + " \"\"\"Private base class for the shifter. This allows some behaviour to be\n", + " easily handled when the shifter isn't used.\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " super().__init__()\n", + "\n", + " self._shifted = False\n", + "\n", + " @torch.no_grad()\n", + " def reset(self) -> None:\n", + " \"\"\"\n", + " Resets the bool tracking whether the data is shifted\n", + " \"\"\"\n", + " self._shifted: bool = False\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, dict[bool, None]]:\n", + " return data, {True: None, False: None}\n", + "\n", + "\n", + "class SWINShift(_Shift):\n", + " \"\"\"\n", + " Handles the shifting of patches similar to how SWIN works. However if we\n", + " shift the latitudes then the poles will wrap and potentially that might be\n", + " problematic. The possition tokens should handle it but masking is safer.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " mu_shape: tuple[int, int],\n", + " global_shape: tuple[int, int],\n", + " local_shape: tuple[int, int],\n", + " patch_shape: tuple[int, int],\n", + " n_context_tokens: int = 2,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " mu_shape: the shape to the masking units\n", + " global_shape: number of global patches in lat and lon\n", + " local_shape: size of the local patches\n", + " patch_shape: patch size\n", + " n_context_token: number of additional context tokens at start of\n", + " _each_ local sequence\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self._mu_shape = ms = mu_shape\n", + " self._g_shape = gs = global_shape\n", + " self._l_shape = ls = local_shape\n", + " self._p_shape = ps = patch_shape\n", + " self._lat_patch = (gs[0], ls[0], gs[1], ls[1])\n", + " self._n_context_tokens = n_context_tokens\n", + "\n", + " self._g_shift_to = tuple(\n", + " int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + " self._g_shift_from = tuple(\n", + " -int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + "\n", + " # Define the attention masks for the shifted MaxViT.\n", + " nglobal = global_shape[0] * global_shape[1]\n", + " nlocal = (\n", + " local_shape[0] * local_shape[1] + self._n_context_tokens\n", + " ) # \"+ 1\" for leadtime\n", + "\n", + " lm = torch.ones((nglobal, 1, nlocal, nlocal), dtype=bool)\n", + " mwidth = int(0.5 * local_shape[1]) * local_shape[0]\n", + " lm[\n", + " : gs[1],\n", + " :,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " ] = False\n", + " self.register_buffer(\"local_mask\", lm)\n", + "\n", + " gm = torch.ones((nlocal, 1, nglobal, nglobal), dtype=bool)\n", + " gm[: int(0.5 * ls[1]) * ls[0], :, : gs[1], : gs[1]] = False\n", + " self.register_buffer(\"global_mask\", gm)\n", + "\n", + " def _to_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the global/local setting back to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " y1 = x.view(nbatch, *self._g_shape, *self._l_shape, -1)\n", + " y2 = y1.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = y2.shape\n", + " return y2.view((nbatch, -1, s[2] * s[3], s[4] * s[5]))\n", + "\n", + " def _to_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the local/global setting to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the lat/lon setting.\n", + " \"\"\"\n", + " x = x.transpose(2, 1).contiguous()\n", + " return self._to_grid_global(x)\n", + "\n", + " def _from_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the global/local\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " z1 = x.view(nbatch, -1, *self._lat_patch)\n", + " z2 = z1.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = z2.shape\n", + " return z2.view(nbatch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def _from_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the local/global\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the local/global setting\n", + " \"\"\"\n", + " x = self._from_grid_global(x)\n", + " return x.transpose(2, 1).contiguous()\n", + "\n", + " def _shift(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shifts data in the gridded lat/lon setting by half the mask unit shape\n", + " Args:\n", + " x: data to be shifted\n", + " Returns:\n", + " x: either the hsifted or unshifted data\n", + " \"\"\"\n", + " shift = self._g_shift_from if self._shifted else self._g_shift_to\n", + " x_shifted = torch.roll(x, shift, (-2, -1))\n", + "\n", + " self._shifted = not self._shifted\n", + " return x_shifted\n", + "\n", + " def _sep_lt(self, x: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"\n", + " Seperate off the leadtime from the local patches\n", + " Args:\n", + " x: data to have leadtime removed from\n", + " Returns:\n", + " lt: leadtime\n", + " x: data without the lead time in the local patch\n", + " \"\"\"\n", + " lt_it = x[:, : self._n_context_tokens, :, :]\n", + " x_stripped = x[:, self._n_context_tokens :, :, :]\n", + "\n", + " return lt_it, x_stripped\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"Shift or unshift the the data depending on whether the data is\n", + " already shifted, as defined by self._shifte.\n", + "\n", + " Args:\n", + " data: data to be shifted\n", + " Returns:\n", + " Tensor: shifted data Tensor\n", + " \"\"\"\n", + " lt, x = self._sep_lt(data)\n", + "\n", + " x_grid = self._to_grid_local(x)\n", + " x_shifted = self._shift(x_grid)\n", + " x_patched = self._from_grid_local(x_shifted)\n", + "\n", + " # Mask has to be repeated based on batch size\n", + " n_batch = x_grid.shape[0]\n", + " local_rep = [n_batch] + [1] * (self.local_mask.ndim - 1)\n", + " global_rep = [n_batch] + [1] * (self.global_mask.ndim - 1)\n", + "\n", + " if self._shifted:\n", + " attn_mask = {\n", + " True: self.local_mask.repeat(local_rep),\n", + " False: self.global_mask.repeat(global_rep),\n", + " }\n", + " else:\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " return torch.cat((lt, x_patched), axis=1), attn_mask\n", + "\n", + "\n", + "class LocalGlobalLocalBlock(nn.Module):\n", + " \"\"\"\n", + " Applies alternating block and grid attention. Given a parameter n_blocks,\n", + " the entire module contains 2*n_blocks+1 transformer blocks. The first,\n", + " third, ..., last apply local (block) attention. The second, fourth, ...\n", + " global (grid) attention.\n", + "\n", + " This is heavily inspired by\n", + " Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " n_blocks: int,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " checkpoint: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " n_blocks: Number of local-global transformer pairs.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.n_blocks = n_blocks\n", + " self._checkpoint = checkpoint or []\n", + "\n", + " if not all(0 <= c < 2 * n_blocks + 1 for c in self._checkpoint):\n", + " raise ValueError(\n", + " \"Checkpoints should be 0 <= i < 2*n_blocks+1. \"\n", + " f\"{self._checkpoint=}.\"\n", + " )\n", + "\n", + " self.transformers = nn.ModuleList(\n", + " [\n", + " Transformer(\n", + " features=features,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " )\n", + " for _ in range(2 * n_blocks + 1)\n", + " ]\n", + " )\n", + "\n", + " self.evaluator = [\n", + " self._checkpoint_wrapper\n", + " if i in self._checkpoint\n", + " else lambda m, x: m(x)\n", + " for i, _ in enumerate(self.transformers)\n", + " ]\n", + "\n", + " self.shifter = shifter or _Shift()\n", + "\n", + " @staticmethod\n", + " def _checkpoint_wrapper(\n", + " model: nn.Module, data: tuple[Tensor, Tensor | None]\n", + " ) -> Tensor:\n", + " return checkpoint(model, data, use_reentrant=False)\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + " \"\"\"\n", + " if x.shape[-1] != self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + " if x.ndim != 4:\n", + " raise ValueError(\n", + " f\"Expecting tensor with exactly four dimensions. {x.shape=}.\"\n", + " )\n", + "\n", + " self.shifter.reset()\n", + " local: bool = True\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " transformer_iter = zip(self.evaluator, self.transformers, strict=False)\n", + "\n", + " # First local block\n", + " evaluator, transformer = next(transformer_iter)\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " for evaluator, transformer in transformer_iter:\n", + " local = not local\n", + " # We are making exactly 2*n_blocks transposes.\n", + " # So the output has the same shape as input.\n", + " x = x.transpose(1, 2)\n", + "\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " if not local:\n", + " x, attn_mask = self.shifter(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PatchEmbed(nn.Module):\n", + " \"\"\"\n", + " Patch embedding via 2D convolution.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, patch_size: int | tuple[int, ...], channels: int, embed_dim: int\n", + " ):\n", + " super().__init__()\n", + "\n", + " self.patch_size = patch_size\n", + " self.channels = channels\n", + " self.embed_dim = embed_dim\n", + "\n", + " self.proj = nn.Conv2d(\n", + " channels,\n", + " embed_dim,\n", + " kernel_size=patch_size,\n", + " stride=patch_size,\n", + " bias=True,\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [batch, channels, lat, lon].\n", + " Returns:\n", + " Tensor: Tensor with shape\n", + " [batch, embed_dim, lat//patch_size, lon//patch_size]\n", + " \"\"\"\n", + "\n", + " H, W = x.shape[-2:]\n", + "\n", + " if W % self.patch_size[1] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " \" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + " if H % self.patch_size[0] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " f\" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + "\n", + " x = self.proj(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxCEncoderDecoder(nn.Module):\n", + " \"\"\"\n", + " Hiera-MaxViT encoder/decoder code.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " embed_dim: int,\n", + " n_blocks: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " transformer_cp: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " embed_dim: Embedding dimension\n", + " n_blocks: Number of local-global transformer pairs.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks = n_blocks\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self._transformer_cp = transformer_cp\n", + "\n", + " self.lgl_block = LocalGlobalLocalBlock(\n", + " features=embed_dim,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " n_blocks=n_blocks,\n", + " shifter=shifter,\n", + " checkpoint=transformer_cp,\n", + " )\n", + "\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape\n", + " [batch, global sequence, local sequence, embed_dim]\n", + " Returns:\n", + " Tensor of shape\n", + " [batch, mask_unit_sequence, local_sequence, embed_dim].\n", + " Identical in shape to the input x.\n", + " \"\"\"\n", + "\n", + " x = self.lgl_block(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxC(nn.Module):\n", + " \"\"\"Encoder-decoder fusing Hiera with MaxViT. See\n", + " - Ryali et al. \"Hiera: A Hierarchical Vision Transformer without the\n", + " Bells-and-Whistles\" (https://arxiv.org/abs/2306.00989)\n", + " - Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697)\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " in_channels: int,\n", + " input_size_time: int,\n", + " in_channels_static: int,\n", + " input_scalers_mu: Tensor,\n", + " input_scalers_sigma: Tensor,\n", + " input_scalers_epsilon: float,\n", + " static_input_scalers_mu: Tensor,\n", + " static_input_scalers_sigma: Tensor,\n", + " static_input_scalers_epsilon: float,\n", + " output_scalers: Tensor,\n", + " n_lats_px: int,\n", + " n_lons_px: int,\n", + " patch_size_px: tuple[int],\n", + " mask_unit_size_px: tuple[int],\n", + " mask_ratio_inputs: float,\n", + " embed_dim: int,\n", + " n_blocks_encoder: int,\n", + " n_blocks_decoder: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " parameter_dropout: float,\n", + " residual: str,\n", + " masking_mode: str,\n", + " positional_encoding: str,\n", + " decoder_shifting: bool = False,\n", + " checkpoint_encoder: list[int] | None = None,\n", + " checkpoint_decoder: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " in_channels: Number of input channels.\n", + " input_size_time: Number of timestamps in input.\n", + " in_channels_static: Number of input channels for static data.\n", + " input_scalers_mu: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_sigma: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_epsilon: Float. Used to rescale input.\n", + " static_input_scalers_mu: Tensor of size (in_channels_static). Used\n", + " to rescale static inputs.\n", + " static_input_scalers_sigma: Tensor of size (in_channels_static).\n", + " Used to rescale static inputs.\n", + " static_input_scalers_epsilon: Float. Used to rescale static inputs.\n", + " output_scalers: Tensor of shape (in_channels,). Used to rescale\n", + " output.\n", + " n_lats_px: Total latitudes in data. In pixels.\n", + " n_lons_px: Total longitudes in data. In pixels.\n", + " patch_size_px: Patch size for tokenization. In pixels lat/lon.\n", + " mask_unit_size_px: Size of each mask unit. In pixels lat/lon.\n", + " mask_ratio_inputs: Masking ratio for inputs. 0 to 1.\n", + " embed_dim: Embedding dimension\n", + " n_blocks_encoder: Number of local-global transformer pairs in\n", + " encoder.\n", + " n_blocks_decoder: Number of local-global transformer pairs in\n", + " decoder.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " parameter_dropout: Dropout applied to parameters.\n", + " residual: Indicates whether and how model should work as residual\n", + " model. Accepted values are 'climate', 'temporal' and 'none'\n", + " positional_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' lat lon encoded in 3 dimensions using sine and\n", + " cosine\n", + " 'fourier' lat/lon to be encoded using various frequencies\n", + " masking_mode: String ['local', 'global', 'both'] that controls the\n", + " type of masking used.\n", + " checkpoint_encoder: List of integers controlling if gradient\n", + " checkpointing is used on encoder.\n", + " Format: [] for no gradient checkpointing. [3, 7] for\n", + " checkpointing after 4th and 8th layer etc.\n", + " checkpoint_decoder: List of integers controlling if gradient\n", + " checkpointing is used on decoder.\n", + " Format: See `checkpoint_encoder`.\n", + " masking_mode: The type of masking to use\n", + " {'global', 'local', 'both'}\n", + " decoder_shifting: Whether to use swin shifting in the decoder.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.in_channels = in_channels\n", + " self.input_size_time = input_size_time\n", + " self.in_channels_static = in_channels_static\n", + " self.n_lats_px = n_lats_px\n", + " self.n_lons_px = n_lons_px\n", + " self.patch_size_px = patch_size_px\n", + " self.mask_unit_size_px = mask_unit_size_px\n", + " self.mask_ratio_inputs = mask_ratio_inputs\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks_encoder = n_blocks_encoder\n", + " self.n_blocks_decoder = n_blocks_decoder\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.residual = residual\n", + " self._decoder_shift = decoder_shifting\n", + " self.positional_encoding = positional_encoding\n", + " self._checkpoint_encoder = checkpoint_encoder\n", + " self._checkpoint_decoder = checkpoint_decoder\n", + "\n", + " assert self.n_lats_px % self.mask_unit_size_px[0] == 0\n", + " assert self.n_lons_px % self.mask_unit_size_px[1] == 0\n", + " assert self.mask_unit_size_px[0] % self.patch_size_px[0] == 0\n", + " assert self.mask_unit_size_px[1] % self.patch_size_px[1] == 0\n", + "\n", + " if self.patch_size_px[0] != self.patch_size_px[1]:\n", + " raise NotImplementedError(\n", + " \"Current pixel shuffle symmetric patches.\"\n", + " )\n", + "\n", + " self.local_shape_mu = (\n", + " self.mask_unit_size_px[0] // self.patch_size_px[0],\n", + " self.mask_unit_size_px[1] // self.patch_size_px[1],\n", + " )\n", + " self.global_shape_mu = (\n", + " self.n_lats_px // self.mask_unit_size_px[0],\n", + " self.n_lons_px // self.mask_unit_size_px[1],\n", + " )\n", + "\n", + " assert input_scalers_mu.shape == (in_channels,)\n", + " assert input_scalers_sigma.shape == (in_channels,)\n", + " assert output_scalers.shape == (in_channels,)\n", + "\n", + " if self.positional_encoding != \"fourier\":\n", + " assert static_input_scalers_mu.shape == (in_channels_static,)\n", + " assert static_input_scalers_sigma.shape == (in_channels_static,)\n", + "\n", + " # Input shape [batch, time, parameter, lat, lon]\n", + " self.input_scalers_epsilon = input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"input_scalers_mu\", input_scalers_mu.reshape(1, 1, -1, 1, 1)\n", + " )\n", + " self.register_buffer(\n", + " \"input_scalers_sigma\", input_scalers_sigma.reshape(1, 1, -1, 1, 1)\n", + " )\n", + "\n", + " # Static inputs shape [batch, parameter, lat, lon]\n", + " self.static_input_scalers_epsilon = static_input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"static_input_scalers_mu\",\n", + " static_input_scalers_mu.reshape(1, -1, 1, 1),\n", + " )\n", + " self.register_buffer(\n", + " \"static_input_scalers_sigma\",\n", + " static_input_scalers_sigma.reshape(1, -1, 1, 1),\n", + " )\n", + "\n", + " # Output shape [batch, parameter, lat, lon]\n", + " self.register_buffer(\n", + " \"output_scalers\", output_scalers.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " self.parameter_dropout = nn.Dropout2d(p=parameter_dropout)\n", + "\n", + " self.patch_embedding = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels * input_size_time,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " if self.residual == \"climate\":\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels + in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + " else:\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " self.input_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + " self.lead_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + "\n", + " self.mask_token = nn.Parameter(torch.randn(1, 1, 1, self.embed_dim))\n", + " self._nglobal_mu = np.prod(self.global_shape_mu)\n", + " self._global_idx = torch.arange(self._nglobal_mu)\n", + "\n", + " self._nlocal_mu = np.prod(self.local_shape_mu)\n", + " self._local_idx = torch.arange(self._nlocal_mu)\n", + "\n", + " self.encoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_encoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " transformer_cp=checkpoint_encoder,\n", + " )\n", + "\n", + " if n_blocks_decoder != 0:\n", + " if self._decoder_shift:\n", + " self.decoder_shifter = d_shifter = SWINShift(\n", + " self.mask_unit_size_px,\n", + " self.global_shape_mu,\n", + " self.local_shape_mu,\n", + " self.patch_size_px,\n", + " n_context_tokens=0,\n", + " )\n", + " else:\n", + " self.decoder_shifter = d_shifter = None\n", + "\n", + " self.decoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_decoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=0.0,\n", + " shifter=d_shifter,\n", + " transformer_cp=checkpoint_decoder,\n", + " )\n", + "\n", + " self.unembed = nn.Linear(\n", + " self.embed_dim,\n", + " self.in_channels\n", + " * self.patch_size_px[0]\n", + " * self.patch_size_px[1],\n", + " bias=True,\n", + " )\n", + "\n", + " self.masking_mode = masking_mode.lower()\n", + " match self.masking_mode:\n", + " case \"local\":\n", + " self.generate_mask = self._gen_mask_local\n", + " case \"global\":\n", + " self.generate_mask = self._gen_mask_global\n", + " case \"both\":\n", + " self._mask_both_local: bool = True\n", + " self.generate_mask = self._gen_mask_both\n", + " case _:\n", + " raise ValueError(\n", + " f\"Masking mode '{masking_mode}' not supported\"\n", + " )\n", + "\n", + " def swap_masking(self) -> None:\n", + " self._mask_both_local = not self._mask_both_local\n", + "\n", + " @cached_property\n", + " def n_masked_global(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.global_shape_mu))\n", + "\n", + " @cached_property\n", + " def n_masked_local(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.local_shape_mu))\n", + "\n", + " @staticmethod\n", + " def _shuffle_along_axis(a, axis):\n", + " idx = torch.argsort(input=torch.rand(*a.shape), dim=axis)\n", + " return torch.gather(a, dim=axis, index=idx)\n", + "\n", + " def _gen_mask_local(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._local_idx.view(1, -1).expand(*sizes[:2], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 2)\n", + "\n", + " indices_masked = maskable_indices[:, :, : self.n_masked_local]\n", + " indices_unmasked = maskable_indices[:, :, self.n_masked_local :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_global(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._global_idx.view(1, -1).expand(*sizes[:1], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 1)\n", + "\n", + " indices_masked = maskable_indices[:, : self.n_masked_global]\n", + " indices_unmasked = maskable_indices[:, self.n_masked_global :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_both(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " if self._mask_both_local:\n", + " return self._gen_mask_local(sizes)\n", + " else:\n", + " return self._gen_mask_global(sizes)\n", + "\n", + " @staticmethod\n", + " def reconstruct_batch(\n", + " idx_masked: Tensor,\n", + " idx_unmasked: Tensor,\n", + " data_masked: Tensor,\n", + " data_unmasked: Tensor,\n", + " ) -> Tensor:\n", + " \"\"\"Reconstructs a tensor along the mask unit dimension. Batched\n", + " version.\n", + "\n", + " Args:\n", + " idx_masked: Tensor of shape `batch, mask unit sequence`.\n", + " idx_unmasked: Tensor of shape `batch, mask unit sequence`.\n", + " data_masked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_masked. Dimensions beyond the first two, marked here as ...\n", + " will typically be `local_sequence, channel` or\n", + " `channel, lat, lon`. These dimensions should agree with\n", + " data_unmasked.\n", + " data_unmasked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_unmasked. Dimensions beyond the first two, marked here as\n", + " ... will typically be `local_sequence, channel` or `channel,\n", + " lat, lon`. These dimensions should agree with data_masked.\n", + " Returns:\n", + " Tensor: Tensor of same shape as inputs data_masked and\n", + " data_unmasked. I.e. `batch, mask unit sequence, ...`. Index for\n", + " the total data composed of the masked and the unmasked part.\n", + " \"\"\"\n", + " dim: int = idx_masked.ndim\n", + "\n", + " idx_total = torch.argsort(\n", + " torch.cat([idx_masked, idx_unmasked], dim=-1), dim=-1\n", + " )\n", + " idx_total = idx_total.view(\n", + " *idx_total.shape, *[1] * (data_unmasked.ndim - dim)\n", + " )\n", + " idx_total = idx_total.expand(\n", + " *idx_total.shape[:dim], *data_unmasked.shape[dim:]\n", + " )\n", + "\n", + " data = torch.cat([data_masked, data_unmasked], dim=dim - 1)\n", + " data = torch.gather(data, dim=dim - 1, index=idx_total)\n", + "\n", + " return data, idx_total\n", + "\n", + " def fourier_pos_encoding(self, x_static: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args\n", + " x_static: B x C x H x W. first two channels are lat, and lon\n", + " Returns\n", + " Tensor: Tensor of shape B x E x H x W where E is the embedding\n", + " dimension.\n", + " \"\"\"\n", + "\n", + " # B x C x H x W -> B x 1 x H/P x W/P\n", + " latitudes_patch = F.avg_pool2d(\n", + " x_static[:, [0]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + " longitudes_patch = F.avg_pool2d(\n", + " x_static[:, [1]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + "\n", + " modes = (\n", + " torch.arange(self.embed_dim // 4, device=x_static.device).view(\n", + " 1, -1, 1, 1\n", + " )\n", + " + 1.0\n", + " )\n", + " pos_encoding = torch.cat(\n", + " (\n", + " torch.sin(latitudes_patch * modes),\n", + " torch.sin(longitudes_patch * modes),\n", + " torch.cos(latitudes_patch * modes),\n", + " torch.cos(longitudes_patch * modes),\n", + " ),\n", + " axis=1,\n", + " )\n", + "\n", + " return pos_encoding # B x E x H/P x W/P\n", + "\n", + " def time_encoding(self, input_time, lead_time):\n", + " \"\"\"\n", + " Args:\n", + " input_time: Tensor of shape [batch].\n", + " lead_time: Tensor of shape [batch].\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, embed_dim, 1, 1]\n", + " \"\"\"\n", + " input_time = self.input_time_embedding(input_time.view(-1, 1, 1, 1))\n", + " lead_time = self.lead_time_embedding(lead_time.view(-1, 1, 1, 1))\n", + "\n", + " time_encoding = torch.cat(\n", + " (\n", + " torch.cos(input_time),\n", + " torch.cos(lead_time),\n", + " torch.sin(input_time),\n", + " torch.sin(lead_time),\n", + " ),\n", + " axis=3,\n", + " )\n", + " return time_encoding\n", + "\n", + " def to_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from lat/lon space to two axis patching\n", + "\n", + " Args: ->\n", + " x: Tesnor in lat/lon space (N, C, Nlat//P_0, Nlon//P_1)\n", + "\n", + " Returns:\n", + " Tensor in patch space (N, G, L, C)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " -1,\n", + " self.global_shape_mu[0],\n", + " self.local_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[1],\n", + " )\n", + " x = x.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def from_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from two axis patching to lat/lon space\n", + "\n", + " Args:\n", + " x: Tensor in patch space with shape (N, G, L, C*P_0*P_1)\n", + "\n", + " Returns:\n", + " Tensor: Tensor in lat/lon space\n", + " (N, C*P_0*P_1, Nlat//P_0, Nlon // P_1)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " self.global_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[0],\n", + " self.local_shape_mu[1],\n", + " -1,\n", + " )\n", + " x = x.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, -1, s[2] * s[3], s[4] * s[5])\n", + "\n", + " def forward(self, batch: dict[str, torch.Tensor]) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " batch: Dictionary the following keys::\n", + "\n", + " 'x': Tensor of shape [batch, time, parameter, lat, lon]\n", + " 'y': Tensor of shape [batch, parameter, lat, lon]\n", + " 'static': Tensor of shape [batch, channel_static, lat, lon]\n", + " 'climate': Optional tensor of shape [batch, parameter, lat, lon]\n", + " 'input_time': Tensor of shape [batch]. Or none.\n", + " 'lead_time': Tensor of shape [batch]. Or none.\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, parameter, lat, lon].\n", + " \"\"\" # noqa: E501\n", + " x_rescaled = (batch[\"x\"] - self.input_scalers_mu) / (\n", + " self.input_scalers_sigma + self.input_scalers_epsilon\n", + " )\n", + " batch_size = x_rescaled.shape[0]\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " x_static_pos = self.fourier_pos_encoding(batch[\"static\"])\n", + " x_static = (\n", + " batch[\"static\"][:, 2:] - self.static_input_scalers_mu[:, 3:]\n", + " ) / (\n", + " self.static_input_scalers_sigma[:, 3:]\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + " else:\n", + " x_static = (batch[\"static\"] - self.static_input_scalers_mu) / (\n", + " self.static_input_scalers_sigma\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + "\n", + " if self.residual == \"temporal\":\n", + " # We create a residual of same shape as y\n", + " index = torch.where(\n", + " batch[\"lead_time\"] > 0, batch[\"x\"].shape[1] - 1, 0\n", + " )\n", + " index = index.view(-1, 1, 1, 1, 1)\n", + " index = index.expand(batch_size, 1, *batch[\"x\"].shape[2:])\n", + " x_hat = torch.gather(batch[\"x\"], dim=1, index=index)\n", + " x_hat = x_hat.squeeze(1)\n", + " elif self.residual == \"climate\":\n", + " climate_scaled = (\n", + " batch[\"climate\"] - self.input_scalers_mu.view(1, -1, 1, 1)\n", + " ) / (\n", + " self.input_scalers_sigma.view(1, -1, 1, 1)\n", + " + self.input_scalers_epsilon\n", + " )\n", + "\n", + " # [batch, time, parameter, lat, lon]\n", + " # -> [batch, time x parameter, lat, lon]\n", + " x_rescaled = x_rescaled.flatten(1, 2)\n", + " # Parameter dropout\n", + " x_rescaled = self.parameter_dropout(x_rescaled)\n", + "\n", + " x_embedded = self.patch_embedding(x_rescaled)\n", + "\n", + " if self.residual == \"climate\":\n", + " static_embedded = self.patch_embedding_static(\n", + " torch.cat((x_static, climate_scaled), dim=1)\n", + " )\n", + " else:\n", + " static_embedded = self.patch_embedding_static(x_static)\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " static_embedded += x_static_pos\n", + "\n", + " x_embedded = self.to_patching(x_embedded)\n", + " static_embedded = self.to_patching(static_embedded)\n", + "\n", + " time_encoding = self.time_encoding(\n", + " batch[\"input_time\"], batch[\"lead_time\"]\n", + " )\n", + "\n", + " tokens = x_embedded + static_embedded + time_encoding\n", + "\n", + " # Now we generate masks based on masking_mode\n", + " indices_masked, indices_unmasked = self.generate_mask(\n", + " (batch_size, self._nglobal_mu)\n", + " )\n", + " indices_masked = indices_masked.to(device=tokens.device)\n", + " indices_unmasked = indices_unmasked.to(device=tokens.device)\n", + " maskdim: int = indices_masked.ndim\n", + "\n", + " # Unmasking\n", + " unmask_view = (*indices_unmasked.shape, *[1] * (tokens.ndim - maskdim))\n", + " unmasked = torch.gather(\n", + " tokens,\n", + " dim=maskdim - 1,\n", + " index=indices_unmasked.view(*unmask_view).expand(\n", + " *indices_unmasked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " # Encoder\n", + " x_encoded = self.encoder(unmasked)\n", + "\n", + " # Generate and position encode the mask tokens\n", + " # [1, 1, 1, embed_dim]\n", + " # -> [batch, global_seq_masked, local seq, embed_dim]\n", + " mask_view = (*indices_masked.shape, *[1] * (tokens.ndim - maskdim))\n", + " masking = self.mask_token.repeat(*static_embedded.shape[:3], 1)\n", + " masked = masking + static_embedded\n", + " masked = torch.gather(\n", + " masked,\n", + " dim=maskdim - 1,\n", + " index=indices_masked.view(*mask_view).expand(\n", + " *indices_masked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " recon, _ = self.reconstruct_batch(\n", + " indices_masked, indices_unmasked, masked, x_encoded\n", + " )\n", + "\n", + " x_decoded = self.decoder(recon)\n", + "\n", + " # Output: [batch, global sequence, local sequence,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_unembed = self.unembed(x_decoded)\n", + "\n", + " # Reshape to [batch, global_lat, global_lon, local_lat, local_lon,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_out = self.from_patching(x_unembed)\n", + "\n", + " # Pixel shuffle to [batch, in_channels, lat, lon]\n", + " x_out = F.pixel_shuffle(x_out, self.patch_size_px[0])\n", + "\n", + " if self.residual == \"temporal\":\n", + " x_out = self.output_scalers * x_out + x_hat\n", + " elif self.residual == \"climate\":\n", + " x_out = self.output_scalers * x_out + batch[\"climate\"]\n", + " elif self.residual == \"none\":\n", + " x_out = (\n", + " self.output_scalers * x_out\n", + " + self.input_scalers_mu.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " return x_out\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "ename": "", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", + "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", + "\u001b[1;31mClick here for more info. \n", + "\u001b[1;31mView Jupyter log for further details." + ] + } + ], + "source": [ + "import yaml\n", + "\n", + "# from PrithviWxC.model import PrithviWxC\n", + "\n", + "with open(\"./config.yaml\", \"r\") as f:\n", + " config = yaml.safe_load(f)\n", + "\n", + "model = PrithviWxC(\n", + " in_channels=config[\"params\"][\"in_channels\"],\n", + " input_size_time=config[\"params\"][\"input_size_time\"],\n", + " in_channels_static=config[\"params\"][\"in_channels_static\"],\n", + " input_scalers_mu=in_mu,\n", + " input_scalers_sigma=in_sig,\n", + " input_scalers_epsilon=config[\"params\"][\"input_scalers_epsilon\"],\n", + " static_input_scalers_mu=static_mu,\n", + " static_input_scalers_sigma=static_sig,\n", + " static_input_scalers_epsilon=config[\"params\"][\n", + " \"static_input_scalers_epsilon\"\n", + " ],\n", + " output_scalers=output_sig**0.5,\n", + " n_lats_px=config[\"params\"][\"n_lats_px\"],\n", + " n_lons_px=config[\"params\"][\"n_lons_px\"],\n", + " patch_size_px=config[\"params\"][\"patch_size_px\"],\n", + " mask_unit_size_px=config[\"params\"][\"mask_unit_size_px\"],\n", + " mask_ratio_inputs=masking_ratio,\n", + " embed_dim=config[\"params\"][\"embed_dim\"],\n", + " n_blocks_encoder=config[\"params\"][\"n_blocks_encoder\"],\n", + " n_blocks_decoder=config[\"params\"][\"n_blocks_decoder\"],\n", + " mlp_multiplier=config[\"params\"][\"mlp_multiplier\"],\n", + " n_heads=config[\"params\"][\"n_heads\"],\n", + " dropout=config[\"params\"][\"dropout\"],\n", + " drop_path=config[\"params\"][\"drop_path\"],\n", + " parameter_dropout=config[\"params\"][\"parameter_dropout\"],\n", + " residual=residual,\n", + " masking_mode=masking_mode,\n", + " decoder_shifting=decoder_shifting,\n", + " positional_encoding=positional_encoding,\n", + " checkpoint_encoder=[],\n", + " checkpoint_decoder=[],\n", + ")\n", + "\n", + "\n", + "state_dict = torch.load(weights_path, weights_only=False)\n", + "if \"model_state\" in state_dict:\n", + " state_dict = state_dict[\"model_state\"]\n", + "model.load_state_dict(state_dict, strict=True)\n", + "\n", + "if (hasattr(model, \"device\") and model.device != device) or not hasattr(\n", + " model, \"device\"\n", + "):\n", + " model = model.to(device)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Rollout\n", + "We are now ready to perform the rollout. Agin the data has to be run through a\n", + "preprocessor. However this time we use a preprocessor that can handle the\n", + "additional intermediate data. Also, rather than calling the model directly, we\n", + "have a conveient wrapper function that performs the interation. This also\n", + "simplifies the model loading when using a sharded cahckpoint. If you attempt to\n", + "perform training steps upton this function, we should use an aggressive number\n", + "of activation checkpoints as the memory consumption becomes quite high." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import Tensor, nn\n", + "\n", + "\n", + "def rollout_iter(\n", + " nsteps: int,\n", + " model: nn.Module,\n", + " batch: dict[str, Tensor | int | float],\n", + ") -> Tensor:\n", + " \"\"\"A helper function for performing autoregressive rollout.\n", + "\n", + " Args:\n", + " nsteps (int): The number of rollout steps to take\n", + " model (nn.Module): A model.\n", + " batch (dict): A data dictionary common to the Prithvi models.\n", + "\n", + " Raises:\n", + " ValueError: If the number of steps isn't positive.\n", + "\n", + " Returns:\n", + " Tensor: the output of the model after nsteps autoregressive iterations.\n", + " \"\"\"\n", + " if nsteps < 1:\n", + " raise ValueError(\"'nsteps' shouold be a positive int.\")\n", + "\n", + " xlast = batch[\"x\"][:, 1]\n", + " batch[\"lead_time\"] = batch[\"lead_time\"][..., 0]\n", + "\n", + " # Save the masking ratio to be restored later\n", + " mask_ratio_tmp = model.mask_ratio_inputs\n", + "\n", + " for step in range(nsteps):\n", + " # After first step, turn off masking\n", + " if step > 0:\n", + " model.mask_ratio_inputs = 0.0\n", + "\n", + " batch[\"static\"] = batch[\"statics\"][:, step]\n", + " batch[\"climate\"] = batch[\"climates\"][:, step]\n", + " batch[\"y\"] = batch[\"ys\"][:, step]\n", + "\n", + " out = model(batch)\n", + "\n", + " batch[\"x\"] = torch.cat((xlast[:, None], out[:, None]), dim=1)\n", + " xlast = out\n", + "\n", + " # Restore the masking ratio\n", + " model.mask_ratio_inputs = mask_ratio_tmp\n", + "\n", + " return xlast\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2_rollout import preproc\n", + "# from PrithviWxC.rollout import rollout_iter\n", + "\n", + "data = next(iter(dataset))\n", + "batch = preproc([data], padding)\n", + "\n", + "for k, v in batch.items():\n", + " if isinstance(v, torch.Tensor):\n", + " batch[k] = v.to(device)\n", + "\n", + "rng_state_1 = torch.get_rng_state()\n", + "with torch.no_grad():\n", + " model.eval()\n", + " out = rollout_iter(dataset.nsteps, model, batch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plotting" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAisAAAEjCAYAAADzFUHYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADvlUlEQVR4nOz9e6wtyVkeDj/d1b3W2rdzzsycmTljezw2BJIvQKIE+GxDiDF8GFsoUYCgECSEFS5BtlHAEBJuis1tQrgICYWbiCagCOE/fiQBEQUwCUQIHPxZ/MEl8NnENsae8VzP2Wefs9daffv+6H6r36p+q7q6V6+199lnP9LWXqu7urq6V3fVU+/7vG9FVVVVuMQlLnGJS1ziEpc4p4jPugGXuMQlLnGJS1ziEj5ckpVLXOISl7jEJS5xrnFJVi5xiUtc4hKXuMS5xiVZucQlLnGJS1ziEucal2TlEpe4xCUucYlLnGtckpVLXOISl7jEJS5xrnFJVi5xiUtc4hKXuMS5RnLWDdgUZVni4x//OI6OjhBF0Vk35xKXuMQlLnGJSwSgqircvn0bL3vZyxDHftvJPU9WPv7xj+Pxxx8/62Zc4hKXuMQlLnGJEfjoRz+KV7ziFd4y9zxZOTo6AgB89v/nO5Eki87+StXWlqiYNlHvtur1nW/IuXh5aqsEu86h57GPler07Yvy3SdQjssKZRz1bhtaPi7ba3HVZaNKIn0PqqQ9RtrG9/HtlYpQqqYNRXN+xdolbNsElYpQsp4jzs395cS9Spnya223RwU62/k22h4V3eOk7/b2Ko4QlRP3G1adVfOc0Db7u72t6nmu+srEmXk9Mbtf0vPhe54k2HUkp7t7v0P6rU377bhA510bCn5uVz8dFZW333bV6zpG6udD9tll+to09L7m+RLve88P6nHch3uerJDrJ0kWSFKTrBg/SLyll2aXqp+h5wopL5VptrmIhD2A2oOn97xxWz7KKyBlu4oKZfObxc1DXw58YWP2sriO5ecJOc5uV6miwXV4kbq3iWQl8pCVmM7fdqx8mw+hpKZDVqwmTklWKhUBs+nqCz6vQBrG1kH1uIjJLhElVYfQEXw/eyjPtcslu5yMpP7BcpLxIG27tx6vhRN0bt2etEs0OmUC63URHul6Q/a56va1YQxCJBz3PFnxYROrR2cA3iJsUkADeej5+Sx8F+3m56Bzu85rb+flCXq/5/dyERkfqbGJBZWN8srPp4R28G1xUQ2ug7fPJjq+a3ARRnt7jEic/bpmfzYxkcrZs0ibxMS5TEyk7a6yfYiKClHht6xUyj7G3G5bTILOG2DF4DgPRITOf1bn7gP9/pIlzt62DYyxVmwL22rH1NcYOobu4t5eaLKyKaSBtteqYO3jkAZt33FDXSS8vEQGfO4GCdKgTSh7zh+yXddvEQF+TCyVF77zz/w6aTsRDF6Gu2Ak2PdNui7XvbSfCV/b7e0uy00f+szSoWbrPpJCcJEPaftQokLmceoAbfdM88lxrP+7sU9wy/ABf+jA7yrPtxufe9xXdhkfMYuKtm5+fKctPa40CfzcrnrHwPes7ILADIXvfQCmGbQ3dcNPiVC32S5I4CVZ8WCshWLooL0p7IHNPk/fd1edQ/fblg9pv72vry2b3LNt1t1Xl4+4uMCJFf233U98P6HYwJLWR2Bsa02fi2gKF1BLUHxExTrG4baxyYdk+bDrnMoyYQ/uru/2Z9e2vjK27kYmHxY5C7Q4Seext0tkKpS4Tq1zkjDFgMqff8kqGXKOvjJTDfy8nlANo2v/ebBI3ZdkJdRV4rJESNYVwq5cR4DbXeFzOZwl+gjQtiGJFvm+zjaycAkDHJUPITyS26zvmSKcxT1zEZShFhYJtsVkSFtsq4FEKvq28Tp2hcq6f9xa6CODkquu44qzj4H8OxkDqvUcd9rrsDZJ19S3bUpwd5HL+rJJgMBZYhdkoO/enPd7d1+SFR+hkAYfadYc6voZgjGiUrJWuEhJSF27IjS2ZWXMvepz3XiPFTrdIcLHUD3AtnQD2yAuhhA30Lw/lJyEbLPbZJQX3CZS9M5QSDNjfj98CCnTd17AtLbY9UnfbetM3zH29pgdC4S5raTvY4THQwfD0OdsjNvIbsumUUISoRwbLRR6njHoex/PgwXFhfuSrExh/g91pYREpEh6Bp+Y1C5rfx4iRrVdD7YQVcJYK8CQwdY14Itk0irrIh28g7U/R0WJSsWDSIarjSHtti0stoZGuseu50UfEzgYcOtGnxC3z6riqj8EIXVWqktIiFDYFgtCCJGwSQInPyX6SVAxsRVhKOkaKh52aVOm0J+ERk6NDhe2rClToJNCYcOJAH9/tkFSpqjb5QYKDV0+ayJzX5IVwD3g+oSp0nE+0uKzVvB9fVYN2u8iLLyMTT4ILjGqjT6iQvtC3RhjEZpPQrKUSGWd9RS1VFjdWaM4mDWdf39M4mDhZUAOFUkgzQmMau5zvDblzVEcoSq6biafG0tqFz1DURJpQqOKtvOSCMDg2XKg5cI1AIeQJ9ty4RvQJd3IFO6MEOGsVE46bpOoJrsOeES3Q0S0odqhMQOc/TxxkmITll1oXS4K6F3tIx6u/aGExbZS8e++/C59uC9/6rED6y71KFNhLJFwuVtCtT7bcJN16nJZShoCEq8swrbKmjZYsUzN92TZTOGSuLNPo9lXJTEqpdrPCWkhmv22sNMTiRVnhT4uKkrz/mVN25ft6BEV5khSzlOUc9VcY4FyrhCtw0gbLxc37caqbqct2iYSU7EwZKnDcXVqoVaaTciCbTXoG+B3qV0B3CRgqJjWt813vo4VylGXL5rIBVm0bBGPAHfjponHXIOjXUZyA9n17zr55zbhsqxIE44+wuJKbGfX60OlIt2/heDCkJVKRc6bOxahwtlNdBRTwWUt2YRghR471TlC76FNTGwXCpGRKjEtJB2SYoMRkSgv6/9ATVjYvnKedsgJtYW3MSpKTZjsc3PCEeUlynlqbK/ylgi5QHXG1hvP22FbiVwme3KD6e05G8jIRajLwrC42B1bmbTbOvoTRySJDS6ELT0Wj84gHwNR2e6rPEaykCy/IWb3qPSfxwc6LqSOiD1CruN4mVCy4dJBbErkJB2OdL4QAuPTamjLX0OMyG1kjwd21mU+yEqDMG2/CODXEWrhGLq9D5tGGV0YsuLCUGY8dODVg9aIQTcEtouoLzz4vCM40d0AkWqVRLXeRDWDbtIM9MxaAdQWiKgovKSFLCZEHqqDLinRbSy69ag7NXEgKw6ArnXGrme1RkxlGnKi27gy64hyq7cuCmA+q4WTeXudrIBBeLykO+4+y7qNeYVkXTZlVP1eFVWj92gGi6TrMvIRjI6I0zGwlTMq361XcuMMtQhERa1BkawwXJvirNczqPdxGD2Y90TwAMMJUYg2pU+gu4lGolIYNHMOxdgEg2OwidviPMFnbZLGSEnX4iMWu7BAXXiyQtjlQzdE1zIWPv1KyLGEbZCeqdxlNMsn60lo/drSUpT6e11PoS0mlVJ6EJdcOHy7O/lbrNsW5aVhLSHrDICagORlTTSoTM5CF/IC0XIFAw3hApGTogCUao83rjdHtErq/QDUsh0hiLjpdqC11kj3wIUqiVCB7lOkSQoNGoZlxUdULPeYqxyJXisFFGlbpmSp9/t0KH2khe+nz/Yxrtm/ZDGS6g4VEUttdYUgu0KV48JNaLjFSWqn63y+9vuIjHQfXdYRVz28n05X5jalrXNRx6ICIDgrbohLxG7LWWIb0UAhZUItINsU4V4ossJ9chIT3PUDt23X0BjCcl5yrkgwBkzW0VSxQqQqTQziZddaYFs5SONhu1tskNajiiNRbBoX7iy1EqK8RLQUppOcZOR5S0I6+wpUeY4oMV/NKnf3vLp1VIYdGxHJoboTBfkJiBGj1s5E60q0spTMakJEpWNq7yEqxn8lH0PuGyIs2rKigEpovGuxwoj+N39xIQ/YZhu710LtcEUASUTDlwBOEs+GulzsNvDL6SVGqmvAcSV1cxE9F7kJGTxDSIkLRD6kPp1ckrwe7qbU58+7JKbv3L79myxkONRyMcU5OXyTd5dLTDpWOse2CMuFIit98eLbuIneJDsDLAwhESIS+lKz9+Vu6TsupLx9jhACNXSl4SqONGlBQzBsgiLdq3KeapJC2hASxJapQjGPm3Jce9Ks/dP8l8S7ZE2ptzWupR7rCQC2r9vrECGpsgxVliFKU/3dBSoj1QegJi50TqXq9jHSQpam+pqAKiGrVGNpyk1XZ6UiFPOuPsyOwBEjj5SnvEVQjL9IrsM4v7DYIbl2ACCqGGlZ19tCLCUUxjxWpBuSdXYopkp936mDew9T8/6EWGVigYSFuIFCrd48n4r+DLPPNwjLmvol8zycsEgT261b3nc0NoWe3+UiGusK2hYuFFmRsM0EPUA/yxwK34A9Ftu2pviS0UkrEdvWChsul0+U2zqW2BSTNiJb24JSLuqHoDhItQWFSIokAoWKEKsIJZqZ24oRlALiOTrwWU8scKsJJyYdkmJbV5KkJjVJ6wKqy/U/7BGg4zTojlZKNddVk7kqbu8TR5zXYkYppLnPNSOVJaJSzEwrClATlZBIGdf5tNUFAPTsvCYsXKfis3CEhgy7jh16zJDzDM2xUuy11+MLlwZqoiaJbvuOkyAlHxySiJAguXacCyRKbjSrzCbEZEz7zxpjCYWPqLhcpFJG5o5Gqqcb5dgqWXnVq16Fj3zkI53tb33rW/Hv//2/x1ve8hb8/M//vLHvNa95Dd773vcOPlep/H5lXwrqbRCaMYKjvnBhl5VlSjIi5VnhhEGy4ri2ueoOAY9Kce2zNSlte+0oIJkc+V7cNuNn1HG3VUlkakGgrBwptX4lakKho45VJW+d7kXRkoU8R5Sm/SSFbyeXj02GmOvHdWxUNCQnL2qNzAKNjqURLCeRtqbY90WCLZyVLCk2QQFqksItKUBrTemQDtFS0fdcte0ngqJDr6uua6mzLaCXdGlWqp5j+9xD9vZRcLh6JFeVS7sjtYV/jgu/xceXfNDrarF0KNJ2+hwVlWkN6rg+3M/vGNIS63u1u4Rp2558T4nQ7Mqh2CpZed/73oeCdaJ//Md/jC/6oi/CV3zFV+htb3rTm/DUU0/p77OZYM8dgCE3RLqZfZk8hz4koboZIiquSAwaOIbqYEKJjE0kfKsJu6wlQyKXerPj2jMgYV2fctY+vkMI2xCrVakAzGNURQXMgUol3f0wTdBGNstVCbUqoU5zRHlZRwlxdxERl6JAlChNLgzCkiSasJRZjjhNUGb19xgNyeGNSpRsyeH6FQ6luuJbwY1ja1V4+ndCFUdOkuIjKDY50fWJlhiXL739bM7+K/25q22x308gXgHlXDyFeC6pTXa9IeizSrV1u+uw90VVd1+njKXpceWnscmJtF2fo+y+C20ZUyTL4RPFqpW/76sJU+vCpeSJ9CwX8xiRw81uk3JgmCbHp++QkqPx7776OrqbDUmKVK8d1iyd96yz1wJbJisPP/yw8f3f/tt/i0/+5E/G61//er1tPp/jxo0b22yGF50H1LHsvN5vdJ792Tu7ZlS320ic/VvbRiV4C7DySCv30kvPz2u7qUIy45Yq6lqHGmsFr08iN3y7WpV1/hK2iKB9P3zLC/CyovvHgpG/QUXi7y35eLNmoItzoJgp5PsxknkMtSoRL2orTLzKWp3Lat1aT1RtaYmSxHQPAUBeExUABmHpwLamJI4LbdxHlbA/KitUiKBWpf79YkQoGsJSzCBG9LjICYUgG5YVQYviHrAFi6PDyhIVJmHyuU3q8saVIzvqnxD0u1/6hJtkfeqSnD7iE2p1IaLCRcZqDcRZ9ziXSDkEsphY2wuD6pBISmT0LXL/xSc9ksYMaImTWpU6FUBn8rNq+4diHoOWotjUGrApXP3NUDFrSOhyKIZGjfnacy6Twq3Xa/yn//Sf8I53vANR1N6c3/7t38YjjzyCa9eu4fWvfz1+4Ad+AI888oizntVqhdWqDfM8Pj4ObsOUseChacbH+jWnSlEcUr+zTFLrNsBDna0yLo0NJyh2nXRcqSJdX6gAWLt+6J5Shlcrpwq9O5WKgVls1FNpUsTP0/WphiaIstXzJPrTZuskQqUUsiMFtU4Qr0okpwrxqhbmxk1oM4CauDTkgYt1OXkhq0ucBL6+5Oax0RCVapGAwpjLudK6nq6lrL6mYgaUqewamkIsa6PfzWOWI8Ii7Ze31//rwbrfauM7d0tC/IO9r42+baEWGxIUx2sgWZqiYpfGyP48xv00VIPDXTjUPp2ojU2WXDDcr4IQ3jqbvG5XWaGcNUJ7wZKxCWGRrBb8e2+23okjSje1kJwledsZWfkv/+W/4ObNm3jLW96it735zW/GV3zFV+CJJ57Ahz70IXzP93wPvuALvgDvf//7MZ/Ldtgnn3wS73rXuzrb4wKIY/OB6Pww4uAzlgy0L9QUCeF8L+WYOl1Lv/P99ELzBfzibKSdMWuIAeqOwn6m7QRttI2TC/7dvg7eVn494nVaCdzidVkTFk/zySQthTy6UCrWsTb+ctpWzGnQamZrZHlIANW0T8WRvt+UhbbCDJErn0pAmzphz5ykKFV/bywq5eFcZ+O17xmRTZ5TpZjXn8s0Miwo/D/lRJFEq5JYlu93wW2F8Lt02vor8bN9jtCBVhKY9pEI137aHkoMJDcPWU3attQERa3Ne1Q2BERyv+j6HOto+drndfWs/CSELCE8X1Ef7OUm6nq40D4W6zGTJULMDq2XnNgC+oIKQqzm9lVJY4PLTW9vk47rc6fbbjQfvP0oWYYGDDdRVVXTUjcHvviLvxiz2Qy/+qu/6izz9NNP44knnsAv/dIv4cu+7MvEMpJl5fHHH8drv+T7kKQLo6xPibzp6pVjrTS2/qOPiLhITGios3P1YkoHv/TfCFf2Uxf5sLf17fPpdOwOyB5UpfO4QqLtY0tmbXF1Er7Mjn3r33CyYs9G1LpCerfSepY4KxAvC4gh0DyqqCgMF49BTHgSOZugEBIFzOe12yeJURzMWzEty6tSzGMUM9MSle1HoJDkwnbnsMyyJdeiWLcnJIqlz4oy9rghGKM3MY8fX38IaTE0IoyscDePWssWFLX2n6+7DEP93xXt4W5jXU96t+qQFNsaYidWrJQSCYlRv4PU8DW74jsrY50vez2vMZCIfadtjuUu+kiQK5DCtRr70LQTfceFIIRQDbHe5NkS7/2178GtW7dw5coVb9mdWFY+8pGP4D3veQ9++Zd/2VvusccewxNPPIEPfOADzjLz+dxpdfGBZof04vjCrbYNY9DvISyuxG/OATZQhGtnhXWWMzqFOOhh7bTJ8XK3hMRav0bn9XAQpcYiQdoPsg5QfTzVPdVRImUz4RLJ7XrBv1KwuOhOg3Wy9rahMAfqCKWqzEXkeOeZxEAeA7mqrSxJwTQtrdVF35FEtdtd+hQOtr5RmSovWavdPi1RMVw6Pa6ekJDjoW4XHynh6+XY24bBY+UMIDJjXUD8+PpcvacyiAp394SEk0vtdVkruUbCl9zNcIfm9TYueJUi+HzrZrnKcORXF7ovACzLir2MRUOG+hY11SRHOG+1qIdNIj5Up91+vkZZpRTKhYKdJdu8Rv/E1MbU2cdDLCtniZ2QlaeeegqPPPIIvuRLvsRb7oUXXsBHP/pRPPbYY4PPwUOXXUvZ039OWKR6emFoOIYruocwT9J2+OCL5OFJvYwyZYwobzPBShaaTtKzFdUfGy9qfU0KVdLtgOr6XbOgCF0VjFTGtMxEBQxLRKTKJqFZXLeHdTBRswhhpEptwqRrU3mJeBV3Bm27tXFR6W0dobHHx80jZnjnVMwqALWIL9+PkdyNWWdeC3Dr9hZu0gK0BIU6SinSB2gJTJLUGhWdbr8ln1xjVJOTboZaCv2sVNfdI5GUvogZXm93m6NsAPkYu6jglJAiaVzaGE5c+nQjtlWFtnGiQsea56s/29blTju127V13cR5m2CtcARrtpqT1i1qu3vqd8B09Ya4fQBoK6BhgWSokghVXpMQla+6FeRlnVuo6SN856XEie3JzR8hWlnmKQmWRTNKqD+ULeJ9kFIjcIIk9eOuRUyVZaUxRMqsvO1CKpU7YlVs88Qh3VsnK2VZ4qmnnsLXfM3XIGG+9JOTE7zzne/El3/5l+Oxxx7Dhz/8YXznd34nrl+/ji/90i8dfJ64AJB2o3UkbHoD+QzCJ6ANzVTYlxXQFfpGKDzWlHrtEMFdsacQz4TF+XISiRYAYot0UJnS+E+fqyJ2mHBliwwtPqjr0OZgz+y5eQHLBRDlMWJkrenYWotHIy91OXKzaCQxqiRG3AhMqyRC3LhF+lbalsAX9jPbzZ/NCGUK5IsIcVYh3280LKsKyWmJ5E6BqKwQr2qSFSNDhcaSYpOWIEtK895ZnVsVtyJaTlTy/ViTEnugI6Jiu3uGEJS+MqPJBq+HCIDLElNEcnl2fnMF4836DJ91wxbn2sfZ+Ve0FSQCoOrfIlNAzLLOGtYlBwHifZd9Hk5UfMJyu+/zZX8O0YLoBUStQTffqy9CpUpbVfU52We+ermuweMy6lwTlXW5oXhuI74NMLevVu136x11Ja8c6voyvrOJpD4P69NqmFYdqZ/lZGRTy43PTa6/n5ekcADwnve8B3/5l3+Jf/bP/pmxXSmFP/qjP8Iv/MIv4ObNm3jsscfwhje8Ae9+97txdHQ0+DylMlnh1HHhdpZESb0+xTmlVU+96zhsoBb3rS1Ups16PEndI/KXyzB5MnNrhO4LBZBZNpyRA9Bm2hKp1+VVIu2cs0pae5d2A83NeuzFB+NVBtwpNXkp56mOjAFa0te3oGJcNKHO2iLUnK9AZ2GWqKyM56dMgHyPiEvZnhdpTViSuF53yMhWK3SeHDw02b42sqrMY02yyqQlJUAtpjVyosQtUaktLzCuU9+bHpIymqCEalP6yjmICtAO9FERMeFqZAzmNoGQcsDYYcA+wrIJGeLWrdJqI9ec9K2LJLerm2iNnpMYEaI1n7BI7qNWGM9F/XV5U5NW/zc1IWK+n1QBKYTon66wtpynwBxut08ojKUzGDkhTVmiuskb6d1jLqga7fV18i7Ru00W6zkzZTXvr74O4Vr4hI0mkFi1lm9aSgNofhNL92evDQbAIC8hfaAPdiLWIdg6WXnjG98IScO7t7eHX//1X9/KOTu5U4IyUHatFpyYpBT6tyJLQ9QJrfO5azr5UnqsKXTertnWbCdPJmd3FjS7GRIeTGVts61IQixrhvEi8XassqasbK406tSumJqE1H+1JUXSvvBsssZLxIR0tpsnyiu9DkptwSiAOTrkRfvAG38z9pJel5zLslLX05yT6QzyRZuMKs6qJoLDdI9xBX5E/vi8WedHXyfdAybAXaR6LSROvHRdzbNRFVXtzixq+02l6uglg6hYCwtyolJfm0U+Alw7IkGZUCjrhUBQoiJCvI7a36cRr5J7xU5TT6B7UhO4lqR23T3Dm+lzA42BHSEkXdOQc0gpHGwRbV2naxLTrk0F0Dts6uQAIDkNb5RhQZ13z6VF7FY7DPCIPF8Gaf3Zat9ibhAV2zJtiOl5niVOhFxuXYJtQTbaVmrCQuck4uKD1v8VrL/l2YLt8uxziK5vkwn9hV4byCYpY24UzSziVamZZVxUOvTKNpsFZ4xddX2uAIUVtpYan5tJu2ya9pDZVZv5mpkMBRIn69Jwb+iwQZZXQ0z3r2IUB7avufFhJ63YmdfrU7ZTnQAMQR8/jpOQeFk0Ky270uZrOoYob2cwlDqeL1gIoEPe4lms70W8MjuuqCAfedyGQFM48ox+906ThDa2n0u9Bg693PQXATqUvmrvTyF3qFIyt2qRGh0YEZV8T6G0onzo/SABLScnXJNC7SeXTz0w95OTdh93BTgKjbGYcMIhHe+yVlC4cNlaTOJ1BLWqiQmPmOlYI6QkVhmAZUvoAO4ma57nyLyfgGx9koiCFP1ji3Cl7LT0WWUmQY6zCvHSbxGuLdURijkAFn5slElqchbnNWGJG+JSJBEi1UzkmvvlmlDJrhg2YVyaYnmb3NTbhd+5sVaoW3dZOcUG71aX0jnaXiFdf2622++da6kLNoFzIcoLYLkCSAdDYvlmAdKWUNFkxBIOUx0SmgVL23LWwyyQnRjQlhigEO+1EZHJ3eU2abUyCPM6SCN3rtxAuwJpViT0rW1imuojqHUFtTJN9O15LL2ItNCbYG2RrB5mReYxtApwcrclSbLbQZjFcD+xipEf1BeY3s7rvB5WhxsVALLu8vF0fFSUjb6EJVUS7rX2xSqWw8Uj+NLbG8LCiYy5QGHMInzkCKH2WuoZi7YoNESFd5R0L43v85qwEHGx/eLSterw5EaAyK0qPGRZk8cCndWBzVTndZhnKwCPtBiug04n0oqewYLlir0E+YFCvhcj249QLKImqZvp7qnb3P1vD7KVqkSXRwgGWVJC6g519XDSUkSGFSVe1wQgXjd/zF0iWVBou0TO4qw9VjWPK8/gq101M7qvpquQwK060n6AJXkT2kyWEsMFpLUsdb+mVhXr20zSUiZtLhayLvO1fcjNSVYVEt9yLR8llORNH+qyljQacqSQQOaTWI7p8lkjgC7xIBdrnvv1YVzs3hAOkYgZRKj0LnDqyi5t1MctqVLZ5nrtclFeiPeno0e0tDBVbgZSVEmEqpQjnDoJPAkZtC7QabUScGHISnJaIC7L2v+OzVTIdQKsehCigQSAERFimz8NK4cHRDx8MfJxUaGy1PRkPSHLTiezqxhuaIYJ2uHKkniWi7/qDoCsFN7LMs7j3N+QHv6ZJ31rj2/bzU2mXMRbt9VslHFthTkrCOkoqYOtdSIKMYrmxW7bmNwuaqKyl0DNhOeAfPqWVaJUQGGmAUKcAclpM9M1NBC1pYNmo9QWKXKKXFR2CDIdm+8rLK/FWF+JkC+YhcQmKlHb1vpP1l4EWUlCsSuXD4GRFnLvkDUlRNNhWy180Tqa2BB5KVEnTowBnNbb8j3TYqVJhkWe+GrUut2NtaTOq+ISwHatISZJMaPXiHR0c7G05bllDjBd5VNmW/WJT31CVGNfHzEZArY+Vwj6SUZDIPK8TTngjOZrLTQUwEC/QtlknwYs9xdB1LUU+n9fO6kO/qtzy3WtE2q+W1bz+hyVuB2AXjolFBeGrADNQH+30DNnHYLJOGS9im57jKSUJxNoiQrVnAbXVhnfKuhbH53O9gm/zy5n9UkuEnuNnqKZ8fNr5P8JJXNlADD0JukJicu6RAVgYlaW4TFeZbWPk393JFQiYmO7fiSdS1S0PWFlvZy6o6H1OxwCMh5Szc2VJLgly0I5r+tXq1JbV9pzy2v96POUleFeqo9po5fidYn9j2dtrhYVIWqegYI/U3Gk18UBrNDRjHQqLbmWoi/kLL2t0r/YS5DvqyZxW6TDSymBW7GoB8Z2Rk918PoqvU12T7Rr1gwSw/KoHIrAsSNxePmpYbmC4qy2qCR3W5dPVNbXJEUBEfpyp0jgVhV9flZHegewc9UArVWECEmcAbjDLHCcNJX22kbyuXz7fHlTjOthVsM6M21rVSFBOHePG8daRNoF2wUbCmdZTlio33JZXSC4hADZ+qFFtdRnMYa3cOcB6+Z/IctNYSZ2BAzNiw1OXspmvbGqSdGAuUxcuNDevBYhQIEft+inCaQttFNVcKIiiaWHZAu+MGRlfaiAWHVnDajFg0QyihmAubxirA2XdYYnkBuSJMxe9C50YTwOHqasO4C5Wda2+OgwQsXV+GZmRyIBtF6NBJ3siMX4m51EV2Br5wOw94dst9H1o8Yo9ppHOe26h3jqeHM7mgUVqa3catBqabi+hpOeaE/VGWgbLUveaEJIC0JJ1MqZOSjVdda6NbWu3Y7JXdc9MQkm3U+yphTzGNmRwvooQr6IkB3IWWQ1WZpxi0m/lWRIBAmVrWK0xIMTEGmbva8HIa6nyNayFBFQRFDLCOq0Jirc6sBJRVy03+3fjGtA2va4SUxUdsO8XdoSTlD4NiInoWnwyVLS0ZfQNTRRPIUyJ16h/VhcoNbHNX0Kz9PDNX1a+C+6fn0PVbdf4vBaVVz9BxGWRnRqpy7g4HeBr81Vl01kDQtguojyHJjPTEEvuyYAbc6XJW+nPBxzYlI1VhZtcS4KxMvuvcqv1GbceFU0EUce0sOObQMgzHPW98MWQ3evawiqJEIVtIhIjYtDVq7GKGYte4vKOrKCzJ6VirDerzt0DmM2U9YzBZrdSgTAJhiheVN4ojBeH6yydmdDA6qBWTeLpGEtYvlTeFtrLU7jbklpVkYPKQm4mgRq7CWVsjNKcM2KXCm0JTdUH+yXiC+811p0KkPEVZePOjNcO4yO/PSaDDaEhbuvOPL9WOdI0edoEqmVaRNV0xAVbe1gg185A5YP1OHKs+NKD05qVXXXsdJJ6GqiQtaU5bUYy4ciZIeN4JGJX8Vw2oEROBUixBmJUOne1xlY+cDLSVixV9adkKpGWUs2TZ1vZIhtiIrWqbCxxpcUkkc9EbiOJEhUTZYa6/7YAlkiKXyxQdrX1iV36jbJ0AJfgbAMhT0pktzeRo4TZRKXKokQrSWrIO+fSmubKeLvDPZWP8K/u9YEAtAhJdp10bPfACcqnJwUBTCf1eSlWRIjWq1RJQtRd8NRC+JVfTyVYW2oACOZnY0oLzsW75rItEsY6KhN28JktUdK9in12b7+WloSxQhFT8zcTnka4IZqcGHIShVHxgJrsBJvAeZA0Tle1cfke9RJRNZgVt/g9C5/Od0zGQ4uvOQr8QJds/8knYxFpuh77RZTOrIJOVDFqrtYYCcvAFh9/Q+X+FLxDmb0uhzm2h72CsExaveMmJvB0eyoCds1xLGFlUugEfwSoaXIGkO4TdYURorqRf/MAbBzTQrIF9CDP+kFFOvkpbDPUtXr9WQHNVHJ9+tBuqM3sW/1UCKgKpSqQpRFjY6iCe+1qolyCmeuUKUtSRmyYvCU6/sYbSsBdRpDrVoC0GdVLfa60YRRzo4d+I66QoNtS4upXRruevJhqBXFB54yAfC7dvQx1kKqgElc6Hsn94eVEiGkD+ptSyMy1boNsrrYcCwq2inD0aycHi2zmoz0gaw9ttWnsZwgV80CpN3w62qRMGtI22+pO2v3fRKSYgLu+2pqVEzIQQ5+cyxZuYtZhCwKHwsuDFnJF4Bis9eoaFe6xZ4prAMcyWkUEBdRbXUo6oGGoyU9bJCCSTS0Gl610SL1eR0L3zVuKq6o552AKkxRm8v15F5JtdLHk56iVHFtZaFZjJ4w1P5GPosnU2NfBxFSBnBbU3wLkxH4CsF8IcLuMS17152oJQxs3TXt9mYugXivvj9RUelssuRyiYvaOqVWJaImyoanpadVifM9f8SN/Tlf1CnTqROnJIe2+Z9cXuU8RnYQo1iQRcUkB8EkJdT9AqBIKxSL1kpB5yNyojOxcoIinKsakAAtUpVRPhLaa9enSdE6hlpGtUVlbVkqGBGQwrNtMhPn0HXYAlRn28taWFsVrWWNu3eojF2nGL7MtGhDRIm6/dTvWdE9PvQl/PLt1++fwyLELeCdfczKYszSreUy5DZZrmgpXFeXVa0FpeNaZtZk8UwMPKQ5UXVnvFo3FhYFCGvZSXqUiIVp25FAFGpdJcoI545OCkTLvJPPxe6Li4O6DerOyrCw0ATQDrKoQ9vbPl0MYU7atY5cizcC7W/N+2Wg7reK/iFD48KQlUqZ5EIyd0clM8FD7hS4osMWssWFOYOmWTnQJNGifY2IlndQpJuRQIMm1ecLteY6C9fii9J2w7LD9BuK5XshUS6amBhuYrRhW1+MdSoEPYvIygPIDZXhES92inj6LYokEkPJSR8UimLeEhmqW0fXWLlKiJBqa0raDQ2WQoI759xrrXtABbVqdDYklG7WcSr2EpxeT5AdxLUL6RAo9iqUqWcAk3QjHogWDqsOqZbQOywRjk3K24QG6xhRyaxABQz3D3fD0fcqqu9jG/bbTQ7H09QTuMZFt6cR7Opw4rImorwcDzXWon7B6qMJlZ2rQn+vDNLF+yxJ2EuZZ42ggwLO98NlpbUhrS0mbe8cB/dSIVWsOt9rclOnJmijHbtaOd0HJUInzzUazXdn1BAT4+o7bkchNsRCE5852hXTAeDOXdM6o1RdV6L82ac5AclbYtFZnR1wW4aa49v8VbF2K9lEhLtwygXAA89dBIRQzuTr0Pm45vwc3ZXoQ3BhyEqRQk4UwiG8vBzkS6Ysj6S09wlpeWItu+7aOlO7owrUHWCybEMHeQdRkC+7eb6LeWuh6bazqgNZ8/ZYvk+3jVl6pLp0aneUxqJVUd4kdeLnZC8zEQe+XRJe9fk/+8BXKvWt1ROSMXHMolqVipDtA9RN0f3WuhTLSkJuyHLWJSr+89T/Cx3KSu1sfp9VqWfUReN+Wl+piYqdSVZjS1qRDnGw/UHBJ/P8FnadvrKoLStRESFaxoiKWkQb5ybBcB6rgGJeW6ba668JY1REOozYBdtt6xMkDxIrF+3zQ31RJ4+FZZUYExU0BL73jJOTvjVnpARx0nEuC5IzX5OQQdvQtEhCfxYh5KozYp99odCdLNJ6bSAmzOWi3aLQUUTVcoUoSWoCc7DfuabO5JCRDk5URMKWU0IGs076rEXSbDIYF7FO6lbOYqyP6rooQapimdyNDOh2wEizn/R9dcbulkgX6/tQYFvOgIgl3OrrpIqGlFBnQ1aXmBEV29zrWj25o0koTPM/ZQXVWpjMzG9Ax0ZFPSunzoUyi/JrollQVFR6Vm9EQAnzW05YYjTrvyT14Ft5cr6Y5IALSU2m3llKgOcm4QIwjyVF9ofGmhRJFhN3Xf4XgKIgklPT5UbHFkxYyX/jfI/XwWboaWSQkyruEhWfVQWAEbWTN89JHX0UgVg2hc8XRIYiqtPq1LdAVAySMpagGBVW7QX01cfL2ljHiNYxkjtxmzNlbb5fcqh2QzAT+dr175fLlg4XfOHPQ8A7dNvFq83qlEyxp30uCyzfb7vF7fQMNnzZbyUdi/GOsSzV9nHGMT0uL52wsjOJ6Ypy23PIi7P6YEzEpP2d8koLcEWhLg95pnqJqDhySJGrxmh7s0QITwKpJ4ZNdCelo2iTarZ1UXbv1QMJuGwhKuqkqGpNHoLWMl3LFtrfzu6XeeoNWhQ1p0SUszYzti7Tk/7fuG3hRc83XIODr3PJ9+qOrdUH8HoiFDOWw6BnAOTnJt3C+kq33PIBoJzRQ0FJnVoiAgBl0SYKs316ujNxGN25KNA0fUfGdipXKgD8gVuVjZupQhuKXBlr6ehtgM5lwmF2IN0Owe4sXIsfFnuJYT70gVtQJCuKj8DoCClqH2lRWHJBPeDpgQJ6u61L6csM22l7ZH7O92srS3YArb9Qa0AtI6R3qjrB22G45aa9UGlQ9syWNxC8Rkyfwusx3DVDSA8RFjqmirRFRS1bTQpZQXj0Ff+v2xF432hlY9vSQXVEReM0tSJ5fLloXCsiS/ymra99Ds0MuuaA7ruuxrmrdXK6BmsNMsAkLaLJXkWym1lZRGfVvSpj2RILHQLD20kTICtDNwf1PdJ6YW39sZe0uEJyXW5x0dWk25906gRg6lMoKRzlWlHK1Nawc5YLmvg1facVfUMdNUUDtffTdOmQtaSYR51JcT32SZ6CCqqo72HBRLJcs6dWlRb+2ykbpP5qiPzq4pCVWH5RXaSFd2Lxun6RVUNWKCJIrSMjrTzPd2Dk4Ygj4zzZAXS2UKBNPJUv2igDHlYqregaVRHUaf05WdaZTttZW2Ut9WW6jPgqvjzaiEJrvfeRGDPvNDhztxZv5ItcUdu4ol/yZUt5UmyUcyUmcqPrcy8W2N2u9SS0cnaTZ0Kt6/aUKtIELcrb8O56yYP65WsjWey3K9IqH9/zp/87OFNnMIVJPPNGd7G+UncCxdwiGgNylwyyonR2Wqb6gXWFEqBeEW5UIVKoo5VmFeJ1pJ9FntdE1xe5DTQ84R1H7RqCfj9tC0arb2EkRnAZ2RYNrmfpg72aPCA8Kx73kCRkpUSZcd5+t9Eu+dCWcYVJE7h1RrLitIuUUrlhbll6R4nASG6jrovIFH5WcaTzTXELsHEEIwR2BJMU6UJ5ToCudUjDWigVPEooV4iUavvZokC0hBFJRJYQu02k6+nmluLWpK7LbXVNdVwynevSbtQ2ceXqmhl1y60l9VgaGe+eK6AAqN/FMjwp8MUhK8V+hWoh+EodnZ4mB0nd8UdFu2SONOEj8zLQilvjousGKmf1zJh84HT+ctE85L6cFlKYZxEhXsZQp/VCa+q0PS/QWmXQrJZbW2iszou7LNgDI2XDnN0203HbBMcOtZb0PDWhiY3vGsxC4zLzkj6FJ3Izo7ZkV1d9rY6oKx6x03TUZFXSHbOjbrpO+6WmF9lI5Be3z4Gdwr5tqzXbG2AdEcOSrXwmLjJCREG31mHZ6CMUsapQeshEPNAiI9XltMZYqFQFMLdbfghx0cCogBFubN9zm4zQu1tvq2A7AAZZtFxtH0BahtU7LmJoU4jrrjksoraryGc5tXO5SLoIAB2xLkXxddrCUjbUbltr4iksWxHlVZMlvCV/dFxpLaPhvmbVioEVyyeTFKiQtKJZ2r7MgEWKSiltZabFSGuLiPOWaSJCYwJfliFf1BMePqG2wSfJhYqwfKh15ZAgHRAIyJFcn6tPKgdYVy8MWXHBPYvsdj7avCvVE7UDEN1fniOBm4arJoyzTuFdATPBzaE7w57Zb5PnolxEKJYxFAuHjvOavJDlRopWANqZGSdVlQKqwnQ/0ZpIOrzbwXrt0G0yKRu+b+H8hCivDL97H+yEej5IuU/s7ZWKAGl1aZY7IspldxIHJRDM9oHywGXp6W2yE85nw+Ni6CMq7YbxREX6vBPwNnMTSWNdoXe6nLXRUXptntPYeMcB/q7K961SVRNN1EYGkXCXjufZZwk8LFkiJHZuldCwZQk+N3fckOm27LDfi1tKugu6ysfYfY9rEiG5ae3Em3xRRH0NuZkltz5nS2D4tratlbFNvz5kWUoi8zNvl+M7WXcqRIYAu19n01yHcU1x066sdv8szWPqdc5q/cf6KEaxMJfw4P2BpNMiaz+R7z73sR1RRlmx+XIdshW5//ka644FLhBZCfXftx2NLKpry7mJBC+jGoFQHVHQ1hsVUU1UHGuhkOWEW358xCUCUB4UKA+g80cQuYjXQHobiGOTGHAiAtSdYqVMl0WlWl1MVJCvPzKONxYyY0sVEGOvmLWF1lUyktGxWZSRittlMm3K6iileTwo1E1ydXV88vtRPUis2uUY+Ln5//QuUKpW0MzXiKo1MrUfPDuIdI6eTdGxvvi0gEICNn0tA6J3fETFJiZTEBVuUamKCFxwG+ouilSFalaimJV1fda7VjXbSq0/M983Y/kBlnHWtshyoiNNUiS4fjOfINYmVFOCkwAARvgy0J2cSO6cvsgiskD6hLpDPttZxEU3Neo115wRmzCjVDSRUcyKYrm6YR1jZ8Q2+q1Z3N3GIBEYvoQHuaVKpLV1ZQGtcSnnKcqFwvpIYfmg0vmb6uuguuT/9me9TXKFVu3zTAtlJqftmmKUzdlnIfadUy5X1WNkIC4MWZFg38zWrDv8WAlRUf/w+VHb6bX+7crr8hlyHrtcNCtRIIZacpJWE6cE5kzOmNQ1nU9c1FYVW89DJlF21iakuhmkLV2NnbvBICyF3NFQx8EjAnzZL2kFajB3lGTx8WlxbJEs317MgfiuexYH1KF6URIZWWU51LpCepdm6C3RgwIguBfMNkjt3ZyoiIP9QKIyBSGJlWlWKPnimI07qSoiVHmsJwgRWf1YkrmOK4gJULSYF9D3w14fqCoBsJBkgJ53f/slvYoksjWu0Rrkh7p7OtoYIfOrVJbK83Zw+PKpAJY4HzKhsUmIa5FGVxuoXN/kQyyj2n6o46pW6IiGgfYd5kSDw74fEiEy1+dyT65c6IaYW4pDQ9/XJnWrkhj5UYrVtQTLB2Odv6m+Hj85cWnkXEQ5KhorfdaSllIYH+r//N47L7tTdlNcGLJSxVXvjZnixrUWkbq+7GpldHhVXAFcOzNS5OhDpSpgr0Ch4nrdE+YmqVT90AH1WGl3pvw1cUU46H0s8iUqq05kUtGQJENgiEYIut+af9Wq0tEDlYpq8gFGBnqut7aySPeh6VADiIqEUtVhoX1LHPRFIgH1dSan0Hl1uvuHmTyDMFFHMCTqh8gHJx18e8jxNmEpeIbaxjIS1CbLLUSkJiILC7OyVGn9njbZKUzXj00OaR2hdWvit11A3muUxKVC4jhCX2gx0FhEVa1H05YdMftr294++JK88dQGNqToIaCfgPByheXO5hORMoHON0Vt4e2oV3zuupdKVB1Lju1e4tcwNO8S0HUpAXX/xPPgSJYUW5jbkht7aYGasJRzhewoqd0+s0inv6jPbZISl2vFR1ToOz3X6Umtf6zbau7nddnrjkkIcgc1BD54BXdcILIyFXyzID5bKwIIyRSDia/TrkgLs461a4cIix1K6UOtrWnOR4TDLsMtBt0atDiVVrq2c0ToCARhdieHbFove14hsRTtugO39EfG4nNGlJRwntx9j/rWPeEkJi7qDlYta586idFs60qIe9GG+FIPDTkeoFFxWVQkQhJKUuxjOGGp21CHzFdFNGAdVg/4QoYMlapqMtnr4q3fIb54of3frLfdN2R9Lz4g8LXGALKEmLNc41iPPoKOs9tF9bbl3ITF3eauBaOvnGsf5YvSScKMJVNakmFc/5zXy9sC7ZJ2hVUTeARlH3GpkghqVbuk7ezZUmg01+Tp7SwbLwl7pQVSqzhCtRd5woploiJF2kjHcBBRidftmGdPPNdXuhIHF0JJivQ5BBeWrAy9EfaxtjhOi2UnICZB2oIAaPP3rM6gUKVVs9pvBBU1D/kpdA6Z+tzmvbFneJRYr7I6Zs3CYW63O0siD2Xamtp5iGTf7AdAG07cQOoY6vVB2gRFfP0jPlPjsK1MtP5Pu7+/47YXcOOIikrrgrIDFo7eEJa2DS1h4QOKqKOSnuMhRGUi149NSsaQFAmkW6mtImhcQI05fIB2hdxCEdeIWa4gQOi07XINaCCIA5NWSQLbTpmAW+YiOvRc+TUy/e4iiUzZKQ86bbLeJVf+Jl+iOKkevV2wDoBNavhSFgBQsOgb475nAFDVy4nkZptciFclSqaJ60x8VN1PcJJCfY9uA7OqhExs+Fps/Dcr5nWWV4rApIlnXxACRefYoflBYlZmLYyK9t7ybkdayb2tM7DPmgAXjqxMdaN00i/Xmiv0Iwni2VHns5JnDQGJDAmFilGpGOntqE7hvu52cvlh/Z9n/OTrnOj7mEJHGpE4F4DOMURmaXIVAV1Xk5FpN4mAVXudPgGe3dH43DGkJzHcWEznYr9kFMljQyIszogAoVOO8zoz7vy4tq7ki1ag5iIsLvRaU5qBtnK5TbZEVPrKlEXcsZ5wlEWMYh3rnjVKyvrcikKi4/p9SJrfdMDCh4MhWGDIBWTPMl1kIVQUy7Pb+jLddsWp/efi+VToMw3uZqr+ymv9sSP9+Haev4ngIi52Ob7fniDwAbkON45ql11WiW00CX67vSA3+LpNT+BzY1GCM6ArDLbbbbiT7Ogkz6Kqdd2VaIEpGquKHcnUaW/Zur3s69fbrFc31OVcJkCsgMiKQFo9QLmcakISQlKAAePviLHuwpAV7soYDD67CrmJvMwIgawNJ0kJTUleRUycWBOXsoiQ76NOloVuB2fnAKmFr47qY2ZtEjQw3WTUlV5XyaineaGLeRtRY5u+CS4xoD17c83mKPuvmQMlaqwflZEvxhe5MNRErgkZ6vuULIGyqCdTLsIidT7Gsyw9Hw69RduQcLcPIBMVn9uHiIivjKRvydeqE/nTOXdUd85VHgNV5b0ejSajLQDRjRRl/DlotjXaFPs5VadRJz2B/dk4NVlhGtdrrwbK6qdcBGSIOwloJwv2/1CIqfMd0ToScRlyHpe+xbA0LKJmTbX6O+UJAYCqhJGMk0CaMbUE4rxto02qyPXD28Gj/egaY0bsXJqXEugECnBrcKmaxJFWjhjK5MtBbnTeh/D/QHclb5dGhUMW8rf/teU8q10/lCtMrmsDoiJNuAJxYchKMMa6cQaQkqCw5776osBOmsrMC0SNLTBKKuCwQjGLUd1RQBMpxAdFCrnudNQNw+apkgn0LNqzQlqEUSIttN84l2IJ7HJ5mXA7CqFtg61BME3F1BHxFNHFLNKdYKVo0ck2P4xNWgA3cSlmbZpxc22nph1skcO4AFC0nQBlMDYXnux5VjykhCcdNCA8M6EkxWdF4fuGuIE4abEFtSJYr6sJiIL7XWCp97WwFqbVIjIsJ+b/zv4+XuQaaC39CSFIN2a/I2jr6WTANc5vknFjDyMrfKC3YZ/DHpz73gt9vh7tB6+HrD51Xd0JA2m+KD0Ct3LFBRCtK1D/UjUaMU5eikVrZekkraSUCjO5rXwCRO8q9UdmRKRnAsqIi7SsANCKdDlhoXsf363za/FIRjtzLH+OK9V1A9Fx5ne7LW36iyIFsiOyBNtuxA3cPRMFAlxcssJvUJ+rZuDNHCM0CiYnPfDXw0I8owqYlShUhXptiVgTlLqNXXNiicYkWDSZCxvmTj7lgnUWACMtlosopm0eH7rWtlAnYLlkXNaNvogDF/GoY/rlzqnPZ6/rU03mSNQRCT6zNyc0tmLfCBXnIekNcTEy0/L/6D5b2gUUQFDGunmm0qdwkOvHdh9VOsMnu+YiAtZxnVzRigDSKy47BLW6qBDSbLoM2/1Gh+9wwdguIqmMvi7PM2trwKTnJOR42QrD3EACGTLIg22hYPUFhyQj6pAODj7gSuG2kbaGtt/1X2OpJcsJT4Kp1lW9NAqqJry3dYPV7iSAExs04lq+inqcm9o6CZLOB2iJCc+4Xbe/6xYn64rkNuLkUK3rdXjqKKComQzWFseS+uHYnPjYWWXNeyxNdur7UqlIr/he7LmTJAIBBGXCUGUbW5LC1HjnO9+JKIqMvxs3buj9VVXhne98J172spdhb28Pn//5n48/+ZM/mb4hW7yB5w0R1y80AsUqrVDsVU7RKf3nf2UzK0nvdDse8u2WCsYKw5SKuZjVyYQo3E53Us0fPwf5g2klaPoz2xiJf759tJ+ToA5hcpyHrpFfrw/UZn0tzbUVs1qwlh20WSD5PR4Kg9g0vzMRFf6d/mLrjyNWpf5zbZf2q7iEirvkhbbTPqkc1admJZJZIZKgKLEICb92a7thTbHr2UIae7M9/n1jft+xGJIw0Y4g8bWVv+chdRrvu/VHJIIyr7rOTVFY8bpJEV+Y/Yd93vp/S5J4WnleLt9rFt1j11MmteunoknIPNLb6DPvj8rGKmvfEyIpfB+VLWYRSrbGGScqtKQIfZYstcbvkZohzPyeAcHzXeve1VaUMgHy/a42xTjGxRYavdm2x9mtW1Y+7dM+De95z3v0d8UWWPp3/+7f4cd+7MfwH//jf8Snfuqn4vu///vxRV/0RfjzP/9zHB0dDTtR6M3a8IaGuHeCxLI9T5Zr1VoXaDDi0RX18aiFi2mF/JA6g0h0A1GkUBU3Zvems0jvyBYCoJk1sFmYHfrNrTHamsOWtietjG3dcIn4pMRVrvK0rc0y2+zns0sh0kFra6TZY153Zvm+e22OMo1QLBrCxjNjVkRq6u8hOQsIeubPn4Wo3RaqORlTxkVOQspL5aRzcl2LspanoMRxANrp4zruWFPa8MseMaIo2gxzA/nEtPY+20risrrQAqp2+0KeC3PW3n6OjevuWjh9kSI8JNpn3dHPMXOx6vMwwT2VsZOM2Z9prTIakG1LLp2jrqd2x8ZN4sraBdSeTy0lUWj72eUC4i5qSsUgLRXAc82QRYkL+m1NXJtkD4ZAPypowdS27rpc7aZutXdE+Jr62aLNdB8la1X9uRK3c+SHVacs0ENQxuA8a1aSJDGsKYSqqvDjP/7j+K7v+i582Zd9GQDg53/+5/Hoo4/iF3/xF/HP//k/F+tbrVZYrVp/xvHxsXziKSJ0AkSxEpkYsmqtr+7eugRIpKWa1297VcYo0zpHTF4CyUncSXRVNL7QGM0MqOlAkmW78BUXZOnj2HdplVi9DlF7ZZqw8GN5lkpet4Sh23m9LlNv3RZpZl93SMUiqq0kvs5b1QI125xtJ1Yyjxn4vFpEZQpiQsSiKGMvGdkGJCtOS2BiPShXd5sb5iAqQH0vJY1K/dmvl/FF9YREAkmDgSt6Re8bYd/254MyNSCGuysgDLb+343y67ZBmP3rAdz9jqk1RKJfu1pJZMqstdY1KCu6UVpnjKKJbELF93fCu63jaXV77q4Gui5rSnJnXG/OBLhNkju+BpSeELGUC3aGbiJAALRFmvrnqLSfeeh1e4xrCiAqkpA2JGu2F1uwsmzVDQQAH/jAB/Cyl70Mr371q/GVX/mV+L//9/8CAD70oQ/hmWeewRvf+EZddj6f4/Wvfz1+7/d+z1nfk08+iatXr+q/xx9/fCvtnjJNMIB6cOkJJTVcOIGQTPxi3fs5qqtZ/RDNSlSLEtkDBdbXSmNgBdrZi7GCsELHHcQtLZIvnKw03FVUzForTe1Trv+4e0gyI0t/+SJCvqgtHFSvO/xQrpfaa5tvyaVDptzsIML6SqTdW7YpW1/fnh3y15AK9vNLA2b/INojSmWQXDkuomK7b2ibhESV+s+1vw8uN5ILZRHXlpU8RrlShoA2BKbFIvw4G5KFhp9Dsjra7lHpr5x1n1fjvYnlbfleExafds/P22G6Frrvuf0nXl8cyX+e46T3jEDr21DbKW0C/Ul5V7h+BWAuJ6sdxrWz/sW+Xrou+k/9T+faWZ9gbrfICbtugLmOmDmA+hKqi/oZcjm56i+TxqXMf+vYnDzabb6I2Kpl5TWveQ1+4Rd+AZ/6qZ+KT3ziE/j+7/9+fM7nfA7+5E/+BM888wwA4NFHHzWOefTRR/GRj3zEWed3fMd34B3veIf+fnx8XBOWgT6zsWTEaUUZ4jAU6pFAa6fQ5yGQypcAcJS1a7HM6gyKBYB4bcfZmDMd3W6h05YsLVIoNJUpFkBZNn5pYYbUfHNeWxVHurMGUOd6KWtRcJxVxsySu57qY1ufNrmhJPAONjuIjBVOk2X7uZjVBMVOxkQL59mhsXFOM8VIz2hCn8WoiGqJneUS6iMjIQghGiHl++rJG2uJ3a6irLdLYc60blAEmPmNGFyWBtPqYVpcbHEzHwztOmwiEJJXxXRZym2iOl13jec+0vUK5MYGfx+NlWgcpIsTBNv1QvX5QOfjgl9XOSOLKbMWUOI7Xs62HgCtJbZUQDVr20zXR1Ybu9+idhmZgVMWuddzbWIOJtK7MEsRWWO4ldZevqBE1VlGwCXwL2aRjiTU7WbnG2tVGSyinShilp9/yDi8VbLy5je/WX/+jM/4DLzuda/DJ3/yJ+Pnf/7n8drXvhYAEEVWp1NVnW0c8/kc8/ncuX8b6M2DMoCohFpOYj0QhZZ3Lxhn11kCqHLo1OZV3D78oqWEdeLapdHD6KPGlGuHicZspshfOFckg11/vmfNMJROXYLsAEhOo1oUnJmJsDiMZFme2SRgmqtJ/AfUbdcWFocKv/5fm4MrmPe2c+8CbJzSi82fj20RE+nY3JHwbSxUXGrCkq/rm2PkY5HAXT1WmLJbV2ITFvo9KAV6vR6Q8Vsl0Gn3Xe4cKmsPIvpzJW/3RQEBbpemS0uio4tKfm3tfvuXty0tvFws1K/PIxABOp5PFIh0lCwTONC+V1EBoNFhFIu63TGFKsf1u4600bNY1ySRS/6+Sq5pWf9ikhhql7Soq1qbJKRStXXXnKxVLDKy1eKZ5MQdmVhaIdnrKxHyQ/Yq2H1HZD6XIa6fYJJwRi4fCTsNXT44OMBnfMZn4AMf+AD+0T/6RwCAZ555Bo899pgu8+yzz3asLVMgePYaUo6Tk0lCjmuMXeXWlZjLlUE0Jka7ttalUGxYT5rOQLsu0Clrf5by1/GXn0ITjfBna7YEyLM/sqRIVhz+omaKxGemIE6Gq8OAjloA2ugE41xCYq9uPZW2svBZYpsMrrX2GFBVJ0mhKLBtEKt+10ooQUlUibyIjfL2901g18NJD7mI6Hlen6aocuE3crhzbDcPX3TUBvfTmySyQjmLjAFY643YQMmfdU1WWW/aeUYjmeBHBVgSsm577Da27TTL2FYUWmLDZxFxD2T1f5so8fdTKi+FJFOuJm5x4RMNHf3DLCVRCqhMbguF7OprdOiI+Gfe74SiOxGLGmtOM4lsRLXcfWRacSur3V0RMl2PhopMrd4MWF2rrciGm6h5ZkyxPv3fjKj0JqPEgP0Dz92HnZKV1WqF//N//g8+7/M+D69+9atx48YN/OZv/ib+zt/5OwCA9XqN3/md38EP/dAPDa57qEkJCLdytAf0C25DMdytM3yw8Fpbovp+ResYcRYZM0uec4KLbWMmEOOr0RZ7RrVO8FkRzdzsDtkWB0rgHbT08pMvnJeVjnfZBnziWf7Z1fnRoAeYL38VA1BtHpUqbm6W9Cy4SEpUGdE//DcOIRRjykjH+OoZYnUhciShTiBXwUUqObg1JV5HeoG2SrWdeu2a6+ZZ4d9DBzQiKdzwU+xV+hzcBUizXSJIfL8N3gaX5dIgS5aloa7fJAhA153iS/fvapMmJc12KTkiB7eAFs3vEFWAOu2S/6iEYTEoGmuKWpskSPcdzXsesWO4pUWePJhlbUjuIFdfxImKVMaVY8pefNLVl5BOLt+TSW6hzOPHrIY8yN2zJYnFkHFzq2Tl277t2/AP/sE/wCtf+Uo8++yz+P7v/34cHx/ja77maxBFEb75m78ZP/iDP4hP+ZRPwad8yqfgB3/wB7G/v4+v+qqv2vjco9bZCXTrbIOYjE285TpOdgO1mUQjVdWLfTVhzRF7aWyXjwReJl4zN1Izg4wCvGMlbPN9W489G4rXwGwtdODLNocJgdwzXF1PpmXFQv4kE3ulGtMzmDZFmR28HcYd50DFZ9isrBiqF/j8GEQlqjqRP/RfxabodYwVJBkY/ZOXbkIylMg4y89yAAmKKuoKa4WBXp1GyK6W7YroZKFS9RIUUWNNjJaxQco5KkUDaaMVW3eKADBntQBQ7JV6HbFqUQJFhHgZtxYPZl2rz0uz70gvGgprALLPJ6JA1y1ApL/57kvqJv2KpWqtFxJ4/fy9A6B1XOWsDdvXfUXVHs/dOHHRvnO0HzCtKXbfoAkSu/aYUi6gOxD7BNK8DCcedq6lGEDJiLOd7p4mYELNRvSRHJHVau2qWS3m17mZIr/FZAhB0dubiVN9Iaw9vLzPAjMRIRkykm6VrPzVX/0V/uk//ad4/vnn8fDDD+O1r30t3vve9+KJJ54AAHz7t387Tk9P8da3vhUvvfQSXvOa1+A3fuM3hudYAfojaUL1Jdb+vjTlZRFNRkbMtOTdz0Pgy2VRu4hUu0KtdHwzO9UdStNpUwp546VmxxlmSeZGolmVPXulj3ZWXLs+Ap85UVSE7hRZZ1gmpg5Ht+u07SB7k72xDpSiD+xOOM5bkkKrk1YKzjT5Q9yRUXNRrZC26ohpOVEJsppMFJKcxKWXsDiP87iBpDIUJVrR/XBEAxFR6Sw82uRsiVRVJ5xLSlTzCNVKIVq6298um8FcP4DuMbk2oFI1QTEEuwDKhrTUDaxJakXXoEvVbieg69Jpz4OOGFtbPddR+xIp8/3Sz3xjoSC3EEXnAV3Czgde22Ij36f2czuwmseQ3ocIVzFvtxHpkQbcluiZ2zhp4SREysvCLUw+XZhL22NvoyraaKTm+LJtg+s64kJeE4nyTEXN75IdANlh1yqoy6/d1sF2e9eya8BFSkb2VxyDUncM0XtWVTW8NecIx8fHdQjzT/8bxHuL3vJjVzZ2raEyhkxMlb68T6dQOAaTsojrSIs8NmaaAPRqs+QTBerP6tTME0AmU0p7vb5iJkDTM6cKhgCMz7Kos5E6aYJPpV/MaMGt5n6s6nYSoaABhYgXncN+weO12Qbb5VPModfL4INFvI60eb/j6zVmQn4CLK0qHGJNIfjCiTmmIiqA37LiPS7ARURlijJGvk6Q301al4lAVmhBwjKtgMMcqskrVKwUcKrq32JeIFIVklnRhkPfThELhMW1+KcNGkTKg0KTok4ZR1JHyr4b32kfNmOAt6KWioVgkSsiJHdi67j23QLad1itWguoIQIW5io+4kD7OVmj9kqr8xLonnbeBTuqy3i/3O2htc5aN1u/JdjnBrL7GVdkGREUO08Mr4+7x6iPtMH7izhrSRhZU4ikaN0bI+FxVvc9rmjCYPeOaFkMn0wB7ufbLOzeX54u8dFvfBdu3bqFK1eueKu5MGsDRfHwHCWEIfoRKT15aNk+TJ2Iiyf54qjbZfYAtuuHm7m5f5R3DmVRZ7clywsRBLViMxOrM9QdBjNf+2ZvrjvSzjhbQmLPukiDQ9dU55OpjPPRcXb7aFaTH1ZGR6G1JHHTgThmJXrNHkBbSFyIkqpzo+y1c2wRbZ/bZ0picpYo1kLPOyuBefujVVWEKqqQ7uWmhmeWY4U5cFoT86iJk6+tUwAeKpDdTYGTBGoZGTNWDrd1r6qtJ0RUpPWZEodldt5MduYFcDtFcidu65xV4AZy7lI0oplUhRzoEBYSA3MYuV6s6DXftfLtLjJSqQrVghG2dT0JovchyqJ6zZnmPUqOleE6pjrIkgVYriNmYWnbUGfhlvQcBH5PbHeVXYbCpwHocHGbZJCL2D6nyyoDtJadqEQrNiZ3D+/30tqa0pKU9lormvxYiQ5psgRYa4oBbheOZxJl3JeBpKXdILwDvvPE4WPvhSErIcnRxtctR9uMxdSkRBqs7CgLDiIvVRHVAtu1Pbup/xsm1jl0rgDbEqJnNhUQkQWjGX9pdVCbkFSqLcvPSfAJ5aRjSNgYs/PzENR8v1u3S6EP1Nda7FUmIfEQE6PtqkJkuW98qHPpmDMV7vKxNSmAIIINeKZSltQmaxgobePfM18KYOF8Q60sqyxBvk46RES0OBkK1lp/Eu3nmO1Z+cYhk/N0P0MGIDpVtcZCVUhm7YM3P1qh3M+QnSYoGtJghy4DXWsbUGeCrmZlLxn1XWOkKuDaGvlRhOhWiuSESAudq6oHurQCmnNVzXFVESGalchVhXhpvccW+a8ioErk69DnEXQ8hruLi8X32hNEtNAkRV9l5BeN2v1oCQBZbuu8I2R5jXRuIn7ukllH7X6EXMt0fS7tjnk/u1ZU+/3XT7ZqrSRENqTy0ndJ7MyXEegcY0WLGUQF0Pc2bu5tOWMW3b5xz+HamTLARBfpqbPTFw5ow4UhKz5sqv+Q6grBUFLiIh2jBJOefBjULj5Yh5pPqwhaYkYzDaStdcWoz3ppbfBwUDquz0cuQeyUonYmZl5PbV6W3jt+rO44mEvH5c6xBytyNxBCnhlbEG2TFN8zIBEVTkwk2Pv5d3sfJy8SmXERJR+JKVZKk2XqiCtAJISGC6jZ5suyW5QxyiJGmuZQcYn0ao7TZA7crQlSYs2yY1VifrhGuZcju5tCPZ9CnZqzWG61A9rZrmvFa31sYOddAigPChTMElgpsvxFKFCi2mN1Ro0oviEtenmNoutaadtsfzctJZXwnGsNUBHpgRIA1DETdNG1MNcxfedWnNYV1Fo5ubuHk6VWhGy2nefBoYmQ7QbzkQm7b/FZZgBmIaYV6B15dKT6ObkirYqdLdxsS6uBou+dhIAH7ATsfegDJyj6rg5MZGpDTqMw0EsxYGy7MGTFl8WTl+nbF0JoNrGMDCEeY8qGho0mswLZ3frnp46BD/hGtA+3gEStNoXMsOUMHVKiO/ekW6cNaZ+vA7FBsy7p3bNzmmhCZeXJqJiQzfYR1+2xZiOR+Z2/pNwiEgoiJvz3s7UoLlLQR0w2hURshlpfgJq8JKrEKkM92Bax2dk2ehRtSSCQ+yMpke7lSFNrcRZ+jkJe12jvYIVsViBfKyxvLhDPi87Kz7EqoeYFikViDKydWS5vrwtVVLufmvaTZobDfGYaS0nRPrOVqhqRd1PuVAF77EFvHt5I1VaXEjEiZUYzGU0SrCZ8XydCRLEoqsYyVe0VjT4oRnrc1cvQo1LMu9FANqTkfaFLUdgWoqho+yYeSyFZbPs0Lragl65Hb6M+QyJBwsRJ17FXW2vt9sjXZP0ezA1U8W0BGEJQhiYtlfdtxwV9YchKFEBWQmDXsYusoL7jx6ZBp0HPF31Rqaru/IoIaES2rWiLz2jRWTG4LWPOZqqms6TU2S6zs0bR7VScMw8hmEut2OzN6tNoBlb7doX6lB4G6tneQak7aA4emUPfgTY6p/5sHjO3BtQhSdmM74FWk9SnRA5EViqkcYGsL0wKXfdRCOwIIk0iqSOmmSWYiLYZ6BeHa9EdJmGV1d1a13W2Rp7GKPZirE9TrG/PkOznhluIzknkpJw1Wov93BAVVjmlYo3AiUHUEC51JzYITzlTKNOqHuwby0jBxInVzRlmL8VI7taDZb7fOHwaCw7lRMKpAvZZe6PWEhEBLNJHHkwk6wqa4/izrxPqkeWouX7VlC/WMfIyRXor1s2ohehdS6YNbtm03R+Si4aLbgFZf2JcF5EFdmoelahB9VO5xOxfqgio5nJ/xLVvPPkgTXyorqioSUq+34S381RXpaNunvZAsjRKOhG78xugHZk6snXIOFzdj5YVwrZXi93EJTPomOY6xkZduJJtUVuKsqxDOauoDucEUBXKMqO2FpfOu0AzGZZdkUch6DBJ1tHYnQ7QEoZQ148r+7qkszHO4TENa1HjrNS6AJuYAO1L7YrMkUSvUwhde106ExAVXk9IfURoXC6jEDITqUoPqTosGTBnk/MC88N1h/wRJIuidql1PRXaWjVPc9y5s0Ayy81FHBdr3N2PUZA24KDQVhhCHUlnXcupglpGDmtBHVodryOg0cRI2i+gdZEUe2VtMTyNkT+Q6/tRqQpRFdXvroESmNVEqXIMgjbsQdHM5VEJ54B2N6GK9HucH1add1uytErvOBfQSsJSKWrGuAaL6Ngi3YgRl44OSRIaM9cNt7Ty42xIwmPex5UzJtI37rFcHy9nRFxRZBvQ6Z8q1japz/Kh7c96ixrlQ+Fdzf1+FNiquOrcCJ/wVEorHopNU5EPGcDGDHZEcHzZQVVcm9Sz06R+uPdzlM2MkKPPdcPNoyjqQadSdUdom7TbY9shSiIXY6B9ww7VPy9HbaDPZSNe5KZ6263D/wN+ggLUv9sm7hlu4aDPPhLh7RAccIW2bwKfFoaIC9fnkFBU51EBzKgHVYlRUPZ7x7dzF5zPPTpfrJFlSYcI7V9Z4i4A3E5rjYRFUpMZkBURsFJAkwGaz7BdLk9uUeDvDU9aSNovY4mIdQzsFaAIMzVv3Vd0L0ugtoqsY+160gSC6YJsNw8fFA39TdVcX3N+ukagJl5qRRYg2V1K1iaeNdiYUKAlBNyKBbRWHt7WKGsjX7jYlhOB1gVGMNfs4ZMnXodkLS5nNWE00Q2zJgs0t1CRiFhfK12LTQatCZwo1OcbWNQZz2INQHQJTWkB4RhrELDf46Fj54UhK4kqoUJyTWya6bMnKsPY15i9xxCOMWZ2+7xS+0z3UK5zTsSqQrFXAA1Z4bMU38zC9tu6OgcXuPtoE0gdR/85m5kbIypjrCdAl1QagtWRlg9+XBoXzlB0wzIwUCdDoDr5goIhbQtxGdmIVdmQlPp7pNASFi4anJXOmaHfberWtNjYW8gpaueHa6wAYKWa98Nsv5qVdR4XMHcWg6SVALpuj3qQBqJ5m98ozgGcNuG+qkKcRShVo+9JSu22MvV1cb00waw08vZEjauX8rpoWDN3ntcHAMpVvSp728YIyUsJ0tttuoJ8Ub9v62uMZFjX6wMtT1AtmmtYx/VChlmEaGm51li9pJWL7wI1gYiQHXUtHC0R6PYLvN+xCYMOkU5bnQhlHm6JV3vNlWUdqO+DGYrN26RJCCcjyowgBGDmXyIz0LyAYm5qXyoNn/5yDOGYan0wXt+QnvG+IyuTnMsK2/SRkbMSRbrcSNJgu1rW5gg1L1AcKCTHSs90bNEtz1tCL7Vt+o7XbZgeHcvR1il18l1XkQsSKfERFVHA1hAVNSsNkbYrEif09wyxgozFNlydNuGxLTRDyYwEukc6Gq2xqiCqdK6ZesYdAbMS6X6GvcV6lG5Lsrws14nR9nmae0lPuRdjncdQquqQVsxylPsRKiTAMtaDnhSNU6kK+ZVSR9TQs2fm8Wi3kQ6kPpasAHFjSTAHKp6VGrNaSxLPS62roVD4KCkBVd/rKm+XG7DXnALqQTJmeWyihMikQqUig5SoUyBVDVmIW+tBqSpEWcRIAgu5JXJC96kJYY+zCMlJ1IniM9/pSKdQ4OuVJXeBYh7BttK0hKHrzrEnU8bilnYmbf3dUcbqd8i6DDS/M4XdC641fWW+SdY6Bmal7qcIEhmhd3TKBU5DMKau6H60rOwC0kC1bTISUk9WKKclpi8t+mKW60yhABAdZciB2uzKwhbrwk0HkJkJtNrERM2M67A1a5uZFmH81yZSBlMvEwYXGbLLGB0gmZpZB5A2g1ef3iREQ7Jt7ZSNWdOmdaH0Z8J6hHXObr9EZoAuKbNdVxIOF2ssGutAXsS4c2fRWC9KbeWbszJTYTHLkRcxVo3rp0/UfnSwxK0ixpwRJiJBSTMQZLMC2WmC6nYKHNFSwRHil9LaTbqoEF1b1+7RPEZ5qtpIG7o9zJJUAVgdNFaQxv1iiFZPFcr9lqDYhKVk7qOI8tgUUS3sbQbKKClR2avYVFG9wnXUEhbAHECLgxLp7fY31TlISBtiaYSquLn+klkfLHdIvIyRnEQ6w26xR9GHkRbsdvVl7fd4zUOf2/6n6362w6DbPokmTWVatQTSaqeOxJHEr1wczidgqrbMVGgi3KglAToSblWp8tqqFifdCZULkiU4BFNbT6bEhSErSTQsVHQTTEZELF2ChCGRGfTZlwdDIi7zlNxBce0O2s9R3U512KJhVi4iIDN9wbZgtgQ6oYOuaJz6+O5+250kwSXedar37UtvTN9EVGhwHJK3ZCpxq42ZKoJJBicnNlFxbePg5+Flx5Ac23UFmM8w3UdNrmOF5MrdlgQI956e2byItRszayJ+aLC289SURYRkVmjLDNWbBEYUUfmrTdsIRHgAEuuucXSwxJ3DmWGNy6/GWg9Dlp7VciZn5CXyQlYQ1QpeSygjv0mURchuNoIXyyJYFrG2qEQsgyxQE5cKaAkLf68t9wNHrKpaVFtFjaWkSdTGLKfzl2odS34YNaJ7tjyAqmori6qAk6SugxGU5JT9ZqqN4CPikak6KkpHZTG3DEBtqN95Cgu23/V28UgO0y1EuhdntmAbnAh1JlzsvtO25nx2uLoEEjFXeYzoVCFqlpAgMfi20l8Yx+1oLI1Clv1ucHHIiiqQbNm1wsE74k0GrL5jpf19BEYSNRJc1qFElbh9Z6FXZC4pQyUz0QMwxI/cCtKJ8CHx6qwSOooWrS7GtHpQNJEtXpPRNRl3jpPEaCzSZNGILPtISYhuBOgnCCEYWscsLrAeoSFxncdHXFz3QQJ/hu1nl9/vvTTrPK95WROU0+VMr2fV0V9YIPdGqSqRAI0RuHO3khRefnV/abZbxaaVTpXIsgR5M4Ab7W/CnTVh0YQCqPYKlCrWaxhFTZqBKq3dOXlDTKpZ2UbiAKbA1rg5jKAk3SzLmkw1EUexKlGsUiSfmKFSFZaPlNrtS+LZchZBnQIHH6/dv2UaoVQR8kNagqPOQJvcrcmInRWWoDIAmeCCURHKJOospdG1ylr9DGluSKDL+gFneDDa58dIuqYqq+8TCJ4d9wyAHGDVrOwsoUGwiTY/nta6Iuuey+orTUI3JRzblioA0EtghODCkJVdwSYPQ4iKpAXogzQY+M7pGwxcYt1UFUhVgXxRm+RphmcIvKJ2gC9VVTP+0vQJ2+IzF7gvt9PhdDoqYc0LWxNgHROV6K7nY/nA43mBvYMVFrMce2k3dXvI77prV48PM9bemdX2MQSmU7/DteRaysEFyW2UlzGWa7krurOcYX2a6kU3UUSGKJEvIGiTmLKIapLRYynrE7Encdlxp4YOBLxcmubIkqTN0cKvo4hMC6GqBchR0rgS7HeqGSyrIqpzoAC1JgXoCmy5u0LIv1EVEYq8yeFSRO3CkIsSmUqAWYni5Usks6K1HK1r11d0h0TGwPIBIFkCalmHwaqsK9Q3UvbHMFZvp338cSXragwz3JtbVOpra48h17PhSrFX42b3hUfmREmTuqAy+zJD+G1DzEbZHJdUQNIuByNpTMiSQlasqoh0xBfpThaz/NxoI6eyJg8V518YspLGBZItmeQlcL/92AiMoecJgU8r4Mo8StsWaY7FtRPkZYw7y5mxfgsp0OsXqwQO6xcrWsedlWtdmhSbdGgtPO9M7bJSB2HPejiKqJP9tG5TG5qZzEpcvXJXhxePISa+39FpqWgsHzaZ4CBiYZelzxLx8BEV1zb7fFPBdV98zzA9a3ePrVXTq6glIPQHGHlBzJw4MCIs+OBKEBPqjczI21dWKk/iVgn6Xamimqg0A1yexwAj+ADq55zqYcmHJK0JgW9Ts1IPjJRtl9b0sZc3iJISs71Mu7SSuKwnNrMZVvMZVgcK82cSqNMmQihtrCQwraySZVR6NGlbYa0Q7XIL0+Wr01ivmcPbL4UM29+1FTaPuxM1dCdVWihbue+3lP6APtuuy5bIxIhnhdbPAbVVTppQhWBbbuopkMYFogHtuzBkxQUposFXjpfpIxYhxGNTd8C6UN7z9FlefJYWXcYS6O6lme6Qbt9ZWEsRtMeVRYwiiVEq1YQdti9+FaMW3AkzQvGz9N1C0CyHkRMOStW+t1hjkebYT2tV3ibWEd9v6yIIPuJg75c+9x0/FH31ucjMEE0N0H2/slIhKxROGqJCugjDOsKtKJZJXrTeGava1eCWFd8s07XPXhdpLE6zFHePF0YiPAPcrWDN1JP9HEWzr2TkTQ+yXHMSWeGuVl2kcSF9GlSFonEjYY+dg1xLql7n6mCxNsjXXprhynyJ48UCt+8ssAKw/6F2OMkb3unKOQOY5MNXxta9UdZZSdcWFWiWKKALrnQ4N4UMd+7/rERk3aeo59FuyYmpQfGFEtPE04jgApDMSiPXjyuL9SbEI2RyPdTqPwWqIROALbZjp1Bxv8A25IfoKzOFFmFK9JnhXdEaRhnHbDNVBZap/IjQC0fagKjpEDrriwDemQ3f3geeQCykrP6clNg7WOFwscZ+utb3zOXa2PQ3HmrdMNrAfh9uVRlzTl/doXDVuy5VR9MSSmCyUuH2cm6uvrxSJjkB/MQW7DfmCz3x85wmKIsYuRVZZOfAkd6JrFCjcyRJWK7r5ItVzlLsS1YQ6xrIFcCXBMjX9QrR+n6pdgjWqzGrCkD9Od3j+ZTaaJL2P4nraxJTrGMgKRFVtTtib7HGlXmtyeEBASoucWW+rEnLwQIvFtew/5ftvTRzmZj3Q7KSSMJ4KsdXb6dlP+wkkDpkmawpTMzMQ4YNDV5zz8X8JrZ7hz1nREx4nip+X10JShNVYoX6uT84WDpF5QQtSJ8gwtDu9+zIwbVn37YQ34+aldmOBbb2QDLVjNceUIZGZ/RZkkKSeZGlJY0LHC7WeOn2vl7B1lVv22D5hZLyJtiJmkJC+oKXNWfpqImoHDUdboigFOgXrPp+87HPwxCiM/TZ4+Wmdv/QveOdnQQiKqfLmSa8VR5rEaIEKW+J+BxY7pBI1YLR02KO0ztzxKpCMsu1oNpnTbl1d4H1aYpkVuDoYOkfUALeqTG5aaoiQr5WSGZmcsL5wRKnqsT69qzVieUx7BTstEQAjxaS1j5TcQmk0Lk5iLyURVSHkKe5OFDymfqV+RLJq17Es/tXkH5srhc/jeWce62GxRMRqMuSt4ut3UPCfdulU8XsOWqCBOKkm1ywWMdOFw5PWOjbBqB13aju2nTuBKT9zyBHCFGhicIQgqHfWanPOWeTcuACkRUXxkZI2HWM2TfFuYyZtvAA+QiMT+/ichUZwsOyJi1HB0ss1wkeOToBANzNZjhZzpAhQcnPb5nnddhe4/e2B5MOUdlwyXIbZMJezGq3jxHiK5BNyaox9e87JTZp2zZEuEb9DtKSFQqrLNGDYUkp3WlNJgs8s22IRc0+FoiAqol0yWsXx/o0Rd4QWAl5WS90CNTWmZfWB53cL3w5Bf7+cJcq4TRL27QAReUcJOtGRx3SQYQFqN/pvIiRpjnK/bi2sADGu9NaVsxZvj3ZkKJQiLxkSJDMmpXAWR8ipVooyhh7aVbrKh4BXtrbR/6X+1Cn5jo9un2WgFbSoAgePW1FsSc+Onqwserq+7qOoY4yOYtrUxcXt5o3QtbK8cX+XAnZABiRO4Cpd/K5JfuW1vC6nj1auRCcRV8X3Y9uoDQu3T/wjv38Q9E3e/fu77G8hGh2nMSFtqe1GZvq2k/XWGYJirKehWFemCvRNugMLhEnJ+jsC7Ga8I5CCgHM18roeNI01x1pn9Vi6O8aUn6mzMRm6yIxtknfx4DqGH28cC0hVqU+kiO5hvJ1olPVd0iqbZ5nmW11+YHg7hYayE/vzLFazoIsLVURoQRweqe10EhJBHkeGI6MkTO6rs618u3sOiWRJp0zUSXuAMjvJvV9auokl0dZxFqjBbRh4ByklbDXTItVKd4XO4TfxkP7d7CfrnF8sMRLLx0gem6O5CTqrNtjLzaoXUZMM0yuH54t2zw26kT9VXwhUhZRw2ELW1vCUgr7u9v4dv5dynrtIiY+/Yl0jyWr71SwRfz02d4Xcu6pJz0cF4asuDCFZYXq2Rb6zPOhA0lf5lKfKZcgvUTkDuI4Wqx0nVTH+jRFdTcxwi/rD2YnzJckJ0hr8nDRmZ2Yay/NRD0OCSIpoilRNYmdNX9jYZOOqerp+75p/cDmBGaoK8zlyuTPIw2odtSFy7o2lKTwiA4t+m5cQ2TOL1E/s+vTFKcsAuN0OevUxVE/tzEyJMgcQRpSxAd31bSVy25Qm6SI5CGtI/heUvvIThOjPhqE8yLG/v6646LyrXGWqBKPXT0G0Ioudd8iJR20noHZvMBemuGBvbu4/dACt+4usDqZIX5xVhMXzk2ZWJZbWIq9ElUMvYCiXkOocfFEx0krtCfRMj1LzKJik4i8iLXliCY2tjA2meUdcsLXY7Jhk1bAJCl9BM8HMcnjhBZV177ObzqhdYaTovsyGmgWu0NQz7Mp30bwjDVAf9CnHwD8Al2+72i+7KjJF2mOXMV6RpnMCmRN1sVqVno6YiIk6GyjnAIuMsJffPtF3kOmr7VoxJHLrLYIzeJiMsIRglk87lzrcvpXUrLuDDp+oMvINUEoyhgnFhEIRYfUBMCIKqKBjdxKjdiVkBVRuwI5r8NDkqSBi57jeZojWdSJFtO0XtKiWFthsZa7xxfqSgMuCcTp2T/NUuAIeAn7YjsA5rppjjEy8DoyBh+m63pQSdtJSujsGgAOkzVm8xwPzk+xPlS4dXWBj+1dw/rZPSR34s76POWsMvMvUW46VaFMgWqvQLKfY75YoyhjrABUp0qX4fe1UuZaTvJ6aHGHpPjS2OuJU9rePyJy2tJlWVFCLCShGDKGnffxbmz7Lg5ZUTlS1S9is83um5bbBL5Bo09U6SvrE+ma5/e7jaR9t1cL3WEezZdY5imyWCFXcUt0mN9YylkAdEP6ABjROlKb91BPY/tmB+tSYY2608jLtl1jCcQuEdrGTUjNlOQl1Gp5mqV49vahTpNPCFsnpes27CMv0grDfJ/hVmJumXYlaFkDQpCsghyUG2PvWqZ1CM82lhbpDZN0EK4Vc2eKWQnT9v0hNw/PxiyJfxdpriOdJNcXhfUfJmvMVI51kTjzA3ldAk0fOlM5DtMVrs6XeP7aAbJCYZkluHNngeKFOdSSwtUZWcuYRukwx8HVUzxydIKr8yVmcYGPH13Bx5+9Vv82zTGUkXe2l2mLb1GaC1rmRayFxDa4AJn3g5IViiw0kobJJiljCMq2dJK7nLCFIBrQngtDVkIh/Vh8gKBBYBc/6hitQohba4ieoD73iJBWFv2xLhSWeS3006GokUlYuOtHCu1bzHIczZc4TB3hA/zcIVoR6qQLNThTog/z5llZbcECMgQuUjOGxNCA5NoHuJ/PvmftNEtbQXaj3RgKOTqj/j/E4tIr0J3IBQXUg2I6L3B13qbhT+MCz94+xGo565Adrq2QonaoThvk3pypAserBbJYiYRfFfXgvUhqwv/S6X6nLl22sUTSbxpqUZG0WHzS9+D8Dh6c32nqTHCSzXHr4QVevLtfL6lwp17zKJkVuo/IsgRHB0s8sHcXN/Zv4yhZ6ncwvVG35W42wws3D5tjc8N9bN832+3t0qS4IC25QAnb+kjKttw3RrlzRkamxFZ73CeffBK//Mu/jD/7sz/D3t4ePudzPgc/9EM/hL/+1/+6LvOWt7wFP//zP28c95rXvAbvfe97B50rjXLM4uEdoQ0aBNZlglmcd/4PRejgMURrEDqzHUpa6nP6c42QO2hdKE0sjBfpyl28VBwYgwjPSWDPQsmEupdmOEzXo0SrLhymtck4K5X3HsxH/K5jjunDFARIekZDnsGNtTLCs3a8WuClO3sdawrgXsRNQilYNloLiHxMaO2kYzFEr4FRaa5riFWJVZZokkz35up8CRWXeBpXxHvCjwdkK+fdbKbJBNVNv91hsjaEkbT9xdW+ro/e6wf27gIww48Niw1rt/7PiGvf80L7xX6tTHCULHGULPHg/A5efeUF/YySFYe3wa7jOF9gpnK8/PAWAOBjJ1dxq1nkj9LSp6qoXWRoyUouCGft1cQBefE/l8uMrFc2QRmbFLIPQ9/Tc29Njs6JZeV3fud38La3vQ2f/dmfjTzP8V3f9V144xvfiD/90z/FwcGBLvemN70JTz31lP4+m43za08J+pHt/2PrkdA3iISY64eKIIcQF6kel1qciEsaF1geJLh7a69OgJV0k1DRLEb7ex2uJ9+L6bMs8OOuzpc4yWaYxcVWCMaUkNo3JYHZVA8zJOLoNEtxezk3TO7cYmBn8eToRl5U3RBTReuooBtVA7f7USqnMYCo9EHFZT1Y7pkD97pUOFyskbMkb651kaRBk9yakluGXC76e0wWjbtegsH32c+KPVFblwkO09paNI9zrMrEKCvWbz13s+Y4qoP/RwJd58zatyoT3M4XuD47Mep7Jj4ylgIgN9btYm7oSXLVrkHlExjzbS5XGWGRZE6CMoV2xNcHHiX17zC0jwjpB/vqPAsL81bP9N//+383vj/11FN45JFH8P73vx9//+//fb19Pp/jxo0bG51rHodZVlZl4vyxQm68fewmP9bQQcRrrh9obeEYWp5Iy4urPTw4P9XbSJT3yNEJngVw99YegHaQIusKgWcVXSRZrwjW7gyl38KwjDWdd902c1VcG7sgMmOelU2eVRubuI6MQbCHtJxkMxyvFqLbIgT2c9IHSVMC9BOWDlHxtqkyXDfmvm5oLOVDsd+tw2SN9Vzh9mqh1yQiDYQ0YHLQ/qxUUEXpHQzt37p+DwR3d5yLzwWRFLsuGiB953LtnzOS4sMVNgjb5a/PTnAlWXb2HS1WnRw3C8sdlMSljmqk3DGJpfcB0BHLSkJZOzX9YRLgvt7Aeum7x5w4TNWPhdaz6fmiAcfv1PF+61ZttnvwwQeN7b/927+NRx55BNeuXcPrX/96/MAP/AAeeeQRsY7VaoXVaqW/Hx8fD2qD7+Zuwx0Q8nJKnYizbIDOZagAcmxY2kzVfu3DdKXbRm165OgET5cxViczLXCkAYhCOnO2WNdhuu50qC647rlru9TB+spvC1M8KyF1DSUy9j0fS57XjbttmaciUZGsKCF5LCT3IYfOn2JlM9X7Hdt9sAkQTwbmIi12uxNV4oW7B1qoahMD0nlRVJ0Em8QkcYmsUFgkmZUwcpgVsu89m8e5+IzRdiILPjIdQk58ddhtmMe5JjL0fVUmhlWXEwhb8OpKfqm/szWFOEE5FCKi1oCxfRtExGW9CO3/ztqS7Pptx07wd0ZWqqrCO97xDvy9v/f38Omf/ul6+5vf/GZ8xVd8BZ544gl86EMfwvd8z/fgC77gC/D+978f8/m8U8+TTz6Jd73rXZ3tZObnN+I8iCGHWmJs4sJnOJ2yA4lLW+9ma8RowZzlw6bOeKZynMRzZEcKz9AideiKBldZgkS1upeQDjRkW9/+kBnIWTw7U51zU+vfEItfh7AUCqdZqqMuJEhaDH/iLtnSEqsKJUi/0k9IOvl/ODoL2flJjZQXCJCv7SSf6XdlHudAusRhusRJtsBzpwfGIMqzxdLKzXyNIhpkizLWOU9cupCZg3D0wXbRcHACEfK8SgOoq/yQSQhtO84XWmDM17bhoOeQXDY8vUFfxI6Un+kkm+EwXRti4bb89JaNoZOz84Kgdg9wlUVVVU2b49yBt73tbfi1X/s1/O7v/i5e8YpXOMs9/fTTeOKJJ/BLv/RL+LIv+7LOfsmy8vjjj+Of/68vx/ww7ZQfizEmtaGDwtDyoTPesUnAQmD7toFux0jXdZIt8JHjB/DS7f1OZllyC13dXyJVBV515cVBneuUL+o2X/qpyE7oTHVIfaGQ3AH8WSRR5LpUeOHuAZZZ0onA8K30yiEtEWEnVyMYC+911gWyBbksxb2dQTYge7Jt2fHl5+AJwpImxP+Vhy+JJJIIC2AmNQRaYsLdGxTNA0ALYiX3qctlGkqIV2WCKyzyhp4/30xZ6jPtfdx9w600He2Koz2LOEMaFcgqhWWZ4rn1IW7nC6yLBCf5rBMtaUc69kXo2GJijpNsrrVCh+lKW2xv54tgkuKyfoRqREJxFkRmTB+1OsnwM3///8GtW7dw5coVb9mdTBu/6Zu+Cb/yK7+C//W//peXqADAY489hieeeAIf+MAHxP3z+Vy0uEyNsS6hoab8IeVDZ7xTpG8fAtvNojue+QnQPH+37i6MQYeHL2+y9Ll9Tt452la2szCTTqVxGjrY7Ar8+TrN0lYL0Ogw+sDXqxmS4ZNbNEoVGaHNOkJIWt6BEZMqj8QVd8eEKkvXkLBrWwsDOVlZXlztaYLmWgFdgo7ciwvjd7AHWpsQhGAe5zjOF5qwXLHe8UXcTd0rPevSNbveQ+l4ft5FnOFQNeJeZFiWqW4fRQiti6RDWEKzV/tcOXR/NTnkk7ORlo+Q32OKOnaFPsJJOJduoKqq8E3f9E34z//5P+O3f/u38epXv7r3mBdeeAEf/ehH8dhjjw06V/0SmL7ks+jQhw4mYwazIRqDPl9qcH4XRz19D+ZhuqzXEkoT5HHcLH7YDmghq45KcHW+vGPm++0Z3VlhKsHspvWMIT32c0bPzrpUOMlmuJvVUXxcMErJs/rIiBRCGgpyBxF4hBDl+BGTuKnCiDLyaVFsuHKhEPLCTLpG4cM8TwhQ/w4Pzk/x4mpPb3PlBUrjQnRd8Mgg/p5KroQhLhu+nZOTZZkibVLQZixH/iKuCQSdx7bKSC563ib7GLt8Gpl9BbWJH8cndERcAHdkpSQ8dvWJh+mqE6Fkfw7BVO7sTc/pwzbHTtMNFN6urY7mb3vb2/CLv/iL+K//9b/i6OgIzzzzDADg6tWr2Nvbw8nJCd75znfiy7/8y/HYY4/hwx/+ML7zO78T169fx5d+6ZdufP4pRYiuOkP8r9sWTkrkxad1MY7dIG4/xFKxKut09yQipFDWsoiBtDFxNzPDoUIzF/nwdQZ9FpfQGegQUjAkymwqvcrU7ZPA9Sq260YiLGNBx0quoNZSx8SwoKy33bV2iJDwtV6y00STHfsYLqwNaSdPGsYjSdaFqu+VcKsPG+sKkTpREBqwvoz9LnPSzt+VUNLu239V3cWtoiZgnMi4Pve9q8f5wijHj7uanIptSKMCS6TijH5VJljFiUE+7EjBk2xuWqEtoiKFgfdNkOjcU0yIhrrEp5yMhdZxYUKXf+qnfgoA8Pmf//nG9qeeegpvectboJTCH/3RH+EXfuEXcPPmTTz22GN4wxvegHe/+904OjraZtMmm12HdPY+P+/U5wJat4xdfmyeDV/IcIiIig9k8zTX7oHT5Ux/fjEuO+HFUsdGZIM6lsN0afjW+8Bf7ON8YZiaXT75vuvjcBGqUPCOb5OOYAhZ7jtXn/txFhc6iydZVwBocajPHcTdRa7Q3Zy5DgF5VVwTbUp7LoSlqCKuMVFxrZ3KWa4TIih1OUaujXpaN5StVUkEK9FMFTjJZzhMTavf1eRUP78fufMgeAAs1+9I68v4V79tyf+mz9FxvtAjhY88cBBRWZapOIjSZyrnIlQAtOsHABZRS4CeL46MuiSycJwvcBsLYz9lzzWjJhMx34zd3pDv23A3T+Ey2gZ2abHZuhvIh729Pfz6r//6JOciN9BZuX42NeMPmQlLs+cx7qOhGPLCEq4kSzy8dwfPoZkppsDt5RyLWY47yxlWTRr2vIwxUwVu7B17Zy+A2bEMISp2fQ9byaUInCxMYeId+2xIFp9tktw+q4xowaPOvVQ4RSq69Raz3CAsnXDcgPwinNRIIlwbZaHQt94OuVTmaY6DxRp5EWOVJcjXSSfKx67DJim6nVZ+DptY8PeXD9j0LH7kTpvWYV2ozrVKRMUlsLX7Cum5csEefO1nYhFnuOXx4JLLJlUFFrHCrXxPLNdHZlZlgpNigTQqMLd0MuR2omuzdTS+iQMJZPk5ybrj6mNdbZwSQ+u0r3lZpqKeaAjIlbcNdK7vvLiBzgJTWTHGnHeqmTAA5ywj9PgpEGI16ds2j3PMD57HQ/MTfOTOg5jFBa7v1SF/J3szvHRarwuyXCd44e4BHpzfDf4Nh4ZmSi8xf7m5v/1Dd64DAB6an4wmRISxLkGpjm0RFpcVywdzaYo6x4Wkt3BlKAZkIa5NXrjlxWWlsd0jsZC3xKcxscvZeV6G1GMTNi7wtO8pPX9EWJ4+vWqIQyVSZqfBnxKhz7lrQOO6EiIXWaEM8r+IM01ebKIiPXO38j1cT2syx99hsrhIRIpfzyo2XeLUb1yxMsAOcYtt8j5u0pf0EZFNiYpUxzbJyxBcOLIiYSo9QOh5OKbQxkw1UG6KUPOmvf1qcqrN3eR6OckWmKkCV+ZLZKXS63icZHWkl2QF4u4fAINIhOsFlGZj8zjHQ/OTlmx5iORQTGl52dZxklndB/qtDtO1TnRGoCytHK5VbF0WFomkhGhgXKRCyomSF7HhbrLrt5cHkNaR0VYVpi+xQ4vXpTuZ2iLO8NjeLW1dodwhetVwFqpsw3Zh2M/tWDeC67hllRriWgmr5h1LowJofj77fQvtU7JKYd6sur6IMiyrFNfUXazKWrNypJaYx5k+J0BWnQxXkiWeWx/q+smCQt9tHVtfe+jeHmn31MLYvw0h/xQkZFvn3hWZuS/ICuEsrC6bWlz66uaY8jxDZhmhINICAB9dPoB1ebXOGpkBWVwvG3+SzXR0Q+gKry74XjJpH81wqa2+Y8/DbGPs87wp2fGdLyQMnSc546HOHNwak5ddIiNZWHiZUtmWFveCdbaLyd5uiMId5+uDJNAk0sKte48vXgJguoP4IoPOBfIcUUChGEJUCIdqqV00HERiaPs8zoCyJg+3i0WnnhAsyxSHaqk1K0RYrqq7uF0sMI+zel+MTpsWcYbHFy/hVr7X0bMMcffyMr6+Zcz9P0sysin6LDFT9Zf3FVk5K/ge3l0SjG3WP/Tcjy9e0h3Icb7AC6tDfPzOFdxeLXA0r2csOrV1oTqd9bqYbh2MMbBDOTfBrix/0nk3dTGaWiIlWjyyUhkJzvoWh7NBQl3AsbhfYGp/IweKMq0qfJutp8mLUhMW6fp4eboug2AIxPq59aGhmbIJCycrEjShZ6G3khWwDxu5JKIMq6j77BskRZ8nw0khR/yE4pq623HZAs0kI8pwTd3Fskoba0sj3IXpNno+O9JWlrZtw/o12tZnWeKYmozYBBHotoeS50lh5tuEayK4aT9535OVbVonxpyfw2b8u2zbWLMxh+8FpReJrC0Pz07wsr2b+PjpNe2CAepr/sidBzuZKCnV9XmA1IGOwRRWuKHERxJgjrG4+CLNeCiutCaLlLWVgwgO4LbEcDeOjSFh0z5rydDwa8kK4ntnOGE5TNaakNiWxpnKdeScnaytD6HkxOWm6pZrXS+cnKzK1IjcWVatHsw+jy0et59DV5tvNqHThhUlynBV3XVe1/X0tuEG4u3oAy/DBb+u935KgiKRk5AyfJurjl2TmLH95H1PVmwMIQ982zaIRKg/d5cYqxEBui8LERYqv5hleGR2u6Pwf3h2gv/vS6/E8WqBK/Nl04EnRqfDc0oM7cCnwhSkZSpX5VCS69I39A0Ydt08nTkRTD7ISwLcPisLJzQ+sS6Bu3QIPhJi6064FWiZJUb6fNcxALDfLGxHi9/Zlg+gfU7pGQ2ZcUruH5cmZZMcH6EEpT5PCsSNdQUp7EideZxpgnJSLPQ1+iwVff3dzWIfyzLFskrxfHakU+8DLRmy28wJE9Xx8vlNPJ+Z1hUf7HbY9e/CarJNuNx424JxvwbcuwtDVuZxhoUnmnEKn9mQF43jLMKph2JsBxfyovpePr6PvyREVNKoQKoK/L2H/gIfvPswnjm9gnWp8OJqX2cFBZqZqKqV/6sy0Sb20BdfMqG69vVhSOcV8lwO+W2kZ22TZzNEu7QqEyMduQ2btISEHktI46JDdmhbFiuvqwgwiY7LosNX7k3jQutrJKGwvaovX6GXsp1yUsE/P7c+xHG+wNXkFEdqqde6ITw0ryOD6H4epkun4JvD1UcNITEhEw2gJSz02SYsZHEJecY5aXO9PyfFAieNmJX6BrKiLOIM19QdLKsZUEILcG28Sj2HZ/JruKru4vn8yNs2ux2dDLoNEeKTrl0TDcC0bk2BMdewC+vM+R9FJ4IrbNXevw3h5HkjMtskJjamennTqMD/6+AZvGJxUyd0A7oRQpu09aywzWevDyHPgmRd6VpUciCfQQJf6TYUNqEhQuAS8HLrjCYtlhVGckHZ9dtp//fSzH1Ol9iVrR1jWz+O1FKLTNdlUgtH02U9wKL9/Yls+yLSfBbdMVbZMe8MHyQlwiKdw0VKhp7fSPWvhbdrLNQay2qGRdSm2OPfbyQ3a1IDiIQllKwN2T8Gtlutr8xZYhe6mPuGrEg4DwOabb71zYynCIMegk3uz5CXlz/grgGbZg9HaokjBVxPT/RxyzLFXy2v6cXi6HrHWlVo2y5mSXZ68qnEusB0RDgkUoQWc1s71rUh2LoPspTwgX+IBSY0rwtPfe9rl70qr7HNeh2krLL1Z/PecJcPYdVkmOW/kf2+UWSQ9EyEWHSHwPWu970DFIXzfH6ENCq0iJZrOuz3a0i/0nd+sqCQG4hICScqAPT3RWRG9FDKflfbfOfnrqcQYsHhIxkS6aP+z3Xc1NaVsdjEIt2H+5qsSOizwNhlppoNh3Q+u9Cs7JrAuYhKN/+JO+oAcT0TlYRzQ84/ZN+2MGVCpm1EGUl1EkGcxQVOmnT7FAnjIi8uIsIJgG+/j8Tw0GhA1sS4Fle03VikF1mXCrN5/Z+i09zlW2sId2dmlUJWqc5zKj3b234PN3XfcvBBe8hxU2BVpkDziD2TXzNEyuQGuhab7qBlleBmcYBlmRqaORu+65BcXjoCKc70Nt8xoXUPOV4iTWdFZKbWwlySlQCcBwvMtrHpNQ7toHwP7pi2XE1ODcvUJh1mn+98l9i1aDfUtWDnnKDvt9OFTuxHsF1A3C3ENSI2QqJvbB1LbpEYSVdCoPWMAJl02N+N1Y3t/82ijqQtIdj3/la+h+fXh1gXCdalwsOzk07ukW3m8Ql9pkPfHxqkKedKm1U2HT1A2uf2DbZza+J4q9jHraLefrPYx43kFpZRgmvxaYdU0fcxbh+gzfXiwy7dNJI1xv58ltaXTcnLJVnZEK6X/zwkDQvFeRiUp1jTgmOMG0dap4S2hSzedi9hKgse/WYUtXUlWeK5+BAn2aLXJQS4CQlZLnxal6xUomvHXp/HV78kCLZzo9h6qMNk3dTRZowFYKw1w92Ry0aIuixTPNcQFcJxvjAW0iT4CMvYDKlTExVdbzNoE1FZRBkWKtOrMm+CvsF+Vaa4iX392UbrHupaxxdRpgW7HKHXT9fHiYBLZHwetCWSpeesyUsRDUgrsMW27BRpVHofsl2b9LfhKpoSUxMUV0KiTdozRLRFKb19qbNdx1EkxqpM8LHlNbx8cRNAO7hQnb7wXjL3S20fei8knMfnSRJIkpXlufgQt/MF1kWCE8wM0sGjg2yNCHexADWpOM3SjvuHVkw2RLiq8IpmjXp94lh7UcDGasKtKHXZ7qKEPPKG3D+3i4VOfmjXZy9GuioTLGZdokKicjrPlWTZa3WbUhviw7JKg6wMY847djDVkUhRqsW0hJvFAQCIZCpEoxOyL0RsvGtI7XGRq/OIC0NW+jDFgDEWvk6DLArbGIDO0mIyxb0eU8fV5BTLMsWz6yM8Mrsd1AnzHBDXZycG8ZCSpa0sIa8eRJq3SSItFwUhzxSlN//o8gHcxgKzRufRBzufCFlmuLsGNrmB311kr6vTe37HMg59qxpz0DayNHA3z7oR1gL1OljS8R+8+7CxyJ5Pzyb1G7t+7ymEmQ/QY4jLJud3EYNbxX4T1lx/X5YpPrK+ricoQ+5VCPkg0nbeiMpFwH1DVoDNBg4+cE4ZpsXN6Jfoh0vszN0+NLsmwuICEVhy8/C6eWI616Bkz4qJtCziTHxepoIkwnUJw6ckwkOf0SvJErfzBWZxgcN0rcW3QL8bhv5zVxLXjNB2TkL0Nv7dkXDO1p+QtaNtA0U3NUsJNAsFSqntJYubHeX1f+9e199fXB1gXSq9wjhZXCTrjO8cEnZlTbHhIirzOMMcbar9qWFkq427lgEiJctmWYBn8qtOojJUUCtB62CYpek8WllccLm0zoPF5b4iK5vAlcr4rKw19xtCBlwj70JcZ8QFalEj5bgA6pkuDxWfx7kmJ/wPQEdP4EuuxY/zuYWmhmuAchHhvns5FXG+mpziOFlgFderbB+ma6+ORSIuh+kKAJxJ53h9EskJEc7qz5YGxShnZaIl2CRiYXXw88YVNItzvLg66Ahzn18famsef8Zcz5l9jrG/1bYsfnTN9mBnr1OzDVxTd3ET+52B9Vaxr9cM8kX/uEDXQQTErt9FRsa6xs4DfGJdF7ZNaC7JyiXOPTb1ydvi2EWcaUuIbWZflQn+/PajeGzvltfE79PFcLcQt7KcF5fQLqx4NDjxkPKTbNGxltg6EBvkblnFibH+EJWnqBrCulTGWjpU/0mTsI5bVGyQ9cRe58jebpMW6Tmh37oVWLZ5aHSbmqgpIitSfZKlZhNs8xmkayUryhzhlouxIN3JVXXX2U9klepkH7bhaxuREYmoAG2uGcmKeR4sErvCti0xl2RlQ4Ssfrlr7HqVzW1j0+R0dB/4/bianOLZ9RFu5wvcRj1g8NWDAVlES59tsiJ9P84XmrDY57fb6Nt/L0CaOdNSCYs401FCPOOwz5JhE4Z5nBtWLrrfx/lC18dJCv2GJ7kp7p2pdt9hsjbaILVDap/kmiExrT0Dpcyqn7T3PK4kS+PZeWF1aCzaKVlMuLuC6ucWglDskihzorZL98cizoASxhIAIRhyb1wRNMsqxbJIO6TlXnH/TA3puqXnoRoQ0XZJVi5x32Ie53qQI7JCpnqfRoXrU6S8JNzFZBOWi4q+wZNCm4/zRRsV4xGO2iHAklstqxSuJEv81fKacexM5ZipHC+u9kVhr09oK6W13xTLqiYX19M6hf7HVtewKhO8bO+meG0cnKhcwo1lldZEhWEeZ70WlRDYbi1fNllqx73q/tkmZN3PJVnZGc7jbPg8tmkofNlsh6DvXtTmfaVn6ADw2N4towzXoXA9gS2wBdDqElhysOO81m3YkMS49zp8C0Iu4gxI0BGVuta+of2uAZvC1a8kyw4BqqNuwgZ4l0WFzi9FhNFnr9jV0ixQZ/3I7DaeXR91Vgd3aeD4dY+xqgC7Wz7CxjbdIFwMaqwCzc4Zoiv03RuXFsWOfuL5VLiw1lfP/Qbpfg15Pi7Jyn2A85KXYyxCFhrbBIfpCusywTN3j/ApV55z6lLsZFyrMmmiXdoB68H5HdFN4KrrSrK8cAnnOKR1YXjG2z6Lgqs+HfkVFXhkdrtzz1dlgufiQ8NFBMiCWgkuQbWU9E3/tnFXp0E4VEvtEoICbsV7hquHXw9dJ33mA922omruJfQlM7O1ExJh6RP8+vQXXLsiiW2lOi5hgu7NpRtoYvhmi2cJKfzuvBKTKVcWnsLqQtE/J9m8taTsd9PNSwOVa7FJ/ieRE8kiQxaBUMKyzYXCdokxv5vveiURNQA8tz4E0L/AIiC7gFzLD9jPiLTMwwpdVwElUCOXUFYp3VZplj4lzsq6MjW4kNenEwLQCWe2NVUSbKKSVUqTTZfI1tVOX3bb+xFdchcFH3tJVkbivAwUU7lL7lXY6/gMuR+rMsGt1QIPzu8AqNOl+1KZ2wOULZjkCBnk+LaplxsIxb0q7u0beOtBrH/W5nIBuQgrX4SQPyv0e5PQltp4O1vgetrN9aP1D5D1D5RQjuPSqmJC+v2JDIaWtyERFY5FlGGFfrJyHtbiOY/YJMX/JVkREOLj3CZcC+n1WSXOq1UFcLetz++/Sf2++7EqE7ywOoSKS5xkCzw0Pxl0PpvU+K7B1rxI++2BjmOboc/3GkkhuPIeEezrkkKkZw73jg+Sa+92vsCKuYXsfD99s2q+j/QQfMBdVukkROUiWFU4bKsF1wjpvpOig3qWoJF+Hyl5XKhw9tIdJGMT8nYuyMpP/uRP4od/+Ifx9NNP49M+7dPw4z/+4/i8z/u8M2nLrjvvUIKxLSIyRbTDNtswBZGRznecL/Diag839m/j+uyks98HKXkXJyKS8JYfK20D2gXvXIPuvUostgHpXtjb+H2WwqRDs8LSfltoS9toAUMumOWDXFYprWVZlWnHPWRbAq6xnCFD3A73G/h9IYEtIEcGueAKseVYlvUijURUQjUznKTSb3w/RgnRfdh04cQzH6ne/e5345u/+Zvxkz/5k/jcz/1c/MzP/Aze/OY340//9E/xyle+cmft2OVAcFYE5TwQk6HYtM3STPg4X+Ajdx7EKw9f6kRkDLX0hGS23eT4eyFL8lm5sELhi/YZApdljIMv00Dv73PrQy2kXpapzqTsqwOoE56d99/+PMLlBrLRscawz3Tfj9RSlwO6SwnY55TIyP1MVAhTEO4zH71+7Md+DF/7tV+Lr/u6rwMA/PiP/zh+/dd/HT/1Uz+FJ598slN+tVphtVrp78fHxxudf5udwVm4Ze4VQjK0nWMtLPZ5iKgcJmtvfo9ttkk63lfXUMIytQ4l5Dl2RWydNZEJyTgc+jvaljPbgkZiXimZ4MeW17Ca1eUXceaMGqKEYlMTlYvmApIgudtCCQIRCtKk8EgsifzYRIV+N05MJOHvRSYskjCcL7+wKc50ZFuv13j/+9+Pf/2v/7Wx/Y1vfCN+7/d+TzzmySefxLve9a7J2nCvCgyB3RGT80CAQi0XLmEk7budL/CJkyPcePhjk7qYXLNu3zkksiK5goCwwWZbz7AtXg6J7LIXhdw1YeEraUvuG9rnCy/ncEX/2HXSfqnsh+88hAfnd3AlWYoCWmo3pZAfi/uBmLiwKlMslEkk+giCKx9Op1yTIfdWsd8lRWW7aCivaxFluNYstniz2O9kuOWCYMnCswtMdT7pvvFEeoR7UmD7/PPPoygKPProo8b2Rx99FM8884x4zHd8x3fgHe94h/5+fHyMxx9/fOO2bIO0TLnirYS+iJVtnOssicvQc0vl10WCRw9vDyYqIeX77lGoiJMIC2EoUTmPxHuKqLWx76hNUFxlxoIvDSC5FanuVx28wKwyqbFiMLeoAP5rvJ/JSAhcEUH2DH9lvWfaukJWEqv/tokIBxESEvNSuWWVGp+Bej8XAN8q9nFV3XW6pLaNXVt7xkZKnf2UGUAUmbHWVVV1thHm8znm8/nkbaAH2dcRjBkEQjvmqUjNNgjMECHiWVthXOJVjtDMpiEaBX5ePiBKFh47EytFG0jtt89LeURcz+cuLIT2szw2dw4fCELfj6EDNC1WCfhz50xBgAHKmJt3rCz6/EKWXht80Oi73svQWBmujLNkbVmozLBcEWlcWO6eheqSBTsZHOVfGQKJRD2a1nmeOq5UKyLsImJIpNSZjizXr1+HUqpjRXn22Wc71pYQbOIbCznOjtKYUvzo6rSnIDFSBMO2IA0E21hvJRT24LEuFQ7TlVOvIpn0fQRIivzh7oW+TKx2W6VzcrEmzQTP2wKaQ62I2gweSFjGXNvV5BS38r3Ocz9EDD1WWzUkLT9QuwiG9F2XJKWL0IHPmUIfltaqCSN3Pdc8Udwz+VVnnfSMG+TD4SblLqFr3PKGrospBOdZ3DuPMwyhenF/ke1hNpvhMz/zM/Gbv/mbxvbf/M3fxOd8zucMqsv2jW07tp2nx6Y/vn0qTOnrt331ts9+qvr5901FrEOP6ct3cpLNvInAQtpLpv0xpGtZprhdLGqTfzOQDj2e6sgqZfydFXRHu0E2YfqbGhRSLD3zvgSA/Pi+92UW5zhi7h/72eDHPDK73cnbcRmePA30GkEYlg9lWaW4WezXvwU9i8x9w38vCsGdx7UWhUgI/dmgMlTPgh3HEfrsG9YfxzmNegcIjEOjqM4KZ+4Gesc73oGv/uqvxmd91mfhda97HX72Z38Wf/mXf4lv/MZvHF2nvTrmrsAJy9TaF8J5Tvx21i4gF4iE3M4XOEzXBmmbqs2uuvjvJaXgD4FNouzQWD7onkXkzZTPpKuusdfFj3Pd7zHWE34M6VT6CKyUqXgRZRsLai8hg2tX6J5LFizKfbOs3Gn77UH/RnJL12f0z0XXdWS7PWmb3Ua+AGJWKXxs9QAA4Hp6YkQlkUhXo2eV5/NoVRmDMx9d/sk/+Sd44YUX8L3f+714+umn8emf/un4b//tv+GJJ56YpP4pVMhjsK38GNsQ7W7TNSQNtGPr2fS4ddFfx5Dz+NxBvmu23T19egqX5kIMpU38A/sYbYskNtw1NhHotmvvyGs28X2SFc1leZF+F58biJdPVRGcZOwSw8BDmEm8GnqcAY/fwSYA5A7iwthllWryYv/nxMMW1VJ7XzF70TjHX60fZJZ7U5zdR1j4OaX23wuE5szJCgC89a1vxVvf+tatn8flGtpWRzG1WJfQ11mPGVS2pSuZ0s005JxS+2cqx7pUuJ0vgGSYMG4oQu5f30y/795Jg6Vdp/2sjH3uzpNFb0wo9CLOsIr9eVIA2ZVJ/31h8VIZTlLsc9h9ka9vGkpmLslPi7lllSaxLbdijJEMaB1KbFpFgo/nVh9uKSnbffZzTmLeVZniT+++DEBt1bNJDdXpapcdWi0RrzHXtG2cC7Jy1jgLZf15z+8ydajyWbmIuPViFudYl/t4cVX/PXHwonM2HFLvGPSdI2R230dmhlhaQuFbKPIsiMwYwmKLbH3ou8eh223CMtRVN2YgvSQqLSg8eVmkYvK2sapNntLft3giBycBdnm9D+4wZ01uVAbMgdvNelF/tX6w3h5nnegkfk4fcbHLnUf9yiVZYbA7hl289OeZtGxKMM6KoEjRO6sywbpMcJis8Rc3H8IDe3cHRYVso12bwM4bIukljvPFRjqWbZAQfs9D7ofLPWO3L/Qa+bMwE4iq5CaSSA6//8f5wsivIj1/hgvoMk/K1mHnU5kzYk2D8SLOdJSNS/DapwXpS7PvIibX1F1DuyJBt1EgVov4Fp6YPa/PTTl6pARzY3DerCrAJVnxYpfmVFcHtklul7My3Z+10LYv3DiJSxTl+EC4voijTQiQHWYuna/v/Pz/GPg6775trvYQht4bF0mQ2rKpBWnt+O0k94/9jD23PsQrFjeN343IIq/Dt9bMJbYHOwFcKEKTs9kJ5ejzgi2rwN+Xa+oOltUMN7GPZVlHIwHMBdSELvP6KJR5Ea2Nc98sDppru9sSsaaOMaHO5/WZvCQrPTgrgS5hE6HuWYsitwVJFGnPgCXQwPHY4TE+9NKD2EszzA9y5wAUcn7XOULKjoVEiELvwS7QFxGzyb3wHd9HWGjflWSprSshglpJREvbiJDYYmkpTPqR2e2BV9viUocyDFK/bQtvbxXQxNG1thUA7YrhVgoiF0RIDKFscyyRCyk/ys3ioBMVZJxfsIjYJIXqsSOS9GSVRSYNISHnlbBckpV7AGdFWMYMfJsMlGPPNdSacJiukKgSx6sFVnvJJO4gF4Gy99tlh9YdAlsjMQZDn5khv9023It0nT7XEF8vaB7nOMkWWMV+wuKyDK3KpOP+Oc4Xnd+KvptuovBZ/pQLwd2PkMKU7e8rCAsgFrJugxMSbb0gXUxT940mIy0/xn6fbvaEq1PuFzCDe33OlrAQUaGoI21RaUj7NXU3yJ1ln/c86lWAS7ISjLPuLM6CsOyCqOzaCkCDyAN7d/H0rSt4ce9Az7THuiikbX0WHt+AOwauWf0Q18g2CcpUdfWJivn+vuupo8PM8/K0+UPacpwvOtuuJEtcT0+QVQq38j1jkcq+KJSz7m/OE6jf4/q+Pt3PFPePD/DcSnGtWcfHKCtY9W4W+6OSvfHvvA0fXl/vDT/mCycuK9amAblYzqNVBbgkK4PAZzi7nu1sKsA9by6hs3ZVPDg/xUuzfXz0+Boe27vVf4CATSKmfDqUIfXx2Tyv6zwTlU3cVX3hwyFWqFVZr769LhJNWCgHz0z5CeNz60Ojftf5yJIyjzPMkeFWvmfs74vy4RGKU/cz9/K6QuchEIGvngx0LXghJMVO9OnqnzuC2bglIx9eX++UNxZOnEDHdZ5wSVYCIM2Cdv2ibyvJ3L2ITa0S8zgH0iUePbyND37iYXzg+GHc2L8NNGnZvcc5tm+DfElWGjv6R5rRbxIBFNqmTY+b4n75xLs+S9lxvsBJ5l8MVXIN2VYYV/1X2HNE/YS9EOWlwDYcvO/bRj9o9+0hmg0euqyPCwhsWMQZbiQ3sYhyLKsEy2oGqO5K6/4ooTUWUe5sp+FiYuHVdG38+72ES7ISiLOeheyaqIQOJttwFU3hTgohL4fJGkcHS/zl8w8C1wHsQyeLG+qSmZKw+Cw2fNtxvtBuC8KVZDm5RWWXepRN4MpHA8gh3utS1QtbJl3hooQ6T4//+uZxjqvJKZZlilWZ4Mjx2m6iDdjUqnvWfRlhaJ/Gy9vHbhIObk9Gh0T/cIS8S7XYdoZFVD+P1+LGpaRMQvFMcRXPZO7FEYHaHSUtoGic7x7MVOvCmS5keIkwXBSLSkgirqkGu5C6ZirH41duAgBeOt3Hx06u4unT+o8sFnadPox15UjoIyoSrvRYhmxM7RbcFlGh3zKU6Epl+fZ1mWBd1u/ULJYHOZdrbWb9xnY5V74VWiCPFp2kBfT6IGW6XURZcAr5ewmbEI5d9ZGbik8XcYZrMYUgm9aRdkHEvBP5Yy5gWFtl7Aili477yrLC9Sau/efNnzvFS3getCrbnHHb1gXpvNIgPotzHKbAyx+8iY888xCWBwmevV1rEg4XV/DY4TEe27s1SvhqW1qk/CkuSO4fO0QbgDHLp+1Tu3+2ZQkbe1xIaLqvLG2bxYUmLASuV+ERVfxYO9KKn+Ph2Ynz/vNyizhDVinM0f9b8X6ILCo8Xfy9Cqlf27Svs4W4Q9A3NhCG3PdrjFCSa+aaRTJbotJEs1VJ6x6y0FnAEI0Wsbi3n4VQ3FdkZYio7aJgW8LJKV0DQ+pymeHt7S7yYp9vFud4+eEt4AbwkWceAgBURYS7t/bw7LNX8IEr1/HEgy/hxt6xMWt2wTfASSGxfdEtvmvg1ziUUPmei7N4BqZAn5ZJW1aKBLO4MKwqRFRmFknpI5n025ImBWjv7SOz27hdLLCIsw6RHDKgapISZUBcp1c/Uttd22oqnIVVOOScrvsfQlr6rBkkgtUuHgA31R5uFge4oWoxf5u8LW/qbNtzs9zT9ThXIY9ybYFZRJkOYb7IuK/IiguhrPpehK063zXOSsOw1jPo/oidWZzj4b07OH5ggVvH+8hX1NlFuHu8wP9v+SjWjyq88vClQa4WO9dJqBaFW1b4sXbiMftad4nzRlRCz62fCyHqZxZvnp8GqN85GgxprRdn2cAEXNQ3LaLMq4W5HzBFlEufFcYXWt6nN7qm7mpSAtTE4oa6jRvqtrGt/Vw0balAiVV8eVhMl1DWuIXSSS0skpfhrMfHC0NWVmWKaGOdwPljpvbLtOlqzVMRllBB6XnIqOpzExEO0yU++doLeG6+xN1shpdu72N9e4ZIVSiLCC+d7uPB+ak46wa6Yk66btsaI2kppNk/tZuvYWMTmpNsgZnKO9aAPmzyDGxLeL0pJKuVC9yKElKvC3aIOH9XKRcIWUCWaBOD1ZaWrt5A6n84SQHqgXLKbMi8rfSZYG87q4jE0KUexhAY6XoJQ1dkJg0JaVKMfazuZaWwiAosrXuZVRGWlcINdYJr8Sk+nD+Em2jS8NOz00QCLSvzuaxJrPv5sfeFiLR9CfWmio5dlVFw2QtDVu4HbNpRnAftioRtEpo1G/gJMzZ75uc9TJc4TJc4yRZ4aP8OPra4iuOX9hGpCi+9dIC/AHByOMOD87u4PjvRx7nCcl1uA99xWgDqyPvBiUxfThAJfc/ANoTBm2IsQZJ+c/45ZND3/V7zONdkhN5NexCQBrs5y5UB1AnH7M7+arPQHceySnFSdIXfPtiDsasPCdGQbJOoTNE3DcltIkGytoRaFW6kt3BD3TK0J7pdzeeFDltv/9uEZREVSKMKywp4VfICbsZ3cbPc16SFW2TqulPcLPcbEtslIC7CtenEfCiRkzCPM1QDiPclWbnEKJyVtSQkfDQUrnDn+fwEh2mta/ioKvHic0cAgJdeOsCd5QyfWBzhbz+cd1xCti4FcKd8522w20GDKREWu62uQbgPmw4IXOTrwtTEc8q6uIVNcs25rGbSNpuo3Mr3DH2S/bvzjp2vRSO5gOx1beZxponKrXzP0Mf4sEuyMRRnMXEKtcaEZMjluKbu4m+kzzZunBpHyLWlBDAtK0Dt8smqqLO9Pn+kjyEryzPRGstqhmvxabOPu5HWANxuo215DDapl4jVpWXlAuA8dSw2piYqUwhsh8DWs0iYxzkenN/B4UMr/FER4/jFA1RFhNVJrdL/3x97Ak88+FLHygKYA5/kX7dn6/Ygaotn7bbbVhcpcsXG1PlUhtSxSabfseDPSS2o7Scfrm0cdtRVVinj3tLChnzNoc66M1W7sF4IyJpiZ8Htw3lIJMldX/a2MQiJBguJuONt2ZSwXFN3cBQDQIR5FGNV1ZnYFhGQVjVpISJDRASAQW66KPT+NMqxiG6a7e5YZGpr3U3sT0ZObPfgJqHlU+DCkxXJHxvin3VhGz/YeTevboKz1qv4YGtZbPcN/T9arHAc1e4gAFifptg7WOF4tcC6UJjFuV5fiDBEVyBFoBBhWVtkRh+j8o2igXaJXYY2u0D6Hp9+yUVUeGZSTkJJPOsfQLuakxCtwLX0LlACJ9VCW20AeCPSdk1MhvQtu9ZJ9UWGcUiEqlu37Pa4pu7iM2Y3zfNEPH1ZCaAlJURAFlFLWpaVj7SYxwHQFhlOWK6pO1hWs94FEkNBz5IvEV/btsJbRhozV03OIeA+dANlVQwV6I/dJMZ/bIcQSobuR2xbhMvdKQSySkhWFpu0LJIM6V6OfF3/dlURIcsSnGYpFomkSXDnO+EiZzvaxz4/t6LYbeSWGJfrgTCVVWVTjUcI+o7ZpH5aC4iO7bseKacFB7+v5M7jVi7DmuCJIPGJFQ8bV9NHlw/o33ibM9yzmtycRWi7/fvbVhbbmsAJC4Un/430WYucdMGJiW8/kZb6u5vAEHFZVvRcUXK5HH9j/nHcLA7wEWHdIBtSFuAx41Of1cVXZ1aF56W9MGTlvOM8z3q2iamJyFSaFduV4nMNzVSBNM2RnbbnLYsYy3UC7NXH8oEqZFB3aSSkQZTaysnL2iJUPoREgQ35nbYZCbYN4jpTuUH6pJWVucViSHI9TlKoDh4dxEOTbw2Y9bbRHxlOsDDaOMQkHzpJ2nV/MdVvPNaayY/lx7ncQlfVXb2uzw11Gw+rAlfjFLBC05eV+bsQkSHXUB+GEJtFVOBafNrmZYkoGqnWtTyTXxWteENE1hw+C8ouxrdLsnIBcV6ICseuhZmh4LNtwHQN8fYkqu5syBUUqxKJKjFTBY4aoW0fUeEWFR86glsiVEWdJp6Smc3ifqsKP+9ZYCxhAfrz0XCEElj79+b1hgx29uzTtqRQh94R1HpyqUhhpfR7klmfBL19g8QYC+42n49diq2HWOXs41yE5aq6i8/f/wCOYtO9s4hSzKPmfa74b93ed05cuJbFBU5UbNIiuYooL8u1+NSKPkobt1CKT5Tt2kH3ulX/wpCVVZkCO+iUz9OS22extsvYZGCbdjRDIbl+QsqSrgEwrThEVqrC7ESIOPTl3ADMZG/0XxqY7QG+taa0nQ0nKn2++BBsiygOrXdMO/qIimlVoTWB3BmFeRvs951+S2kGLrkMhmYV5WHN/Perf+cMc2Q4KRbe2XGfhmDKfuMsxfab1i25FGlbGhX464un8RmzT+BqLN9rTlJCYFtZTLdP97sPiyjCsqqMRHIcyyqpBbfFfm+4O3+WuX6n85zE7TMukeIxz9UqzOAE4AKRlV1h0xc9NDFbyHLjUyCUoGzT8rHraCBv/WzmrXO0FApJXELNShS3U2BWoixiJKrEg/M7XjOyPVj0ma3t/W2emLqew3Sl9/PQ6fNoTQPOR1JA2w1EsO+/Lz8Oh+tec6IwdbbPq8lpcJSHbwY9xXOyraixTesb4waSnoHr6Qles/8X+JT0BFfjOvpvWRWaZMyjGMuqMKwoEhaR6riFCNxSUrt0ZILSWnMGjOpo0/G/avY8bhb7+Kv1g97y9nPhSsS3RGpmaN5hv7O1VZc//OEP42u/9mvx6le/Gnt7e/jkT/5k/Jt/82+wXpurSUZR1Pn76Z/+6W0168xxXgeVS7hRlPVrMl+sUTVuoLKIsJdmzlV2aXVdWm3XFmPaIO2DlHcFqC04j+3dwssXN/HyxU08bIVLn2ec50gljlC90aagZ8MF/cyw3CpTCPQ3jchxPZuhx0ufN22L1KYh7XSVuxafdoSzLv2Ji5AMQV9EUJ+I10arX7mLa+quFmp3zjvimViWKW57rHvbwtamPH/2Z3+GsizxMz/zM/hrf+2v4Y//+I/x9V//9bhz5w5+5Ed+xCj71FNP4U1vepP+fvXqVbu6XqzKBHO4Z0dDhFihM6xtYluk5jyEkZ4XtC4Bs7Ph4tt1kSBj7pdWs1LhZQfHYr0hYlbA3YFTuDLhZXs3vQPppsso3Ku/7RCdinlc65aTorH4Ppfg0s4bsioTXE1OO24YrlcJ0ZvQqsp07CpKdb2hmKrvmNqCsu2+RyItNny5cwhX1V0cxTmArmWEa0/GkhQiJlLOFW5h6SNLbdnaFbSsFJZV0smiu4jWuJHcwgeLhX6eQyMEfZbRIYJ8Xh8/flX2h20TttZLvelNbzIIyCd90ifhz//8z/FTP/VTHbJy7do13LhxY+Nzhr4Ym7LusbB/eJ+QcBOi5NJCjKljG+GnY8tOCa7/4J+BlryQoBUA8jLGajnTmpWDgyVmKsdxvtDWlaGhvYA5C7QTvl2fnQSt9DzOV3xvEhRg+65AV44VG0RSJIzRrGg9DAtzvtroV57Pj8Rjzotbh9c1pN/Y9XPo0q1IE1Tu9rG3rarS2B7iFuIko9WbQG8z2uWwpHCrjulKUpqctP/bZ2MRZ/hr80/gg6tHxSzazjafE8IKbNENJOHWrVt48MGu7+ztb387rl+/js/+7M/GT//0T6Ms3f651WqF4+Nj4+9ewRBzpWTq9Jk87f1jTbZ2HecJu1pd2CYyp1mK23cWtV4FwOxojaNFrR0Zm/eDd5Lrsl6UkJOjo2QZRFSGYhNT/ibnnBJDlhjoQ597w0UGpA7/o8sHsNTJruzzjCMVNOBIocrngai4+p5tntse/KZyTxGWVWpYPVZViVVV4lZZYFlVmiTQdvpbVkXnz9xf6T9X5M88ioNdPnRcm56fLCpphyQvojUWcYZXzF48V0EiQ7CzXusv/uIv8BM/8RP40R/9UWP7933f9+ELv/ALsbe3h9/6rd/Ct37rt+L555/Hd3/3d4v1PPnkk3jXu97lPdcUlgUXzoP//byRiLHYlrC2LxLItqT0lUniUieEA+qwZX0e61TiDM0xs9AWFUZSgNqqMiVRmep5GWNtm7rsrggr4I78O87r6Aq6F9Tu2/kCD89ONLE4aczuQ893UiywipjGCWknrf9YTD1THnLMJn1yiIsnpN4QN9CqTPHh/Bo+JX0pKDJHHydYYgjc9VMvVBju/hgKO1SeVmmuV2xOkUZFUPZlAu/TJFdOH2w3kF1HKKKqGnbX3vnOd/aShfe97334rM/6LP394x//OF7/+tfj9a9/PX7u537Oe+yP/uiP4nu/93tx69Ytcf9qtcJq1UZEHB8f4/HHH8c//19fjvnhxRCv2g/D1ARpW2Rum+6fsfqEbj3DRGEn2Qy3Vws898IRqrsJMC+Q7uV4+YM38eorLxhaklAtFP9/ki2wLpWRP+XB+R08PDsZ9Ltvk8Duwq3Xd9wmJEXSrBymKyM0/YhFVtlE0c46bFsReNtevriJq8kp0qjA7YErJE+FbTwL29abSMdsa8LJYb9j8zjHy+c38bf3PoJPSV/yHhtCZGxSwi02QFerYrucJBeUXTfVyV1B9feUfU6wrGZYlik+sr6O28ViYy3KFFidZPiZv///4NatW7hy5Yq37OBWvf3tb8dXfuVXesu86lWv0p8//vGP4w1veANe97rX4Wd/9md763/ta1+L4+NjfOITn8Cjjz7a2T+fzzGfz4c2+55CyCxi6nPsso5tEZVNIRGZooyRlzHKlUJylOGhaydIVYGH9+6I+hMbdpZae5CjMGmtk+khU+f1Wdiknm0SFQntvU50qLo9e6TPQFebIg2i6yLBYdqSnF1ESpz17zZVfWc5aErE6PnsEMvFTJOAvkUIJSsJ6VJs0GrLzvZYhEQiKK66pRWcDcISEJF4njG4ddevX8f169eDyn7sYx/DG97wBnzmZ34mnnrqKcRxvy/uD//wD7FYLHDt2rVB7VqXCtjhzd4kf4RP1HWecNYD49BBaqxVJcTaEqkKD107wRNXXnLm7eAItYjZWWnr9ozXDPUNqlNiCr1DH4Za1Oi36fuNTrI5ZnHRyakz1MRNhIeIim8Q2HXOmfNibZvyWN/zQFqmkDJSm+i3P84X+PD6Om6oW7gWr8TygElSbGJju3364Mu1sgmk7Mndtah2ZzWzsfboU21s7Wn++Mc/js///M/HK1/5SvzIj/wInnvuOb2PIn9+9Vd/Fc888wxe97rXYW9vD//zf/5PfNd3fRe+4Ru+4dxbT6Z8WTepaxsuorOua2qSMhbroiYxt4738eDDt3E0X2JdKrxs7yYAtw9W0qVw4kBWFTuz6rpUOEzWuB6QQ2WIr3kbmGr27MMY15/O9hvwTNg6ISBsAmG7goiokEvQtazCWZP/Xda1aZ1jrGkhx9hlXAuYfmz1AG7O9xrRajuRWFaqY8HglhKbtNj7OWwSYxMWl67F3s6tNXw15larkjVuoDbf0zzOtfbKhfNmadlaa37jN34DH/zgB/HBD34Qr3jFK4x9JJNJ0xQ/+ZM/iXe84x0oyxKf9EmfhO/93u/F2972tm01K3jG6xITbqIj2IYF5bw9UMD2Zk4uhFg6hlpViKjczWYoiwgP7N3Fjf3ben+fC2iI7ohnpeXaCV/9LvQRqF3oAMYIcTmmFFO7MIuLliA2959bV1ykxb6mdZHgJJvjMF2NjgyTcJZu2l2fj9bjGuvusxclHXpuoEtaauvKw7i2qBcF5CSg/l8IZEPpsiGQrC59wlt+DH22yRBva11nTVD63EAh+3xw9TFTjXtbe5rf8pa34C1veYu3jJ2LZROsygTVgA59bDnfsX0DwaZuo7PCtgS5hFGzqAEd1Cbun7yMMdvLcJjWmZftRQ5XZYLb+QJHjugdKkezGH7/7LbTej8+S40LkjVnV37pqc6zC5LSV6ftJrCJl605sl14vIz9eZvYlCCOPecYSL/zVAJ6XxlfX8HPT+tG3Sr224G+kupsUs5b+iQiM9I+wkJnJO7qYmwCY5MRyZLTPT//nuBmcaC/S2H0fb/lkOdriEXxXLiBzjO29VKfV23AeWvDJuSkb7uPgPRZUWwUZYy8iDFP206ujt5pB7WTbKEFmtKgxsty1w93+8xULpIgCefhWejDlJE7/efaTMi6LpXxm5xkrWmcE0f+e9rtrK0zy04o81TYZHIVgl0I2I01tzYgmmPeb77fFrLbRJPj+ewQz+RXsYgzLCJzmRjKaWJDIjXLatY53q6DkgDKdaa6TDd/StYpx9uyrGa4WewbCxreyvdEF1AIYTlLnP+ebwuQBhL63ofQhFQ+3+gQhHYkUybK2hSbdn5jOrMxg5aLpADtekCnyxmODpZsIcGlkQ7/xdUeHpyfNm0wZ2drYYCjevj/PuvQUFHymGeh77gx70rvOQN/500JiQ+zuMBJPqvFts3ARQRU+v3I5UgD3ywuDKLiwrY6+l3mnemceyixHKgj2gbs+s2VzNvM1QDwseU1vHx+Fa+aPY9lNQuqn5fjbpclUsMds2hW1yYS06bGd7+D3MrDE8C1+837e7M4CMqgPPTZtPs5e9sQZNV9aFnJqmRUNNBQdflov+qWO5VN69/0oRuLTc35m3RuM1V4CUtWKhwdLLFcJ+IMjNp+ks8wY7M2Guic1qBSYV0oHKZrne+D0NdxjBERhqLvuBAiNLV7ZtuDFwCDqBBxJB0K0I004uDhyoRNfPVTvn/bEp4Phe83DA3ZH1IfEUmpnM+SIllb/u/pw7iR3Oqsp0OrD0ugRSjpPwA8nx3hOF/gOG8tsw/PTnA9PcFVdRc30ltYRGuDtMiuJxlkRanrMInVsmyWb1B1wsE+Ya0E13O5y/HifDzNlzhzXGSSMtT9w5HEpbay1HXVJOQkn+njQ+qxMVPDF0E7y1m01IbzQExcx/gGJW99Pdek8+Iw1x0wzvq1S+vULjDl72eXCf09bctlXzlxEsLOdyVZ4maxj0UlEBOHUeBmsY/n8yN88O7DotuQvp9kC3wI1zFTOf7fVz+kSREAncANaDMbc1eSi9QQUSGidLPY166iVZni2bW5xtQkkyPhGeQpBKbC+XnSJwC/aVP4R6V6ed20z/5+ic2wyWy6t5MKcP0QMlaWaxsINuHwzeh426gNh8m62d61WIztJM4aki5g0/qmPM416Nm/3SwuDC0LfSc3oPu8YVbY80b6to1NnwtjvS7HOxwyAfC5gXQ9zXNwmC5ri0qVAqW1MGCU6ZT1HLeKffyfOzfw4qoVtPaRrXWp8Ae3Xo2/ffRXeDRtM7frFb9L4Jq6U29jVhNuieEkhaB1KqomKh9bXROtKttIFRGaQiAr7kM30LpIjEdnGyTFtX2bg0boi20mFTM7hrGzzG1iCmHk4GM8QlobWamQqgKnWYp5muM0S4376CMlHEbiMaZZsX8T78A24PkSffA7HrymMukPOqf127oGrr4ZtbRPSthnf+62Zzsi0l1hG23wWTjp9xpjpeT1bALdBm2dSXAr3wMAY62mrFL4WP4AjvOFjgrjgvuTvKtv6bufL6728X/iG0DDcT62uoZHZrdxPbkNxBA1M5K7p92X6qifVZni+ezQcEHR9XXaKVhDNlnCZMqx58KQFR+mfvFcxGAb5wqFb8ZwHjq/TbCRpYV1YLZGRSIpQE1UCLfvLLC3WCOz1u8Z2taZyvWfncRsys5hTLnQes7iOR87APWRl1GkxWHS3nUE07bqmhJDf7fQ8q53FgBUHD5L97WBnpUXV/s4yeZ4cH7HWC/q+fUhTrL5Ru5lAn8unzmt18Y5yRZaC1UvinkXN7EvRgx1tDRCTpXbxQLPrQ87gvG2vcMn31NNNrMB9Vw4smJ3NLuYIZzXDuNewyRugwk6PcJpliI7TbC3WOM0S7FIssEzBf08huSDmXgRxk2P21Y9nXo3nBGHnmOMTsisY7iFa1PXx1lOhKR7tovfihDyjtrllaUxs0GEhspKsK/xmdMreAZXOvtta1CfYD/0XLXVNsHz60OsygSftPc8llWGRZQZot5F5CYqz+dHeh9ZVLrn3k3U5VS4MGRlXSpUF8iaQDiv1zGmEw51nfRh09l2n0WFtCrLdQI1K5GoEntp3Un0+p8dbaO1aHQ57qobkCumc74dPB+2dcretk0MHbBs8AFJvA7r/g0Vco4pN+Xvu+3fYWz9m/5uPrhm42lc9J6X7+8rq+LSe/32vil+i3WhjHpeXO3jdr7A3zx8GlDAski1hYVHJPHIo5NigWfXR8ZaR2cVNi71l2PPc2HISgh8ors+caRdntc51Y9xL2Ebyv+hL7tvJjO2sySikpcxsizBfLFGEpdI4yJ4Zm6f+7SMMZt3n61tRU90jplwQBtS1zYHLKAdtNK46AxgafNO8jZIxMUlkqbPYzCV23IKbPs32BS+33BsXUOROgjqWd67k2ymrT/P3D3Cyxc3kUYFskphFaXIKoU0KnCraFf4ptXB53GOK8nSWIcMmMhyPbAOPrYC6LjC3anwurjwZCWEhISGu9nlXd+nxFD/Z0h53kG7TJpj6pii7KYYTVLYb7jM2tciUSVSh97BxrpQKMrYqCuNC+ylmfgcTj2LpzZMiV102NIgQwNIyAAkleGDIMFn+g/FpHqTCX6rsxpQs1I5B3naP7S+4LLNfUtVYXweCxfRPUvQ70rP7IfuXMdqL9GLZQLQ0Uh2+DEJfwHgdmNV2WaEZd+xhjHAyvzMF4nsw4UiK30ho/catiVSk8pt61xj6pZgd8p80AntsEM6xLxJsV+UMcqi/kxwzcYJM1UAqsCeY77AQ5dnypxx2GX6sMk93dYAFzrghBARvs83IGXWfaABjMragypdOz0/Ia4hCb5nYUq32XmxjPgI4dbO2XPP+H4XgaFnwX6G+DPSqXfgc7xtFGWM504P8OJqDzNV4DBZG1mT12VirE329OlVp7v6LD0B9kRtFhf3p2UlK+Ogi5kqvI3XR4vc8cFoStimbFcHxveFCs3s+u8VFGWMNYTcKFt48XinSH5s6TmYqcLpQrD1MmtAE5Y+nJXrZcp7KQ08fYPRkPJ5GSOJS13GV9bWNkjEpQ804+1zQ9orwmzy+0jWIr79PGHobzsEObuHSVzq7/z3522QtoW20WexmeK+D7EgArVLeV0oPHd6YPThL6p9AKYoesjkvTd6bsLf8zBZs2gg/yrTHBeGrNgISRw05gfoO2bbZnhfZzdEPNa3n89Ibb9y34zCZ1addAAcamruMRlnhdKWlLKIEau6M1hmSd0hpubA5po1S6SV7jdvMw1kQ57JMYPdtgncpsfmExEsqR57AEuVqY0IEWW64DuO3hNXDp9NIL2bo+uyLA+7hE067G1D6xj7HCUe1+A27ott/QuF/UzxiekmVvVNyoXWZfRxhuwiPMPthSIruwyr4wPVSeZe5Oo8Wy3sTtveZneEOlpmi/7obcDuGFyuAwCGCyhWJbIsQZYBsSprIrOo75Uz7LFUIkkBuvehb3ZuH9+5ri1YPnz3yd7vwlTkA4DhhhuKRJWdAdFHXKaGq+4pBsDTLPUOskMwtj2T/s4D6uLPRKI2vwdDzk2WnE3uPbf4jLEu8neQTwQP07Xh4nlxtTeobttFau8bo/cS3aR8wjbg+i8MWVkXarAbaCyRsF0oPHZ/auxqoPfqBwbOAiQy4Nq3K/jOyyOACGURATB/zxUS5EWMxSzXocySVUQiKPYgSQh5ZqaYPW9SVurMB8+ANyAdmxzrrXfDQaevbsCctQ8dkO2B2L4P0n0JGbzta56SdLhAbaX2TfWbTlVPKOmhe7WpFWeTZ8/oS6ys2nbSyTFjkuuYoXVRv+V3Kd2nlpVtEwffeX04S8vCVORgSrN/COzOgM9oXOZ+flxfefs4wN/xkaVlMcv1MUUZ4zQgC66o1xjhTjtPM+ApBokp6ghJBGaDD0zLLJlkds7Br0u6xtDrlsq5QrHtY/g1dUhPwL2yydKuyMWmlrSx2PT6hpId+/Ogc7HfnfoIFZc4yWc4bLavS9Wr5/O5El37QkXFvH/z5bca9N4GlzznKBqB7aYkpY9YkM+QBilp0Nm2MHHXsAkAfZbKbLsNrvPY20NnQNL+sohRFRGgWvHXfLFGokp93dptwvJD2C+4bbGx71lQWG6gpSMEfCDbtHPelrXDNjeHvM9jZoJ8hk8Ds2TNGIO+exMqgieUjvpoe7xBm33Pgu86tvX7b4JdEY5Nzm1blsac0yUoXgNYx/X3k2zW279IkyEeRRdyjEReslLV2b6bVBpTGQ4uDFkB/AzRtX+Kc0wlcts2NjZNTzArCD7XRAOHXSfBZ2KnQaBYN4PKzLxfNmEhSNYU6T5JGhoeUtlpt+dej+mg+2b7Y8mML7zcV65vvz0ouwZvH2LVEoOijLESYiaLMh7lm9+UfPSRERdiVaIsYkMMzo91kRifxWQqEuK6J9uMQtw0h86Qax/bN425365z8QlZFivsp2vcWi16rbo+uIIQXGOUNFEbMnnPB4ydF4asZKWsWRkbMeJCyI84xhrSF5Y5JULNwH2YYoa+aRumrJvEtRxRY12xtydxN1kc0CUpnHS5ZkT8OF4utN1D9ofArkMawF0DTSmQPg4aYMdAOm5sXb56pmxfSLm+42r9VI2YWftC22wTFxchm5I80DmHRDPScaGwydgm7R9KdMa8Z1MRHKmuvIxxvFrU+4Zei9C39aUasCdbJPjeb9J4kPchK/0i4uJ+FNjmhUK1A5fJFPkibPjEVlMQla0JFCcWTW5CflzCPdcsUuo4KAqoLCKgihAldZlYlboz464gDols0Pk6/v8Jxakh92uKQchl7ZAwdt8m4AM6YA7q2zzvGLRtlchX1Nk2ZL+J8b8D3z/WzbSNe+6rcxN32Jh3ZBcEB5D7Km8/t+H77jre0Mr0TK7uZrOgdhTVAGtWcMlzjryKUe1QVCvlBXDlCgjRCgx5kM+jv5jDFwJnl+Hl+HXZdfQluBt6X0XLyjpBWUSo8hiIzMFOmo268oVwkgLsRiPiuz8hg8Z0Foohg+n2MLQdsapGt90mRvz8tE+qe8j5KqssWfzs7XyfWX+o1UdqZ/uc032SrnnXGOsOdB07lOyEEBxyg23iovIRE1cZV7mx53QhiUvDykyWFMoE7j12YPsuDFnZNfrCOTeJAtj0mKEYQgzG1LtJuU0Elj64Oo8qr8W1UeLujDexjAx9QceSlLE6CLNsd+DyDcwSpMF0l4hU5W1DpCrjmoa0VyYFJqR9dA4f4eiD7xiJ2LhdSZHzOAlFU6ZsykcjSMsmRMd1r2sSFabxcdc97L0OITd2ArcQ9BGb0P5k6rHDJwrm7u8h0W7FEJ1QcMkReNWrXoWPfOQjxrZ/9a/+Ff7tv/23+vtf/uVf4m1vexv+x//4H9jb28NXfdVX4Ud+5Ecwm7kTrUnIixjVBoMCr+esMbQNQwV+ffXcD3DNdqoiQlREgIpQNW7ZfK2QNI/jct2GurpcTLaFiJ/H9dsOsb6MsaLIeo/hg2TIMc5Brwo8H7dq0THRuAGubwCuighlYNmhdU9+/Jj7J5ynCD1vFTnve9W4SitPXIBNxkLI3Vj0uQHrMv1kZtg5N7fOSJDebxeBmdp6EYJQa7WvbWPatXXLyvd+7/fi67/+6/X3w8ND/bkoCnzJl3wJHn74Yfzu7/4uXnjhBXzN13wNqqrCT/zETww6D5GVqZMOTYVN2jOFhWIT2C8lfyFt3/Z50gYAbZt426j9/H6VRYxiHdcdahF5rSs81NWHvllVnyVrjBjRP3M8AwuIZ4C1BzHvMaED9QhU+QR1h5CpLV7D1s7TV5e0P6pEIuN6xlzWGV4+xAoVOSxGfWhddZtbVkLqGENoQtd5s7GNcVCapA09D5U/N5YVADg6OsKNGzfEfb/xG7+BP/3TP8VHP/pRvOxlLwMA/OiP/ije8pa34Ad+4Adw5cqVwefbJH59TD0hmSY3waYExPXybBIGOoWLYVegNkmRE3QP8nVShylXEXCqAFUhav7qct3O1Jc0K1SMyqM0hrpzXPsmJSQTDnqcmNjt6GtXn6uhbxB0kqKAc4cgUtXG98qlSZH28f2+faHntH8T3/FB56N74bonPZaf0POGlqHrs++pZOmRXGQut9VYy8rYftJVd6h2pg+h2poh41vfRG0Itk5WfuiHfgjf933fh8cffxxf8RVfgX/5L/+ldvH8/u//Pj790z9dExUA+OIv/mKsViu8//3vxxve8IZOfavVCqvVSn8/Pj4GABRlBLCbvct0zmPPNYSI8AF2ClIwTLtgnrvvhZQsMbZ1w0Yoeeqroy9RFm8/rydSFcpVjAhApSqEDD0hv7vrd5NIVCh8OohgcPfKFkhJ3/6oiFANHFQ3bctU1qOoqacKmO2PReh9HHNsX/khYt6+84nEJ+R5c1mqfMf6XFYIex6KhsRwIhdqpQnRzfTX4e5b+/pOH87aOu86T1GGP6tbJSv/4l/8C/zdv/t38cADD+AP/uAP8B3f8R340Ic+hJ/7uZ8DADzzzDN49NFHjWMeeOABzGYzPPPMM2KdTz75JN71rnc5zxmamCoEIaxwSA6KMRiak6Hv+CnaMrYNU4jbQusIKWe/5N3ZbFx3gGz72Iyjrnswxn8vDg4DCUdbx/ZdE5FrxntOIofGYmj7idxMcd2Vqpz1+PYNLVfZVpfAtg8hcqMJzZhjAsg5dwuGaHIIfe9yiLB4jK5mKkuNPRH1uf+HYgp9z2Cy8s53vtNLFgDgfe97Hz7rsz4L3/It36K3/a2/9bfwwAMP4B//43+MH/qhH8JDDz0EAIgiYZZYVeJ2APiO7/gOvOMd79Dfj4+P8fjjjzvbMlSFPSb6ZGo2Ombg3sStc7+ic6+iCtVs2hfSt9/VuQXPjAM79F1G5AQPxEPaNFWYLJ2z0SV16h1zn84ohNd3n0N/g5ByY4mVfZzPitZnmeEum6G6l+7JNnSFRvaEJrwNQ3U0Uoj4lCLh4Zo3c5LnslJvC4PJytvf/nZ85Vd+pbfMq171KnH7a1/7WgDABz/4QTz00EO4ceMG/vf//t9GmZdeeglZlnUsLoT5fI75fN7ZXhXxxomMxpAOn1tkl+6a8Vk3x3VGUv6ITXJVbAJXKCbto5fe5Zu2YXQ6VQRpBWYfdGI5x7nsNlJnPNZi0qv5OEsLxhTnnrr9VN8u2sZ+e/F3GNqGc5DfBEBYu/uuvUEIkQl2L1p19bm0fKHtwRafKMyKNCTMm/oIX6i2fNx2BL6uc4yVE9D3asDxg8nK9evXcf369aGHAQD+8A//EADw2GOPAQBe97rX4Qd+4Afw9NNP622/8Ru/gfl8js/8zM8cdQ5C6E0cygyl8mN+sClwlgm4Nk1yNSVC8lzYZaRjaqEk6wRYJ9Tksu0cI2loXMI8iaRIn3vN1FLbGw1ILzHx7Xd1ptwiMbbuMeU4yBoS0o4pzjcEvvZsi2jdC3C11bpfQ1xLfaRnqNZnrPbHntC4C47L4dOHULrBJ2ljo566dfpdUT5Xkr1tSFu2pln5/d//fbz3ve/FG97wBly9ehXve9/78C3f8i34h//wH+KVr3wlAOCNb3wj/ubf/Jv46q/+avzwD/8wXnzxRXzbt30bvv7rv35UJNAYDE07vamGJKxN91CHdNEQVajy2IgGqoqoswozYWiocG+HVYV3pnbH7ezIx5IIThBc+/vqn+pZ9rVjBKIScGX65vsix6hgHDuGAEoYaK04lwi5F1KZgOvqIzVT6XTs8uL2UBfQCEGwv0FR3UcFEineB7mSPIZanNt6hrm7p8LWyMp8Pse73/1uvOtd78JqtcITTzyBr//6r8e3f/u36zJKKfzar/0a3vrWt+JzP/dzjaRwQ1EWMdATCSIeM/Qcg8pP01GPFqlNVDc/R0jWSl+oaGiZsdkxQ+Cru95eNnkimnvTdDiUh2VoFld+XnnH8OdkIwtKKHZxjh3AJhwSEali87uPzAzCWGtQX31UZ58OZxcYSoalNgZaYTplB17vUJdoaNTaUP1KfdDI96cnJHxIdFaIxRnYLOvwVIiqqjr7VmyA4+NjXL16Fa/6D9+DeH8xef1TEA5JIObLbTBIYEnq9pEZPneFTVKLh9Tt0n2EpFuXwIlK53eKqiAi1gtHpzPEmtLBrgjGkAGnB5L1wiYPEkLKbBOTkJldwraUhR4DTE9MtzH47XBADQ2759jG5Ms7qesZE/r0PVIZIJy4+MZO7ZK6u8SHv/b7cOvWrV5vyn2zNtC2XCuhAxOlp9bfrayZzhC50AySgQmYzgrbjEjxifA2yVcBoCYmQibbUdcj/EYbiWOHtmHK32DoDHUgqQgpf5ZEZVvwXdPG5GjM778BAfW2dwPrSFCdLtjka+S5h+QJIoyxYo+t15mocKCGRpr8TTGW9gmIJVwYslIW0eAXS2Klo5JrSWuZOM7nRrjbY5BbaFcpvvsQOdqzSzLFzz3leQfc49HkZKzuxHeuDQd7aTC6iATChZBrDSEYofdMcmeNbUOoZie0LWPrAbBda1/fuUbqZ4BhLqVtEhsCbw8/W7AYuPcEsuWaoy9b9CbWpQtDVkIwZuZtFnYMdiPM+boaxwMGDGPXfefelv4jGK6XxL6nEplxlZEIx1CX2FgX2sDjXBE8vTjHuhAajPj/exl2ZtrJ6hW0MX0C3m0hVJdD5e5LQjq1zghWP7+FvlgS2/cl5hs8Jgh9XmhWY5cMoDovGWx3iaqMpnE1hDLPyu16IAxKkCWp1IsoOL9o3wswiqk7HrCtEZ+QBez49xACFHIum/wMfAaAe8udM+VgQ3WNqXNoNtRN6hgCqc6xA4x9Xza5X311jynXV8em7bSJ2lbhE+mOER/7nq0N+sCt5z1yjCf2Mzx0TOi4ljacrA3FhSErqKLwQSa0yoGWERFbMN8DGJyrgD+oYxXufdv74LIU+RaYC1mMbiic5sgJErABjt9iyD2b4MUerBHxnHOUmHDCDvk8pOXf9sz4fkBQGPiAerh1Klgfw7dt8jsO0cfsCrxNgnVo6Bhhb+v0wSE6yTGrdjtwccjKhhjMMncVdRFaf8+LEZyTIwBjO+sx5Gdbi9FtWs95spQA05KTTcreDwjp9EPW7dlkraChx/rK2+9zHzGbwiJm1DfSejM41DxEr2Jj24RGOh8RqU0JldSGgVZKH2kBHJPHLekkLxRZmTriZCvrnGwLfW3Ykv81BBdhJnpuQoaxXWJyic1hD/Zih78BUfHBRZR81tS+dYaGJlK7UOAWCn4PttWn0Tns/1PXDwy6BpeeK0Qi4NPMDBmzLwxZqQL0Hb7ZjrfMGcyUO9j0ZXEx7C2/gCF6A9fsbRdCtFEIJSaBeS22JVg8bwNMpSotMJ06kiLUjXUWz1yngxdIy1R199U52io6hfuVWT58At5ziaFWmfMwQeuzzIS+gwGupCGrbOtjBvZPF4asREU0WNBKP1YE9jKetXsn9Lw+8jEmhG/LGLNK7BgLjk9UNmrwdh0TUldPmalICh9kzxtBsbGJEFfSXYUQDMmtcd7v00WE9Nv3RR31bT+3pEd6vqQcL9JkcQo3UI9+RWxPSF1SPei+Tz7XYsh2CReGrGiMcd1wonOvdmL8wR6oZzHqkI5xPewB2OWg4PO5BmNHZHWSaBDuO95Yg1P/r9RG1fScY1qdEH2fWsx7EdyW9xpc70Podk5aJJJjb5tsOYUh6JtwSgTD9Wz3ER9fnfx4X3uk8/W1qRmHttHvXxyyUkSTzHanQGiippB04oMw1ArAH3D74e178ENZeQiGvji+43wY29aBx7k6S2f5LT+TUdElIZEjY7JrO2GbZGYXuLSoXGzw0PC+9Z6GTBhcJIjX4yszOYa6pXzHbzpu2mPBkLbct5aVCTqiMQ/wts4ztC2DYT+wQwb/KTv9gRqP0W2Y2CoyJMvntgZJ2yLCv3Pi0UdChpzvXicsfRgTYbOpNUYyo3P3V0iETog2R2qrq+5Nrus8WKh8bqexdYXkrOkL0e5bUsG1VtbksC2VPckKnW0Y0rdt8ExcHLIy4IbZP8pY0nGeMjna2UQ3xlhX0pBjbYLkIk995ssxbRqIoYm11DLqdNiuQZ4TgLGkwj5uKnJyCTdsN1yokNdXj6tue59PeyMdG6rdsqN/fNogH8HaJvr0EecVm0xQXROkUILhI1C29cnXto09AhtYhC4OWQmA60c5T6RjCmwla6SPgIToPFxuI9+xoa6o0PoaSGus8H2hLhw9kyoiREX7v1JN5Eunc+9aPPi+ewW70LdcNExtaQghD6H7zhNcpA/wh2VzSJaoUCG2L7w7lHiOTaIYVP9Aa9HUGYqHrAk1VRJAwoUhK1G5HdKxqSkU6Gf+U3Vkm5o9q+Dk/jgf4dwjtCQh24NeSHbueB1BrYAy4G2SicrZm8zHYAzBusgEp48QuH7nsRaJ7bkWxwmZXccRfCTEB7o/odc79L4MFarb/Xqo5asvEk3KaTOVW27XCCVQQ8anC0NWpkLI7CQkudK9+JBJC9NJDDlUq3Huwgk3hP1scJJSzIcdK+0PGbTuheeoD/eD7oXQN4BvXn9br31fde6Lc3Kvh4avbhLtZRJp06XF67Tfp3jdbu+K0sN/yyFkbMg5xqRzuCi4r8iK9ENPIZILMU32DUYhL9IuYIvEXNYal6tkCEHpW/V1E7IzpSrfnHm1A0Ocu60pY56n0FndReiA7ifCsgl81quoiBCv6+cwXgNqXW8vZkC+D5Qz6nPO/702xeB+kmLfE35t9F7G6667slJAMY+0m7Y5QpejyUelgCqKUM5k0hLSJrMP705y4jX0EjradWxdbt1O/l2yyvW37aLgwpCVqByQFI4fF2ACdLHboebbTc2o28agGUCAS2XTiKcpV3wFxt9P3hHF60h3htyaInWguzLTt+eUk+L5IkNc8B23CYZGKN0vnbE7lFyaCHUH5bgwn3e1qvcDEcqkO/DZcFlhfMRgDDjZN54FNnC35bqEg5eJm/+lEghL0X3/qxhQp1RfZNwT+z9va9A1sfbz69LnYdXEeff6+lC3NTLaWSZAOesX8IeiT7i9qWZH7JvKS4HtTnCviNZC4XpA+9T325jt99W56UA6VtBaKQA566AKa98ZYmhUSWhdZ2rpm8Aq4KvjPFgdbDJMzxa3DkQFUM54OUBljLgU7bXE6+41VapraSyNwTrS5Yz/UUt0hlhpbAukREJixzvYCte79wfoHmeTFX4ML1sqgIp1rjPu1iWJ4aVt/FrqpSTkum3w6/T1R4ZlKG2IygyIIkCdtr9bK+znbfNPSIbC1S/39T3OcPsB574kK5fwwvUQ2gzc5eYKUde7jh0idgtBCEFpr8fczkOL6W92qza51+ZinCviMha7XDMnFK7oIymnDH0OTYLn2tf3+/U9S673QBrsyP1Arhwa8OwBvbTaRAQlzir9vVRALITBtwNn1NlWssGQBsRyVlsO7YEvhLDQ9ZAVg66Jt8lHVoxyZTeizoaC3B77+uPMvH6+D2jJzNB3VyJGEug+x+weAN1rNNoWmyQyXjf9zbr7u9Vlok77iXAa2wSrUt2mfh1O6ESyb9tQXJKV+whTziJDH0j7wXbN9M+DlapLltD6sFlnEa+BZFnvT+/UhCU7qDuRkIigexGbEhaJnPJ9HK5Iij7Swj8Pcavwc/J6bBLkq1cC1yVUkWnCbyNcoAd2lTXPl0BCABdZadsd563lwEb7XFbailLM6JrqtpUASjZzD9FM1MfDuJaoaK8jzizLQw8JIcLFr5tfowt0TU409dp1VaolUnHWbGMEISQHUlRW+hh+XRJZkurxEVLd3qx1LavGcmZbgbh1iJ+X/7dqt35fMJdhmFZnl7igXev5hWvWt60Hw67b5QPe5oM5tfVDAu9QXLOsfs2QWRd12vG627ZKNTPGZmChDiSy/NNj4BqUd4G+mVUoYXERVx690m7vzu66lq2uRWDMs9Ix4Vf0e9mEqb8u29pG9RFi9nzYAxJdT1SYJCXOKl2eBqpaeyITEYOsBFgqKhUBeYUyAdQ6aga2RgTKBm5+nClIpWurRaJkEaLz21ahqKgHcMPV5SEhcQ5ESXvNrnL6Wgh5W4aIGdWhv0upGYR7VqqWIBj/PdC/les36LiO5GsS20jtao4jgkUks24zq4OTzMzthhLP0fR55SxqLGvuCcau+6dLsnIGGDsg+46XBmufv9WeOe4S2zinNLseC01OKtmUXs7qTqmY1SQlLoAqa83v5WwYATwPuolQDLWAGVoFD8+J8i7Ja4mE+TvQIBTq2gl1CXFLQpRb+4S2oUeLwfULfPDutLWsrDIVu0ZhQDee9eFWh3r2HOmZeRVHKGamJoLaw3Uy9bZWh6JOWwujfX1EUCRiElv31t6vAt0qsK6dBmxlHaeKlqTZkCyhdLx936iNru02pOvsRdNG3i46H1md2meoaqIeI+OaJQITqjOKVd2XET+rokj3g5WKUEVg33dribkkKzvEFCQlRCgYOvjZA/x0LqJ+H7I9swYkv+rZ6CRs83/FrCX0opZsFioJ/ID+34vql7bbn+8dMtP9TPdMHKgd200/fJcM2ESAHye5Hfh+oGtu5+cs0m59NrF3EZLOfmHwtgc3iZhI5QiRNejGnUGbWaESn8CyHujouskFJLkOKP8IuXq4wFdbGJkFRbIMSdcobXddp/M6mmu0SUp7nRFQVOJvLh2jyYhw/rqdrSut3SYjVMfSOXfnnPU+7hJsnzubKPjvR3suOl7eli+6FnhXlFUxi1Ds1TonHqE0JbZGVn77t38bb3jDG8R9f/AHf4DP/uzPBgBEUffH+amf+il84zd+4+hzn/eZqs+M3ecjD51NTmlp6IPvOsxt3A3Q/GfPddW59rBZvOu3lnQRNOBJHXJHo+CxBsSFaf7G2qwjEkRtEkJ+m/P8LEsDtW2Rov3aZZaZx1F+EKBLJjj5sF0kdZRJ1T8DB3XokVV38z1pv5fCcwHQgMzDRNtnM0RLYrsJ7AGuj4D4yuo2WcSk811FplWlISpEtqUQYLKIRUU3TFoiKj7LkH3t0nXY183LVEmEKK+6BIwdU0qDfXOMtrCwNohalwBrFSc/al0/WyFWLhvc7UOamkpFHReW3WbbrWj294EkzxI907YqjpCc1r9twSxr9PuSq4iE2OUMLGpsexPMrZGVz/mcz8HTTz9tbPue7/kevOc978FnfdZnGdufeuopvOlNb9Lfr169uq1mOWe09n6OKQYLyYphzxr9YqhNzx91rt1eM2NXJE8iKvQ9KgDkZtkh+gGrxo5FRJenczP3g+F2EPIm8MGWCyF1RFA87LkKxXkj35Jrx75HdnmfnkFpTZA5gPDvNjnhAx4f4IbOxjlooLP3UZ1xUenPVRIZ5X2ugT4iEtpmEnJyQacLdA29wlPpPNZvaIdJA26Xj4+oSCRFImW0vyPCXVf6Pxe08nuhJDKDWu9hn5t/91mgXOXJcqHbO+LZE60mLKBX1LA0miO7DCfkfbAJDoe2kGWm6DjfA1YHQLFHOV667p9tToy3RlZmsxlu3Lihv2dZhl/5lV/B29/+9o415dq1a0bZTSDN9HzlhtQJhA8afceEWkg2gW1N6NYf6f98oJZIk+kWcUdyEFxroABtm3jbOh2hxfp91ijDOhLLJn5eJy8juWG4iZ+3kQ+wAFCm9Qts+/rHvLzS81KpyjC/nyVhkSxiPouafR+le0jf1YrIMhERdzZAaaDj27pRJo66BNFkrGQlolRHVcag0j7i4IoU6W2fE3GnLn5+ybJgiC/zejCMEbXvYzNwKTiicUqTnHONDZXjLh9JbBoxoif9bhxt/b57E+sybR8R12ROsth4agJaMhMXlXgPebsBNJaVqN3mccXZkAgSJxqG1cSRicQgMVxc7CjftSpa+60QaaSyBYUnopP6o232UTvTrPzKr/wKnn/+ebzlLW/p7Hv729+Or/u6r8OrX/1qfO3Xfi2+4Ru+AXEsP16r1Qqr1Up/Pz4+1p+37e6wzxEqUB0zYJnbuyG1ru+SdSD0XPosuby/HUQt83LULdcmEnO3wYZEUuz2GoOg434VM3m7ca4AAuk6T5m2gkQiPrbGgh839gX2/e47tYAV3WdJIr8GIbEIis+FEzrjFtvoICr2QBcy643y8A6EylZJN3KpU5bfq8DZt4T6XPV1VQKxivIKMfwDZdsmCp1uRZo+PQ931dXHm24d25oyhqj4fr9u++X9EiEkAmOQA+t7XIQ9e/w4XzkX6Qm14PTB5cbi+qC+NAo2QTH6sbjuR4u91sXjIym7wM7Iyn/4D/8BX/zFX4zHH3/c2P593/d9+MIv/ELs7e3ht37rt/Ct3/qteP755/Hd3/3dYj1PPvkk3vWud3W2RwWAtFt+m5jaAiLVy33GLguJJHoa2k7XzF0TFCuGvyuGNT+7zikN5pLrwNXGTRDiZqP7QOcqFVDNrOMty0zH6lBYURQB7f7/t3f2sVEUbxx/7s72WpBWoNrrAb+Cf/gSixCIQkkUA4IQXjQSAv5jTXwJxqpo/8GAAavEdzQRCVEJ0T8UTASjEYyigBpQsTaxookYyjuFQIBWlL7cPb8/rrs3M/vM7Oze3kuP+SRN73ZnZ2af29357jPPzHj1xvi1hcpDJeYvek9knhS24bLcxpEeXqBEuhG8xC8kI/IGweHK1/CmWPEOzrwyW9NBHDWUKiuccb5ifqmy0g1lKJG0BYvYFRJOILClW2/h1lt3+q2cjcFJ/WdneZUh/naqkUsqoSKbDI39/XTEHWsTbntE8EIJv5Xo4dDujnM5jhIz1vUsS5uqP/t7OL0sFCpPCjcaqb/rKH0tAAD0dysSz37HHC4F0A0dQkS9X6ifVatWkWKBZd++fVxcyrFjx6C2thY+/vhjWLBggfLY119/HZqbm+HChQvkfsqzMmrUKBj9wmoIl5V5OJPsorsIl9UosHMysFAjH2Rv/LSb2b2ubhci+9ZloSNadMuXpRdvVFVeqnMQ62q9OTjKVizGKBs9IquXeIOTAb0eXrK8dD96fbBQXTtsMKWdjvB6yWIY2IBUtmvAwu9IELd4D50GUFWGJTLSAkHyBp8I5k0FIxFHftY2R9r+OjkDZ9MXrP22zKRJRkLcd7FLgM9LM3bDZUg1m0YWV+T2W3n1Qsm8FpR9rLLdYn90yiKDfvuxuopELwuV3k98UbocvgtPljcbTG4dl/oOXMB1ooz3rLgt6kh5fXWe/clLl+DQiuVw4cIFqKioUKb17FlpbGyExYsXK9OMHj2a+75x40YYPnw4zJ8/3zX/yZMnQ2dnJ5w6dQqqq6sd+6PRKESjUeLI3OE27FbVTePIi+j7p4ZFsth9ykIZ7I1n34wuDZclkJTehmTaVWzX0frg0hUjNniqelDluiE+bGTDiNkyWM8HK0y8rtLsxf0vbsMIcKFzbiLGi+hTdeGJwsT6Y6d5t9KJ84Kopj53Ex/iUE8ujgKcb5fsWyv7mX3ohxPoeLul7oFU+ekYB3sb4W2xBAH7nRIsoqDwK16o40KJBGAkYv+3t/cl++sjdGkQXhb27T+cQDvGIpVPv1ggBIzWSBi7XInYkHTnqcSHn247PziuYR+ixW3UFQDfFScOJc/WuQHIBYtjlFG/R8YeIZVITYaXGiEUAmRiuhLl6TYiFx5wGZ7FSlVVFVRVVWmnR0TYuHEj3H///VBS4t5P09raCmVlZXDVVVd5qleq/1V84+BXjvWyWq26rPR/2TDNVL78f3Yfld5Rr3Dqj+3iYS+YBHvxCEFx9Hh9shS6cAIuWNClm8ca0ijLh9zu6yK38nJOHQ2QFiChJN9g2zcf8eKsEi2yqbGpoYReBKzMQ+XH/UqJHWsxOWr4KZlHhL++UufHCwGxu8xK5xAmwoykqmGe4j7OIyAIE7ariHLJ0+I9/eOGEknpyB+uToKAkYkXVlxk6nmxjlcJFh6+48eGsXtEDOxMpO2o8mRxgaSqOvfRvwMAfc+7CRTRzlLhyHSTpb6L3erOOB9W4Fn18+tl8UK2hIrOC6EFNeEci/XywlEOAMTIJPblRxygQeH0vOvbPOsxK99++y20t7fDgw8+6Nj3+eefQ0dHB9TX10N5eTns3LkTli9fDo888ohn7wnVhaK79ozuNOwAQAat6rzJU/9lOOIhhEZRbIDZ/BLCd/Yiptxz4g0qW9uCOUJZd7bx1xUfXm60VL58X3nyipQnx3orSIKzq4cVfa7eJkLAhIl9Mk3jnGnSnUy662RCxy0myBLDXD2E388x94xdRn9ft30t9jd8QhlhCNmCxd7W5y5Y0nVIj5KgvC4AtGhh96fO1TkiR2y82LxZ3LwvqTRJTlQE5Xlhj7WFENk1xdaJiflI8OcJAPYQYOpICl2HYyiRBEw6PVhcGq1AZ9mxenFAqm4Z5XGSEVZe0QlwFpF1AYkvRLJuHp28qS4gsfuHDbBlv4cSlhdGXo49jYFk5l4MES9poihSkHWxsmHDBpgyZQrceOONjn0lJSWwbt06ePrppyGZTMK1114Lzc3N8Nhjj3kuJ9QH/qY3VuXp8nyRPcjZ/Ww+bp4WqjtG/M697aqrBwDskDw6T8fF0z+PA1V/Kj2Fm/jwMnkSK0jE31e8+fgy+vdJBItXvB7n98FiHUvNEml1uYkxODrdjuxDB8Apaq1yxWPsY6kyIumuovTDHR11kAkTa7vV3+4G5XVRNX6U10QmzqnRNWncG17rrZ8VMmzjKotDsdNqiBnRy8LXJR1ro/a6+MfviKogg43dCGK0jR+houN98hqnIj4/VM8TKh9xNBA1kSErVKxhyuzIRj/xb2L7Yj+nCPOoRqyKZF2sfPjhh9J9s2bN4iaDywSVSztoqP473eBPN/c/VZZX2EbMjvKXpBUf0VTsi1vQrCywjixPQ1CSUfHEjI7sfqteYbAmupMHAaePda+LnZbxZknrTczj4pUwIbIsnCuyMvUjfnNH/RTXEiUWpN14XNcQs52YfyP1XRjpwwhPVrBQ9XEMk1c93KV7+vOkNmpcjxiOSLounX4kPtbGvfvILoMQIX68MXTXCUr3BwEVHCyrO9elpRBfKijPlnpZASK9sE1nkj3RYyOOWPPjUZHBr+sT4jyMjrTEM5FceZkYnq4adIAR4LqUdYSF28t2JhTN2kBegyMzKkszFiWosqi8OU+JxJWvarRkIsZr0G4oAY43Y9Ftz0J5SNJ5pW5InZtSNd15qp4hZkEyPp1Owy7alA1qlgmWIK5B9dtT5vmLiHOisNtlyIJudY8HUF8HAHwMDCVk0r9H+lpze6smfzYhYFcKNUS5f7IeOpjdravGiUNo+GjI3fIE4LuVghrVJOatg5fzowRKep83oeIXr14bKkjcd9kanhMxnShS2G4ddVlOoQLg/M4tlRJybguaohErft37vsqSeFZ0hmx5FTWy9CrXOdtougkXVTcTRNi3Y1q4sPUBcG+kVA2U7puDF9jzS09nrf6tqMBbKmYlVyj7iQkvl9j1R9WZ8gqGiW3cMYp4JpV3TfSKeemupcSbX68V221EBYKqZi5VNjbk+QheFSKYl9tPeGJS24O94FgxkVEcjdDlpYNf8eW1HBYxkNZ3PkTwru7EhRmXLVmTKtzn79ko6xGwxY3VvgmnpxIv2RQpFkUjVnThho4S8Sai6FDFl3gtTzetCq/BqBRuHiFxlEeYEGJijIIF21D6XdwLQH4TZjIXgQXboOtC3dQy/DamfupCLQ0QucTvo45zGxbuvctICNokGnBqm6/F33zeA3x3iH654nBpEXIItDifBxHMK+ZB1cutoc5lPAiL0tPhUZT4FSOymCWv4kR3nSUVVFeQVoyPbCZaScyb2B0kEyzsMy6USA1JDhNLTACkzz9RmvYcYw8xMzfhbbHzCHkTLKrRmDIuO7HCIuti8SJKVB4Vt+BZP8iGy6rQibcIElWXkOpNIIgA6fRNYE08xTcc1mdVZLtoJlXMjhW0y3qzdIJrqbl0qHgmjABAr7yLiVp3J73PbWSX/jXkJipUXXtu+BYfHiaTyzbShtNl+LS9TzIaye18dBt6OwA3gK4lv2TiIZFO+MbNFeOev2qdpiCQxa/QQ+Kd8wvZ+9iFDRkPt7UyM1emZJ2gkE6AbST9XBRf4sLgfBaGgIhL8ShUrHIBvHWdF41YUa14a6Ez2iaogFc3EVTI+Kmn+8gpb14W2RsDFWgmigNnXZgbmG3AhYnuLEQhQ2k8RzdgUn7jiWseWcJCXt90GTrYYkcIcLXKpo/x9nRRCckgBIm4wjKLKgbKPj5HIkU16sNNaKi6UWWel6DWksErInkTcnw9eA9SYOfnQai4bRPhR30FF0SrM/U+K1oA6HmeZPeLuE4QvcIzk0dvapYMcmSrJO5FHOIMwHhjJKby22VUNGJFx2sQhBDJVrSzjudFp35kfIKHuU9kDZ3uG7vbTKaWCNHxolBp2JvOQryhAdzEi7OOlviwvWSMkHGIFyYt163I2DmiOTV9qiz67nW7pt3iRlJpghuZJc1H57rsF5lu+VlryVCeimw0tuybsOytmJ0uPRdxChgJk11FGeeb5dlTdcqnPucTrzPYkismEwG01HXiNlqIEkWquYio+47yvMjWCQIAexCClQ4A+mezFerGiJlUWuY4xTwtbFpyYMrl2A0UPQcQ+Tf12SEoGNGdFNQfl054W9YRJrK4Fi9BkVTZFDpp3Mpye+uWvaW7NYRuDR+bNuLxHFjCwiyKlnhRjQ6yjyXeStL14/+zXUkA/E3F9v2y89KwUPN5UC5ot5FeOshEoddj3Mr2E1+iEydCiYRId9rg0mH3AQoHt1V3vZZFrfLLYl03sm4JWVeRY4I3j6JGb8Ze/UZVJ31QiCLO+q4bo6I7S61OOpWQltnD7RrSiXdxEzH29Pks9v3FpwMQ0jJlRoAefUnG0fQvxxLulbdt9mdhpedEIU0KlysGnU7CFSV6Ny47mZi4kJP1WSZqWLeXvc2ju95tG4CG21zy5i4LqlR1EciCI7MZb2DhdaIjGZxwAv/ChZp9Vn2ORNxEQrbgFzrSBYHfeJEgJ+/z+sbuRRDkatRFpngdoso2iDLRy6XXmC/EDcfKxC71pbw8OkOFZSsg+4EVJEEORVaWKYpHDxPFUSPOdBY+dPPGiKtpe4UUKOIcSOyzkIiFEeNgRMRnkbPbni+vr1f/jIpGrER6ECKKBaTZVSnTb/bCG/kVVGQ534+oXrHUe71V4iGYgFOvjYheHn5H+uhGvbPlqkQL2VXE3BCUF0eMhxHTyDw/XuI2vFwLgYkWX7FG3gSJn64E0XMyUMSHV/x0c3ANWg7WpnGb64XCj0AIdH4TRV6FIF5knhiVV1HnHrIXoWTQuXfcmn9VvBD/7OO7bjEScq4ZpIOiztir/ywoGrESSiCEk/L1PawfwaEu2e4J4odIRkRjihcPPdsgV4ZQTx104wAoZAJLdbxKfHjpS3VDDKQU+17J+nosy+2GotabCR653bNBJl00XgRIEB6UyxF2BtS8xo1oeHCKlUzEYD7s5taVaKETL0Oi0S1llcnGbQVJ+PL0rCQhbHlGmB+BmiNBtcqoiI5uD1Lby+ZbUJYvBADK5oUI5M1WcaysDhT2MvXEeWZvZdLcPWy8XGP5gLNFjuySC/urumLcRqAEMdOoql6G7BOEqGDjidJTIKjzVC2CqBUDwwT6cp9Z74yGBydIIkK+4ncAur3SacNs8e5hnqCiESuhPoQQET9AGVjX6Cq8/Eg6/ZU2wgNdtz7sOVHnp9rO1jGTC19WB9n5e32z12lIct0oyOqew9UfMiKbDbRuObIgTz/DW11jMFTxFlkcluq2PZfIGk7dAFQrrW6+meKlXrr1cJ0Izopf1CxXlc5PHrrihk2r+k1kYkomwryOjnLb5ieNyIAXK9gfp9IL3YCQwc3iIT4k1Id2WdxDWJYHsz3Uh8qyvE4KFRgBr1itzNvtu24+hYBsOfRMrkUfeBHZjrS5squXcpi0XmzpKr7EAMAciTVpmbLgaB+jdHTLVpJJBGc2J53UzFs5AsvnPtdykqhVP9cXwqTksyyNW1o/6b2kkeDlxbevrzt1jCLe1CKEOqkKmGPHjsGoUaPyXQ2DwWAwGAw+OHr0KIwcOVKZZsCLlWQyCSdOnIAhQ4ZAKJTbNyRdOjs7YdSoUXD06FGoqKjId3UKAmMTJ8YmToxNnBibODE2cTIQbIKI0NXVBfF4HMJhdef5gO8GCofDroqsUKioqCjYiyZfGJs4MTZxYmzixNjEibGJk0K3SWVlpVa6gRIHaDAYDAaD4TLFiBWDwWAwGAwFjRErOSAajcLKlSshGo3muyoFg7GJE2MTJ8YmToxNnBibOCk2mwz4AFuDwWAwGAzFjfGsGAwGg8FgKGiMWDEYDAaDwVDQGLFiMBgMBoOhoDFixWAwGAwGQ0FjxIrBYDAYDIaCxoiVgFm9ejVMmTIFBg0aBFdddRWZJhQKOf7Wr1/PpWlra4OpU6dCeXk5jBgxApqbm7UWeypEdGxy5MgRmDdvHgwePBiqqqrgiSeegJ6eHi5NMdlEZPTo0Y5rYtmyZVwaHRsVG+vWrYMxY8ZAWVkZTJw4Eb7//vt8VyknrFq1ynE9xGIxez8iwqpVqyAej0N5eTnccccdsH///jzWOHi+++47mDdvHsTjcQiFQvDpp59y+3Vs0N3dDY8//jhUVVXB4MGDYf78+XDs2LEcnkWwuNnkgQcecFw3kydP5tIMVJsYsRIwPT09sHDhQnj00UeV6TZu3AgnT560/xoaGux9nZ2dMGPGDIjH47Bv3z5466234LXXXoM1a9Zku/pZwc0miUQC5syZAxcvXoQffvgBNm3aBJ988gk0NTXZaYrNJhTNzc3cNbFixQp7n46Nio3NmzfD0qVLYfny5dDa2gq33XYbzJ49G44cOZLvquWEm266ibse2tra7H2vvPIKrFmzBtauXQv79u2DWCwGM2bMgK6urjzWOFguXrwI48aNg7Vr15L7dWywdOlS2Lp1K2zatAl++OEH+Oeff2Du3LmQSCRydRqB4mYTAIBZs2Zx1822bdu4/QPWJmjIChs3bsTKykpyHwDg1q1bpceuW7cOKysr8dKlS/a2F198EePxOCaTyYBrmjtkNtm2bRuGw2E8fvy4ve2jjz7CaDSKFy5cQMTitYlFbW0tvvHGG9L9OjYqNm699VZcsmQJt+2GG27AZcuW5alGuWPlypU4btw4cl8ymcRYLIYvvfSSve3SpUtYWVmJ69evz1ENc4v4zNSxwfnz57GkpAQ3bdpkpzl+/DiGw2H88ssvc1b3bEG1Iw0NDXj33XdLjxnINjGelTzR2NgIVVVVcMstt8D69eshmUza+/bu3QtTp07lZh6866674MSJE3Do0KE81Da77N27F+rq6iAej9vb7rrrLuju7oaWlhY7TbHb5OWXX4bhw4fD+PHjYfXq1VwXj46Niomenh5oaWmBmTNncttnzpwJe/bsyVOtcsuBAwcgHo/DmDFjYPHixXDw4EEAAGhvb4eOjg7ONtFoFKZOnXrZ2EbHBi0tLdDb28ulicfjUFdXV9R22rVrF1xzzTVw3XXXwcMPPwynT5+29w1kmwz4VZcHIs8//zxMnz4dysvL4ZtvvoGmpiY4c+aM7fbv6OiA0aNHc8dUV1fb+8aMGZPrKmeVjo4O+/wshg4dCqWlpdDR0WGnKWabPPnkkzBhwgQYOnQo/Pzzz/DMM89Ae3s7vPfeewCgZ6Ni4syZM5BIJBznXF1dXZTnKzJp0iT44IMP4LrrroNTp07BCy+8AFOmTIH9+/fb50/Z5vDhw/mobs7RsUFHRweUlpbC0KFDHWmK9RqaPXs2LFy4EGpra6G9vR2effZZmDZtGrS0tEA0Gh3QNjGeFQ2oYDfx75dfftHOb8WKFVBfXw/jx4+HpqYmaG5uhldffZVLEwqFuO/YH0gqbs8XQduEOi9E5LYXuk1EvNjoqaeegqlTp8LNN98MDz30EKxfvx42bNgAZ8+etfPTsVGxQf3mxXy+FrNnz4YFCxbA2LFj4c4774QvvvgCAADef/99O83lahsWPzYoZjstWrQI5syZA3V1dTBv3jzYvn07/PXXX/b1I2Mg2MR4VjRobGyExYsXK9OIb/1emDx5MnR2dsKpU6eguroaYrGYQ+VarjzxTSJfBGmTWCwGP/30E7ft3Llz0Nvba5/vQLCJSCY2siL4//77bxg+fLiWjYqJqqoqiEQi5G9ejOfrxuDBg2Hs2LFw4MABuOeeewAg5Tmoqamx01xOtrFGRqlsEIvFoKenB86dO8d5Ek6fPg1TpkzJbYXzRE1NDdTW1sKBAwcAYGDbxHhWNKiqqoIbbrhB+VdWVuY7/9bWVigrK7OH9dbX18N3333HxSx89dVXEI/HMxJFQRKkTerr6+H333+HkydP2tu++uoriEajMHHiRDtNodtEJBMbtba2AgDYD2IdGxUTpaWlMHHiRPj666+57V9//XXBP1SzQXd3N/z5559QU1MDY8aMgVgsxtmmp6cHdu/efdnYRscGEydOhJKSEi7NyZMn4ffff79s7HT27Fk4evSo/RwZ0DbJW2hvkXL48GFsbW3F5557Dq+88kpsbW3F1tZW7OrqQkTEzz77DN955x1sa2vDv//+G999912sqKjAJ554ws7j/PnzWF1djffddx+2tbXhli1bsKKiAl977bV8nVZGuNmkr68P6+rqcPr06fjrr7/ijh07cOTIkdjY2GjnUWw2YdmzZw+uWbMGW1tb8eDBg7h582aMx+M4f/58O42OjYqNTZs2YUlJCW7YsAH/+OMPXLp0KQ4ePBgPHTqU76plnaamJty1axcePHgQf/zxR5w7dy4OGTLEPveXXnoJKysrccuWLdjW1ob33Xcf1tTUYGdnZ55rHhxdXV32swIA7Hvk8OHDiKhngyVLluDIkSNxx44d+Ouvv+K0adNw3Lhx2NfXl6/TygiVTbq6urCpqQn37NmD7e3tuHPnTqyvr8cRI0YUhU2MWAmYhoYGBADH386dOxERcfv27Th+/Hi88sorcdCgQVhXV4dvvvkm9vb2cvn89ttveNttt2E0GsVYLIarVq0asEN03WyCmBI0c+bMwfLychw2bBg2NjZyw5QRi8smLC0tLThp0iSsrKzEsrIyvP7663HlypV48eJFLp2OjYqNt99+G2tra7G0tBQnTJiAu3fvzneVcsKiRYuwpqYGS0pKMB6P47333ov79++39yeTSVy5ciXGYjGMRqN4++23Y1tbWx5rHDw7d+4knxsNDQ2IqGeD//77DxsbG3HYsGFYXl6Oc+fOxSNHjuThbIJBZZN///0XZ86ciVdffTWWlJTg//73P2xoaHCc70C1SQixSKYANRgMBoPBUJSYmBWDwWAwGAwFjRErBoPBYDAYChojVgwGg8FgMBQ0RqwYDAaDwWAoaIxYMRgMBoPBUNAYsWIwGAwGg6GgMWLFYDAYDAZDQWPEisFgMBgMhoLGiBWDwWAwGAwFjRErBoPBYDAYChojVgwGg8FgMBQ0/wd8wQvbVeqjqwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "t2m = out[0, 12].cpu().numpy()\n", + "\n", + "lat = np.linspace(-90, 90, out.shape[-2])\n", + "lon = np.linspace(-180, 180, out.shape[-1])\n", + "X, Y = np.meshgrid(lon, lat)\n", + "\n", + "plt.contourf(X, Y, t2m, 100)\n", + "plt.gca().set_aspect(\"equal\")\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}