diff --git a/Wav2Lip-master/.gitignore b/Wav2Lip-master/.gitignore new file mode 100644 index 00000000..56f02807 --- /dev/null +++ b/Wav2Lip-master/.gitignore @@ -0,0 +1,16 @@ +*.pkl +*.jpg +*.mp4 +*.pth +*.pyc +__pycache__ +*.h5 +*.avi +*.wav +filelists/*.txt +evaluation/test_filelists/lr*.txt +*.pyc +*.mkv +*.gif +*.webm +*.mp3 diff --git a/Wav2Lip-master/README.md b/Wav2Lip-master/README.md new file mode 100644 index 00000000..0de3bc53 --- /dev/null +++ b/Wav2Lip-master/README.md @@ -0,0 +1,122 @@ +# 项目名称 +Wav2lip-master +## 1. 项目简介 +这个项目是基于Wav2lip-master项目(https://github.com/Rudrabha/Wav2lip/)的复现项目,在原项目的基础上通过更加简单的操作来实现从生成视频到评测完毕这一流程。 + +## 2. 系统要求 +- 操作系统:Windows / Linux +- Python 版本:3.8 + +## 3. 环境配置 + +### 3.1 安装 Python +请确保系统上安装了 Python 3.8。可以从 [Python 官网](https://www.python.org/downloads/) 下载并安装。 + +### 3.2 创建虚拟环境 +建议使用虚拟环境来管理项目依赖。使用conda创建虚拟环境。 + +```bash +# 使用 conda +conda create --name myenv python=3.8 +conda activate myenv +``` +在Windows下如果要使用gpu运行,请输入命令: +```BASH +conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia +``` +### 3.3 安装依赖 +在项目根目录下,使用 pip 或 conda 安装项目所需的依赖。 +```BASH +# 使用 pip 安装 +pip install -r requirements.txt +``` +### 3.4 docker镜像安装 +我已经将运行此项目docker打包并上传至该网址,读者可以自行下载使用: +```BASH +https://share.weiyun.com/sI9Hlbbb +``` +```bash +docker run ... --shm-size=3g --gpus all --name=xxxx -t -d name +#docker运行 +docker exec -it yuyinshibie /bin/bash +#进入docker +conda activate xuni +#进入进行工作的虚拟环境 +``` +### 3.5 模型安装 +我把模型都放在一起,方便在windows下的读者能够直接下载模型安装到对应位置。 +链接: https://pan.baidu.com/s/1xIi0HJKMs7_V8mt5JVSsgw 提取码: kgmg +```BASH +syncnet_v2.model和example.avi需要放置到Wav2Lip-master\evaluation\syncnet_python-master\data下,无data文件夹请自行创建 +sfd_face.pth请放置在Wav2Lip-master\evaluation\syncnet_python-master\detectors\s3fd\weights下 +s3fd-619a316812.pth请放置在Wav2Lip-master\face_detection\detection\sfd下 +wav2lip.pth和wav2lip_gan.pth文件请放在Wav2Lip-master\checkpoints下 +``` +## 4. 项目结构 +这里只显示操作涉及的文件夹和文件,没有提到的文件或文件夹不代表不需要 +```bash +Wav2Lip-master/ +│ +├── checkpoints/ # wav2lip模型存放地 +│ ├── .pth #预训练的模型文件 +│ ├── README +│ +├── evaluation/ #评测文件夹 +│ ├── inputframes/ # 由输入视频提取的图像 +│ ├── outputframes/ # 由输出视频提取的图像 +│ ├── scores_LSE/ # 评测文件,请保证该文件下的py文件复制到syncnet_python-master中 +│ ├── syncnet_python-master/ # LSE-C LSE-D评估请在该文件夹下进行 +│ ├── pytorch-fid-master/ #这里存放pytorch-fid项目 +│ ├── .../ +│ +├── inputmp4/ # 可以将作为人脸输入的jpg,mp4放在这个文件夹下 +│ ├── ... # +│ +├── inputwav/ # 可以将作为音频输入的wav放在这个文件夹下 +│ ├── ... +│ +├── face_detection/ # 请将下载到的s3fd-619a316812.pth文件放置在face_detection\detection\sfd下 +│ ├── ... +│ +├── results/ # 生成的输出视频会出现在这个文件夹下 +│ ├── ... +├── requirements.txt # 依赖项 +├── README.md # 项目说明 +``` +## 5. 运行项目 +确保inputmp4文件夹中和inputwav中存在格式正确的文件 +```bash +运行下列命令: +python inference.py --checkpoint_path checkpoints\wav2lip_gan.pth --face inputmp4\video1.mp4 --audio inputwav\test1.wav#生成视频 +``` + +## 6. 测试 +### 6.1 LSE-C、LSE-D指数测试方法 +```BASH +cd evaluation/synsyncnet_python-master +python run_pipeline.py --videofile path to\result_voice.mp4 --reference wav2lip --data_dir tmp_dir#将path to\result_voice.mp4改为你要检测的生成视频路径 +python calculate_scores_real_videos.py --videofile path to\result_voice.mp4 --reference wav2lip --data_dir tmp_dir >> all_scores.txt +#将path to\result_voice.mp4改为你要检测的生成视频路径,结果会生成在all_scores.txt +#注意:在结束测试后,如果要进行下一次测试前请先删除产生的tmp_dir文件夹 +``` +### 6.2 FID指数测试方法 +```BASH +python get_frames.py path/to/your/video.mp4 --output_folder evaluation\outputframes(inputframes) --frame_interval 1 --max_threads 4#用该命令将生成视频(输入视频)提取为图像存放在指定文件夹中 +python -m pytorch_fid evaluation\inputframes evaluation\outputframes#生成结果 +#注意:在结束测试后,如果要进行下一次测试前请先删除产生的inputframes和outputframes文件夹中的图片 +``` +## 7. 常见问题 + +问题 1 FID指数测试无法运行 +检查被比对的文件夹中存放数据是否正确 +在python -m pytorch_fid evaluation\inputframes evaluation\outputframes后加--num-workers 0 --device cuda:0 + +问题 2 linux下运行LSE-C测试在最后一步被KILLED +尝试缩短生成视频的长度,如果无法实现,请尝试在windows环境下运行 + + +## 8. 联系信息 + +姓名:majunchen +邮箱:2406722613@qq.com +GitHub: GitHub 链接 diff --git "a/Wav2Lip-master/Wav2lip\346\250\241\345\236\213\345\256\236\347\216\260\346\225\260\345\255\227\344\272\272\345\224\207\350\257\255\345\220\214\346\255\245\345\256\236\351\252\214\346\212\245\345\221\212.docx" "b/Wav2Lip-master/Wav2lip\346\250\241\345\236\213\345\256\236\347\216\260\346\225\260\345\255\227\344\272\272\345\224\207\350\257\255\345\220\214\346\255\245\345\256\236\351\252\214\346\212\245\345\221\212.docx" new file mode 100644 index 00000000..c018a9cd Binary files /dev/null and "b/Wav2Lip-master/Wav2lip\346\250\241\345\236\213\345\256\236\347\216\260\346\225\260\345\255\227\344\272\272\345\224\207\350\257\255\345\220\214\346\255\245\345\256\236\351\252\214\346\212\245\345\221\212.docx" differ diff --git a/Wav2Lip-master/audio.py b/Wav2Lip-master/audio.py new file mode 100644 index 00000000..32b20c44 --- /dev/null +++ b/Wav2Lip-master/audio.py @@ -0,0 +1,136 @@ +import librosa +import librosa.filters +import numpy as np +# import tensorflow as tf +from scipy import signal +from scipy.io import wavfile +from hparams import hparams as hp + +def load_wav(path, sr): + return librosa.core.load(path, sr=sr)[0] + +def save_wav(wav, path, sr): + wav *= 32767 / max(0.01, np.max(np.abs(wav))) + #proposed by @dsmiller + wavfile.write(path, sr, wav.astype(np.int16)) + +def save_wavenet_wav(wav, path, sr): + librosa.output.write_wav(path, wav, sr=sr) + +def preemphasis(wav, k, preemphasize=True): + if preemphasize: + return signal.lfilter([1, -k], [1], wav) + return wav + +def inv_preemphasis(wav, k, inv_preemphasize=True): + if inv_preemphasize: + return signal.lfilter([1], [1, -k], wav) + return wav + +def get_hop_size(): + hop_size = hp.hop_size + if hop_size is None: + assert hp.frame_shift_ms is not None + hop_size = int(hp.frame_shift_ms / 1000 * hp.sample_rate) + return hop_size + +def linearspectrogram(wav): + D = _stft(preemphasis(wav, hp.preemphasis, hp.preemphasize)) + S = _amp_to_db(np.abs(D)) - hp.ref_level_db + + if hp.signal_normalization: + return _normalize(S) + return S + +def melspectrogram(wav): + D = _stft(preemphasis(wav, hp.preemphasis, hp.preemphasize)) + S = _amp_to_db(_linear_to_mel(np.abs(D))) - hp.ref_level_db + + if hp.signal_normalization: + return _normalize(S) + return S + +def _lws_processor(): + import lws + return lws.lws(hp.n_fft, get_hop_size(), fftsize=hp.win_size, mode="speech") + +def _stft(y): + if hp.use_lws: + return _lws_processor(hp).stft(y).T + else: + return librosa.stft(y=y, n_fft=hp.n_fft, hop_length=get_hop_size(), win_length=hp.win_size) + +########################################################## +#Those are only correct when using lws!!! (This was messing with Wavenet quality for a long time!) +def num_frames(length, fsize, fshift): + """Compute number of time frames of spectrogram + """ + pad = (fsize - fshift) + if length % fshift == 0: + M = (length + pad * 2 - fsize) // fshift + 1 + else: + M = (length + pad * 2 - fsize) // fshift + 2 + return M + + +def pad_lr(x, fsize, fshift): + """Compute left and right padding + """ + M = num_frames(len(x), fsize, fshift) + pad = (fsize - fshift) + T = len(x) + 2 * pad + r = (M - 1) * fshift + fsize - T + return pad, pad + r +########################################################## +#Librosa correct padding +def librosa_pad_lr(x, fsize, fshift): + return 0, (x.shape[0] // fshift + 1) * fshift - x.shape[0] + +# Conversions +_mel_basis = None + +def _linear_to_mel(spectogram): + global _mel_basis + if _mel_basis is None: + _mel_basis = _build_mel_basis() + return np.dot(_mel_basis, spectogram) + +def _build_mel_basis(): + assert hp.fmax <= hp.sample_rate // 2 + return librosa.filters.mel(hp.sample_rate, hp.n_fft, n_mels=hp.num_mels, + fmin=hp.fmin, fmax=hp.fmax) + +def _amp_to_db(x): + min_level = np.exp(hp.min_level_db / 20 * np.log(10)) + return 20 * np.log10(np.maximum(min_level, x)) + +def _db_to_amp(x): + return np.power(10.0, (x) * 0.05) + +def _normalize(S): + if hp.allow_clipping_in_normalization: + if hp.symmetric_mels: + return np.clip((2 * hp.max_abs_value) * ((S - hp.min_level_db) / (-hp.min_level_db)) - hp.max_abs_value, + -hp.max_abs_value, hp.max_abs_value) + else: + return np.clip(hp.max_abs_value * ((S - hp.min_level_db) / (-hp.min_level_db)), 0, hp.max_abs_value) + + assert S.max() <= 0 and S.min() - hp.min_level_db >= 0 + if hp.symmetric_mels: + return (2 * hp.max_abs_value) * ((S - hp.min_level_db) / (-hp.min_level_db)) - hp.max_abs_value + else: + return hp.max_abs_value * ((S - hp.min_level_db) / (-hp.min_level_db)) + +def _denormalize(D): + if hp.allow_clipping_in_normalization: + if hp.symmetric_mels: + return (((np.clip(D, -hp.max_abs_value, + hp.max_abs_value) + hp.max_abs_value) * -hp.min_level_db / (2 * hp.max_abs_value)) + + hp.min_level_db) + else: + return ((np.clip(D, 0, hp.max_abs_value) * -hp.min_level_db / hp.max_abs_value) + hp.min_level_db) + + if hp.symmetric_mels: + return (((D + hp.max_abs_value) * -hp.min_level_db / (2 * hp.max_abs_value)) + hp.min_level_db) + else: + return ((D * -hp.min_level_db / hp.max_abs_value) + hp.min_level_db) diff --git a/Wav2Lip-master/checkpoints/README.md b/Wav2Lip-master/checkpoints/README.md new file mode 100644 index 00000000..8580a0de --- /dev/null +++ b/Wav2Lip-master/checkpoints/README.md @@ -0,0 +1,2 @@ +Place all your checkpoints (.pth files) here. +sk-ue9Q9QZARKiuuMNZBkggmA.f4RZqAPEZPtnUBpColqyj_KZat84vqlj \ No newline at end of file diff --git a/Wav2Lip-master/color_syncnet_train.py b/Wav2Lip-master/color_syncnet_train.py new file mode 100644 index 00000000..afa00544 --- /dev/null +++ b/Wav2Lip-master/color_syncnet_train.py @@ -0,0 +1,279 @@ +from os.path import dirname, join, basename, isfile +from tqdm import tqdm + +from models import SyncNet_color as SyncNet +import audio + +import torch +from torch import nn +from torch import optim +import torch.backends.cudnn as cudnn +from torch.utils import data as data_utils +import numpy as np + +from glob import glob + +import os, random, cv2, argparse +from hparams import hparams, get_image_list + +parser = argparse.ArgumentParser(description='Code to train the expert lip-sync discriminator') + +parser.add_argument("--data_root", help="Root folder of the preprocessed LRS2 dataset", required=True) + +parser.add_argument('--checkpoint_dir', help='Save checkpoints to this directory', required=True, type=str) +parser.add_argument('--checkpoint_path', help='Resumed from this checkpoint', default=None, type=str) + +args = parser.parse_args() + + +global_step = 0 +global_epoch = 0 +use_cuda = torch.cuda.is_available() +print('use_cuda: {}'.format(use_cuda)) + +syncnet_T = 5 +syncnet_mel_step_size = 16 + +class Dataset(object): + def __init__(self, split): + self.all_videos = get_image_list(args.data_root, split) + + def get_frame_id(self, frame): + return int(basename(frame).split('.')[0]) + + def get_window(self, start_frame): + start_id = self.get_frame_id(start_frame) + vidname = dirname(start_frame) + + window_fnames = [] + for frame_id in range(start_id, start_id + syncnet_T): + frame = join(vidname, '{}.jpg'.format(frame_id)) + if not isfile(frame): + return None + window_fnames.append(frame) + return window_fnames + + def crop_audio_window(self, spec, start_frame): + # num_frames = (T x hop_size * fps) / sample_rate + start_frame_num = self.get_frame_id(start_frame) + start_idx = int(80. * (start_frame_num / float(hparams.fps))) + + end_idx = start_idx + syncnet_mel_step_size + + return spec[start_idx : end_idx, :] + + + def __len__(self): + return len(self.all_videos) + + def __getitem__(self, idx): + while 1: + idx = random.randint(0, len(self.all_videos) - 1) + vidname = self.all_videos[idx] + + img_names = list(glob(join(vidname, '*.jpg'))) + if len(img_names) <= 3 * syncnet_T: + continue + img_name = random.choice(img_names) + wrong_img_name = random.choice(img_names) + while wrong_img_name == img_name: + wrong_img_name = random.choice(img_names) + + if random.choice([True, False]): + y = torch.ones(1).float() + chosen = img_name + else: + y = torch.zeros(1).float() + chosen = wrong_img_name + + window_fnames = self.get_window(chosen) + if window_fnames is None: + continue + + window = [] + all_read = True + for fname in window_fnames: + img = cv2.imread(fname) + if img is None: + all_read = False + break + try: + img = cv2.resize(img, (hparams.img_size, hparams.img_size)) + except Exception as e: + all_read = False + break + + window.append(img) + + if not all_read: continue + + try: + wavpath = join(vidname, "audio.wav") + wav = audio.load_wav(wavpath, hparams.sample_rate) + + orig_mel = audio.melspectrogram(wav).T + except Exception as e: + continue + + mel = self.crop_audio_window(orig_mel.copy(), img_name) + + if (mel.shape[0] != syncnet_mel_step_size): + continue + + # H x W x 3 * T + x = np.concatenate(window, axis=2) / 255. + x = x.transpose(2, 0, 1) + x = x[:, x.shape[1]//2:] + + x = torch.FloatTensor(x) + mel = torch.FloatTensor(mel.T).unsqueeze(0) + + return x, mel, y + +logloss = nn.BCELoss() +def cosine_loss(a, v, y): + d = nn.functional.cosine_similarity(a, v) + loss = logloss(d.unsqueeze(1), y) + + return loss + +def train(device, model, train_data_loader, test_data_loader, optimizer, + checkpoint_dir=None, checkpoint_interval=None, nepochs=None): + + global global_step, global_epoch + resumed_step = global_step + + while global_epoch < nepochs: + running_loss = 0. + prog_bar = tqdm(enumerate(train_data_loader)) + for step, (x, mel, y) in prog_bar: + model.train() + optimizer.zero_grad() + + # Transform data to CUDA device + x = x.to(device) + + mel = mel.to(device) + + a, v = model(mel, x) + y = y.to(device) + + loss = cosine_loss(a, v, y) + loss.backward() + optimizer.step() + + global_step += 1 + cur_session_steps = global_step - resumed_step + running_loss += loss.item() + + if global_step == 1 or global_step % checkpoint_interval == 0: + save_checkpoint( + model, optimizer, global_step, checkpoint_dir, global_epoch) + + if global_step % hparams.syncnet_eval_interval == 0: + with torch.no_grad(): + eval_model(test_data_loader, global_step, device, model, checkpoint_dir) + + prog_bar.set_description('Loss: {}'.format(running_loss / (step + 1))) + + global_epoch += 1 + +def eval_model(test_data_loader, global_step, device, model, checkpoint_dir): + eval_steps = 1400 + print('Evaluating for {} steps'.format(eval_steps)) + losses = [] + while 1: + for step, (x, mel, y) in enumerate(test_data_loader): + + model.eval() + + # Transform data to CUDA device + x = x.to(device) + + mel = mel.to(device) + + a, v = model(mel, x) + y = y.to(device) + + loss = cosine_loss(a, v, y) + losses.append(loss.item()) + + if step > eval_steps: break + + averaged_loss = sum(losses) / len(losses) + print(averaged_loss) + + return + +def save_checkpoint(model, optimizer, step, checkpoint_dir, epoch): + + checkpoint_path = join( + checkpoint_dir, "checkpoint_step{:09d}.pth".format(global_step)) + optimizer_state = optimizer.state_dict() if hparams.save_optimizer_state else None + torch.save({ + "state_dict": model.state_dict(), + "optimizer": optimizer_state, + "global_step": step, + "global_epoch": epoch, + }, checkpoint_path) + print("Saved checkpoint:", checkpoint_path) + +def _load(checkpoint_path): + if use_cuda: + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + +def load_checkpoint(path, model, optimizer, reset_optimizer=False): + global global_step + global global_epoch + + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + model.load_state_dict(checkpoint["state_dict"]) + if not reset_optimizer: + optimizer_state = checkpoint["optimizer"] + if optimizer_state is not None: + print("Load optimizer state from {}".format(path)) + optimizer.load_state_dict(checkpoint["optimizer"]) + global_step = checkpoint["global_step"] + global_epoch = checkpoint["global_epoch"] + + return model + +if __name__ == "__main__": + checkpoint_dir = args.checkpoint_dir + checkpoint_path = args.checkpoint_path + + if not os.path.exists(checkpoint_dir): os.mkdir(checkpoint_dir) + + # Dataset and Dataloader setup + train_dataset = Dataset('train') + test_dataset = Dataset('val') + + train_data_loader = data_utils.DataLoader( + train_dataset, batch_size=hparams.syncnet_batch_size, shuffle=True, + num_workers=hparams.num_workers) + + test_data_loader = data_utils.DataLoader( + test_dataset, batch_size=hparams.syncnet_batch_size, + num_workers=8) + + device = torch.device("cuda" if use_cuda else "cpu") + + # Model + model = SyncNet().to(device) + print('total trainable params {}'.format(sum(p.numel() for p in model.parameters() if p.requires_grad))) + + optimizer = optim.Adam([p for p in model.parameters() if p.requires_grad], + lr=hparams.syncnet_lr) + + if checkpoint_path is not None: + load_checkpoint(checkpoint_path, model, optimizer, reset_optimizer=False) + + train(device, model, train_data_loader, test_data_loader, optimizer, + checkpoint_dir=checkpoint_dir, + checkpoint_interval=hparams.syncnet_checkpoint_interval, + nepochs=hparams.nepochs) diff --git a/Wav2Lip-master/evaluation/README.md b/Wav2Lip-master/evaluation/README.md new file mode 100644 index 00000000..affebbc0 --- /dev/null +++ b/Wav2Lip-master/evaluation/README.md @@ -0,0 +1,63 @@ +# Novel Evaluation Framework, new filelists, and using the LSE-D and LSE-C metric. + +Our paper also proposes a novel evaluation framework (Section 4). To evaluate on LRS2, LRS3, and LRW, the filelists are present in the `test_filelists` folder. Please use `gen_videos_from_filelist.py` script to generate the videos. After that, you can calculate the LSE-D and LSE-C scores using the instructions below. Please see [this thread](https://github.com/Rudrabha/Wav2Lip/issues/22#issuecomment-712825380) on how to calculate the FID scores. + +The videos of the ReSyncED benchmark for real-world evaluation will be released soon. + +### Steps to set-up the evaluation repository for LSE-D and LSE-C metric: +We use the pre-trained syncnet model available in this [repository](https://github.com/joonson/syncnet_python). + +* Clone the SyncNet repository. +``` +git clone https://github.com/joonson/syncnet_python.git +``` +* Follow the procedure given in the above linked [repository](https://github.com/joonson/syncnet_python) to download the pretrained models and set up the dependencies. + * **Note: Please install a separate virtual environment for the evaluation scripts. The versions used by Wav2Lip and the publicly released code of SyncNet is different and can cause version mis-match issues. To avoid this, we suggest the users to install a separate virtual environment for the evaluation scripts** +``` +cd syncnet_python +pip install -r requirements.txt +sh download_model.sh +``` +* The above step should ensure that all the dependencies required by the repository is installed and the pre-trained models are downloaded. + +### Running the evaluation scripts: +* Copy our evaluation scripts given in this folder to the cloned repository. +``` + cd Wav2Lip/evaluation/scores_LSE/ + cp *.py syncnet_python/ + cp *.sh syncnet_python/ +``` +**Note: We will release the test filelists for LRW, LRS2 and LRS3 shortly once we receive permission from the dataset creators. We will also release the Real World Dataset we have collected shortly.** + +* Our evaluation technique does not require ground-truth of any sorts. Given lip-synced videos we can directly calculate the scores from only the generated videos. Please store the generated videos (from our test sets or your own generated videos) in the following folder structure. +``` +video data root (Folder containing all videos) +├── All .mp4 files +``` +* Change the folder back to the cloned repository. +``` +cd syncnet_python +``` +* To run evaluation on the LRW, LRS2 and LRS3 test files, please run the following command: +``` +python calculate_scores_LRS.py --data_root /path/to/video/data/root --tmp_dir tmp_dir/ +``` + +* To run evaluation on the ReSynced dataset or your own generated videos, please run the following command: +``` +sh calculate_scores_real_videos.sh /path/to/video/data/root +``` +* The generated scores will be present in the all_scores.txt generated in the ```syncnet_python/``` folder + +# Evaluation of image quality using FID metric. +We use the [pytorch-fid](https://github.com/mseitzer/pytorch-fid) repository for calculating the FID metrics. We dump all the frames in both ground-truth and generated videos and calculate the FID score. + + +# Opening issues related to evaluation scripts +* Please open the issues with the "Evaluation" label if you face any issues in the evaluation scripts. + +# Acknowledgements +Our evaluation pipeline in based on two existing repositories. LSE metrics are based on the [syncnet_python](https://github.com/joonson/syncnet_python) repository and the FID score is based on [pytorch-fid](https://github.com/mseitzer/pytorch-fid) repository. We thank the authors of both the repositories for releasing their wonderful code. + + + diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/README.md b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/README.md new file mode 100644 index 00000000..7112998b --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/README.md @@ -0,0 +1,16 @@ +BEGAN fork from + +https://github.com/carpedm20/BEGAN-tensorflow + +with batched FID evaluation + +Needs fid.py from TTUR root directory. Please copy it here. + +Precalculated real world / training data statistics can be downloaded +from here. Be sure to use the batched versions. + +http://bioinf.jku.at/research/ttur/ttur.html + +see sh/run.sh for options + +Fixed random seeds are removed. diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/config.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/config.py new file mode 100644 index 00000000..4760775b --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/config.py @@ -0,0 +1,80 @@ +#-*- coding: utf-8 -*- +import argparse + +def str2bool(v): + return v.lower() in ('true', '1') + +arg_lists = [] +parser = argparse.ArgumentParser() + +def add_argument_group(name): + arg = parser.add_argument_group(name) + arg_lists.append(arg) + return arg + +# Network +net_arg = add_argument_group('Network') +net_arg.add_argument('--input_scale_size', type=int, default=64, + help='input image will be resized with the given value as width and height') +net_arg.add_argument('--conv_hidden_num', type=int, default=128, + choices=[64, 128],help='n in the paper') +net_arg.add_argument('--z_num', type=int, default=64, choices=[64, 128]) + +# Data +data_arg = add_argument_group('Data') +data_arg.add_argument('--dataset', type=str, default='CelebA') +data_arg.add_argument('--split', type=str, default='train') +data_arg.add_argument('--batch_size', type=int, default=16) +data_arg.add_argument('--grayscale', type=str2bool, default=False) +data_arg.add_argument('--num_worker', type=int, default=4) + +# Training / test parameters +train_arg = add_argument_group('Training') +train_arg.add_argument('--is_train', type=str2bool, default=True) +train_arg.add_argument('--optimizer', type=str, default='adam') +train_arg.add_argument('--max_step', type=int, default=500000) +train_arg.add_argument('--lr_update_step', type=int, default=100000, choices=[100000, 75000]) +train_arg.add_argument('--d_lr', type=float, default=0.00008) +train_arg.add_argument('--g_lr', type=float, default=0.00008) +train_arg.add_argument('--beta1', type=float, default=0.5) +train_arg.add_argument('--beta2', type=float, default=0.999) +train_arg.add_argument('--gamma', type=float, default=0.5) +train_arg.add_argument('--lambda_k', type=float, default=0.001) +train_arg.add_argument('--use_gpu', type=str2bool, default=True) + +train_arg.add_argument('--update_k', type=str2bool, default=True) +train_arg.add_argument('--k_constant', type=float, default=0.06) + +# FID +fid_arg = add_argument_group('FID') +fid_arg.add_argument('--train_stats_file', type=str, default='train_stats.npz') +fid_arg.add_argument('--eval_num_samples', type=int, default=10000) +fid_arg.add_argument('--eval_batch_size', type=int, default=100) +fid_arg.add_argument('--eval_step', type=int, default=1000) + + +# Misc +misc_arg = add_argument_group('Misc') +misc_arg.add_argument('--load_checkpoint', type=str2bool, default=False) +misc_arg.add_argument('--checkpoint_name', type=str, default='') +misc_arg.add_argument('--start_step', type=int, default=0) +misc_arg.add_argument('--log_step', type=int, default=500) +misc_arg.add_argument('--save_step', type=int, default=5000) +misc_arg.add_argument('--num_log_samples', type=int, default=3) +misc_arg.add_argument('--log_level', type=str, default='INFO', choices=['INFO', 'DEBUG', 'WARN']) +misc_arg.add_argument('--log_dir', type=str, default='logs') +misc_arg.add_argument('--data_dir', type=str, default='data') +misc_arg.add_argument('--test_data_path', type=str, default=None, + help='directory with images which will be used in test sample generation') +misc_arg.add_argument('--sample_per_image', type=int, default=64, + help='# of sample per image during test sample generation') +misc_arg.add_argument('--random_seed', type=int, default=123) + +def get_config(): + config, unparsed = parser.parse_known_args() + if config.use_gpu: + data_format = 'NCHW' + else: + data_format = 'NHWC' + setattr(config, 'data_format', data_format) + return config, unparsed diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data/README.md b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data/README.md new file mode 100644 index 00000000..89c8a992 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data/README.md @@ -0,0 +1 @@ +Data folder, e.g. celebA_cropped or lsun_cropped directories are located here if not specified otherwise. diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data_loader.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data_loader.py new file mode 100644 index 00000000..9f3f59d7 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/data_loader.py @@ -0,0 +1,61 @@ +import os +from PIL import Image +from glob import glob +import tensorflow as tf + +def get_loader(root, batch_size, scale_size, data_format, split=None, is_grayscale=False, seed=None): + dataset_name = os.path.basename(root) + if dataset_name in ['CelebA'] and split: + root = os.path.join(root, 'splits', split) + + if dataset_name == "lsun_cropped": + #print("scan files... ") + paths = [] + for i in range(304): + print("\rscan directories %d" % (i + 1), end="", flush=True) + paths += glob(os.path.join(root, str(i), "*.jpg")) + print() + print("%d files found" % len(paths)) + else: + print("scan files... ", end="", flush=True) + paths = glob(os.path.join(root, "*.jpg")) + print(" %d files found" % len(paths)) + + tf_decode = tf.image.decode_jpeg + + with Image.open(paths[0]) as img: + w, h = img.size + shape = [h, w, 3] + + filename_queue = tf.train.string_input_producer(list(paths), shuffle=True, seed=seed) + reader = tf.WholeFileReader() + filename, data = reader.read(filename_queue) + image = tf_decode(data, channels=3) + + if is_grayscale: + image = tf.image.rgb_to_grayscale(image) + image.set_shape(shape) + + min_after_dequeue = 5000 + capacity = min_after_dequeue + 3 * batch_size + + queue = tf.train.shuffle_batch( + [image], batch_size=batch_size, + num_threads=4, capacity=capacity, + min_after_dequeue=min_after_dequeue, name='synthetic_inputs') + + if dataset_name in ['CelebA']: + queue = tf.image.crop_to_bounding_box(queue, 50, 25, 128, 128) + queue = tf.image.resize_nearest_neighbor(queue, [scale_size, scale_size]) + else: + pass + #queue = tf.image.resize_nearest_neighbor(queue, [scale_size, scale_size]) + + if data_format == 'NCHW': + queue = tf.transpose(queue, [0, 3, 1, 2]) + elif data_format == 'NHWC': + pass + else: + raise Exception("[!] Unkown data_format: {}".format(data_format)) + + return tf.to_float(queue) diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/logs/README.md b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/logs/README.md new file mode 100644 index 00000000..74d1068c --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/logs/README.md @@ -0,0 +1 @@ +Tensorboard logfiles, samples, checkpoints will stored in autmatically generated subdirectories here. diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/main_fid.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/main_fid.py new file mode 100644 index 00000000..aec66131 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/main_fid.py @@ -0,0 +1,43 @@ +import numpy as np +import tensorflow as tf + +from trainer_fid_batched import Trainer +from config import get_config +from data_loader import get_loader +from utils import prepare_dirs_and_logger, save_config + +def main(config): + prepare_dirs_and_logger(config) + + #rng = np.random.RandomState(config.random_seed) + #tf.set_random_seed(config.random_seed) + + if config.is_train: + data_path = config.data_path + batch_size = config.batch_size + do_shuffle = True + else: + setattr(config, 'batch_size', 64) + if config.test_data_path is None: + data_path = config.data_path + else: + data_path = config.test_data_path + batch_size = config.sample_per_image + do_shuffle = False + + data_loader = get_loader( + data_path, config.batch_size, config.input_scale_size, + config.data_format, config.split) + trainer = Trainer(config, data_loader) + + if config.is_train: + save_config(config) + trainer.train() + else: + if not config.load_path: + raise Exception("[!] You should specify `load_path` to load a pretrained model") + trainer.test() + +if __name__ == "__main__": + config, unparsed = get_config() + main(config) diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/models.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/models.py new file mode 100644 index 00000000..6d9c42dc --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/models.py @@ -0,0 +1,91 @@ +import numpy as np +import tensorflow as tf +slim = tf.contrib.slim + +def GeneratorCNN(z, hidden_num, output_num, repeat_num, data_format, reuse): + with tf.variable_scope("G", reuse=reuse) as vs: + num_output = int(np.prod([8, 8, hidden_num])) + x = slim.fully_connected(z, num_output, activation_fn=None) + x = reshape(x, 8, 8, hidden_num, data_format) + + for idx in range(repeat_num): + x = slim.conv2d(x, hidden_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + x = slim.conv2d(x, hidden_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + if idx < repeat_num - 1: + x = upscale(x, 2, data_format) + + out = slim.conv2d(x, 3, 3, 1, activation_fn=None, data_format=data_format) + + variables = tf.contrib.framework.get_variables(vs) + return out, variables + +def DiscriminatorCNN(x, input_channel, z_num, repeat_num, hidden_num, data_format): + with tf.variable_scope("D") as vs: + # Encoder + x = slim.conv2d(x, hidden_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + + prev_channel_num = hidden_num + for idx in range(repeat_num): + channel_num = hidden_num * (idx + 1) + x = slim.conv2d(x, channel_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + x = slim.conv2d(x, channel_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + if idx < repeat_num - 1: + x = slim.conv2d(x, channel_num, 3, 2, activation_fn=tf.nn.elu, data_format=data_format) + #x = tf.contrib.layers.max_pool2d(x, [2, 2], [2, 2], padding='VALID') + + x = tf.reshape(x, [-1, np.prod([8, 8, channel_num])]) + z = x = slim.fully_connected(x, z_num, activation_fn=None) + + # Decoder + num_output = int(np.prod([8, 8, hidden_num])) + x = slim.fully_connected(x, num_output, activation_fn=None) + x = reshape(x, 8, 8, hidden_num, data_format) + + for idx in range(repeat_num): + x = slim.conv2d(x, hidden_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + x = slim.conv2d(x, hidden_num, 3, 1, activation_fn=tf.nn.elu, data_format=data_format) + if idx < repeat_num - 1: + x = upscale(x, 2, data_format) + + out = slim.conv2d(x, input_channel, 3, 1, activation_fn=None, data_format=data_format) + + variables = tf.contrib.framework.get_variables(vs) + return out, z, variables + +def int_shape(tensor): + shape = tensor.get_shape().as_list() + return [num if num is not None else -1 for num in shape] + +def get_conv_shape(tensor, data_format): + shape = int_shape(tensor) + # always return [N, H, W, C] + if data_format == 'NCHW': + return [shape[0], shape[2], shape[3], shape[1]] + elif data_format == 'NHWC': + return shape + +def nchw_to_nhwc(x): + return tf.transpose(x, [0, 2, 3, 1]) + +def nhwc_to_nchw(x): + return tf.transpose(x, [0, 3, 1, 2]) + +def reshape(x, h, w, c, data_format): + if data_format == 'NCHW': + x = tf.reshape(x, [-1, c, h, w]) + else: + x = tf.reshape(x, [-1, h, w, c]) + return x + +def resize_nearest_neighbor(x, new_size, data_format): + if data_format == 'NCHW': + x = nchw_to_nhwc(x) + x = tf.image.resize_nearest_neighbor(x, new_size) + x = nhwc_to_nchw(x) + else: + x = tf.image.resize_nearest_neighbor(x, new_size) + return x + +def upscale(x, scale, data_format): + _, h, w, _ = get_conv_shape(x, data_format) + return resize_nearest_neighbor(x, (h*scale, w*scale), data_format) diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/sh/run.sh b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/sh/run.sh new file mode 100644 index 00000000..9b8b2352 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/sh/run.sh @@ -0,0 +1,35 @@ +lr_d=0.00006 +lr_g=0.00004 +update_k=True +k_constant=0.08 +dataset=celebA_cropped +#dataset=lsun_cropped +train_stats_file="stats/fid_stats_celeba.npz" +eval_num_samples=50000 +eval_batch_size=200 +eval_step=1000 +lambda_k=0.001 +gamma=0.5 +python3 main_fid.py \ +--dataset $dataset \ +--train_stats_file $train_stats_file \ +--eval_num_samples $eval_num_samples \ +--eval_batch_size $eval_batch_size \ +--eval_step $eval_step \ +--input_height 64 \ +--output_height 64 \ +--is_crop False \ +--is_train True \ +--batch_size 16 \ +--log_dir "logs" \ +--d_lr $lr_d \ +--g_lr $lr_g \ +--lr_update_step 100000 \ +--lambda_k $lambda_k \ +--update_k $update_k \ +--k_constant $k_constant \ +--gamma $gamma \ +--max_step 500000 \ +--load_checkpoint False \ +--checkpoint_name "" \ +--start_step 0 diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/stats/README.md b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/stats/README.md new file mode 100644 index 00000000..6765f6d4 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/stats/README.md @@ -0,0 +1 @@ +Folder for precalculated FID statistics diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/trainer_fid_batched.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/trainer_fid_batched.py new file mode 100644 index 00000000..aba6d66c --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/trainer_fid_batched.py @@ -0,0 +1,499 @@ +from __future__ import print_function + +import os +#import StringIO +import scipy.misc +import numpy as np +from glob import glob +from tqdm import trange +from itertools import chain +from collections import deque + +from scipy.linalg import sqrtm +from numpy.linalg import norm + +from models import * +from utils import save_image +ry: + import fid +except ImportError: + print("fid.py not found. Please download fid.py from the TTUR github repository.") + raise SystemExit() + + +def next(loader): + return loader.next()[0].data.numpy() + +def to_nhwc(image, data_format): + if data_format == 'NCHW': + new_image = nchw_to_nhwc(image) + else: + new_image = image + return new_image + +def to_nchw_numpy(image): + if image.shape[3] in [1, 3]: + new_image = image.transpose([0, 3, 1, 2]) + else: + new_image = image + return new_image + +def norm_img(image, data_format=None): + image = image/127.5 - 1. + if data_format: + image = to_nhwc(image, data_format) + return image + +def denorm_img(norm, data_format): + return tf.clip_by_value(to_nhwc((norm + 1)*127.5, data_format), 0, 255) + +def slerp(val, low, high): + """Code from https://github.com/soumith/dcgan.torch/issues/14""" + omega = np.arccos(np.clip(np.dot(low/np.linalg.norm(low), high/np.linalg.norm(high)), -1, 1)) + so = np.sin(omega) + if so == 0: + return (1.0-val) * low + val * high # L'Hopital's rule/LERP + return np.sin((1.0-val)*omega) / so * low + np.sin(val*omega) / so * high + +class Trainer(object): + def __init__(self, config, data_loader): + + self.config = config + self.data_loader = data_loader + self.dataset = config.dataset + + self.train_stats_file = config.train_stats_file + + self.beta1 = config.beta1 + self.beta2 = config.beta2 + self.optimizer = config.optimizer + self.batch_size = config.batch_size + + self.step = tf.Variable(0, name='step', trainable=False) + + self.g_lr = tf.Variable(config.g_lr, name='g_lr') + self.d_lr = tf.Variable(config.d_lr, name='d_lr') + + self.g_lr_update = tf.assign(self.g_lr, self.g_lr * 0.5, name='g_lr_update') + self.d_lr_update = tf.assign(self.d_lr, self.d_lr * 0.5, name='d_lr_update') + + self.gamma = config.gamma + self.lambda_k = config.lambda_k + + self.z_num = config.z_num + self.conv_hidden_num = config.conv_hidden_num + self.input_scale_size = config.input_scale_size + + self.model_dir = config.model_dir + + self.load_checkpoint = config.load_checkpoint + self.checkpoint_name = config.checkpoint_name + + self.use_gpu = config.use_gpu + self.data_format = config.data_format + + _, height, width, self.channel = \ + get_conv_shape(self.data_loader, self.data_format) + self.repeat_num = int(np.log2(height)) - 2 + + self.start_step = config.start_step + self.log_step = config.log_step + self.max_step = config.max_step + self.save_step = config.save_step + self.lr_update_step = config.lr_update_step + + # TTS stuff + + self.update_k = config.update_k + self.k_constant = config.k_constant + + #self.global_norm_thres = 100.0 + #self.clip_value_min = -0.1 + #self.clip_value_max = 0.1 + + self.eval_num_samples = config.eval_num_samples + self.eval_batch_size = config.eval_batch_size + self.eval_step = config.eval_step + + self.output_height = config.input_scale_size + self.output_width = self.output_height + + self.is_train = config.is_train + self.build_model() + + self.saver = tf.train.Saver() + self.summary_writer = tf.summary.FileWriter(self.model_dir) + + sv = tf.train.Supervisor(logdir=self.model_dir, + is_chief=True, + saver=self.saver, + summary_op=None, + summary_writer=self.summary_writer, + save_model_secs=3600, + global_step=self.step, + ready_for_local_init_op=None) + + gpu_options = tf.GPUOptions(allow_growth=True) + sess_config = tf.ConfigProto(allow_soft_placement=True, + gpu_options=gpu_options) + + self.sess = sv.prepare_or_wait_for_session(config=sess_config) + + # dirty way to bypass graph finilization error + g = tf.get_default_graph() + g._finalized = False + + if not self.is_train: + self.build_test_model() + + def train(self): + + print("load train stats..", end="") + # load precalculated training set statistics + f = np.load(self.train_stats_file) + mu_trn, sigma_trn = f['mu'][:], f['sigma'][:] + f.close() + print("ok") + + z_fixed = np.random.uniform(-1, 1, size=(self.batch_size, self.z_num)) + + x_fixed = self.get_image_from_loader() + save_image(x_fixed, '{}/x_fixed.png'.format(self.model_dir)) + + prev_measure = 1 + measure_history = deque([0]*self.lr_update_step, self.lr_update_step) + + # load inference model + fid.create_inception_graph("inception-2015-12-05/classify_image_graph_def.pb") + + #query_tensor = fid.get_Fid_query_tensor(self.sess) + + if self.load_checkpoint: + if self.load(self.model_dir): + print(" [*] Load SUCCESS") + else: + print(" [!] Load failed...") + + # Precallocate prediction array for kl/fid inception score + #print("preallocate %.3f GB for prediction array.." % (self.eval_num_samples * 2048 / (1024**3)), end=" ", flush=True) + inception_activations = np.ones([self.eval_num_samples, 2048]) + #print("ok") + + for step in trange(self.start_step, self.max_step): + + # Optimize + self.sess.run([self.d_optim, self.g_optim]) + + # Feed dict + fetch_dict = {"measure": self.measure} + + if self.update_k: + fetch_dict.update({"k_update": self.k_update}) + + if step % self.log_step == 0: + fetch_dict.update({ + "summary": self.summary_op, + "g_loss": self.g_loss, + "d_loss": self.d_loss, + "k_t": self.k_t, + }) + + # Get summaries + result = self.sess.run(fetch_dict) + + measure = result['measure'] + measure_history.append(measure) + + if step % self.log_step == 0: + self.summary_writer.add_summary(result['summary'], step) + self.summary_writer.flush() + + g_loss = result['g_loss'] + d_loss = result['d_loss'] + k_t = result['k_t'] + + print("[{}/{}] Loss_D: {:.6f} Loss_G: {:.6f} measure: {:.4f}, k_t: {:.4f}". \ + format(step, self.max_step, d_loss, g_loss, measure, k_t)) + + if step % (self.log_step * 10) == 0: + x_fake = self.generate(z_fixed, self.model_dir, idx=step) + self.autoencode(x_fixed, self.model_dir, idx=step, x_fake=x_fake) + + if step % self.lr_update_step == self.lr_update_step - 1: + self.sess.run([self.g_lr_update, self.d_lr_update]) + + # FID + if step % self.eval_step == 0: + + eval_batches_num = self.eval_num_samples // self.eval_batch_size + + for eval_batch in range(eval_batches_num): + + print("\rFID batch %d/%d" % (eval_batch + 1, eval_batches_num), end="", flush=True) + + sample_z_eval = np.random.uniform(-1, 1, size=(self.eval_batch_size, self.z_num)) + samples_eval = self.generate(sample_z_eval, self.model_dir, save=False) + + activations_batch = fid.get_activations(samples_eval, + self.sess, + batch_size=self.eval_batch_size, + verbose=False) + + frm = eval_batch * self.eval_batch_size + to = frm + self.eval_batch_size + inception_activations[frm:to,:] = activations_batch + + print() + + # calculate FID + print("FID:", end=" ", flush=True) + try: + mu_eval = np.mean(inception_activations, axis=0) + sigma_eval = np.cov(inception_activations, rowvar=False) + FID = fid.calculate_frechet_distance(mu_eval, sigma_eval, mu_trn, sigma_trn) + except Exception as e: + print(e) + FID = 500 + print(FID) + + self.sess.run(tf.assign(self.fid, FID)) + summary_str = self.sess.run(self.fid_sum) + self.summary_writer.add_summary(summary_str, step) + + #print("eval finished") + + + def build_model(self): + + self.x = self.data_loader + x = norm_img(self.x) + + self.z = tf.random_uniform( + (tf.shape(x)[0], self.z_num), minval=-1.0, maxval=1.0) + + if self.update_k: + self.k_t = tf.Variable(0.0, trainable=False, name='k_t') + else: + self.k_t = tf.constant(self.k_constant, name="k_t") + + G, self.G_var = GeneratorCNN( + self.z, self.conv_hidden_num, self.channel, + self.repeat_num, self.data_format, reuse=False) + + d_out, self.D_z, self.D_var = DiscriminatorCNN( + tf.concat([G, x], 0), self.channel, self.z_num, self.repeat_num, + self.conv_hidden_num, self.data_format) + AE_G, AE_x = tf.split(d_out, 2, 0) + + self.G = denorm_img(G, self.data_format) + self.AE_G, self.AE_x = denorm_img(AE_G, self.data_format), denorm_img(AE_x, self.data_format) + + if self.optimizer == 'adam': + optimizer = tf.train.AdamOptimizer + else: + raise Exception("[!] Caution! Paper didn't use {} opimizer other than Adam".format(config.optimizer)) + + g_optimizer, d_optimizer = optimizer(self.g_lr), optimizer(self.d_lr) + + self.d_loss_real = tf.reduce_mean(tf.abs(AE_x - x)) + self.d_loss_fake = tf.reduce_mean(tf.abs(AE_G - G)) + + self.d_loss = self.d_loss_real - self.k_t * self.d_loss_fake + self.g_loss = tf.reduce_mean(tf.abs(AE_G - G)) + + self.d_optim = d_optimizer.minimize(self.d_loss, var_list=self.D_var) + self.g_optim = g_optimizer.minimize(self.g_loss, global_step=self.step, var_list=self.G_var) + + #grads, vrbls = zip(*d_optimizer.compute_gradients(self.d_loss, self.D_var)) + #grads, _ = tf.clip_by_global_norm(grads, self.global_norm_thres) + #grads = [ + # tf.clip_by_value(grad, self.clip_value_min, self.clip_value_max) + # for grad in grads] + #grads = [tf.div(grad, tf.reduce_max(grad)) for grad in grads] + #self.d_optim = d_optimizer.apply_gradients(zip(grads, vrbls)) + + #grads, vrbls = zip(*g_optimizer.compute_gradients(self.g_loss, self.G_var)) + #grads, _ = tf.clip_by_global_norm(grads, self.global_norm_thres) + #grads = [ + # tf.clip_by_value(grad, self.clip_value_min, self.clip_value_max) + # for grad in grads] + #grads = [tf.div(grad, tf.reduce_max(grad)) for grad in grads] + #self.g_optim = g_optimizer.apply_gradients(zip(grads, vrbls), global_step=self.step) + + + self.balance = self.gamma * self.d_loss_real - self.g_loss + self.measure = self.d_loss_real + tf.abs(self.balance) + + # k update + + if self.update_k: + self.k_update = tf.assign(self.k_t, + tf.clip_by_value(self.k_t + self.lambda_k * self.balance, 0, 1)) + + self.summary_op = tf.summary.merge([ + tf.summary.image("G", self.G), + tf.summary.image("AE_G", self.AE_G), + tf.summary.image("AE_x", self.AE_x), + + tf.summary.scalar("loss/d_loss", self.d_loss), + tf.summary.scalar("loss/d_loss_real", self.d_loss_real), + tf.summary.scalar("loss/d_loss_fake", self.d_loss_fake), + tf.summary.scalar("loss/g_loss", self.g_loss), + tf.summary.scalar("misc/measure", self.measure), + tf.summary.scalar("misc/k_t", self.k_t), + tf.summary.scalar("misc/d_lr", self.d_lr), + tf.summary.scalar("misc/g_lr", self.g_lr), + tf.summary.scalar("misc/balance", self.balance), + ]) + + # TTS stuff + + self.image_enc_data = tf.placeholder(tf.uint8,[self.output_height, self.output_width, 3]) + self.encode_jpeg = tf.image.encode_jpeg(self.image_enc_data) + + self.fid = tf.Variable(0.0, trainable=False) + self.fid_sum = tf.summary.scalar("FID", self.fid) + + + def build_test_model(self): + with tf.variable_scope("test") as vs: + # Extra ops for interpolation + z_optimizer = tf.train.AdamOptimizer(0.0001) + + self.z_r = tf.get_variable("z_r", [self.batch_size, self.z_num], tf.float32) + self.z_r_update = tf.assign(self.z_r, self.z) + + G_z_r, _ = GeneratorCNN( + self.z_r, self.conv_hidden_num, self.channel, self.repeat_num, self.data_format, reuse=True) + + with tf.variable_scope("test") as vs: + self.z_r_loss = tf.reduce_mean(tf.abs(self.x - G_z_r)) + self.z_r_optim = z_optimizer.minimize(self.z_r_loss, var_list=[self.z_r]) + + test_variables = tf.contrib.framework.get_variables(vs) + self.sess.run(tf.variables_initializer(test_variables)) + + def generate(self, inputs, root_path=None, path=None, idx=None, save=True): + x = self.sess.run(self.G, {self.z: inputs}) + if path is None and save: + path = os.path.join(root_path, '{}_G.png'.format(idx)) + save_image(x, path) + print("[*] Samples saved: {}".format(path)) + return x + + def autoencode(self, inputs, path, idx=None, x_fake=None): + items = { + 'real': inputs, + 'fake': x_fake, + } + for key, img in items.items(): + if img is None: + continue + if img.shape[3] in [1, 3]: + img = img.transpose([0, 3, 1, 2]) + + x_path = os.path.join(path, '{}_D_{}.png'.format(idx, key)) + x = self.sess.run(self.AE_x, {self.x: img}) + save_image(x, x_path) + print("[*] Samples saved: {}".format(x_path)) + + def encode(self, inputs): + if inputs.shape[3] in [1, 3]: + inputs = inputs.transpose([0, 3, 1, 2]) + return self.sess.run(self.D_z, {self.x: inputs}) + + def decode(self, z): + return self.sess.run(self.AE_x, {self.D_z: z}) + + def interpolate_G(self, real_batch, step=0, root_path='.', train_epoch=0): + batch_size = len(real_batch) + half_batch_size = int(batch_size/2) + + self.sess.run(self.z_r_update) + tf_real_batch = to_nchw_numpy(real_batch) + for i in trange(train_epoch): + z_r_loss, _ = self.sess.run([self.z_r_loss, self.z_r_optim], {self.x: tf_real_batch}) + z = self.sess.run(self.z_r) + + z1, z2 = z[:half_batch_size], z[half_batch_size:] + real1_batch, real2_batch = real_batch[:half_batch_size], real_batch[half_batch_size:] + + generated = [] + for idx, ratio in enumerate(np.linspace(0, 1, 10)): + z = np.stack([slerp(ratio, r1, r2) for r1, r2 in zip(z1, z2)]) + z_decode = self.generate(z, save=False) + generated.append(z_decode) + + generated = np.stack(generated).transpose([1, 0, 2, 3, 4]) + for idx, img in enumerate(generated): + save_image(img, os.path.join(root_path, 'test{}_interp_G_{}.png'.format(step, idx)), nrow=10) + + all_img_num = np.prod(generated.shape[:2]) + batch_generated = np.reshape(generated, [all_img_num] + list(generated.shape[2:])) + save_image(batch_generated, os.path.join(root_path, 'test{}_interp_G.png'.format(step)), nrow=10) + + def interpolate_D(self, real1_batch, real2_batch, step=0, root_path="."): + real1_encode = self.encode(real1_batch) + real2_encode = self.encode(real2_batch) + + decodes = [] + for idx, ratio in enumerate(np.linspace(0, 1, 10)): + z = np.stack([slerp(ratio, r1, r2) for r1, r2 in zip(real1_encode, real2_encode)]) + z_decode = self.decode(z) + decodes.append(z_decode) + + decodes = np.stack(decodes).transpose([1, 0, 2, 3, 4]) + for idx, img in enumerate(decodes): + img = np.concatenate([[real1_batch[idx]], img, [real2_batch[idx]]], 0) + save_image(img, os.path.join(root_path, 'test{}_interp_D_{}.png'.format(step, idx)), nrow=10 + 2) + + def test(self): + root_path = "./"#self.model_dir + + all_G_z = None + for step in range(3): + real1_batch = self.get_image_from_loader() + real2_batch = self.get_image_from_loader() + + save_image(real1_batch, os.path.join(root_path, 'test{}_real1.png'.format(step))) + save_image(real2_batch, os.path.join(root_path, 'test{}_real2.png'.format(step))) + + self.autoencode( + real1_batch, self.model_dir, idx=os.path.join(root_path, "test{}_real1".format(step))) + self.autoencode( + real2_batch, self.model_dir, idx=os.path.join(root_path, "test{}_real2".format(step))) + + self.interpolate_G(real1_batch, step, root_path) + #self.interpolate_D(real1_batch, real2_batch, step, root_path) + + z_fixed = np.random.uniform(-1, 1, size=(self.batch_size, self.z_num)) + G_z = self.generate(z_fixed, path=os.path.join(root_path, "test{}_G_z.png".format(step))) + + if all_G_z is None: + all_G_z = G_z + else: + all_G_z = np.concatenate([all_G_z, G_z]) + save_image(all_G_z, '{}/G_z{}.png'.format(root_path, step)) + + save_image(all_G_z, '{}/all_G_z.png'.format(root_path), nrow=16) + + def get_image_from_loader(self): + x = self.data_loader.eval(session=self.sess) + if self.data_format == 'NCHW': + x = x.transpose([0, 2, 3, 1]) + return x + + # Load checkpoint + def load(self, checkpoint_dir): + print(" [*] Reading checkpoints from %s..." % checkpoint_dir) + #checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir) + + ckpt = tf.train.get_checkpoint_state(checkpoint_dir) + if ckpt and ckpt.model_checkpoint_path: + ckpt_name = os.path.basename(ckpt.model_checkpoint_path) + self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name)) + print(" [*] Success to read {}".format(ckpt_name)) + return True + else: + print(" [*] Failed to find a checkpoint") + return False diff --git a/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/utils.py b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/utils.py new file mode 100644 index 00000000..49235123 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/BEGAN_FID_batched/utils.py @@ -0,0 +1,83 @@ +from __future__ import print_function + +import os +import math +import json +import logging +import numpy as np +from PIL import Image +from datetime import datetime + +def prepare_dirs_and_logger(config): + formatter = logging.Formatter("%(asctime)s:%(levelname)s::%(message)s") + logger = logging.getLogger() + + for hdlr in logger.handlers: + logger.removeHandler(hdlr) + + handler = logging.StreamHandler() + handler.setFormatter(formatter) + + logger.addHandler(handler) + + if config.load_checkpoint: + #if config.load_path.startswith(config.log_dir): + # config.model_dir = config.load_path + #else: + # if config.load_path.startswith(config.dataset): + # config.model_name = config.load_path + # else: + config.model_name = config.checkpoint_name + else: + config.model_name = "%s_%.6f_%.6f_%s" % (get_time(), config.d_lr, config.g_lr, config.update_k) + + if not hasattr(config, 'model_dir'): + config.model_dir = os.path.join(config.log_dir, config.model_name) + + config.data_path = os.path.join(config.data_dir, config.dataset) + + for path in [config.log_dir, config.data_dir, config.model_dir]: + if not os.path.exists(path): + os.makedirs(path) + +def get_time(): + return datetime.now().strftime("%m%d_%H%M%S") + +def save_config(config): + param_path = os.path.join(config.model_dir, "params.json") + + print("[*] MODEL dir: %s" % config.model_dir) + print("[*] PARAM path: %s" % param_path) + + with open(param_path, 'w') as fp: + json.dump(config.__dict__, fp, indent=4, sort_keys=True) + +def rank(array): + return len(array.shape) + +def make_grid(tensor, nrow=8, padding=2, + normalize=False, scale_each=False): + """Code based on https://github.com/pytorch/vision/blob/master/torchvision/utils.py""" + nmaps = tensor.shape[0] + xmaps = min(nrow, nmaps) + ymaps = int(math.ceil(float(nmaps) / xmaps)) + height, width = int(tensor.shape[1] + padding), int(tensor.shape[2] + padding) + grid = np.zeros([height * ymaps + 1 + padding // 2, width * xmaps + 1 + padding // 2, 3], dtype=np.uint8) + k = 0 + for y in range(ymaps): + for x in range(xmaps): + if k >= nmaps: + break + h, h_width = y * height + 1 + padding // 2, height - padding + w, w_width = x * width + 1 + padding // 2, width - padding + + grid[h:h+h_width, w:w+w_width] = tensor[k] + k = k + 1 + return grid + +def save_image(tensor, filename, nrow=8, padding=2, + normalize=False, scale_each=False): + ndarr = make_grid(tensor, nrow=nrow, padding=padding, + normalize=normalize, scale_each=scale_each) + im = Image.fromarray(ndarr) + im.save(filename) diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/README.md b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/README.md new file mode 100644 index 00000000..b917a285 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/README.md @@ -0,0 +1,22 @@ +# DCGAN for CelebA evaluated with FID (batched version) + +DCGAN fork from https://github.com/carpedm20/DCGAN-tensorflow + +Precalculated real world / trainng data statistics can be downloaded from: +http://bioinf.jku.at/research/ttur/ttur.html + +## Usage +- Copy the file fid.py from TTUR root into the DCGAN_FID_batched directory +- Modify the dataset variable in run.sh +- Modify the data_path variable in run.sh +- Download the precalculated statistics (see above) and save them into the "stats" folder. +- Modify the incept_path in file run.sh +- Run the command: bash run.sh +- Checkpoint, sample and Tensorboard log directories will be automatically created in logs. + +## FID evaluation: parameters fid_n_samples and fid_sample_batchsize +The evaluation of the FID needs the comparison between precalculated statistics of real world data vs statistics of generated data. +The calculation of the latter is a tradeoff between number of samples (the more the better) and available hardware. Two parameters +in run.sh are concerned with this calculation: fid_n_samples and fid_sample_batchsize. The first parameter specifies the number of +generated samples on which the statistics are calculated. Since this number should be high, it is very likely that it is not possible +to generate this amount of samples at once. Thus the generation process is batched with batches of size fid_sample_batchsize. diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/logs/README.md b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/logs/README.md new file mode 100644 index 00000000..7328fd0f --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/logs/README.md @@ -0,0 +1 @@ +Tensorboard logfiles, samples, checkpoints will be stored in automatically generated subdirectories here. diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/main.py b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/main.py new file mode 100644 index 00000000..fd9b0427 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/main.py @@ -0,0 +1,159 @@ +import os +#os.environ['CUDA_VISIBLE_DEVICES'] = "1" +import numpy as np + +from model import DCGAN +from utils import pp, visualize, to_json + +import tensorflow as tf +import fid + +flags = tf.app.flags +flags.DEFINE_integer("epoch", 25, "Epoch to train [25]") + +flags.DEFINE_float("learning_rate_d", 0.0002, "Discriminator learning rate of for adam [0.002]") +flags.DEFINE_float("learning_rate_g", 0.0002, "Generator learning rate of for adam [0.0002]") +flags.DEFINE_float("lr_decay_rate_d", 1.0, "Discriminator learning rate decay [1.0]") +flags.DEFINE_float("lr_decay_rate_g", 1.0, "Generator learning rate decay [1.0]") + +flags.DEFINE_float("beta1", 0.5, "Momentum term of adam [0.5]") +flags.DEFINE_integer("train_size", np.inf, "The size of train images [np.inf]") +flags.DEFINE_integer("batch_size", 64, "The size of batch images [64]") +flags.DEFINE_integer("input_height", 108, "The size of image to use (will be center cropped). [108]") +flags.DEFINE_integer("input_width", None, "The size of image to use (will be center cropped). If None, same value as input_height [None]") +flags.DEFINE_integer("output_height", 64, "The size of the output images to produce [64]") +flags.DEFINE_integer("output_width", None, "The size of the output images to produce. If None, same value as output_height [None]") +flags.DEFINE_integer("c_dim", 3, "Dimension of image color. [3]") +flags.DEFINE_string("dataset", "celebA", "The name of dataset [celebA, mnist, lsun]") +flags.DEFINE_string("input_fname_pattern", "*.jpg", "Glob pattern of filename of input images [*]") +flags.DEFINE_string("checkpoint_dir", "checkpoint", "Directory name to save the checkpoints [checkpoint]") +flags.DEFINE_string("checkpoint_name", None, "Directory name to load a checkpoint from [None]") +flags.DEFINE_boolean("load_checkpoint", False, "Load checkpoint [False]") +flags.DEFINE_integer("counter_start", 0, "counter to start with [0]") +flags.DEFINE_string("sample_dir", "samples", "Directory name to save the image samples [samples]") +flags.DEFINE_string("log_dir", "logs", "Directory name for summary logs [logs]") +flags.DEFINE_boolean("is_train", False, "True for training, False for testing [False]") +flags.DEFINE_boolean("is_crop", False, "True for training, False for testing [False]") +flags.DEFINE_boolean("visualize", False, "True for visualizing, False for nothing [False]") + +# added parameters for batched fid +flags.DEFINE_string("stats_path", None, "Path to pretrained statistics") +flags.DEFINE_string("data_path", None, "Path to input data") +flags.DEFINE_string("incept_path", None, "Path to inception net.") +flags.DEFINE_integer("fid_n_samples", 10000, "Total number of samples generated to calculate the FID statistics. Will be adjusted if not a multiple of fid_sample_batchsize [10000]") +flags.DEFINE_integer("fid_sample_batchsize", 5000, "Batchsize of batches that constitute all generated samples to calculate the FID statistics [5000]") +flags.DEFINE_integer("fid_batch_size", 100, "Batchsize used for FID calculation [500]") +flags.DEFINE_boolean("fid_verbose", True, "Report current state of FID calculation [True]") +flags.DEFINE_integer("fid_eval_steps", 1000, "Evaluate FID after this number of minibatches") + + +FLAGS = flags.FLAGS + +def main(_): + + pp.pprint(flags.FLAGS.__flags) + + # Create directories if necessary + if not os.path.exists(FLAGS.log_dir): + print("*** create log dir %s" % FLAGS.log_dir) + os.makedirs(FLAGS.log_dir) + if not os.path.exists(FLAGS.sample_dir): + print("*** create sample dir %s" % FLAGS.sample_dir) + os.makedirs(FLAGS.sample_dir) + if not os.path.exists(FLAGS.checkpoint_dir): + print("*** create checkpoint dir %s" % FLAGS.checkpoint_dir) + os.makedirs(FLAGS.checkpoint_dir) + + # Write flags to log dir + flags_file = open("%s/flags.txt" % FLAGS.log_dir, "w") + for k, v in flags.FLAGS.__flags.items(): + line = '{}, {}'.format(k, v) + print(line, file=flags_file) + flags_file.close() + + if FLAGS.input_width is None: + FLAGS.input_width = FLAGS.input_height + if FLAGS.output_width is None: + FLAGS.output_width = FLAGS.output_height + + if not os.path.exists(FLAGS.checkpoint_dir): + os.makedirs(FLAGS.checkpoint_dir) + if not os.path.exists(FLAGS.sample_dir): + os.makedirs(FLAGS.sample_dir) + + run_config = tf.ConfigProto() + run_config.gpu_options.allow_growth=True + + # load model + fid.create_inception_graph(FLAGS.incept_path) + + with tf.Session(config=run_config) as sess: + # get querry tensor + if FLAGS.dataset == 'mnist': + dcgan = DCGAN( + sess, + input_width=FLAGS.input_width, + input_height=FLAGS.input_height, + output_width=FLAGS.output_width, + output_height=FLAGS.output_height, + batch_size=FLAGS.batch_size, + batch_size_m=FLAGS.batch_size_m, + y_dim=10, + c_dim=1, + dataset_name=FLAGS.dataset, + input_fname_pattern=FLAGS.input_fname_pattern, + is_crop=FLAGS.is_crop, + checkpoint_dir=FLAGS.checkpoint_dir, + sample_dir=FLAGS.sample_dir, + log_dir=FLAGS.log_dir, + stats_path=FLAGS.stats_path, + data_path=FLAGS.data_path, + fid_n_samples=FLAGS.fid_n_samples, + fid_sample_batchsize=FLAGS.fid_sample_batchsize, + fid_batch_size=FLAGS.fid_batch_size, + fid_verbose=FLAGS.fid_verbose, + beta1=FLAGS.beta1) + else: + dcgan = DCGAN( + sess, + input_width=FLAGS.input_width, + input_height=FLAGS.input_height, + output_width=FLAGS.output_width, + output_height=FLAGS.output_height, + batch_size=FLAGS.batch_size, + c_dim=FLAGS.c_dim, + dataset_name=FLAGS.dataset, + input_fname_pattern=FLAGS.input_fname_pattern, + is_crop=FLAGS.is_crop, + load_checkpoint=FLAGS.load_checkpoint, + counter_start=FLAGS.counter_start, + checkpoint_dir=FLAGS.checkpoint_dir, + sample_dir=FLAGS.sample_dir, + log_dir=FLAGS.log_dir, + stats_path=FLAGS.stats_path, + data_path=FLAGS.data_path, + fid_n_samples=FLAGS.fid_n_samples, + fid_sample_batchsize=FLAGS.fid_sample_batchsize, + fid_batch_size=FLAGS.fid_batch_size, + fid_verbose=FLAGS.fid_verbose, + beta1=FLAGS.beta1) + + if FLAGS.is_train: + dcgan.train(FLAGS) + else: + if not dcgan.load(FLAGS.checkpoint_dir): + raise Exception("[!] Train a model first, then run test mode") + + + # to_json("./web/js/layers.js", [dcgan.h0_w, dcgan.h0_b, dcgan.g_bn0], + # [dcgan.h1_w, dcgan.h1_b, dcgan.g_bn1], + # [dcgan.h2_w, dcgan.h2_b, dcgan.g_bn2], + # [dcgan.h3_w, dcgan.h3_b, dcgan.g_bn3], + # [dcgan.h4_w, dcgan.h4_b, None]) + + # Below is codes for visualization + #OPTION = 4 + #visualize(sess, dcgan, FLAGS, OPTION) + +if __name__ == '__main__': + tf.app.run() diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/model.py b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/model.py new file mode 100644 index 00000000..b4aeccf1 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/model.py @@ -0,0 +1,523 @@ +import os +import time +import math +from glob import glob +import tensorflow as tf +import numpy as np +from random import sample + +from ops import * +from utils import * + +# import fid +import fid + +def conv_out_size_same(size, stride): + return int(math.ceil(float(size) / float(stride))) + +class DCGAN(object): + def __init__(self, sess, input_height=108, input_width=108, is_crop=True, + batch_size=64, sample_num = 64, + output_height=64, output_width=64, + y_dim=None, z_dim=100, gf_dim=64, df_dim=64, + gfc_dim=1024, dfc_dim=1024, c_dim=3, + dataset_name='default', + input_fname_pattern='*.jpg', + load_checkpoint=False, counter_start=0, + checkpoint_dir=None, + sample_dir=None, + log_dir=None, + stats_path=None, + data_path=None, + fid_n_samples=10000, + fid_sample_batchsize=5000, + fid_batch_size=500, + fid_verbose=False, + beta1=0.5): + """ + + Args: + sess: TensorFlow session + batch_size: The size of batch. Should be specified before training. + y_dim: (optional) Dimension of dim for y. [None] + z_dim: (optional) Dimension of dim for Z. [100] + gf_dim: (optional) Dimension of gen filters in first conv layer. [64] + df_dim: (optional) Dimension of discrim filters in first conv layer. [64] + gfc_dim: (optional) Dimension of gen units for for fully connected layer. [1024] + dfc_dim: (optional) Dimension of discrim units for fully connected layer. [1024] + c_dim: (optional) Dimension of image color. For grayscale input, set to 1. [3] + """ + + self.sess = sess + self.is_crop = is_crop + self.is_grayscale = (c_dim == 1) + + self.batch_size = batch_size + self.sample_num = sample_num + + self.input_height = input_height + self.input_width = input_width + self.output_height = output_height + self.output_width = output_width + + self.y_dim = y_dim + self.z_dim = z_dim + + self.gf_dim = gf_dim + self.df_dim = df_dim + + self.gfc_dim = gfc_dim + self.dfc_dim = dfc_dim + + self.c_dim = c_dim + + # Batch normalization : deals with poor initialization helps gradient flow + self.d_bn1 = batch_norm(name='d_bn1') + self.d_bn2 = batch_norm(name='d_bn2') + self.d_bn3 = batch_norm(name='d_bn3') + + self.g_bn0 = batch_norm(name='g_bn0') + self.g_bn1 = batch_norm(name='g_bn1') + self.g_bn2 = batch_norm(name='g_bn2') + self.g_bn3 = batch_norm(name='g_bn3') + + self.dataset_name = dataset_name + self.input_fname_pattern = input_fname_pattern + self.load_checkpoint = load_checkpoint + self.checkpoint_dir = checkpoint_dir + self.counter_start = counter_start + self.log_dir = log_dir + self.stats_path = stats_path + self.data_path = data_path + self.fid_n_samples=fid_n_samples + self.fid_sample_batchsize=fid_sample_batchsize + self.fid_batch_size = fid_batch_size + self.fid_verbose = fid_verbose + + self.beta1 = beta1 + + print("build model.. ", end="", flush=True) + self.build_model() + print("ok") + + # Model + def build_model(self): + + # Learning rate + self.learning_rate_d = tf.Variable(0.0, trainable=False) + self.learning_rate_g = tf.Variable(0.0, trainable=False) + + # Placeholders + + if self.is_crop: + image_dims = [self.output_height, self.output_width, self.c_dim] + else: + image_dims = [self.input_height, self.input_width, self.c_dim] + + self.inputs = tf.placeholder( + tf.float32, [self.batch_size] + image_dims, name='real_images') + self.sample_inputs = tf.placeholder( + tf.float32, [self.sample_num] + image_dims, name='sample_inputs') + + self.z = tf.placeholder( + tf.float32, [None, self.z_dim], name='z') + self.z_sum = tf.summary.histogram("z", self.z) + + self.z_fid = tf.placeholder( + tf.float32, [None, self.z_dim], name='z_fid') + + self.fid = tf.Variable(0.0, trainable=False) + + # Inputs + inputs = self.inputs + sample_inputs = self.sample_inputs + + # Discriminator and generator + if self.y_dim: + print() + print("Conditional GAN for MNIST not supported.") + raise SystemExit() + + else: + self.G = self.generator(self.z, batch_size=self.batch_size) + self.D_real, self.D_logits_real = self.discriminator(inputs) + + self.sampler_fid = self.sampler_func(self.z_fid, self.fid_sample_batchsize) + self.sampler = self.sampler_func(self.z, self.batch_size) + self.D_fake, self.D_logits_fake = self.discriminator(self.G, reuse=True) + + # Summaries + self.d_real_sum = tf.summary.histogram("d_real", self.D_real) + self.d_fake_sum = tf.summary.histogram("d_fake", self.D_fake) + self.G_sum = tf.summary.image("G", self.G) + + def sigmoid_cross_entropy_with_logits(x, y): + try: + return tf.nn.sigmoid_cross_entropy_with_logits(logits=x, labels=y) + except: + return tf.nn.sigmoid_cross_entropy_with_logits(logits=x, targets=y) + + # Discriminator Loss Real + self.d_loss_real = tf.reduce_mean( + sigmoid_cross_entropy_with_logits(self.D_logits_real, tf.ones_like(self.D_real))) + # Discriminator Loss Fake + self.d_loss_fake = tf.reduce_mean( + sigmoid_cross_entropy_with_logits(self.D_logits_fake, tf.zeros_like(self.D_fake))) + # Generator Loss + self.g_loss = tf.reduce_mean( + sigmoid_cross_entropy_with_logits(self.D_logits_fake, tf.ones_like(self.D_fake))) + + self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real) + self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake) + + # Discriminator Loss Combined + self.d_loss = self.d_loss_real + self.d_loss_fake + + self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss) + self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss) + + self.lrate_sum_d = tf.summary.scalar('learning rate d', self.learning_rate_d) + self.lrate_sum_g = tf.summary.scalar('learning rate g', self.learning_rate_g) + + self.fid_sum = tf.summary.scalar("FID", self.fid) + + # Variables + t_vars = tf.trainable_variables() + + self.d_vars = [var for var in t_vars if 'd_' in var.name] + self.g_vars = [var for var in t_vars if 'g_' in var.name] + + # Train optimizers + opt_d = tf.train.AdamOptimizer(self.learning_rate_d, beta1=self.beta1) + opt_g = tf.train.AdamOptimizer(self.learning_rate_g, beta1=self.beta1) + + # Discriminator + grads_and_vars = opt_d.compute_gradients(self.d_loss, var_list=self.d_vars) + grads = [] + self.d_optim = opt_d.apply_gradients(grads_and_vars) + + # Gradient summaries discriminator + sum_grad_d = [] + for i, (grad, vars_) in enumerate(grads_and_vars): + grad_l2 = tf.sqrt(tf.reduce_sum(tf.square(grad))) + sum_grad_d.append(tf.summary.scalar("grad_l2_d_%d_%s" % (i, vars_.name), grad_l2)) + + # Generator + grads_and_vars = opt_g.compute_gradients(self.g_loss, var_list=self.g_vars) + self.g_optim = opt_g.apply_gradients(grads_and_vars) + + # Gradient summaries generator + sum_grad_g = [] + for i, (grad, vars_) in enumerate(grads_and_vars): + grad_l2 = tf.sqrt(tf.reduce_sum(tf.square(grad))) + sum_grad_g.append(tf.summary.scalar("grad_l2_g_%d_%s" % (i, vars_.name), grad_l2)) + + # Init: + tf.global_variables_initializer().run() + + # Summaries + self.g_sum = tf.summary.merge([self.z_sum, self.d_fake_sum, + self.G_sum, self.d_loss_fake_sum, self.g_loss_sum, self.lrate_sum_g] + sum_grad_g) + self.d_sum = tf.summary.merge( + [self.z_sum, self.d_real_sum, self.d_loss_real_sum, self.d_loss_sum, self.lrate_sum_d] + sum_grad_d) + self.writer = tf.summary.FileWriter(self.log_dir, self.sess.graph) + + + # Checkpoint saver + self.saver = tf.train.Saver() + + # check if fid_sample_batchsize is a multiple of fid_n_samples + if not (self.fid_n_samples % self.fid_sample_batchsize == 0): + new_bs = self.fid_n_samples // self.fid_sample_batchsize + n_old = self.fid_n_samples + self.fid_n_samples = new_bs * self.fid_sample_batchsize + print("""!WARNING: fid_sample_batchsize is not a multiple of fid_n_samples. + Number of generated sample will be adjusted form %d to %d """ % (n_old, self.fid_n_samples)) + + # Train model + def train(self, config): + """Train DCGAN""" + + print("load train stats.. ", end="", flush=True) + # load precalculated training set statistics + f = np.load(self.stats_path) + mu_real, sigma_real = f['mu'][:], f['sigma'][:] + f.close() + print("ok") + + if config.dataset == 'mnist': + print("scan files", end=" ", flush=True) + data_X, data_y = self.load_mnist() + else: + if (config.dataset == "celebA") or (config.dataset == "cifar10"): + print("scan files", end=" ", flush=True) + data = glob(os.path.join(self.data_path, self.input_fname_pattern)) + else: + if config.dataset == "lsun": + print("scan files") + data = [] + for i in range(304): + print("\r%d" % i, end="", flush=True) + data += glob(os.path.join(self.data_path, str(i), self.input_fname_pattern)) + else: + print("Please specify dataset in run.sh [mnist, celebA, lsun, cifar10]") + raise SystemExit() + + print() + print("%d images found" % len(data)) + + # Z sample + #sample_z = np.random.normal(0, 1.0, size=(self.sample_num , self.z_dim)) + sample_z = np.random.uniform(-1.0, 1.0, size=(self.sample_num , self.z_dim)) + + # Input samples + sample_files = data[0:self.sample_num] + sample = [ + get_image(sample_file, + input_height=self.input_height, + input_width=self.input_width, + resize_height=self.output_height, + resize_width=self.output_width, + is_crop=self.is_crop, + is_grayscale=self.is_grayscale) for sample_file in sample_files] + if (self.is_grayscale): + sample_inputs = np.array(sample).astype(np.float32)[:, :, :, None] + else: + sample_inputs = np.array(sample).astype(np.float32) + + if self.load_checkpoint: + if self.load(self.checkpoint_dir): + print(" [*] Load SUCCESS") + else: + print(" [!] Load failed...") + + # Batch preparing + batch_nums = min(len(data), config.train_size) // config.batch_size + data_idx = list(range(len(data))) + + counter = self.counter_start + + start_time = time.time() + + # Loop over epochs + for epoch in range(config.epoch): + + # Assign learning rates for d and g + lrate = config.learning_rate_d # * (config.lr_decay_rate_d ** epoch) + self.sess.run(tf.assign(self.learning_rate_d, lrate)) + lrate = config.learning_rate_g # * (config.lr_decay_rate_g ** epoch) + self.sess.run(tf.assign(self.learning_rate_g, lrate)) + + # Shuffle the data indices + np.random.shuffle(data_idx) + + # Loop over batches + for batch_idx in range(batch_nums): + + # Prepare batch + idx = data_idx[batch_idx * config.batch_size:(batch_idx + 1) * config.batch_size] + batch = [ + get_image(data[i], + input_height=self.input_height, + input_width=self.input_width, + resize_height=self.output_height, + resize_width=self.output_width, + is_crop=self.is_crop, + is_grayscale=self.is_grayscale) for i in idx] + if (self.is_grayscale): + batch_images = np.array(batch).astype(np.float32)[:, :, :, None] + else: + batch_images = np.array(batch).astype(np.float32) + + #batch_z = np.random.normal(0, 1.0, size=(config.batch_size , self.z_dim)).astype(np.float32) + batch_z = np.random.uniform(-1.0, 1.0, size=(config.batch_size , self.z_dim)).astype(np.float32) + + # Update D network + _, summary_str = self.sess.run([self.d_optim, self.d_sum], + feed_dict={self.inputs: batch_images, + self.z: batch_z}) + if np.mod(counter, 20) == 0: + self.writer.add_summary(summary_str, counter) + + # Update G network + _, summary_str = self.sess.run([self.g_optim, self.g_sum], + feed_dict={self.z: batch_z}) + if np.mod(counter, 20) == 0: + self.writer.add_summary(summary_str, counter) + + errD_fake = self.d_loss_fake.eval({ self.z: batch_z }) + errD_real = self.d_loss_real.eval({ self.inputs: batch_images }) + errG = self.g_loss.eval({self.z: batch_z}) + + # Print + if np.mod(counter, 100) == 0: + print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \ + % (epoch, batch_idx, batch_nums, time.time() - start_time, errD_fake+errD_real, errG)) + + # Save generated samples and FID + if np.mod(counter, config.fid_eval_steps) == 0: + + # Save + try: + samples, d_loss, g_loss = self.sess.run( + [self.sampler, self.d_loss, self.g_loss], + feed_dict={self.z: sample_z, + self.inputs: sample_inputs}) + save_images(samples, [8, 8], '{}/train_{:02d}_{:04d}.png'.format(config.sample_dir, epoch, batch_idx)) + print("[Sample] d_loss: %.8f, g_loss: %.8f" % (d_loss, g_loss)) + except Exception as e: + print(e) + print("sample image error!") + + # FID + print("samples for incept", end="", flush=True) + + samples = np.zeros((self.fid_n_samples, self.output_height, self.output_width, 3)) + n_batches = self.fid_n_samples // self.fid_sample_batchsize + lo = 0 + for btch in range(n_batches): + print("\rsamples for incept %d/%d" % (btch + 1, n_batches), end=" ", flush=True) + #sample_z_fid = np.random.normal(0, 1.0, size=(self.fid_sample_batchsize, self.z_dim)) + sample_z_fid = np.random.uniform(-1.0, 1.0, size=(self.fid_sample_batchsize, self.z_dim)) + samples[lo:(lo+self.fid_sample_batchsize)] = self.sess.run( self.sampler_fid, + feed_dict={self.z_fid: sample_z_fid}) + lo += self.fid_sample_batchsize + + samples = (samples + 1.) * 127.5 + print("ok") + + mu_gen, sigma_gen = fid.calculate_activation_statistics( samples, + self.sess, + batch_size=self.fid_batch_size, + verbose=self.fid_verbose) + + print("calculate FID:", end=" ", flush=True) + try: + FID = fid.calculate_frechet_distance(mu_gen, sigma_gen, mu_real, sigma_real) + except Exception as e: + print(e) + FID=500 + + print(FID) + + # Update event log with FID + self.sess.run(tf.assign(self.fid, FID)) + summary_str = self.sess.run(self.fid_sum) + self.writer.add_summary(summary_str, counter) + + # Save checkpoint + if (counter != 0) and (np.mod(counter, 2000) == 0): + self.save(config.checkpoint_dir, counter) + + counter += 1 + + # Discriminator + def discriminator(self, image, y=None, reuse=False): + with tf.variable_scope("discriminator") as scope: + if reuse: + scope.reuse_variables() + h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv')) + h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv'))) + h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv'))) + h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, name='d_h3_conv'))) + h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin') + return tf.nn.sigmoid(h4), h4 + + + # Generator + def generator(self, z, y=None, batch_size=None): + with tf.variable_scope("generator") as scope: + + if not self.y_dim: + s_h, s_w = self.output_height, self.output_width + s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) + s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) + s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) + s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) + + # Project `z` and reshape + self.z_, self.h0_w, self.h0_b = linear( + z, self.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) + self.h0 = tf.reshape( + self.z_, [-1, s_h16, s_w16, self.gf_dim * 8]) + h0 = tf.nn.relu(self.g_bn0(self.h0)) + + # Deconv + self.h1, self.h1_w, self.h1_b = deconv2d( + h0, [batch_size, s_h8, s_w8, self.gf_dim*4], name='g_h1', with_w=True) + h1 = tf.nn.relu(self.g_bn1(self.h1)) + h2, self.h2_w, self.h2_b = deconv2d( + h1, [batch_size, s_h4, s_w4, self.gf_dim*2], name='g_h2', with_w=True) + h2 = tf.nn.relu(self.g_bn2(h2)) + h3, self.h3_w, self.h3_b = deconv2d( + h2, [batch_size, s_h2, s_w2, self.gf_dim*1], name='g_h3', with_w=True) + h3 = tf.nn.relu(self.g_bn3(h3)) + h4, self.h4_w, self.h4_b = deconv2d( + h3, [batch_size, s_h, s_w, self.c_dim], name='g_h4', with_w=True) + + return tf.nn.tanh(h4) + + + # Sampler + def sampler_func(self, z, batch_size, y=None): + with tf.variable_scope("generator") as scope: + scope.reuse_variables() + + if not self.y_dim: + s_h, s_w = self.output_height, self.output_width + s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) + s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) + s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) + s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) + + # Project `z` and reshape + h0 = tf.reshape( + linear(z, self.gf_dim*8*s_h16*s_w16, 'g_h0_lin'), + [-1, s_h16, s_w16, self.gf_dim * 8]) + h0 = tf.nn.relu(self.g_bn0(h0, train=False)) + + # Deconv + h1 = deconv2d(h0, [batch_size, s_h8, s_w8, self.gf_dim*4], name='g_h1') + h1 = tf.nn.relu(self.g_bn1(h1, train=False)) + h2 = deconv2d(h1, [batch_size, s_h4, s_w4, self.gf_dim*2], name='g_h2') + h2 = tf.nn.relu(self.g_bn2(h2, train=False)) + h3 = deconv2d(h2, [batch_size, s_h2, s_w2, self.gf_dim*1], name='g_h3') + h3 = tf.nn.relu(self.g_bn3(h3, train=False)) + h4 = deconv2d(h3, [batch_size, s_h, s_w, self.c_dim], name='g_h4') + + return tf.nn.tanh(h4) + + + @property + def model_dir(self): + return "{}_{}_{}_{}".format( + self.dataset_name, self.batch_size, + self.output_height, self.output_width) + + # Save checkpoint + def save(self, checkpoint_dir, step): + model_name = "DCGAN.model" + checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir) + + if not os.path.exists(checkpoint_dir): + os.makedirs(checkpoint_dir) + + self.saver.save(self.sess, + os.path.join(checkpoint_dir, model_name), + global_step=step) + + # Load checkpoint + def load(self, checkpoint_dir): + print(" [*] Reading checkpoints...") + checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir) + + ckpt = tf.train.get_checkpoint_state(checkpoint_dir) + if ckpt and ckpt.model_checkpoint_path: + ckpt_name = os.path.basename(ckpt.model_checkpoint_path) + self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name)) + print(" [*] Success to read {}".format(ckpt_name)) + return True + else: + print(" [*] Failed to find a checkpoint") + return False + diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/ops.py b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/ops.py new file mode 100644 index 00000000..26342040 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/ops.py @@ -0,0 +1,124 @@ +import math +import numpy as np +import tensorflow as tf + +from tensorflow.python.framework import ops + +from utils import * + +try: + image_summary = tf.image_summary + scalar_summary = tf.scalar_summary + histogram_summary = tf.histogram_summary + merge_summary = tf.merge_summary + SummaryWriter = tf.train.SummaryWriter +except: + image_summary = tf.summary.image + scalar_summary = tf.summary.scalar + histogram_summary = tf.summary.histogram + merge_summary = tf.summary.merge + SummaryWriter = tf.summary.FileWriter + +if "concat_v2" in dir(tf): + def concat(tensors, axis, *args, **kwargs): + return tf.concat_v2(tensors, axis, *args, **kwargs) +else: + def concat(tensors, axis, *args, **kwargs): + return tf.concat(tensors, axis, *args, **kwargs) + +class batch_norm(object): + def __init__(self, scale=True, epsilon=1e-5, momentum = 0.9, name="batch_norm"): + with tf.variable_scope(name): + self.epsilon = epsilon + self.momentum = momentum + self.name = name + self.scale = scale + + def __call__(self, x, train=True): + return tf.contrib.layers.batch_norm(x, + decay=self.momentum, + updates_collections=None, + epsilon=self.epsilon, + scale=self.scale, + is_training=train, + scope=self.name) + +def conv_cond_concat(x, y): + """Concatenate conditioning vector on feature map axis.""" + x_shapes = x.get_shape() + y_shapes = y.get_shape() + return concat([ + x, y*tf.ones([x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])], 3) + +def conv2d(input_, output_dim, + k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, + name="conv2d"): + # ELU init stddev + #n = k_h * k_w * (input_.get_shape().as_list()[-1] + output_dim) / 2.0 + #n = k_h * k_w * tf.sqrt(tf.cast(input_.get_shape().as_list()[-1] * output_dim, tf.float32)) + n = k_h * k_w * input_.get_shape().as_list()[-1] + #stddev = tf.sqrt(1.55052/n) + with tf.variable_scope(name): + w = tf.get_variable('w', [k_h, k_w, input_.get_shape()[-1], output_dim], + initializer=tf.truncated_normal_initializer(stddev=stddev)) + conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding='SAME') + + biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0)) + conv = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape()) + + return conv + +def deconv2d(input_, output_shape, + k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, + name="deconv2d", with_w=False): + # ELU init stddev + #n = k_h * k_w * (input_.get_shape().as_list()[-1] + output_shape[-1]) / 2.0 + #n = k_h * k_w * tf.sqrt(tf.cast(input_.get_shape().as_list()[-1] * output_shape[-1], tf.float32)) + n = k_h * k_w *input_.get_shape().as_list()[-1] + #stddev = tf.sqrt(1.55052/n) + with tf.variable_scope(name): + # filter : [height, width, output_channels, in_channels] + w = tf.get_variable('w', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]], + initializer=tf.random_normal_initializer(stddev=stddev)) + + try: + deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape, + strides=[1, d_h, d_w, 1]) + + # Support for verisons of TensorFlow before 0.7.0 + except AttributeError: + deconv = tf.nn.deconv2d(input_, w, output_shape=output_shape, + strides=[1, d_h, d_w, 1]) + + biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.0)) + deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape()) + + if with_w: + return deconv, w, biases + else: + return deconv + +def lrelu(x, leak=0.2, name="lrelu"): + return tf.maximum(x, leak*x) + +def elu(x, name="elu"): + return(tf.nn.elu(x)) + +# Scaled ELU +def selu(x, name="selu"): + alpha = 1.6732632423543772848170429916717 + scale = 1.0507009873554804934193349852946 + return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x)) + +def linear(input_, output_size, scope=None, stddev=0.02, bias_start=0.0, with_w=False): + shape = input_.get_shape().as_list() + + with tf.variable_scope(scope or "Linear"): + matrix = tf.get_variable("Matrix", [shape[1], output_size], tf.float32, + tf.random_normal_initializer(stddev=stddev)) + bias = tf.get_variable("bias", [output_size], + initializer=tf.constant_initializer(bias_start)) + if with_w: + return tf.matmul(input_, matrix) + bias, matrix, bias + else: + return tf.matmul(input_, matrix) + bias diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/run.sh b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/run.sh new file mode 100644 index 00000000..70cfe47e --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/run.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +# celebA, lsun, imagenet, cifar10.. +dataset="cifar10" + +lr_d=$1 +lr_g=$2 + +counter_start=0 +load_checkpoint=false +if ! $load_checkpoint; then + dwt=`date "+%m%d_%H%M%S"` + run_id=${dwt}_${lr_d}_${lr_g} +else + run_id="MMdd_hhmmss_lrd_lrg" +fi + +incept_path="inception-2015-12-05/classify_image_graph_def.pb" + +case $dataset in + celebA) + data_path="data/celebA_cropped" + stats_path="stats/fid_stats_celeba.npz" + input_height=64 + output_height=64 + input_fname_pattern="*.jpg" + epochs=81 + ;; + lsun) + data_path="data/lsun_cropped" + stats_path="stats/fid_stats_lsun.npz" + input_height=64 + output_height=64 + input_fname_pattern="*.jpg" + epochs=9 + ;; + imagenet) + data_path="data/imagenet" + stats_path="stats/fid_stats_imagenet.npz" + input_height=64 + output_height=64 + input_fname_pattern="*.jpg" + epochs=5 + ;; + cifar10) + data_path="data/cifar10_train" + stats_path="stats/fid_stats_cifar10.npz" + input_fname_pattern="*.png" + input_height=32 + output_height=32 + epochs=500 + ;; +esac + +python3 main.py \ +--dataset=$dataset \ +--input_height=$input_height \ +--output_height=$output_height \ +--input_fname_pattern=$input_fname_pattern \ +--is_crop False \ +--is_train=True \ +--batch_size=64 \ +--checkpoint_dir="logs/${run_id}/checkpoints" \ +--log_dir="logs/${run_id}/logs" \ +--sample_dir="logs/${run_id}/samples" \ +--fid_n_samples 50000 \ +--fid_sample_batchsize 1000 \ +--fid_batch_size 100 \ +--fid_eval_steps 5000 \ +--learning_rate_d $lr_d \ +--learning_rate_g $lr_g \ +--beta1 0.5 \ +--epoch $epochs \ +--load_checkpoint $load_checkpoint \ +--counter_start $counter_start \ +--incept_path $incept_path \ +--data_path $data_path \ +--stats_path $stats_path \ diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/stats/README.md b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/stats/README.md new file mode 100644 index 00000000..6765f6d4 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/stats/README.md @@ -0,0 +1 @@ +Folder for precalculated FID statistics diff --git a/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/utils.py b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/utils.py new file mode 100644 index 00000000..152b530b --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/DCGAN_FID_batched/utils.py @@ -0,0 +1,241 @@ +""" +Some codes from https://github.com/Newmu/dcgan_code +""" +from __future__ import division +import math +import json +import random +import pprint +import scipy.misc +import numpy as np +from time import gmtime, strftime +from six.moves import xrange + +pp = pprint.PrettyPrinter() + +get_stddev = lambda x, k_h, k_w: 1/math.sqrt(k_w*k_h*x.get_shape()[-1]) + +def get_image(image_path, input_height, input_width, + resize_height=64, resize_width=64, + is_crop=True, is_grayscale=False): + image = imread(image_path, is_grayscale) + return transform(image, input_height, input_width, + resize_height, resize_width, is_crop) + +def save_images(images, size, image_path): + return imsave(inverse_transform(images), size, image_path) + +def imread(path, is_grayscale = False): + if (is_grayscale): + return scipy.misc.imread(path, flatten = True).astype(np.float) + else: + return scipy.misc.imread(path).astype(np.float) + +def merge_images(images, size): + return inverse_transform(images) + +def merge(images, size): + h, w = images.shape[1], images.shape[2] + img = np.zeros((h * size[0], w * size[1], 3)) + for idx, image in enumerate(images): + i = idx % size[1] + j = idx // size[1] + img[j*h:j*h+h, i*w:i*w+w, :] = image + return img + +def imsave(images, size, path): + return scipy.misc.imsave(path, merge(images, size)) + +def center_crop(x, crop_h, crop_w, + resize_h=64, resize_w=64): + if crop_w is None: + crop_w = crop_h + h, w = x.shape[:2] + j = int(round((h - crop_h)/2.)) + i = int(round((w - crop_w)/2.)) + return scipy.misc.imresize( + x[j:j+crop_h, i:i+crop_w], [resize_h, resize_w]) + +def transform(image, input_height, input_width, + resize_height=64, resize_width=64, is_crop=True): + if is_crop: + cropped_image = center_crop( + image, input_height, input_width, + resize_height, resize_width) + else: + if (input_height != resize_height) or (input_width != resize_width): + cropped_image = scipy.misc.imresize(image, [resize_height, resize_width]) + else: + cropped_image = image + return np.array(cropped_image) / 127.5 - 1. + +def inverse_transform(images): + return (images+1.)/2. + +def to_json(output_path, *layers): + with open(output_path, "w") as layer_f: + lines = "" + for w, b, bn in layers: + layer_idx = w.name.split('/')[0].split('h')[1] + + B = b.eval() + + if "lin/" in w.name: + W = w.eval() + depth = W.shape[1] + else: + W = np.rollaxis(w.eval(), 2, 0) + depth = W.shape[0] + + biases = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(B)]} + if bn != None: + gamma = bn.gamma.eval() + beta = bn.beta.eval() + + gamma = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(gamma)]} + beta = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(beta)]} + else: + gamma = {"sy": 1, "sx": 1, "depth": 0, "w": []} + beta = {"sy": 1, "sx": 1, "depth": 0, "w": []} + + if "lin/" in w.name: + fs = [] + for w in W.T: + fs.append({"sy": 1, "sx": 1, "depth": W.shape[0], "w": ['%.2f' % elem for elem in list(w)]}) + + lines += """ + var layer_%s = { + "layer_type": "fc", + "sy": 1, "sx": 1, + "out_sx": 1, "out_sy": 1, + "stride": 1, "pad": 0, + "out_depth": %s, "in_depth": %s, + "biases": %s, + "gamma": %s, + "beta": %s, + "filters": %s + };""" % (layer_idx.split('_')[0], W.shape[1], W.shape[0], biases, gamma, beta, fs) + else: + fs = [] + for w_ in W: + fs.append({"sy": 5, "sx": 5, "depth": W.shape[3], "w": ['%.2f' % elem for elem in list(w_.flatten())]}) + + lines += """ + var layer_%s = { + "layer_type": "deconv", + "sy": 5, "sx": 5, + "out_sx": %s, "out_sy": %s, + "stride": 2, "pad": 1, + "out_depth": %s, "in_depth": %s, + "biases": %s, + "gamma": %s, + "beta": %s, + "filters": %s + };""" % (layer_idx, 2**(int(layer_idx)+2), 2**(int(layer_idx)+2), + W.shape[0], W.shape[3], biases, gamma, beta, fs) + layer_f.write(" ".join(lines.replace("'","").split())) + +def make_gif(images, fname, duration=2, true_image=False): + import moviepy.editor as mpy + + def make_frame(t): + try: + x = images[int(len(images)/duration*t)] + except: + x = images[-1] + + if true_image: + return x.astype(np.uint8) + else: + return ((x+1)/2*255).astype(np.uint8) + + clip = mpy.VideoClip(make_frame, duration=duration) + clip.write_gif(fname, fps = len(images) / duration) + +def visualize(sess, dcgan, config, option): + image_frame_dim = int(math.ceil(config.batch_size**.5)) + if option == 0: + z_sample = np.random.uniform(-0.5, 0.5, size=(config.batch_size, dcgan.z_dim)) + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_%s.png' % (config.sample_dir, strftime("%Y-%m-%d %H:%M:%S", gmtime()))) + elif option == 1: + values = np.arange(0, 1, 1./config.batch_size) + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_arange_%s.png' % (config.sample_dir, idx)) + elif option == 2: + values = np.arange(0, 1, 1./config.batch_size) + for idx in [random.randint(0, 99) for _ in xrange(100)]: + print(" [*] %d" % idx) + z = np.random.uniform(-0.2, 0.2, size=(dcgan.z_dim)) + z_sample = np.tile(z, (config.batch_size, 1)) + #z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + try: + make_gif(samples, '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + except: + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_%s.png' % (config.sample_dir, strftime("%Y-%m-%d %H:%M:%S", gmtime()))) + elif option == 3: + values = np.arange(0, 1, 1./config.batch_size) + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + make_gif(samples, '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + elif option == 4: + image_set = [] + values = np.arange(0, 1, 1./config.batch_size) + + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + image_set.append(sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot})) + else: + image_set.append(sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample})) + + make_gif(image_set[-1], '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + + new_image_set = [merge(np.array([images[idx] for images in image_set]), [10, 10]) \ + for idx in range(64) + range(63, -1, -1)] + make_gif(new_image_set, '%s/test_gif_merged.gif' % config.sample_dir, duration=8) diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/README.md b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/README.md new file mode 100644 index 00000000..31759b20 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/README.md @@ -0,0 +1,87 @@ +# Comparison of FID and Inception Score + +This experiments should highlight a crucial difference between the FID and the Inception Score (IS). +The purpose of a generative model is to learn a real world distribution. Thus a good performance measure +should, roughly speaking, somehow capture how far off the model distribution is. The experiments show, +that in this sense the FID is a more useful measure. + +## Methodology +While the idea of the IS is to capture 1) how real the structures in the generated images are and +2) how much variability the generated samples have, there is no connection of the score to the +real world distribution. Clearly the assumptions of the IS are met best on the dataset it is trained +on, namely the ImageNet data set. It is however questionable, if the assumptions carry over to another image +datasets. As an example consider the celebA dataset. It consists of about 200k face images of celebrities. +While assumption 1) still holds it is not so clear why there should be a high variability across samples. + +But the main point is: an evaluation method should indicate how well the real world +distribution has been learned. This implies: disturbed images should lead to a +lower score or a higher distance respectively. Thus for the experiments we produce +disturbed images of the celebA dataset with increasing disturbance levels +to evaluate the FID and IS on them. +The IS is transformed to an distance as described in the TTUR paper. This is done to +make comparison between the two methods easier. We refer to the transformed +IS as the IND - the inception distance. + +## Experiments +1. Gaussian noise: We constructed a matrix N with Gaussian noise scaled to [0, 255]. The +noisy image is computed as (1 − α)X + αN for α ∈ {0, 0.25, 0.5, 0.75}. The larger α is, +the larger is the noise added to the image, the larger is the disturbance of the image. + +|FID|IND| +|-|-| +| || + +2. Gaussian blur: The image is convolved with a Gaussian kernel with standard deviation +α ∈ {0, 1, 2, 4}. The larger α is, the larger is the disturbance of the image, that is, +the more the image is smoothed. + +|FID | IND| +|-|-| +| || + + +3. Black rectangles: To an image five black rectangles are are added at randomly chosen +locations. The rectangles cover parts of the image.The size of the rectangles is +α imagesize with α ∈ {0, 0.25, 0.5, 0.75}. The larger α is, the larger is the disturbance +of the image, that is, the more of the image is covered by black rectangles. + +|FID|IND| +|-|-| +|| | + + +4. Swirl: Parts of the image are transformed as a spiral, that is, as a swirl (whirlpool +effect). Consider the coordinate (x, y) in the noisy (swirled) image for which we want to +find the color. Toward this end we need the reverse mapping for the swirl transformation +which gives the location which is mapped to (x, y). The disturbance level is given by the +amount of swirl α ∈ {0, 1, 2, 4}. The larger α is, the larger is the disturbance of the +image via the amount of swirl. + +|FID|IND| +|-|-| +| | | + + +5. Salt and pepper noise: Some pixels of the image are set to black or white, where black is +chosen with 50% probability (same for white). Pixels are randomly chosen for being flipped +to white or black, where the ratio of pixel flipped to white or black is given by the noise +level α ∈ {0, 0.1, 0.2, 0.3}. The larger α is, the larger is the noise added to the image via +flipping pixels to white or black, the larger is the disturbance level. + +|FID|IND| +|-|-| +| | | + + +6. ImageNet contamination: From each of the 1,000 ImageNet classes, 5 images are randomly +chosen, which gives 5,000 ImageNet images. The images are ensured to be RGB and to +have a minimal size of 256x256. A percentage of α ∈ {0, 0.25, 0.5, 0.75} of the CelebA +images has been replaced by ImageNet images. α = 0 means all images are from CelebA, +α = 0.25 means that 75% of the images are from CelebA and 25% from ImageNet etc. +The larger α is, the larger is the disturbance of the CelebA dataset by contaminating it by +ImageNet images. The larger the disturbance level is, the more the dataset deviates from the +reference real world dataset. + +|FID|IND| +|-|-| +| | | diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_FID.pdf new file mode 100644 index 00000000..8e0a6259 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_IND.pdf new file mode 100644 index 00000000..41fd973a Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/blur_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_FID.pdf new file mode 100644 index 00000000..2f30c35d Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_IND.pdf new file mode 100644 index 00000000..32b6462e Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/gnoise_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_FID.pdf new file mode 100644 index 00000000..d292c3fa Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_IND.pdf new file mode 100644 index 00000000..05ab7cc3 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/mixed_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_FID.png new file mode 100644 index 00000000..ca5c4376 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_IND.png new file mode 100644 index 00000000..83c431eb Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/blur_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_FID.png new file mode 100644 index 00000000..59b9c5a8 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_IND.png new file mode 100644 index 00000000..fdabd3be Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/gnoise_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_FID.png new file mode 100644 index 00000000..1db52268 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_IND.png new file mode 100644 index 00000000..e9018292 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/mixed_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_FID.png new file mode 100644 index 00000000..7ad48a64 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_IND.png new file mode 100644 index 00000000..02ac8b94 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/rect_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_FID.png new file mode 100644 index 00000000..938943ec Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_IND.png new file mode 100644 index 00000000..bf3d125a Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/sp_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_FID.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_FID.png new file mode 100644 index 00000000..a4f8268a Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_FID.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_IND.png b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_IND.png new file mode 100644 index 00000000..4ae1aa20 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/png/swirl_IND.png differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_FID.pdf new file mode 100644 index 00000000..63f26d92 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_IND.pdf new file mode 100644 index 00000000..d88d6cd8 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/rect_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_FID.pdf new file mode 100644 index 00000000..776e4357 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_IND.pdf new file mode 100644 index 00000000..c402bc01 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/sp_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_FID.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_FID.pdf new file mode 100644 index 00000000..ec783ecf Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_FID.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_IND.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_IND.pdf new file mode 100644 index 00000000..46c459eb Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/swirl_IND.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/table_FID_vs_Inc.pdf b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/table_FID_vs_Inc.pdf new file mode 100644 index 00000000..1b36c2ef Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/FID_vs_Inception_Score/figures/table_FID_vs_Inc.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/create_plots.ipynb b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/create_plots.ipynb new file mode 100644 index 00000000..816292dd --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/create_plots.ipynb @@ -0,0 +1,343 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'tensorflow'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mfidutils\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mvis\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mfiy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.83\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;31m# transform INC to IND\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/run/user/1000/gvfs/sftp:host=hawk/publicwork/ramsauer/GANs/FIDvsINC/fidutils.py\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmath\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mrandom\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtensorflow\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mscipy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstats\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mst\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'tensorflow'" + ] + } + ], + "source": [ + "import fidutils as vis\n", + "\n", + "fiy=0.83\n", + "\n", + "# transform INC to IND\n", + "class fd:\n", + " def __init__(self):\n", + " self.ul=0\n", + " def __call__(self,x):\n", + " return ul-x\n", + " def set_upper_limit(ul):\n", + " self.ul=ul" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "gn_001: {'alpha': 0.0}\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEPCAYAAACp/QjLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsvXd8XdWZNbz2rbrqvVuWLFlyr7Ix\nxWDTW4AAyYSEGRKSyUwmMwmZlkx5Zyb83pk3mZAvU4BkSCCEdJIwISQQMMW44N6rZMvqXbpq9+r2\ne74/nqOzDliAAFuWYK9/dLRP2+3c/TzrKVsZhgENDQ0NDQ07HBe6AhoaGhoaMw+uC12Bdwqfz9cT\nDoeLLnQ93i/wer2IRCIXuhrvG+j+PLfQ/Xl+kZKS0hsKhYonO6dmG62klDJmW51nMpRS0P157qD7\n89xC9+f5hdm/arJzmlbS0NDQ0DgLs45WOgv+fQCAQCBgFcViMQCAw+m0ypI8RCIRN//GrDKHuUwm\nYiKl9PaMWecCY1EAQEoqH6IcUT4vOqH2sjsdcJvvkEU53ZXB540MyjnErbLRIdYlAXlPWuEbGwso\nJc91uz1WmUd5zWdQ/T5xvNE6Hh7rBgBUzKX2WFxec/bDAaBxLwCg29ltFY0hBQCQn83nRxKpbE9w\nBACQPco6JdzSPz5vAQDgZFOndS46Jn3iKmEfeiLs20El5RnxcZ735gAA/G2tAIDytKWs35iUjYyn\nW2W+8LB1PJoj41JYxvFRI9KPY/lyXUp8nnWuJCljcbwjYZU1th6yjpPDXQCASHGVVXbZinJMBv/v\nvgsAeGEx27oukgcA8A7nWWXZwZB1HJ8j79+m2IbarTJ/okUyFnk5zda5NmcQAODwZfO9v2Zb+66W\nMQg0dFhlVYaMmT8xR/7mp1nnwn3y3vS0XKusy+O3jgvH5X3dPWzTknHpK3+l/J9RxMlb3C9tK+3g\nHHiilN9rRaP0Y//YMats3bqPYzLsPPYqAGDU+apVFvpOJgAgsZ5tqG3h/GwqlfnW6W+zygaXyBh2\nPiVtXFV9yjpXvlzqWd52g1V2Ks5nn6iUeZ4PzsGiNpkfYz1JqV/xtda51JxdAIAlwwN8XhO/lapU\n+R1ouGnUKvP/YD8AoLJmgxRUD1nnHC/LmHluW2KVHX2O47MidScAILco0yo7knoJAODeOy/CVKE1\nBw0NDQ2NszCtmoNS6jEANwPoMwxjia38LwB8HkACwO8Mw/jbqT5zxz5ZsYMhSl6R8bAcuCj5RWGT\nesNh8y8ld0Cki1hcNIcUF6WwFI9IDS2HGqyycIySTzQu74lT+EfMFKocDuniklxK7X2dIumGo5SM\nU1yUIkvmVAAAzhymJBU1n+eESJBO5bPOhUytqbWl3Sprbeax1ydtWn/5aqusPmUStQTA/lN7AAA9\naeR5jTGRho6FKZ2oTJ6PtEjDg4rSUIZL6pTok7JE/hzrnA8yVs1tlOTiQY5FX0Ikt4Jxalt9DrnH\nFzW1AEUpa8Qv9RrJOWOVZcX4vtJikQ5f3UttKBYWbSueK/2YmmBfDwVEi+k52cSy45T6Qj6Rqa64\niZ9PW00BJsOJMhnX3CNs657EaQCAp2u/VeYvDVrHc/aLVnNwjJJ7MCxlyZ9IP526PGmdWz+YBQA4\n4mu1yppLw9bxwO9Fq8n3UjoOmFMvfYF8P9kHqAUd8Ml8N55hn6Rc3msdhzpvAgDEch+xyn5dWA8A\nyHlN+iS88ffWOc/zFwMARtI5ZtEGHj93RDSaLy8rtcrasA+TYbRFpOKOQ4NWWf4S+bZD7fzen1vM\nuTrwe2lb6SfrrLLkKZmfty6Wfvrd/Dutc4sd0taf7OX3fvFitr/259cAAPaU/LdV1p0vczYlLGOR\nNcT5nHSLlvL9vTutslX+v2Nd75Z+zr2/3ypbOLoWALCzXuZ96yvUbDMC8u0t2vuAVdZ8hPXL8Mlv\nzfN311plnxvswzvFdGsOjwO43l6glNoI4FYAyw3DWAzggUnu09DQ0NCYRkzr4mAYxhYA/jcUfw7A\n1wzDiJjXvPMlTkNDQ0PjnGImGKRrAaxXSv0rgDCAvzYMY89kFyqlPvvGsl375dJEgjTHeNBU0Z1U\nM4NRm0obkXI7NeN0Cs2QiAulkJ9H766CAjEC9vRzXQvFaCwMRk1aycZSTdi6PW55h3LSoNU3Om6W\n8fqok+9LjcnxSJhr91jAfEdY+KXgGGmDwQFRaYeGabSKxknxZDqFkvDb3MXHY5O7B76yQ6iXnDzS\nKIEhoah6EjRw5SZZt3bTuOkI5FtlnmyhPeaMSvuNEVJozsVyXc9rlAOiqey8UIqMVc8Yxy+WIdSD\ns1sokmQtHQYiA1LXxAhpqJ48cnwpIVH1Y+PHWeeY1MHZLH02ZrB+g8PSnng/Da69+ey8wtS5AIDg\nAPtouJP9bcfm534KAJizYIFV1tkj9JezscQq83TYDPIDR6W+eaRB+lJknjfO2QAAKGqmAbzHEFqi\n5wWOaeZtnG89HQcBAI7REavsZN5KAEBGn8yjspwW61zuAen3pRfx5+GlXra1slCe1xZYyIaelGdn\ntYnjQOpPFvHUeA8AIDBMqjbj0r3WcWqVXPvLalJv1+yksdeO4WcXSx2zDltlXQvl/iVbT1tlTx8g\nhfoHi6V8y49yrLKSAfmWey6X7/iKFxk69fh+05h9FanJ+w9WWsf/WSy/B/N8862y9Dzpq8Zd8jtS\nlUoK87URmWPhOlK5j+08Yh1/4ucy9/2rSJU1RGR+d778C2lvyl3WuVC2zJUTW9ZZZX2JH1vHmR1C\nR55qJ1X4vZ+3AAC+Prmdf1LMBIO0C0AugHUA/gbAk0qpSf1uDcN4ZLJyDQ0NDY1zi5mgOXQAeMqM\nbNutlEoCyAfQ/9a3CRZUi/HW5XFbZf5BWamHRijpB2yukvGYHKemUNJN8YgbZDIhEmCaj8bA9DSR\nJOqqKMkNBbqs4/GwSJXhCKXVaFikOKdz4nmUaksqquW+cRq1kzFKnoN+kWK7e2hk7+s1pY+QrOeB\nAN8VGRepzePhMxwuHntNA2N2Fo2myrBZz21YVGW6g+bbNLGwSFzpLZSkhzyUjOakyj2ZI5RU4lky\ntQr8IoX2F1NLm69E8hq7jO60Gado/O1NXSbvTWMf+wbl2WquSO1pbkrGQ3Olrb2jrHNtOzWB7jyR\nDk8l2Ce9p0WaHOsWrcLt5fwZVaYGlkbX2IyuMuvYnS/SdrCUBtRgoAWTYZlPpFpjmAbk3qRoEWk5\ndBrotDlPzL9IpN6hET4/OyFzdVVM6hmMU5LPSPkYAKCm5rdW2XCQcl9B0dUAgHz3j6yycpdI6Y29\nMhdb51OLq82R9j23uNIqW3yEmvcrDpGYC/uPWmUqKpLuwZi42FaNU747kyHzuErxm/L9ht9j+hp5\ntjd1hVU2uJDvs6P7StGS44c+ZZXt88rcSa3jM6+96r+s4+p/k3mr/uBXVtk2dwsAoOa4zJPtruXW\nuYuulPo2jPw/q+zPyv+vdfw/TY8DANZGT1plp4auAgD48+S76DLoalycJb8bw1Vsv/cgtSTvKvm+\nYtvZ5h3DMi5DqVcAAJIfoYF/2ZG/BABEPXz/qIda2Y/nixPEspO7rbL1mdSkpoqZoDn8GsBGAFBK\n1QLwABh4yzs0NDQ0NM4rptuV9acANgDIV0p1APhnAI8BeEwpdRRAFMA9Oj+GhoaGxoXFtC4OhmHc\n9San7n63z5xXLqpa3GaQLsoWo1A4StoonqCaO0HNdHbSgNvSJGq1f1BUSo8tornY9JPPKaDaurCi\n0jpOMXsxGiV1AEOujSXkXf2jvLelUwx0Q72kvXq6qFKOmn7M/gANiJGIqMYT0d/RKA24ETNiNauA\n1IfLSwolFJDz0SBprCyvzRpuQ12d0D+xEGmbYLGpYBaxPgjyXQnTcHwsyv4Mt4px7XivGJKVn37a\nMONGqnykmrIXrLKO5ziFpnNGGOUarpQ6DGdJu9wnaFwcH2mR9g21WGVbB9k/sW7x/x/vYP2706TO\nOZAye7R6xC90T2biMqssq4TRvaNj8m7HKA301fWk1OyYkyc+5+0uUmRrbpT5WdLJCNdn20jrOA5L\nu4v8pP7CJtVzJiT1GK+qtM5t8wmFceson5eMk5JaXSft7z9OA+rSQqlDfooYlVtGSfG15EmfpL2y\nwyo7epAxEq4q8Z/f3cO4ioI8mb/uUunHlib+tISqxXie3vSKVbZnEd932Q75bhZVc84trCKFacc6\nr9Arvb2MQL7+jMyx77dyvlTso4//qX+T34H8XzDKvbzrIrMNPwcAVBeyfb1zZe6mevibMdj7vHX8\n+Y+K8XrLUIVV1lAq/bjox/Kc0AZbHE5Q6NzKnz9nlfnGOd/3vyBzOuwjkeO8UeZD6ymhlTyv8rel\n4ZDUq3IR+/jDH2KEwJw/ln584j9SrLKKFZPTyG+FmUAraWhoaGjMMOjFQUNDQ0PjLMwEb6X3BJ9b\n6JEESCu504QSCsW8VlljE9X2ptPiKbB3D70tujrkfHBU1NlolN4jaT55TnUtU2CsXk2/9cpSoZ3S\n00l1eD2i4ja1yLu2bKFK29Qt7woFqXqGQnxfPC4eKXZFMB4TuiscEYooYWOwJo6dXqqRbjeHdtz0\n2mo36SwAMBz0DLHDGREVXBk2SissZVnp9LZo7ub5fc2SZuBoI5P9BbqEYmo8IzRDbjbbWhCUc1l1\npP2W+KiiL/VI32bXkNrzJqQ9+xulLa/tY+K1453ivzCUIE3n9dvSqSihtGI+xoYYfnnekEPGwtVD\nbyXDIzJTfgr7KyWFNJq/R7yf+sD0EuGhybcYidWZ7WonRZG2TTyuGjPpq58VYt3a88SXP7zrp1bZ\nb/1CCS7OFypLHaF3V2ZI5uy+fM4/n2uDdVxxpXwjXdeuscpW7xTvIveY9NO4arHOdR46IPXIsiXb\nI2OF0Ubx9DqRW2mVdR2RvljglXYsidFnv3uzUEhp1/Kb2fgr9seJQaFdQrtZlrHARkPasAf3AwCO\nLqanTm6OzLu0r5LmO/EQ4yRWbJe5+mLKz6yy8a1Cvaz6tNBBv4uyvilPy/OWlJCGU3fwt6R7UDyD\nbu08YJXFnZUAgHmfkP+H+3hvSo/M8y0tlVZZIEpvvxuzZCw7+/7IKlvQK2X1ZeIxd3Ae39W3Ufom\nmcU4jL1PsO2Dt/0vACDzZVKlncuYRHCq0JqDhoaGhsZZmPWag2HGLLhBA2ssJlLBlh2UZJ95lkmv\nRoZEkhjwUzIyEiK5K/NvIkqDWCAk0rp/P6NyOzp5b2mBSGxpqfSLd5kaTVu7GBCPd1AKDZt1ddqi\nou0R3vGYSF8xm/YSiZuJ7BJSr6Rdc4AYdoP2zH8e9ofTrFfPEKXTodDkcoEj1dRq4pVWWXGaXPvC\nTvrlv/jiJut4OCr9M9jHSgW6pS7uNHmnI8QI4+NmtLRrL69PDNrSYxdKXxnbGEHsM6Pdu3pE4jra\nRsnRGxJjcF8F+78klcbusBlP4DnA2IdggTxvfFiMgdlu9sdgVDTEjASflxPg2KsC0ylgJ6W1yCpq\nbXak9kskbGYfJdl4qmger7zI+5vHOd8i834pbe20JT9MlTnWYMi8Kw5Qi2tMl/qUDbGO8+o4347/\nSrTW0isojf93l3wjiWGpX+MRJgHMaBU1Ie0yRvGe6mY0blX/TwAA69czNmDviLSpyyf9MDeH13sS\nMmbDeyi9xj78gnWc9rIZRX8b4zR6jt2LyVA89AwA4OUhaoZ/EpR5cvBFaprDuYxJUi4xOjsOUfPf\nul6u7fZLPMHNfrZ/z4ekP0e8NBqH9lHSj0KMxIMZlNwX9Ytx/vEXbgcALMzk/NzoehEAsDnOOf7J\ndaz/VtNPpGkOYx/y08WwfcR8bfm32Z/O1aIp11zB9u4Ocrz3Vptai4tz6vN5NtVvitCag4aGhobG\nWdCLg4aGhobGWZj1tJLHI2qsEzQoHm8UGuEHTzxplfX0kdbwesW4ND7Osold4ZxJUf1cDpvPsVue\n7XSyu4ZHGDMwPjpmPsOW6M+MPYiY9FTARnslzDp7XHxezJa1LxqVusSjpBUmnq2cE/WiyuhOSF3G\n++mL78hlqozCcqFdErbYwr5+1t+OZFioh7RSUnK7Tkp/PfqjX1plQz3cf8BIMfdGCDGwvTdHDPJZ\n3WYqk1QadNO8Qmk4w7y+oY/PqxowVf5Utifml3aH4uYueh5SPkP5YuwuDrP/B6KkWDwN5p4SXvrR\nO0ISd+AMmmNRwLiQ0jShCJy9pIoMW2LASp/4yA/kMXahbWTyuJEBQ3bgKlhNWvMnZ+TaYBdTIhhn\n+KzmPknQljOfcSMqJu9UYekn7yWMMViwSai7gkIa9Y+c4a5vIbekU6j+Fft4V4bQFnNWSZaaY9uZ\nFG75zZJYL9DOspEUUqCZebKfwW9OkAa5vkzG8rUhSRcS6SLtmlYuYxWLczxHgnfwfRUvAQC6dl1s\nle29hv1hx8JKGYfyINNF7Nkgbd30Uzo4lD/zoHW8P+1WuXfF76yyHUFxCkg9IUb/zWsuYX03y94I\nz15Fp4dVHVnW8a5Kob+qT9niKgal/YX9Uu/81F3WuT/PkHZ95g85P5uOkRJqGpK+Wn8Tf4+e+XUl\nAOB2v/yGdd/8CevcZUGZP09v/l+rbN26y63jSIE4DJzqrrfKwo+ZzhMfY3zI20FrDhoaGhoaZ2HW\naw4Tie0StqRqm18Tt6/jp20RnLmUXCMRWaGjNqNz0tQYkoapOdiS2DnNvajtyf2cDpsmMHGvg9KV\n0yNGNpeZl9trcB0ej8n14TDfb88XMnGlyyaMek1J2TB3tEskadC65mKRKlPcNHDuOsrEX+Pmtl8x\ng8PdcILpje0wckWSjMVoQN25VSTcxmYaKEvn0uAXGhbJ1UhS8kmLSft7s0SrKLXtQZySFKnJYTMg\npwzTVXDY1IQiQRqVM5widQ5P7JgXZl7GrCExSA4r9knClmhRuUViHkmjq2dKmkieqVFzj28vpeC6\nNbILly9EybCvga6Og2miVSUT3L3vwHbuGmZHXouk544V0BiackYk0sYIJcnkFRy7xb8x3Yltrrce\nc9fALZeKRtR7ilqNt1gkxIWpTIN9NJtj/Zkmed5Ttn3B5xdLf+f8QCKJb1zNncTmRKTfd2WwbMFh\n1q8hQ8avJIOaWMR0nigqkrF7OcIZvXGjuOZmt3I8Hxx8xjqudchzkg08v8TUdhhTLNgbkbYuAw2s\nr3znIan3ia1WmaOo0jo+4zKN7hWU1lOOy9wryv13AMDmfkZvl4/IPFnyM/5mnF7Gub1kUOZMXid3\nFhgLiNa2+KTU79gazof1P5Gx2H0DE/+lDLN/uv9YEkccbqQmHb10GwDghSPXAQBu8thSoJfKOH6q\nnVHw/6eL9y5pF9fhnCM0Uh/6uNT1UnwJU4XWHDQ0NDQ0zoJeHDQ0NDQ0zsKsp5Wipvo6EiClcOhk\nCwAgaMvdlR217bUQnYgVsPnZT4QZW1u4kVaa2CfB8ToqyX6v/E3aDL6GSSc5nKL++2zGbCgzCjti\nq+Drtjcyzip0QJ4TNumFedU0Ft77KTG4+dw0mqU/TTX5uVfFz93p5fmBAVIadjiHRfX2p5BGOdYu\nFNSgi7TavA72Z0yJkdCVoO/92KhcO99MxpdI5bnsUlHpB518Rmoq2xoyI0rDHhpkx83ob5e570Us\nmxRENCRUhjfOZ7jTOD5hc4CcA6RpsgIyHqNlIh8V5jGm4nM3iprvLiKt9NqTv7aOn3hG6Mr0PBqp\nnTW2bfZsOLNA6KqanTQWN/ULLRDvpdo/OrTYOk6vECoqM3SFVfZCRCij+gaZGwNxGmzrk9JP/s6V\nVtnaDdypcFup6Rd/mv190ozg91ZKXQzbzoKn2qQvAq4Wq6w8lzvPedLFuJ5ZbKNRO+WeFR5pU6iU\niQg3FAtNt3wtDdLFR++xjlueEtqyrp5ZDF48OPl2Lq4tEh+RnXPCKlu5T2jDbYrPj8/n70F5u7TD\nVcid6y4tlTHcvv37AIAPH+F82n6FmRHhJdJwvcUXsQ6vCkUau3ODVXayQSgj35dlLNqOkiZdc4fM\nzxZbcs2Lx262jgub5LssPsRvJPUmqevIgBjbO4qftc6tqpfvvXkeI84XpZEyu+W78o38Zj6preaX\nTJr4i5gytOagoaGhoXEWZr3m4DIN0iMBSkrdpptm3GaAjdvcTCfcQuM299GkWWaYxuUJI7Qdidjk\naW9jprE58Tph3PG6P3aLc5rpyupzn230BoBQaPx19ZR3i5TmMJ+zbBElM7dPNBCHg66Kq1fx/NCQ\nSI5NbdQGevvYX3a4sqXvQp226Owec6c5Rc1jPEapMWganZNDlPZKElI2EJB25bl5fWBM6lFik02G\nW225iZaY7W+mNBQbF20jzdzhLmBL512SZkprthTauU5KbmlucW02yvm8kF+kQtOjFfNvZwppZ7lI\n0Tl9HLS8NTQIrukWrahhnJLgma7JXS/LskW66zOYhynaLn3b56d2kx2n62nXYnlXaPF2q2zdaXMf\n8EOiVdSt5s5ez5l9d1WVbY/sEMfK5TXdqVPZ3/WnJLo37DFz9mRRqg6Uylh99MDVVtnPhmmwX36l\njMH4GMdg8xxpy5Wn5N6hpexPb6o4LzQHKcnOWXDGOj52idS/7RQl/5xaM5qdm5kBAFam/AMA4MGm\nh62yq9fK/G4rpKZ19GGm4M4zd4tsP7HeKssqFk2sJCkaQ8fag9a52l9J3rHoUhp8+87QbTT1D2Wn\nQv+rNIAX1ohrbvb3ZZzv6KRDyIn50r6baunAUK6oqX37f8VBofyjHJ+ePTI+QXNv8+uz+X14D+wD\nACzM5+/Hqpf523T4WtGavtBEbfX/zaNzzlShNQcNDQ0NjbOgFwcNDQ0NjbMw62mliWjigUHSJAPD\nQo0k7VHJCVJIE5HCsUmoppTXGYbNd5h00ut2L1W8cIIciNuM1BPGaad5nWFLrDdxp51KctsS5TnM\nFOEhW7K6eFjqP7dcfK/XrmHSs1BUaB9FmxoqK0grfOKODQCALduoOje2iw/7Edr15D1jQr10tNDH\n/VSH2Z9jNqot05ZuPCwqsjOPie3aBmQ80kwj+LiH59wj4tfeFSH1U5xNymG4Q4ybyTRGcWcHhVYZ\nisi70nPY/wPDIuOkg7SaK9VG0yXl2mSYbQoWy9Sfb/rsr0snbeQeFnqsO4tjVgv6vC+7Uyiul15i\nf55pF+qGJQLncengH43w+S3DkpTt0kpGce+Pcqz9eUJJZOxkSvmt+ULNrF0m0bapisbs9cukn053\n8HkrAqTYtpfIsTNGWkP1SBt7DaFOKm2Tp7hfxqwri1TY6ixSTEarODs4nLznKrP+zjyJ1L01h+8a\nTpEkjXldtt0DN5J2Weox409uoVF+6Ck5z/SOgra5EnlcsGCtVbbpSelPhy3jwR2L6OxxZlxiInoK\nSAM995LMz1VmtHubLSNCyqVy3e7jNELfdiOjjQ+6ha65K+M1q2xf41MAgJHLJUFe5yh/Wt0Z8mz/\nYdKayXQau50LJFp7k40auihVvpesNrknNX2jdS5QK99CywgTYd7ADOHozpQkgl+bw1iSyzP/Ee8U\n06o5KKUeU0r1mftFv/HcXymlDKVU/mT3amhoaGhMH6Zbc3gcwIMAnrAXKqXmALgWQNsk97wlhkMi\nwZ5u4yo6sVeyR1EziBpclSdK4zbJ3brSvCcBGoxC5uY78TfpLmdyMjdG0SISlkTCdxmm26zHFlEd\nskVrw3QZTdj2efYmRCpcf5EY+hbV0ljmVGZ6bNs7kk62PSNHDFdXX0bp9Yq4uLb9+vcvvq7WzUNy\nX6u5LzMADA+LwbcINED6A4yORZ5Ifsl2tkGViVtixJRI8wcZDR3Nl/7K8PH68TFGEGNIjJUqm3KC\nPyrvK/SJG6HfQa3DyJD+TI9S0zAMm1YIcz/hTEZ4V5mC2/KLxBA7f36ldS4zZm72U0R3yJ5hSuU5\nptax4mbuU7y6W8bliW3caxgADhdLn21o4NRu6jI3gFrMKOfImausY0/jj6QNydutsmS2jHH5fpGc\noyV0K84v3CD3RdmGbSH23Xwz73O4iLJgb6VoRTVdYjCPeWn8PtMrkmzJUW4OdHoJVUxnmmgFVyy2\nRRDvlojeguVirI2t5jdRmSV1r1nC8W5z0HU4VipzpaqaRuwTHzZdtV96FHZsHzU16Fc5vkPlIsm3\nDlAbyc3mtzCiRJ/7/CF+M/9l5tk6XCUW72vPtFjndgyKYbv8H21j/jINuq018luTe5IR96c94gDy\nmYCU/UU2tYQqJY4C9ZfYcnvVsP2rnhDNZ3kp58PcTnle+1WiiVWkcrOw9nG5Pm2Y38zO3Ous40WL\n5ZtznuD8qV86uQPKW2FaNQfDMLYA8E9y6lsA/havzyKhoaGhoXGBcMEN0kqpWwF0GoZx6G0v1tDQ\n0NCYFlxQg7RSKhXA30Mopalc/9k3lk0YlZuaW6yySEAMkw47zZKkUpI0KR+nk2ujMg3HE0XRGFXg\nqKnQJGxLqWGLXlbJs+MfXme8BuCydfWEcVq5SQ1M7PBmR8JGe5UVipq6bLHsXJVmi+CORc32JG11\nsiX6m3h2Xh5VXXeS77YjxyWK3fHjbFOWXyiC5DzbMyOMZnX2iMGzJ5PvL/ebcQlRUWfHs0j5OAwx\neI510GgacpKGcBVIuvFCN/epHjJEVW4dl3uzM2xb4fmFWnS4SHuF3Oz/jLDQVGNjpAHcuUKNLJsn\nlERxBsdnIo13t20nNp+HDg1neqUuudVMi56fOXnEeU2P9MNPfTTgV2dK0r/4fvZXTiV3QUsbkTHe\nXcM6XTQk+02PpAuV02xLz96YJX7yq/OXWWVG/0vWccApYx0JMA4luFWoo2CKOSeW0aJZni+018uX\ncee4pWQFkZEnlNXJg6QqlhXL81zF0mcLWyutcwWp0vaOLLZ3UTfH6lCdzIN8F2mhQhf71o7LqyU+\n4RfbGbOzMNdMk13FrAFtT3KunqqXHd2+V8s4kAUjMn9OnpFo731+xnm0lMp8+Zd/ptH4lTgdPC55\nVVKW/6qQ38AllVsAAJ8bkL6py2DcS9/TYkxuW7zFKit/jXOrwSE/f5XPcA6lXC7n3VulH1z/hynf\nGzIqAQCLGv/BKpu/bZt1HD08WKOIAAAgAElEQVS1AwAwp5D7xP/2Wfmer+SGcm+LC605VAOoAnBI\nKdUCoBzAfqVU8WQXG4bxyDTWTUNDQ+MDiwuqORiGcQSAtdybC0S9YRgDb3qThoaGhsZ5x7QuDkqp\nnwLYACBfKdUB4J8Nw3j0re96a4wHRd1va2H6gWhcaBSXl7qwA6QhDDPmweWkmmuY/koxc4+FmIM0\nz8TeDPbceXGD90ZsDMcEksbrAybcSVuqDjM2w2XbCc5OQiWSZmLAGBW74rmiTM0plbXUY/Mxn6DE\nUmzpOCb2mACAhOmB5VI2KslxdnoQABh1Cc0QOM1M+mEziZ3qYS1LPPT8CJRI31e1kwpwpJs74UH8\ntSN+ehclo0JdudLZJ2VuHrcPSz37XPTuCEalrzwxk57pIqURGBcaKNvm/WUo0hadCXmey0tqJ7dW\n2llXJB5HXRn0FBk1Kam6MPs/VMgYiuouqYsjwbJ4clJlF5lDJuV1iH4Y7YVCn3S7uEF9LMKdzRzZ\nMv4VgSarTJllvr0iN6XeRops8aAc5w0yLmGouNo6zhqTMSg6TO+bkyUy/i3mPhtLOpmeYb+ZbmPd\nIqag2DPG72HNXplnrR7SKisXynwoXiIUji+P869zm8zVgblWEXJLWvjPkOzCFuzkmFVlTb7fyOED\nMsaRilVWWYtT9tcIt7A/5y1l3foKZB6t8rNPDvmkb+uTQr20tXEntYvm/gAA8InFjCVaYND7PmoI\njXdpMWmsH/dLHEpdurSrd4jjPX6T0KNzo6SuunuYSM/ZK99X/FZ6MG0xnc3uuE7GYLifzHvuC1KX\nUM5DVpn/bn77r4SFerw2yxZHdGhyGvmtMK2Lg2EYd73N+cppqoqGhoaGxltg1kdIhwIihfsHR2yl\nIvn50igNOuxZ8UwDcqaPq23NPJEqjjeIxDISCNquN1UDJ2MflJvPhuIKbb3PFGInIq8Tcdt+0GbC\nP3tiPefrDMxy3jCokhTli0EsJ1O0IRWj5J/mEYOey6Z+RG1GcmXI8+K2ncfU5IoDPH4xHPeMUTPo\nzZK+qLYZsQdyaGDODJjtL6RkWlIrKZ4Pd4nEmdHFqRb0S1udQ4yaRhaleo+vEgDgiDLOICvNTHNt\nRlWH0illRgekfYMelnmc1CwyIJKrY4yaSImZZtwoEUN45ilbhHCutM2Zyj5MaaQWEc8XqdDRS4M6\nnJMbpBtLTS0oyPbtbhYpc2OmLTmeLdp2j6ltlIPaRIZHpMreWjGWVh7jvd21YvzMLbLFtmxl4sVd\nZoR7ytU8P3e3tDvN3I3vWBHjGOaPigboDzPO4fIEv6/2cpncFUdssTuG1G+e2cfpAfbd/HkiyR5T\nbGOgm/NrQ55onoGozWEi0/49ExWN8u6RYpsmelAM8b3rGAG/L0ktoTYu0vdOH6Pwy5JSp2Ez0Hz+\nMhqfG0YldfaKq/m9//G/MY7gl+vFsXJz5HGrrLBENJD8r0u/d9x7i3Vu3rDsXX3yTmoTeY99xDou\nNrWDoycYVX5lurQ/Yf6O9GSQYHEmpX4qnU4EL++m8fmmkzJ/YkU02s+teOfm5QttkNbQ0NDQmIGY\n/ZpDSMTlgG2znwmk+SgNJsM8H4iLxLq0jhGud95yEwDgOz+R1Lz7DpJjTDXzHpUWkeft7edGNKGY\nSBj2zYCUknV3ooMdtq5OmjYCe8pwj/vs/alDNu0lI93k3M3HRG179E7kaDJsEeE+N9seNzWHhE26\njWFySbd3QJ4bNZhSuTwiUmBeFm04LSHWPegUDv2yBdxs5ubrrgQAPPSkRKA2jTE6OdN0I8zZwP4c\n38/2R+KmBGjbvMgRl4Y7R+Wc22BdQi7TlTeL2kLcTSlU9Zp2lBRKx6mZQoBnhYU3dueyr0cCohV6\nbeOTaYtIjsREK3LmkIePpU8W2wl0Zos2sz1JqXahuTf0yrkco6NO+hiWd4vUWHoFI9rvuVKk/2d+\nIq63B3oo6RadkcjiplZKnnMTdGV1r5Vx8TWyjhVRac+vIdqPD0zvHCwTDXBBF10/eyOUojsy5F7f\nBs6RXDMiuqxPootPOKlF9pSKTWAxOJ6Hi6jZnOqSeeDO4/j9NovvsyNvrYzlga8ftspG/1g0p5r/\nZht8N9IttvGkzOl1SdrEln9GtKL+Rebe3P7HrHPRI5Ljaa5xqVW26XZmzZp7RnI11XSw/T+/Rsa5\n+cPSN5c9s886N3K79P+Ge05aZTvr+PvRmSJzcVkHbTzj10nE/Gbzm72+lzYtb/DT0u55jVbZ6iG6\nuvakiGYRtLnqFx+e2O/6jzBVaM1BQ0NDQ+Ms6MVBQ0NDQ+MszHpaaaDfNNwkaLydcBG179yWjJMC\nqCgT49mHTOoDABbMEze45UtEZdy7nzRIabG4+d179x9YZQ2nqNLtPyqGrtFRGmkjEaGxJtJuh6J0\ns5yoVcxWv6jNrdVtus56MmjwrKqU+k3QT04HKQmXuca7lS1S2xYt7TLZLmUrg/P1rrYTON0t0bEp\nPqqxjkQLAGDclhbLN2ozlpcLrXHbjTTCVdWKqrxygRjHTu7nll75S8SQ9rHln7TKWucwAeCZNjFQ\nNg4yojcaEUPnuEPU91iQDgGuCqFTxoZIJfh6bNHcpodj0RANvAsWiEHUVSjzQvlJyRSXmK6Fg5wz\naXEmsvMkZWyD4zarfnTyiN5Bc27MTa+zymKbxDD7Wg3fmem07ZJmZme+p4zUX1OfGEQX5UqU7a6x\nV61z6QGhuTZ8hr6ix/2sT+UrQmF4EqTxjDvk+KpDQjeMDTIF9oC5x3mng8ksC0/SzXL+dTIPfnOM\n77sjTaiTlg/J/BksZErxRQ2SZryvnxHXxemkrNJGZC7mje+xyuZ0y1h9C6+Ht0bm/5/dyfTeP/8X\nmQtnFpKW2t/D8apzCQV1219dY5VtGjaj/qukn4qy6JY6t1z6emQjw63c/5fzqfBKMYbHWm0py00X\n+Da/tOX7G2gM3tgr7+/fSJqqZ9Fe6zjxM6lDzWc5Hwo65fenIy51edXLb+9ThZJ8cXwX3Y8jtWzv\nnjyh0b54mu68u6/jLnlThdYcNDQ0NDTOwqzXHCYCxrw+SpJu98QezDQoOm0G2Ntvk9X44rVLrbIJ\nR9LiElnFnbYgs+J8MfCsWUYj2mVrKAle2SvGrZFhSi4joyIxD/SLBBGxaQ6NTSLJvbKDRrWobTMg\nmOmmS4tyraLaGjG2pZrG11CM7XE4J9KD04hn30jIiMXNd1AStueGsiPulX70p9Fg5k43+zNObSJe\nQin9o1d+EQBQXUX3Qa8hWk/pHNNwXGhzU8wXieeSdSzbkMvNZLoHpB1HuikteUZFWm/vlT4O9NBA\nPJqUsqd3ss7xBKd2dsCsQzHHb0m5aYgOyvgoxesTHWKcdhbQ/TRhN/AXSd92HbfJVsnJDfxza0y3\n6hdYt5x6kfLCuZTssnYwQGrjSnGUKLflxknPEYPj7jwpqxx72TqXkivje/MtTHldspv7JcfWVMo7\nTrE9gBg/Gz76MQBA5hkGnR1sEKm8v4lOHB15zH01MC71XxSmplo9R7SItKD0XWh8gXWutVfmVKst\nLTh6+ew1ZirvhM0BI8UzeVDh0z+U+076qdlX3ChjU7aCDiZL9/2Sz7pcNIbGAAPn0PJ7AMBVi0Si\nfvIVuoUOD2yWZyzkPt0pX6Zbb6hbtDzfsb/n85rF2aHoIvlbgg9bp8p9ouW1JqmJhcvoyltzuYzV\n6cEHrbL+LX8DAPBk/gIAcM+HrrfO/es+MY7/UR7zax3ZWWkdf36ZaNyP1nB8ctwMEp4qtOagoaGh\noXEW9OKgoaGhoXEWZj2t5PWJip+ZYYuO9Yt6Grb5ZtfNJUVz9ZWiIqb7bFHLpi+/MWEuNhixXF0h\nBr9cWy4gj63nUuaLcS8Ro+994g0puz2Kz9u8QyiZbXupGgejVKmdHqGJogk+LxI2o3bNaN80D2m0\nZIq8K2KjjZSNVnJ5RQZwjpP6iCcmSQgFYI7pgz8vSONtm0OujYTo172onMbFldeJIS0tadudLdWM\ncB2QenhtBveaYrnX57ZFubrY1tpS8VvPyK+0yjKDJvWQkHt8xYx2fmWrUCLPdzEXUXejbT/pYTO3\njYf3jMSEJqmISzv7bRHVwxUyV4pCNKo6QBpkvE/6szCbZZkdk++0Vdkp9IFzhDRcLEfaF27knA1W\n23Ydu1py+vTbknaVHZZ3pTYLvZRyinl/hq8Qn/pNbTS43ng5ac8jTRJjcfVF7JNnh+T+JT6Zg64F\nNPDmlMl8eja42SobK+C9fWlCx666jnPwxQVCU30yLn2d5r/EOpcsE+NrgYc0YvsiOls0jpt5wzpo\n4B69Y/K4kU9cKo4NLx8vt8r2mnmnXLtesco6mSoJ962V+RnrIBXnXSf983SjUHtNo6TVcq6Q34cq\nW1S7yiVFE8mTb3DLOlI112wTQ3xFtdCjrq3cOa6xTPqppZNR32taeXxy2dMAgFt6SGMVL5P3pYbk\nWwm42P+fWiEU2CshpgUv9rH+m8wMA8Mn+Xu1JGLSolPaHEGgNQcNDQ0NjbMw6zWHtBRpQnYWJRGn\nEoOMx8O17+br6EZWUy0G0WA/V94JV9gRv6zo9qyndXWyUvtsG54kbBHXE0Ll6BClR4+ZKyktTZ4T\nteWmGR7pNN/JsvwcPtxrSvpDA5SeTjVLROzS+SJlJeK8N2Eau2OwRezavFon6pLio6SeSLxes5mA\nzyUSWU4W3eROtJgaRxaff8m1jIaeO08kSdVN98Rkwuw/M/ul0U+p/aJykaQqbLmYegN8dp+ZFTQQ\nsRkos00pKCzG0K4eajEpQ2JULmhi1HBaIQ2aTjOv1akY+7P1kEiM6dfKdZlx5nnKcovm4Omke2O4\nmHXJgTgteByUzHpzuJ+yHYc9YoSsK6QDwLbTMh7eJXRdvKeSmpqvSpwPjEG2p3VIDNFbsiRj6Eux\nHda5/8/4BADgdtt+UaMH6BY6NN90zz5Ko/h4n2gTBavMSHHb5k1nquW9R4KVVllmlG0tiMn87dhP\nA/iSbtGejeukH3x+ajYdQzKfkxdRk89sZQbZqgXy7oE4DfDZT00utx4Zknn0Wgd/uha/Kn2bMcoO\nKLucmm37xEZeC22Zb/fK/YXjInEHSmms3gBpS3Mn6zvUwG+7Iiku8Pe20KFk/xrJ7+Q0HVEq59vc\nvvtFs8sap9E7ks2Nf274X9FiHs3m9/mFYjF6h+bK71qig27yiyeYih7+Bo2NVlrHdSniglu+mvOr\nbd8SvFNozUFDQ0ND4yzoxUFDQ0ND4yzMelqpoEAogMJcqoww4xsqykgLrFpEH/fQqKix8RhVtXBY\nqJP+HjFC+lw0IKdniuEwnKAaFwmRZkhEzWuj5HLS0oQm8kzkxrb1tMsr70pPpYH44noaEIvyhbZ4\n8smtVtmR40KjrF8nydi8Dr7fMJP8xd7EyByJRM4qs280ZIdaKG3xNNqM22dEVV44jwbUDWX0y3f0\nCY2XSOH7o61CW3R1ioprY82g8oW6aiq3qd4nKaekDkodHKM04rrc0j8pfrnOkUpj6JYiec5QJsdn\n3Wr62dcUCZ3yw18dsspOtMv5dR4ZMxVl3Z1j8t5BF8czo7fSOo755DnjKTSKJkdJkdkxHJTI4sok\nDZSHzDaU5ZPWPOYipVI/KIbO3DYacLMzhJq4ONuMQ8m2OSRkyfH2bPZX6BhpvKIMs/N7bLEDPumz\nNJMO6zhGWszXYsZmKFJ3a9P5vNyVEhvwywMvWGVhk6b8ftNlAICr5jLiuzZX6j60+2Kr7FQ153vu\ns0LZlC7+mVXW10NayA7/PEne93d+1veTS6UfNwYYt5RVQYeMki5JQjg2ejOfb9K02yHfWLeLvw9D\nEXlH3hh/Hy5bwPHddfw7AIDld15mleW3mfPI/H7G95GmbB+X+J9wA5PxlZQxZXdlnfx23ZHPxHvO\n398h994usSELD/C3Ym+xODnUXcr2LnyKFGSvGQOUMcLkocHlE22px1ShNQcNDQ0NjbOgFwcNDQ0N\njbMw+2mlHLHmZ9niHFzmkldTyT1li/PIawz1m/7rtvQaibiog2XFQlOl0lkJytwngVcDQZv3hjsh\nL8zMYb76hLl7XDRq7gftIW1RXGwm2qpm/vm6aobTX7pG/KyPHqJaf7xRjtv6RT1cWmt7l7mfhDNB\nVTph24tgYsc5e6K/RGLyfPmLwkIPbMkgJRePS0qAVeX0XU9x0pspOCDUUQC23ddSxUOkdr6o1L99\niT7hyi3eLs5u0hwj/eyflHSpQ/5c0jbDp0Std5v7KmTbPI8Wp0j75mbRe2ZFLcd7RYWo/yf3kHY6\n2C/J7461i5fKyjR6Ew1HE+a7SDVEbT7vo+ZmyEYbqYPUCtuudjbcWyWU2w9s6ScynVKPuS1M9xC4\nmp4qY2YqmEgN95/YtyPLrFMlACCnlGPZ6xfqKtbFOlxb2mYdnzQ9fOLzfmqVFTSK9175aaEy2ocZ\nBxS6SLxq8qP0na8+aUv0uEg81VafJi22d1za97mN4jk2sWMhAAReE3rsoC3ZXkY/YxpCudLPbXmU\nVcfS+Gw7Kjtkn4NHN5KiWfSM1O3iFWxzW+HHrWNvQr6d6Bm2wb9Oxrj6uW8AAApj/2Sdu26RPOc3\nO231uYztyc0Vb6FjB2w7BZaIB5N3m3iRZZSwfdn5ZpLE5fymDBvNaJwWeqjkx9zzo/+2J6SsX6iw\n6DU/ss6tbRfPoz27mY7ktWIm+luelHk1Pp91Lmu6CO8U06o5KKUeU0r1KaWO2sq+oZQ6qZQ6rJT6\nX6VU9ls9Q0NDQ0Pj/GO6NYfHATwI4Alb2SYAf2cYRlwp9XUAfwfgy1N9oMfMR11SzBTFOeaOZVVz\nqDlkealZxBKy4idAo7PD3Id57VpZlZ97njsr+f0i/fmHeH0oSANRqmmI9XqYDG4iiiBmJmRz2/ac\ndpsxFC7F6+cUUXOoLBUp7kM3c3ew7z7xDABg135JFT6/kucm9IVggJKj05aS2+2U96Tn2PZdthnj\n7YhmibReGaVPdmGWSLBOH42SqcWcOmZ2cmS52B53ivR9UbnULi2bUlY8IFLmUDeN6oNJpkcu9lQC\nAMJZfN94tRxnh+WekbxK61zY3JO6qoiSV5WHKaYXLhMZ6PIN9GX/2S/ESH3moGiRNbfTwO4zpeCo\ns8UqCwXYd1mpUhfnfBpN+9vp827HUJNIcYG5TASXt0TmWkE1Y3Puu55z68UG0SjH06iJJZbIO5Mj\nIn06AkyBXlYkUc55mWzzlhPUHHt8MpdXxjmmLetk/Ob5pQ3z6hjncLpftInCNs5J9wpqalmDUpei\nO2kQbfmtGHYdu6UdR7zUFK8olvbU9NhiL6KMuUhZIt9GdYx93Fk8+fw8mHEAAHDpd2i8feIq0aqS\nzzG24hP1jCP4/nZJWpe3mpL09aYm9/OV8l1te4SSfsFpmS+L/nGjVdYI7qx32Cnai6+NDgCdYdFk\nluVIUryMU7da51am7QcAfHELI6C/8Mfsn/6AxO6c+gzHTPWJdn20V34qP5Jzu3XObabiLrn/gFW2\n6nGmXH/GJ/OqvJvJF5uXTmRjYLzX22FaNQfDMLYA8L+h7AXDMCZ6ZSeA8rNu1NDQ0NCYVsw0g/S9\nAJ57s5NKqc9OY100NDQ0PrCYMQZppdQ/QGy+P36zawzDeEQp9T/2ssxMUaHmlFPhyMsVNbO00LY7\nV5zroMOkkxK2tdHrE6NZWanQQBVzaKBrbpUUCAsW8XmxqC1/vxm34LAlvkuaBulQSDiXXA93auo1\nd68L2fLXp/poLBvyC9WxsI4G24oqad++I0Lx1C+lyp+XbhrM46yTfWe8ib0dymw58sNRu3mdcMZF\n3c6oIUWUekjU/QW1fGe6j8Zdh9M0lho0iMdi0hcLlguVsWAuDdhdB4Uaq1jKNmdm08A+FBY6xTvK\n96mQlLWZqUIys9jWaKcYHDsHSSupNLbvxBEZ51Ur+b7GBhmP504KDbC4g5RDcVLa3ttMKqzARoN0\n5sj4FORxvMOJyffHyKgSem3NEe46+HRI9hJI2BIhNv6E92ctflaeaTNiVyRl7vVXioH/+nTW97VU\nOb4sk0bzwnQaIPtdQou2rqTB/vqYUBK/65c+ub2GcQOpjWL8b0iQurrYxRiBwJ/JvUUP0Gi8vkri\nb37w2iYAwOeyuJ/Br1YJhbI2yjFOcx60jncMC/W2JI91KN5BismOsuJKAIDrdtK0t0p3wVjH3fGe\nreCuje5U+Zm7tPBhq6zlkFBDxiXSJ/UnmATz+HKZO+27OCZ/0mJz5rhRUn+M3UsDePdxidtIGOL8\ncO/FjIs4+py0O38O55DxG/5ezbtTnuM/xXQXjgzpi5XmLdsWMm5EnWgBANzxPOt3ooVJItsulm+/\nxEPqavFec64yN+PbYkYsDkqpTwK4GcBVhmFMnvRHQ0NDQ2PacMEXB6XU9QD+FsAVhmFM7g/4FvAH\nZUXffYBucnOKRZKqraH0PxKm62bSXH6cLkq6434xhRgOWWHXrKXxcvseefYltsRm2QxehX9INIGR\nIKXp4IQLqxm9HLKl3z58RJy1yitpkEx4mEivuVskskwf9/xdsVwMh1t2SPrjgTFKianp0g678BqN\nUfJPmOmpMUyjY8hWVzuGcqTew4cZTVxhpirPy6Fk09xHKTwxLBJwSQEl3cFxOe9zyJAuW8D+PHxM\nXCQXtduM9HN470hI3DrTQU0g2iWSUbxYJP7CU9TidnWIZFxTYZO+bUkCx0fFWFgQ5vsm9r3u2iX9\nOdJHV8DsfGlvZjrHpz1OqdedKXPJdYb7iPeGOVZ2dA7KtdvKOR5la0XK/FCMknwm2J4tfTIXs+J8\nZp45aUNhGcPOBXSL3H1MxMsP2VypWwtsEe7Zkqp6tJHnt5gp0I1xYXFfO0wfkNDPxXheUEzpfaTE\n5lr6Y5FIq/MYhb6jU4zN8YikrD6wlgb2lZVy78Ahasr589j2SL8YarvDm62yrd7Jd9brbxXHhUSO\nzXi70WxLJqPM07qZvr12WPoncIoOBFtioskud0m/713C34ecEXn3H2SSWRi98irW/ag4H3ijjJCe\nMyR9HGiSex+5kXPR65P6rein+2ruPXx24KgY9r2F/Ca3F7QAAHbvFMP06mPU3PYulm/hV0eoWR+Z\nQ4eK9Vny7JE+uro2lEtd78TUMd2urD8FsANAnVKqQyn1aYj3UgaATUqpg0qp70xnnTQ0NDQ0zsa0\nag6GYdw1SfGj01kHDQ0NDY23xwWnld4rTjeJ2rV1OyMmb9go6mVuNqmcaJhqttNMqudJIc1gmPsz\nKIeo70uWMHr15e1CsbSb9AUA5C21G6eFEglHaS4ZGJHn9fuF/jke5ubiITNp3+pVi2x1siXNs/Za\noKq7sE4MWA2nRV1ubqYBqtKkSEJRGsHCcdbFYUZ4h8ap1ttyBL4Ojn1Cr/xkL5N2Xb9SYio8uaSS\n3AFGzwaqzH7sodG9OFX6JD4qdNmKi5hPfuvRFmmDbae1yjijxY1mMb6NZzIeMjgmbeg1fcsPp9NA\nl2omUly4hH7ug7aIZ2fYTEyYT6riItOIfmpQ6tzSxQ4pXiTq+mAvqRTlp9/9mCFjpdKoeEejk0ec\nb85+HgBw4Ds0wN69Tuq54wbSfNeMsz3LXxXjdddCzplDVwqlt/Ck8JntmTfw+uT3AQDHS0hdhY2T\n1nHrVkm2dvF60kSvnhYDf3pAYgAamukHkrOhBQCwsnaNVZYxwrqWtch3oD7K/lk7VyibSFyui22n\nMbSrRuocjpEGabYlpsNVYsT2Pcn6peUxxsWOxR6hiI4+R153/eUy1m2LWVbQw7iZ2Dox7rY+zu/3\nlp9Jf+/6qVCDVXv4/TWMyHz+St5+q+zWXv5UXpyQOfjSAGlP55jENczPlnmy+9uPWOfWFUq8wUUF\n/D56/Jzvox75LWlYbUvL8LTEXTQcF+N3rZsG88vXy1wYXMP5Uf4zfu+7Ter1KwtJI24eMa32+DSm\nipnmyqqhoaGhMQMw6zWHY4fFtTPFtmfyiqUi8XhdlCQ86dQcJnZGczopacVN18sJG3VxEVfdpOmi\n2tTIlXpOHg1uoyMizaSl0l1zuF80h5dfFEOd07YH8ZVXSjRjXi4liXjElqdlohIO1jknTepaXSYa\nRFcrYwmj9eb1NoO5C5SSPR5Tirbv/vYm6b23HhKNIcPg1Jh3kUi6+Xk2ycZNY2SeuQdwag2N5NGA\naDHjSZFQfaM2rcPcqa/piG2fbls0dNJturKGmdtoOCh9//IvRNKL5VNLuv06yflUZktj3WELtSyI\nmVHVMeYvQqpoJRXz5B0DRzr5/spKAEBKOufPaBY1B1eqHCdsEb1qfPL+zO8xo/UXcqxzTIk85qU7\n4zNtdADYsF7akfDYdofrlXemDIrmWFfClNfPm84HOY10VVZl1GRqq+W43E/tJbVW3nFqzy4AQNqh\nvda5gntE0r0lhZLx9jH27Z4ikXqvaqMmOpCU72XdoNR59zJGQ3uaRQpeM5eaRm+EEb3ul2Suji2k\nU8K61Wbbv47XYWybaKIL1/LdO4IyF+c/y/kX6+Jc7wnKxskp6/n+sd9Ie+tHROveei+j0S95RVx1\nC4PUNBJezvenmu8GAGQ7HrTK5t4kGtP2h+T7ia5hdobTdSKDl/+SH+iO/YzIvtjco/3WFzgHX2wX\nh4PRP5M5ktrMsWiPSFnpUX4DySV0k62NiyPOzi4yJy3O6/BOoTUHDQ0NDY2zMOs1h7EhERFvu+V6\nq2zdanGN87q5UhuKUslEUFg4QunKNS5d4VRml0R4b2G+uDQO9lCS6O6km6PHfE/MyXtaTomkGzb3\nta2qouRYYG5MFA/x/U7bSCR9IulPZHQFAAWRigrNoL+TfnK63Z0iQcwpswUOpdDVz+kUycShKAsk\nbM+2Y3BMNJhrbyXffPNqkUi9BiWRZB6fP+KT9wfD1H6ccalvrks0hrFx9l2mQzSs8QHmpjkxanNz\nNPMYufPJGw9vkf50xCgdAEUAACAASURBVEWCrk2lK2emV94/HKP0njducz3NMF37RjkHfG6pa36P\ndPyOELXInj7ZOCXPlgIy0xbgmBMUyaw7ykEri9IV2Y6+3eKO6sm+3CqbXyntPxrgPsqLy6gJqDOi\nRfXW01U2a4/w0pmLpN7RJtp8aiul7xqGmcX3T4qo9fSbbtptKZVWWfY24dydgyLp5vwptZjKLPk+\nNh2je2TStlnTnIwWAEBLE12bYWqNLSVis+o7w/mxoFA0g8MhfgPlK5+3jsfmS9s9c2iDO/AUtRw7\nDl0qwX3X99EttEZJW/21bMNiF3NddSthF9yb2YjsL8nGQ9+NPAYAWH+I/bV9s2Ts9bk5ZovWkBXo\nKJB568y4ySpL+c9fAQBiBSLBr8v4I+vcyGmZ+7tqOT7XLOV8az8o9riBZtZvg6sFAJDZKX9Hn6L9\nY9nd/woAiN/4Q6sssWWDdeyKy+9HZ9I23zv/1zyaepIJrTloaGhoaJwFvThoaGhoaJwFvThoaGho\naJyFWW9zWLdG+LrcUnpqBMbEcyBs89iJ22wOSTN9U9y2M1rSMJPnmUnXnKCf/DVXSuh5Wys5w5IC\ncpAlpmdTTy+9JbIyxBPnxhuEt5xTRhtFRro822XzlnJ57EMRN8/TQ8Rp7jaXny5lY2N0xzlySHza\ny/OXWWUeH9s+sbWDx03vIHgmTxS3cIN4b6QVk0/u7Jb+Sk+nXSASoedHPCZ2hbQe2/4YZv9EItIn\nyVyeu/xjGwAAXe303V5g89wpqpa6dfdRdokvEk7/0jXiKVNdS++mPKf0rX+QHmF5DnpvhMxxznXR\nt9xpptJIKTFTLxzg2L56SObKx4vpoZKaQk464JbzuV7aNQYckweOFK+U+bk+SU+37jyp2/Iepuxw\nBJl0rjUmfZvlr7bKTnrF1uIdFHvHWBrTkdTUiheWo4MeQgHb3g45EYmbONBLG4/HnLPXlcs+AVkg\nh79im4z9Sx/hnPQ0si7JiMSnBK6h91faUXne4ALh14O/49gNpUtiueo6preoOEx+/Uxc+iYjxtiH\nwhLaJ+yY1yjzILiY3kDRLJlHawboUXYqxLHGaonBKKjfbBUdeOU2AMD1lZ8CAKR7eO6GWyRW98Um\neg+VjTFuZMQnO+F9qInf+4+T4n3lqpe28ssGmofk9+MjVfSobOxh3632yj2uu9gn2x8Xm0hirSQT\nzB7n3gwdEUmLMbjpaqssrYIxQ6Ez0o/lWxirdesym9FoitCag4aGhobGWZj1msPcSjNacIyeLT1d\nIlUrm+t51KBkN+EBrWzeOzCjXnPTRDJLJHkuZ8IHv5zStNcueZtCek4OtYNLL5PYAF+qKXEmbJ4D\nKU7zlXyGL40ru7Xnc5SScIqpTTgLpZ5zKuitkzBTdSubtuPzUkoJhUSiitlSaofGJ4/oXW5K+P5W\nepsEisUzJtnMsg4HZaN0M5ZhIJ3Pj26XqVVSJX2WlkHNodgrks1glk2+8tCPuwIiFXorbGnDfSIJ\nxsukLLWVkmWoSiTwwjxKy2VODn53WI7HE/TmKoV4sCzLlLJ9HmqCmR4Z+0SAYzaWbouRMB2XCt3U\n3iK2vrVjbaHMp0OZTCcd3S5SYHSuzQ99sMU6zkrKs/q66a1UHZD+yxyTNh4qZ1xEZZv45fcuoO98\nIMZP2x+Q+bFgBSOUR9tkLH0+affAHGqCvzS10+tOM/Hf/mJqaiVKjhcb1HxOponmUzcmUno4lxH2\nmQ7xtDJizCrQdRX3xy44IhL4cBm1/6HhyfszPk/6rKKJO8EdKJB2He7ttsqWzGF9t/b+DgCQ7am3\nyjxmioBto7JL3t1jTPF9QImGVVLB+dRt+z34E6fsN90y5++ssg150n8Hb5D5dOej7M/qZvE02pvG\n9uXbPIkaDHnPDd9l8tAdt8iv1KKwpMobrX/SOvebOTIut/yO2kRNmN/X4M3i9VWwgx6Nu+vkG6BP\n59tDaw4aGhoaGmdBLw4aGhoaGmdh1tNKWemiPiZsQUrxcXMvBVu6CA/OVlPdbtueByaVE0mKuh23\nZabLS5duyvJxf4iYjSaKK3m2x8d3ZEHUOJ9ZNj7C+qV7TEOmm93v9lEtHBoWGiie5POcKVJXn5J6\nXbNxJRsSk+t86bZkZS5SUh6fqOJJ24YP6em2VBg2pGeIiutzUiXtGBWaweFiG1xhRojFfHJtsZeB\nXINVQjUZZpqOET9Vfih599WFrIPfz+OmNGmH02FzIsiX84leoRDyskm5eB3SjwnbFgDuXFtfdIgh\nPcNDGiThrwQAhHKFXrvsJiYGzDEpt4JCUnfDXs4Vr1cozDGQRswLTB4EFzK76dIE63O8WAyooyO2\nFCW/p4F19B4xfi4doME7Ui+UxO4DZhqNCI23pdlCZVzq4B4Ju1s3W8eVK6XP1GnOiaqL5TmdR6SP\n3WmcfwtbxdgdyKEBG/00qAebZHe/H6a2WGU1bjFEL6uVeqUXsj/j2yVwLT2NgW9VrzGR37ZqMb62\nH+H8+lKN1PVevB6xZpmfvxjgLmyOZmlfVQmdBp66ngb2lK3yfXScYtBhq1vOX2Mmbdycy13v1pp9\nPcYqYjx4o3XcYc79Ran8Bp5NyFyoMfdf2Lmexvegubtk9guvWGWL76KBf2i+zOVnXiXtmTsm58dD\n8m3l/vIe61x+jdBUh1JJaw438LepqP+bAIDvLeX8rTwt+1Fcfy2mDK05aGhoaGicBTXbduVUSumd\nRM8hlFLQ/XnuoPvz3EL35/mF2b+T+rVrzUFDQ0ND4yzoxUFDQ0ND4yxM9x7Sjyml+pRSR21luUqp\nTUqpU+bfyXdq19DQ0NCYNkyrzUEpdTmAAIAnDMNYYpb9OwC/YRhfU0p9BUCOYRhffrNn+Hy+nnA4\nXPRm5zXeGbxeLyKRyNtfqDEl6P48t9D9eX6RkpLSGwqFiic7N+0GaaVUJYDf2haHBgAbDMPoVkqV\nANhsGEbdWzwCAFBfX2/s3bv37S7T0NDQmBZ861vfwpe+9KULXY23hVJqn2EY9W933UywORQZhjHh\nBN8D4E21AqXUZ5VSe5VSe/v7+9/sMg0NDY1pRUdHB7761a+is7Pz7S+eJZgJi4MF00f1TVUZwzAe\nMQyj3jCM+oKCgje7TENDQ2Na8fDDD2NkZAQPP/zwha7KOcNMiJDuVUqV2Gilvre9Q0NDQ+MC4v77\n78ezzz6LlBTJHjChMTz55JPYvl1SaofDYdx44434p3/6pwtWz/eCmbA4/AbAPQC+Zv59+sJWR0ND\nQ+Ot8YUvfAHPPPMMXn311deVnz59GqdPSxbj+vp6fOELX7gQ1TsnmG5X1p8C2AGgTinVoZT6NGRR\nuEYpdQrA1eb/GhoaGjMW2dnZ+O1zv0flgqWTnq+vr8emTZuQnZ096fnZgGnVHAzDuOtNTl01nfXQ\n0NDQeLdIJA08fbAT33yhEYnr/gGu3r9CfIiG6Nra2lm/MAAzg1bS0NDQmPEwDAObG/rx9d+fxMme\nMQDAvGwnAs4YxlwuVFVVobm5GaOjowgGg7N+cZhR3koaGhoaMxEH2obwsUd24lOP78HJnjGUZfvw\nzY8sx0WhvXApA/fddx+OHDmC++67D4lEAg899NCFrvJ7htYcNDQ0NN4ETf0BPPB8A5472gMAyE51\n48831uDudXOR4nai4flMbNq0CcuXy7bA3/jGN3D33Xfjueeeu5DVPieYdSm7J6AjpDU0NM4X+kbD\n+I+XTuHne9qRSBpIcTvw6cuq8CdXVCMzxf32D5jBmGqEtNYcNDQ0NEyMhmP4n1eb8Oi2ZoRjSTgd\nCnetrcB9V89HUWbKha7etEIvDhoaGh94ROIJ/HBHKx565TSGxmUr3usXF+Ovr6tDTWH6Ba7dhYFe\nHDQ0ND6wsLuldg7LvtVrq3LxlRsWYFXFB3v3AL04aGhofOBgGAY2N/bj68/RLXVBcQa+fP0CbKgr\ngFKT7pz5gYJeHDQ0ND5QONg+jK89dwI7z/gBAGXZPvzlNbW4bWUZnA69KExALw4aGhofCLydW6rG\n66EXBw0Njfc1JnNLvfdScUvN8s1ut9TziXe8OCilKsANeXoNw2g7t1XS0NDQeO8YDcfwyKtn8Oi2\nZoRiiQ+0W+q7wZQWB6WUE8DfA/gc3rBTm1KqB8DDAL5mGEbinNdQQ0ND4x1Au6WeG7zt4qDEbP9b\nANcA+AWATQA6ACgAZQCuA/BVAJcAuOm81VRDQ0PjLaDdUs8tpqI5fALAlQCuNwzjxUnOP6aUugbA\nb5VSHzcM4yfntIYaGhoab4HJ3FLrijLwlRu0W+p7wVQWh7sAPP4mCwMAwDCMTUqpxyELiV4cNDQ0\npgXaLfX8YSqLwwoAj07huucB/Pd7q46GhobG2+NMfwAPvNCAZ49ot9TzhaksDnkAuqdwXY95rYaG\nhsZ5Qd9oGP/50in8TLulnndMZXHwAJiKF1ICwLseHaXUlwB8BoAB4AiATxmGEX63z9PQ0Hj/4I1u\nqQ4F3LV2Dr54VS2Ks7Rb6vnAVOMc/kIp9XbaQ8m7rYRSqgzAFwAsMgwjpJR6EsDHADz+bp+poaEx\n+xGJJ/CjnW148OVT2i11mjGVxaENwGVTfN57CYhzAfAppWIAUgF0vYdnaWhozGIkkwaePtSJB563\nuaVW5uIrN2q31OnC2y4OhmFUnu9KGIbRqZR6ALK4hAC8YBjGC2+8Tin1WQCfBYCKiorzXS0NDY1p\nxoRb6r//vgEnukcBiFvql2+ow8a6Qu2WOo2YEbmVlFI5AG4FUAVgGMAvlFJ3G4bxI/t1hmE8AuAR\nQLYJnfaKamhonDe80S21NCsFf3ltHT6s3VIvCKYSIf2ORPR3mWvpagDNhmH0m+98ChJx/aO3vEtD\nQ2PWQ7ulzkxMRXNogXgQTQXGFJ/5RrQBWKeUSoXQSlcB2PsunqOhoTFLoN1SZzam8kP+obc5nwrg\nTwFsBBB7N5UwDGOXUuqXAPYDiAM4AJM+0tDQeH9hLBzDI1vO4HtbtVvqTMZUDNK/m6xcKZUO4M8B\nfAlAJoBvA/j6u62IYRj/DOCf3+39GhoaMxuTuaVet7gIf3PdAu2WOgPxbvZzyAFwH2Rh8AD4HwAP\nGIbRc47rpqGh8T7AhFvqN19oRMcQ3VK/fMMCrJ6r3VJnKqa8OCilCgH8NYRCSgB4EMB/GIYxeJ7q\npqGhMYthGAZebezH17Vb6qzEVLyVygF8GcC9AIIAvgbgQcMwRs9z3TQ0NGYptFvq7MdUNIfTkJxJ\nmwF8B7JAXPZmq75hGM+eq8ppaGjMLkzmlvr5DTX4w4u1W+psw1QT7wHijbQBsgPcm8EAoGeAhsYH\nDNot9f2HqSwOVee9FhoaGrMS2i31/YupuLK2TkdFNDQ0Zg/e3C21DjWFGRe4dhrnAlMxSPdj6hHS\nMAyj8D3VSENDY8ZCu6V+cDAVWukhvIPFQUND4/2HN3NL/dvr63DlAu2W+n7EVGilf5mGemhoaMxQ\nHGofxteeO4kdZySkSbulfjAwI1J2a2hozDw0DwTxwPMN+N0R2QQyyyfZUrVb6gcDenHQ0PiA4lvf\n+ha+9KUvnVX+RrdUr8uBey+rwp9qt9QPFPTioKHxAURHRwe++tWv4qMf/SjKysoATO6W+rE1c3Df\n1dot9YMIvThoaHwA8fDDD2NkZAQPP/ww/umr9+PHO9vw4Cun4Q9GAWi3VA29OGhofCBw//3349ln\nn0VKimgAnZ2dAIDHfvgTPPjT3yIST8KIx1Cz+jI88eA3tFuqBpRhzE4v1fr6emPvXr1ZnIbGVDA8\nPIxrrrkGb/XN1C5ejp1bX0FOjl4Y3s9QSu0zDKP+7a5zTEdlNDQ0Liyys7OxadMm1NdP/puwur4e\nu7Zt1guDhgW9OGhofECQnZ2N+7/zM7hzy15XXltbixc3bUJ2dvYFqpnGTMSMWRyUUtlKqV8qpU4q\npU4opS6+0HXS0Hg/YfvpAXz+B68hER6HcjpRUzMfLpcLo6OjCAaDF7p6GjMMM2ZxAPCfAH5vGMYC\nAMsBnLjA9dHQeN/gtdMD+PQP9mBw92/gdQF/ed99OHr0CO677z4kEgk89NBDF7qKGjMMM2JxUEpl\nAbgcwKMAYBhG1DCM4QtbKw2N9wdeOz2Ae3+wB+FYEiuqS/Daq6/ggQcegNfrxTe+8Q1s2rQJmZmZ\nF7qaGjMMM8JbSSm1AsAjAI5DtIZ9AL5oGEbwDdd9FsBnAaCiomJ1a6vOJq6h8VZ4rWkA9z4uC8PH\n1szBv314KRw6H9IHGrPNW8kFYBWAbxuGsRKyFelX3niRYRiPGIZRbxhGfUFBwXTXUUNjVmFH06C1\nMHy0vlwvDBrvCDNlcegA0GEYxi7z/19CFgsNDY13gZ1nuDB8ZHU5vnb7Mr0waLwjzIjFwTCMHgDt\nSqk6s+gqCMWkoaHxDrHrzCA+9f09CMUSuHN1Ob5+h14YNN45ZlL6jL8A8GOllAfAGQCfusD10dCY\nddjd7MenHpeF4Y5VemHQePeYMYuDYRgHAbytkURDQ2Ny7Gnx45Pf343xaAK3ryrDv9+5TG/Go/Gu\nMSNoJQ0NjfeGvS1+fPIxc2FYWYZv3LlcLwwa7wl6cdDQmOXY2+LHPY/tRjCawIdXluEbH9ELg8Z7\nh14cNDRmMfa1cmG4bUUpHtALg8Y5gl4cNDRmKfa1DuGex/YgGE3g1hWl+OZHV+iFQeOcQS8OGhqz\nEPvbhnDPY7sRiMTxoeWl+KbWGDTOMfTioKExy3CgbQj3PMqF4VsfXQ6XU3/KGucWekZpaMwiHGgb\nwh89uhtjkThuXlaiFwaN8wY9qzQ0ZgkOtg9bC8NNS0vwH3+wQi8MGucNemZpaMwCHGofxh8+ugtj\nkThuXFqM//iYXhg0zi/07NLQmOE43DGMux/dhbFwHDcsKcZ/fmwl3Hph0DjP0DNMQ2MG40jHCO7+\nHheG/7pLLwwa0wM9yzQ0ZiiOdo7gE9/bidFwHNctLtILg8a0Qs80DY0ZCFkYdmE0HMe1i4rw33et\n0guDxrRCzzYNjRmGiYVhJBTDNYuK8ODHV8Hj0p+qxvRCzzgNjRmEY10juPtRLgwP6YVB4wJBzzoN\njRmC412j+MT3dmF4PIarFxbqhUHjgkLPPA2NGQBZGHZieDyGqxYU4qFP6IVB48JCzz4NjQuME92y\nMAyNx3DlgkI8fPcqeF3OC10tjQ84ZtTioJRyKqUOKKV+e6HroqExHTjZI1TSxMLwbb0waMwQzKjF\nAcAXAZy40JXQ0JgONPSM4ePf3QV/MIqNdQV6YdCYUZgxi4NSqhzATQC+d6HroqFxviELw074g/9/\ne/ceZkdd33H8/c1mk839utySmBAhkRpbkCUXoZJSUEQqQrVk5SIEk1r1Afv4lMo/NPoUrdWHeGlB\nNxcQhUQtUblaI0Ktyi4kFNEQCCGEZEOSvSR7vyW73/7xmw2Ts2c3u8nZzDlnP6/nmWfPzPzOzPcM\nYT5nfjNnpoOL5xRz7/XnKxgkq2RNOADfBG4HunprYGbLzWyTmW2qrq4+eZWJZNC2/SEYaqNg+N4N\n51NUqGCQ7JIV4WBmVwJV7r65r3buXubuJe5eUlxcfJKqE8mc12LB8H4Fg2SxrAgH4ELgI2a2E1gP\nXGJmP0y2JJHMem1/I6Wryqlp6uAvz55KmYJBslhWhIO73+Hu0919FrAE+LW7X59wWSIZs72qkdJV\nFUeCYdWNJQoGyWpZEQ4i+Wx7VRNLyiqoaWrnorMUDJIbhiddQCp3fwZ4JuEyRDJie1VT1JXUzoVn\nTVEwSM7QkYPIIHm9OgRDdWM773vnFFbfeAGjRigYJDcoHEQGwY7qJkrLQjAsmj2FNZ9UMEhuUTiI\nZNgbNc2UriqnqrGdhbMns+amEgWD5ByFg0gGvVHTzJKyZ9nfEIJh7U0XMHpE1p3aEzkmhYNIhuys\naaa0rJz9De3MP1PBILlN4SCSAW/Whq6kfQ1tzJ81mfsUDJLjFA4iJ+jN2maWlJWztz4KhpsvYMxI\nBYPkNoWDyAnYVdtCaRQMF8yapGCQvKFwEDlOu2pbWFL2LG/Vt1EycxL33TxfwSB5Q+Egchx2H2ih\ndFU5b9W3cf7MSdy/dD5jFQySRxQOIgO0+0ALS8rK2VPXynvfMZH7b75AwSB5R+EgMgCpwfD9pfMZ\nV1SYdFkiGadwEOmnyoOhK2lPXSvnKRgkzykcRPphT10rpavKqTzYyrkzFAyS/xQOIsewp66VJWXP\nsvtAK38xYyIP3DKf8QoGyXMKB5E+vFXXSmlZeQiG6RN4YKmCQYYGhYNIL/bWt7KkrJxdB1r48+kT\neOCWBUwYpWCQoUHhIJJGPBjeM20CP1iqYJChJSvCwcxmmNnTZvaymW0xs9uSrkmGrn31bZSWlfNm\nbQiGH96ygAmjFQwytGTLL3cOA19w9xfMbByw2cw2uvvLSRcmQ8u++jaWlD3LztoW5k0br2CQISsr\njhzcfa+7vxC9bgS2AtOSrUqGmv0NbZSuKmdnbQvvPkPBIENbVoRDnJnNAs4DKpKtRIaS/Q2hK+mN\nmmb+7PTxPPipBUwcPSLpskQSk1XhYGZjgYeBz7t7Q5r5y81sk5ltqq6uPvkFSl6qioJhh4JB5Iis\nCQczKyQEw4PuviFdG3cvc/cSdy8pLi4+uQVKXqpqaGPJqhAM50TBMGmMgkEkK8LBzAxYA2x197uT\nrkeGhqrGcI5hR3Uz7zptnIJBJCYrwgG4ELgBuMTMXoyGK5IuSvJXdWM7n1hVwetRMDy0bCGTFQwi\nR2TFpazu/lvAkq5DhobqxnZKV5WzvarpyBGDgkHkaNly5CByUoQjhhAMc08NwTBl7MikyxLJOgoH\nGTJqmtq5bnU5r1U1MefUsTy4TMEg0huFgwwJtU3tXLeqgm37mzj7lLE8tGwhUxUMIr1SOEjeq20K\nJ59f3d+oYBDpJ4WD5LXapnauWx2C4awoGIrHKRhEjkXhIHnrQHMH162u4JV9jbyzeAwPLVugYBDp\nJ4WD5KWDsWCYXTyGdcsWcsq4oqTLEskZCgfJOwebO/jE6gq27m1gdvEY1i9byCnjFQwiA6FwkLzS\nfcSwdW8Ds6cqGESOl8JB8kZdSwfXr6ng5b0NnDl1DOuWKxhEjpfCQfJCdzBseSsKhmULOVXBIHLc\nFA6S8+pbDnH9mgr+tKeBWVNGs27ZQk6boGAQOREKB8lp8WCYOWU065YrGEQyQeEgOau+9RA3rK3g\nj3vqmTllNOuXL+T0CaOSLkskLygcJCfVtx7ixjUVvFRZzzsmh64kBYNI5igcJKesXLmShrZD3Lj2\nOf5QWc+MyaNYt3whZ0xUMIhkUlY87EekPyorK1mxYgVPtpzJtsZCZkwexfrli5imYBDJOB05DLKV\nK1cmXULe+Oa3v0NDQwMVj69j+qRRrFu2UMEgMkjM3ZOu4biUlJT4pk2bki6jT5WVlcybN48tW7Yw\nbdq0pMtJnLvT2eV0dHbRcTgaOo/+e6izi/Zo3trvfIPnfvMUBYUjcHd2Ve6hubqSkVOmce7cMykq\nLKCtrY0rrriCO++8M+mPJ5ITzGyzu5ccq13WdCuZ2eXAt4ACYLW7/1vCJZ2we+65h/r6eu655x7u\nuuuuk7ruzi4/sgNu7+zkUOfb42Fn3EnH4aN31Ic6u9sfPZ5uJ37U/GiHnq59fGff0dnFQL6LdA0/\nn/3VP6Nj32tHTW+v3UPF7/cAUFJSwq233prJTSciZMmRg5kVANuAy4BK4Hmg1N1f7u092Xjk8OUv\nf5knnniCoqIiOrucXbsr2bVzB9NnnsmUU07DHdrb2znvor/i6qW3pd3Jpu6Ae9vJHrUjPtxFR6fT\ncbjzSPuu5P+zplUwzCgsMEYUDGPE8AJGDh8WxocPC0PBMAoLwuuRw4fh7c089rXPUrWj5z+FkpIS\nNm7cyMSJExP4JCK5qb9HDtkSDouAFe7+wWj8DgB3/2pv78nGcKirq+Oyyy6jr7pGnHYWp177rwwr\nGjuotZgR7YDDTrb7dfeOd0Rs2ojYtMLYtJFp2hcOH8bIXtp3r6vHOmLjBcNswJ+lrq6OBQsWsG3b\ntiPT5syZQ0VFhYJBZIByLRw+Blzu7p+Kxm8AFrj751LaLQeWR6NzgVdPaqH9UwDMAUanmddCOELq\nPKkV5b5C4BxCN2gnYRsfBrYChxKsK9dNBWqSLiKP5Mr2nOnuxcdqlDXnHPrD3cuAsqTrGAgz29Sf\nlJb+0fbMHG3LzMq37Zktl7LuAWbExqdH00REJAHZEg7PA2eb2ZlmNgJYAjyScE0iIkNWVnQrufth\nM/sc8N+E/uS17r4l4bIyJae6wXKAtmfmaFtmVl5tz6w4IS0iItklW7qVREQkiygcRESkB4XDIDGz\ny83sVTPbbmZfTLqeXGZma82sysz+lHQt+cDMZpjZ02b2spltMbPbkq4pl5lZkZk9Z2Z/iLbnl5Ku\nKRN0zmEQHM/tQKR3ZvZ+oAl4wN3nJV1PrjOz04HT3f0FMxsHbAY+qn+fx8fMDBjj7k1mVgj8FrjN\n3csTLu2E6MhhcMwHtrv7DnfvANYDVyVcU85y998AB5KuI1+4+153fyF63Uj4pbluG3ycPGiKRguj\nIee/dSscBsc0YHdsvBL9zydZyMxmAecBFclWktvMrMDMXgSqgI3unvPbU+EgMkSZ2VjgYeDz7t6Q\ndD25zN073f1cwt0d5ptZznd/KhwGh24HIlkt6ht/GHjQ3TckXU++cPc64Gng8qRrOVEKh8Gh24FI\n1opOoK4Btrr73UnXk+vMrNjMJkavRxEuRHkl2apOnMJhELj7YaD7diBbgR/n0e1ATjozWwc8C8w1\ns0ozuyXpmnLchcANwCVm9mI0XJF0UTnsdOBpM3uJ8MVwo7s/lnBNJ0yXsoqISA86chARkR4UDiIi\n0oPCQUREelA4iIhIDwoHERHpQeEgJ4WZzTMzN7PFsWkePQGwv8u4Pf7+DNW1OKoj53/RamYrzKwm\nwfXvNLNvJLV+XxPDfQAABfBJREFUySyFgyRpEfCTAbS/HVg8OKWISFxWPENahqYkb2kc/Up4ZFLr\nF8l2OnKQQWFmnzGz3WbWbGaPEn5FmtrmqG4lM7vIzP7XzBqi4UUz+3g0bycwBfiX6H0edQnNil5f\nmbLs+81sU2x8hZnVROt4HmgDPh57yxlm9lhU7y4z+3TK8haZ2SNmtjdq86KZXZfS5qaolveY2cao\n3Stmdk2az3519ICYVjOrNbMnzGxmbP48M3vczBqj4Sdmdlq/Nv7R65lsZmVmtt/M2szs92a2IDb/\nGTPrcfRmZl+PtoNF40Vm9u/Rf9P26ME2+lV1HlM4SMaZ2VXAfwKPAdcAfwTWHuM946P2O4C/BT4G\n/ACYGDW5Gqgn3BNoUTS8MMDSRgPfB1YTboz2XGzeGuClqN4ngHtTAmcm8DvgFuBvCDetu8/MStOs\n5yHCvbSuBl4D1pvZ9NhnvQHYALwO/B1wM+HhUMXR/LOidRUB1wM3Ae8GHu3eWfeHmY0EfgVcCvwT\n8FGgGvhVLGh+BFxhZmNi77Oorh/727dQ+K+ojq9En/954BEzO7e/9UiOcXcNGjI6EHa6T6ZMW0V4\nAMri2DQHPhe9LonGx/Wx3BpgRcq0WdH7rkyZfj+wKTa+Imp3VUq7xdH0spTpG4HyXuowQpfs94Bf\nx6bfFC1raWzaFOAw8OlofBjhDr0b+vicPwBeBUbEpp0NdAIf7uN9K4Ca2PgtQAdwdmzacEIofT0a\nL47qWxJrsyj6HCXR+F9H4xenrO83wE9i4zuBbyT9709DZgYdOUhGmdlw4L3Az1NmHeu20K8THgX6\nkJld1X2Xywxz4Mle5v00ZXwDcL6FR75iZpPM7Ntm9iZwKBqWA3PSLOuXR1boXkt4AEz3kcNc4Azg\nvj7qvDSqp8vMhkfb9A3Czrekj/elW85m4I3YcgD+p3s57l4N/Bq4Nva+a4HX3X1TbDn7gN91Lyda\n1lMDrEdyiMJBMm0qUEDYIcaljh/F3Q8SbnVcCPwYqI763GdnsLaDHh7bmk66eocTPg+EI5Frga8D\nHwAuIHSVFaVZVl3KeEes3ZTo794+6pwK/DNvh1D3MJujnxNyLFOBhWmWc3PKctYDHzKz8WY2jHAu\n5kcpyzktzXJWDLAeySG6WkkyrYbQ/XFKyvTU8R48XL10uYV74l8K3E3ov1/Yx9vaor8jUqZPSreK\nPpaTrt7DQI2ZFQFXAp919+92N4h2pANVG/3tcYI+5gDhyGF1mnkD+R3DAWAT8A9p5rXHXv8UuJfw\nnPM3CUc28XA4QOgK++gA1i05TuEgGeXuh83s/wg7mu/GZvW4YqePZbQSTr7OA+6IzYp/A+9WRfgW\ne073BAuPv3wfYUfXX1dzdJfT1cBmd++MljeM2A7VzMYBH2HgD5J/lbCj/STwaC9tniKcgN7sUWf+\ncXqKcJSzy917PXJz94Nm9kvCkdGbhIcAvZSynC8ATe6e8w+xkf5ROMhg+AqwwczuJXwrvZhjPDbR\nzD4MLAV+BuwCpgF/T+gP7/YK8GEz+wXh/MSr7t5oZj8H/jE6H1BH2JG1DrDmD5nZXYT++GsIXVxX\nAbh7fXT5651m1gB0AV8kXD01fiArcfcuM7sdeNDMHgTWEQLmEmBd1M+/gnBS/3EzW0s4WpgW1XS/\nuz/Tz9U9AHwaeCb65fIOQrfWfGCfu6+Mtf0RoZusHviPlOVsJDy4aqOZfQ3YEn3uc4Eid78DyT9J\nnxHXkJ8D4Ul4lUAL4dLQD9D31UpzCZdL7iZ8Q68kHHlMjrU/HygHmuPLAk4lnABvIHzzXU76q5Vq\n0tS5OFrWBwlHDi3Ruj+T0u4swjfoZkJ43Z66TN6+Wmlsynt3knIVDyGANhO6xWqBx4GZsfnvirbH\nAULQbSdcHTW9j23e4zMCE4BvRdu1I/psG4ALU9qNiz67A3PTLHsk8KWojg7CCepfELt6Kt3n1JC7\ng54EJyIiPehqJRER6UHhICIiPSgcRESkB4WDiIj0oHAQEZEeFA4iItKDwkFERHpQOIiISA//D4XY\nGRSrx7gzAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY0AAAEPCAYAAAC+35gCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsvXd4XNd5Jv6e6YPeO0CABMHewSJR\nlKheLcmS7NiWYrl7HSexk03WaZvi3ya7XifrzcaWHTkukqtkFUuyJUtUoShSLGLvBAmid2DQBtNn\n7u+P7+C+VzYSwbFEUPR5n4cPL86dufe0O/f73q8py7JgYGBgYGAwG7jmugMGBgYGBu8eeOa6A28X\ngsFgfywWK5/rflwq8Pv9iMfjc92NSwZmPt9emPl8ZxEIBAai0WjFTOfUpUJPKaWsS2UsFwOUUjDz\n+fbBzOfbCzOf7yz0/KqZzhl6ysDAwMBg1rhk6KlfQegAACAcDttNyWQSAOByu+22DA+RTqf0/0m7\nzaVfq+mkSDUD/ZP2ufBkAgAQyOJFlCvB6yWm1WdOswtefQ95ied4cnm98RE5h5TdNjHKvqQh98ku\n++XBAkrJdb1en93mU359Darxp0622Mdjk30AgLp51EIrahp/9eIA0LIfANDn7rObJhEAAJQU8Prx\ndBbHMzUOACiYYJ/SXpmfoL8UAHC6tcc+l5iUOfFUcg59cc7tiJL23FSE5/2FAIBQZwcAoCZ7Bfs3\nKW3jkRy7LRgbs48nCmVdyqq5Pmpc5nGyRD4XSM23z1VmZC1OdqfttpaOI/ZxZqwXABCvaLDbrlhd\ng5kQ+vk3AQAvLONYN8WLAQD+sWK7rWAqah+nauX+OxXH0PSa7J9EuaxFcWGbfa7TPQUAcAULeN+f\ncqyD18kahM90220NlqxZKF0r/5dk2+dig3LfnOwiu63XF7KPyyJyv75+jml5ROYqVC9/55Zz81YM\nydiqurkHHq7i81rXIvM4NHnCbtu06UOYCXtOvAoAmHC/ardFv5EHAEhv4Ria2rk/W6tkv/WEOu22\nkeWyhj1PyBjXLjhrn6tZJf2s6bzZbjub4rVP1cs+LwH3YHmn7I/J/oz0r+IG+1xW4V4AwPKxYV6v\nlc9KQ5b8Dpy5dcJuCz10EABQ37hVGhaM2udcL8ua+e5cbrcdf47rszprDwCgqDzPbjuWdTkA4GP3\nbMRsYTQNAwMDA4NZ44JrGkqpdgCTANIAUpZlNSuligA8AqAeQDuA91uWNaqUUgD+GcAtACIAPmJZ\n1sHZ3Gf3AXnDT0UpqcUjMTnwUFJMwCElx2L6f0r6gEgjyZRoGgEPpbaAT6SM9iNn7LZYkpJSIiX3\nSVFZQFILYS6XTH1lEaX8wR6RjGMJStIBD6XOyto6AMD5o5S8Evp6bojE6VZB+1xUa1kd7V12W0cb\nj/1BGdOWK9fZbc2BGdQYAAfPvgEA6M8mj2xNivR0IkZpRuXxfLxdBj6lKD3leqRP6UFpS5fU2ueC\nkLVq66Tkl5riWgymRdIrjVA7G3TJd4IJrTUoSmXjIenXeOF5uy0/yftVVYg0+ep+ak/JmGhnqSKZ\nx6w053o0LFpP/+lWtp2klBgNigx21a18rDobSzETTlXLuhYd41jfSJ8DAPh6ucVDVVP2ce1B0YIO\nT1LSn4pJW+aHMk9nr8zY57aM5AMAjgU77La2qph9PPwL0YJK/JSmw3rr5SyW56fgELWmQ0HZ79Yz\nnJPAlQP2cbTnVgBAsuhBu+2nZc0AgMLXZU5iV//CPud7/jIAwHgO1yxxhsfPHRMN6Asrq+y2ThzA\nTJhoFym6+8iI3VayXJ7taBef9+eWca8O/0LGVvWRRXZb5qzszzuWyTz9fOE99rllLhnrD/fzeb9s\nGcff9Mj1AIA3Kv/FbusrkT0biMla5I9yP2e8otV8Z/8eu21t6M/Z1/tknou+OGS3LZnYAADY0yz7\nvuMVasK5YXn2lu7/R7ut7Rj7lxuU35rn72uy2z4zMohfF3OlaVxtWdZqy7Ka9d9/BuAly7IWAnhJ\n/w0ANwNYqP99CsDXL3hPDQwMDAxsXCz01B0AHtLHDwG409H+sCXYA6BAKVU5Fx00MDAwMJgbQ7gF\n4AWllAXgXy3LehBAuWVZ0xxBP4DpeItqAF2O73brtj5HG5RSn/rlm+w9KHRKOk26JDKlVX031dWp\nhEM1jku7k+Jxu4WuSKeEmigpphdaaakYH/uHqPJGkzRSTiU0PeVgu6Zt7D6v3EO5aUgbnIjoNn4+\n4eb9spJyPB7ju34yrO8RE55qapL0w8iwqMajYzSWJVKkivLcQm2EHO7ukeTMboyv7JYpLywmHRMe\nlaXpT9OwVpRh37q0UdUVLrHbfAVCn9ROyPitcVJx7mXyuf7XqTInsjh50YCsVf8k1y+ZKxSGu0+o\nlkwTHRXiw9LX9DjprP5icoWBqFAGychJ9jkpfXC3yZxNWuzfyJiMJzVEQ+9ACSevLGseAGBqmHM0\n1sP5dmL7cz8CANQuXmy39fQLjeZuoVzk63Y4Agwfl/4Wk04ZDMg+b6ndCgAob6Phvd8SeqP/Ba5p\n3p3cb/3dhwEArolxu+108RoAQO6g7KPqwnb7XNEhmfcVG/mz8dIAx1pfJtfrDC/hQE/LtfM7xWEh\n64dLeSrSDwAIj5Hyzd283z7OapDPPraAFN71e2hkdmLs2WXSx/yjdlvvEvn+8tfO2W1PHSIV+zvL\npH3H9wvttspheZb7r5Tn+KoXGfr13YPaiH4tKc4vHq63j/+5Qn4P5gcX2m05xTJXLXvld6Qhiz9d\nr4/LHostIiX87T3H7ON7H5G9H1pLyu1MXPZ3z8s/kfEGPmifixbIXjm1Y5PdNpj+gX2c1y205tku\nUo7/9kg7AOBLM/sXzIi5eGlcYVlWj1KqDMA2pdRp50nLsiz9Qpk1LMt6UCn1r29rLw0MDAwMfgUX\n/KVhWVaP/n9QKfUkgA0ABpRSlZZl9Wn6aVrU7AFQ6/h6jW57SyxeIEZjj89rt4VG5M0+Ok7NIOxw\n6Uwl5TgrQMk44BN3zUxaJMbsII2QOdkieSxqoOQ3Gu61jyMxkUJjcUq3iZi8D93u6etRCq6sWyDf\ni9CYnklSUh0JidTb10/j/uCAllaiIuGHw7xXPCJSns/Ha7g8PPZrw2ZBPo21ynJY7R1Y2qDdVksc\nmltMJLScdkreoz5KUrVZ8p28cUo2qXzZcqUhkVqHKqjVLVQiqU1eQbff3LM0Og9krZT7ZnOOgyNy\nbTVPpPxsLyXp0Xky1oEJ9rmpi5pDX7FIk2fTnJOBcyJ9TvaJFuL1c/9MKK2xZdOFN7e32j72loh0\nPlVFw+1UuB0zYWVQpGBrjIbrgYxoHdmFVK57HE4bCzeKlDw6zusXpGWvrk1KP6dSlPxzAx8AADQ2\n/sxuG5uiJlhafh0AoMT7fbutxiNSfcuA7MWOhdT6mgplfM8tq7fblh2jpv6KSyTssqHjdptKiGR8\nOCmuwA0Ras7nc2UfNyg+U8Gn+TzmrJdr+7NW220jS3g/J/qukZ+F1JGP2m0H/LJ3shbxmjdc+//s\n4wX/IPtW/c7jdttObzsAoPGk7JNdnlX2uY3XSH/PjP9Pu+33av6Hffyvrd8FAGxIUA4+O3otACBU\nLM9Fr0WX6Ip8+d0Ya+D4/YepVfnXyvOV3MUx7x6TdRnNugoAkHkfHQtWHvtjAEDCx/tP+KjF/WCh\nOF+sPL3PbtuSR81rtrigNg2lVLZSKnf6GMANAI4DeBrA/fpj9wN4Sh8/DeDDSrAJwLiDxjIwMDAw\nuMC40JpGOYAnxZMWHgA/tCzrF0qpNwA8qpT6OIAOAO/Xn38W4m57DuJy+9FfvaSBgYGBwYXCBX1p\nWJZ1HsCqGdpHAFw7Q7sF4LP/mXvNrxGVL+UwhJcXiDEqliD9lEpTXZ6meHp6aDhubxX1PDQiqqnP\nEcFdof38C0up/i6pq7ePA3p2EwlSELDks8m03Gtogt9t7xHD4OgA6bP+XqqmE9oPOxSm4TIeFxV7\nOto9kaDhOK4jdPNLSaF4/KRiomE5n5giHZbvd1jhHVi0SGikZJT0z1SFVlTL2R9M8V5pbbA+keB8\nxjrEqHdyQAzYKkQ/c+i4l4YgKauCxWvt41q30H3uOKN6Y/XSh7F8GZf3FI2akfF2Gd9ou9322gjn\nJ9kn8QuRbva/L1v6XAhpc0bnx0NCG+Wlr7Db8ivJlk5Myr1dE3QMWNBMas6J2mLxme/ykGpbf4vs\nz8oeRvQ+20l6yHVUxl0eIoUY05TR+aj0I9JQb5/bGRQq5I4JXi+TIrW1bpGMf+gkDbcryqQPJQEx\nZrdPkCpsL5Y5yX5lt912/DBjPDwN4v+/r59xIaXFsn+9VTKP7a38yYkuEKN9TusrdtsbS3m/K3bL\nc7N0AffckgZSoU5s8gtNMzDAiOubzsse+04H90vdAcYonP0H+R0o+Qmj+mt6N+oxPAIAWFDG8Q3M\nk72b5eNvxsjA8/bxZ98vRvMdo3V225kqmcelP5DrRLc64oimhBauf+Q5uy0Y4X4/+ILs6ViQhJD7\nFtkPHWeFnvK9yt+WM0ekX/VLOcfvfc9N9nHtJ2UeH/6/AbutbvXMdPR/hIvF5dbAwMDA4F0A89Iw\nMDAwMJg1LtmEhUGv0CxpkJ7yZgu1FE367baWVqr/refEc2H/G/T+6O2W81MTohYnEvRmyQ7KdRY0\nMRXIunX0u6+vEvoqJ4eUid8nqnJru9xrxw6qxq19cq/oFFXYaJT3S6XEQ8apUKaSQpvF4kI1pR1M\n2PSx20911Ovlkke0F1mXpsUAwHLRU8UJd1xUeWU5qLGYtOXn0PujrY/nD7RJuoXjLUySGO4Vqqrl\nvNAVRQUca+mUnMtfRPpweZCq/gqfzG1BIylCf1rGc7BFxvL6ASasO9kjnkSjadJ9/pAjrYwSaiwZ\nZGyLFZLrjbpkLTz99J6yfCJjlQQ4X4EA6bhQv3hjDYJpNmKjM5d4SS7S4+oi1ZG9UzzAWvIYa5Af\nZd+6iiUWIbb3R3bbz0JCLS4rEUpMHaO3WV5U9uyBEu6/oGerfVx3jTwjvTest9vW7RFvJ++kzFNE\ntdvneo4ckn7kO5IUkvnCRIt4np0qqrfbeo/JXCz2yziWJxlz0LddqKjsG/jMXP045+PUiNA30X1s\ny13soDMdeANfBAAcX0bPoaJC2XfZf0e68NTXGOexepfs1RcDP7bbIq8JhbP240Ir/TzB/gaekust\nrySdp+7mb0nfiHgq3dFzyG5LuesBAPPvlb/HBvndQL/s8x3t9XZbOEE/n1vyZS17Bj9sty0ekLbm\navHgOzyf9xq8WuYmk0+H0/0Pc+wjdz4JAMh7mZRrz0omX5wtjKZhYGBgYDBrXLKahqVjLrygYTeZ\nFClix25Kvs88y2Rh46MieQyHKElZaZH0lf4/naAhLhwV6T50kFHI3T38blWpSHjZWfTr92gNqLNL\nDJcnuym1xnRf3Y4ocGdEeyop0lrSoe3EUzoBYFr6lXFqGhCD8pQzY6KP8+HW/eofpTQ7Gp1ZjnBl\naS0oVW+3VWTLZ1/Yw7iCF1/cZh+PJWR+RgbZqXCf9MWbLfd0RRlRfVJHh3v28/PpEUca8jKZK2sn\nI6aDOrq/t18ktOOdlDT9UTFCD9Zx/iuzaGSP6XgI3yHGbkyVyvUiY2KELPByPkYSolHmpnm9wjDX\nXpVqZ4Q9lO7ia6nlOZE1JJG/eYOUfFNZoqm88iK/3xbhfovPf0zG2uNIGpkle+yMJfuuIkytryVH\n+lM9yj7OX8T9dvJx0XKrrqL0/i+98oykx6R/LceYPDG3Q9SK7CsYtXy2j9HHDUM/BABs2UJfl/3j\nMqbeoMzDvEJ+3peWNRt7g9Ju8r0v2MfZL+usAXcyzqT/xMcwEypGnwEAvDxKTfLTU7JPDr9IzXSs\niDFVyiPGbtcRMgWvbZHP9oUkHuK2EMf/xntkPsf9NFZHD1AzSECM0yO5lPSXDolTwHdfuAsAsCSP\n+/Nqz4sAgO0p7vGPbGL/X9P+Ka21jN0oyRGD+jF925qvcz7d60SzbryK4903xfXev0BrOR7uqc8W\nO1TFWcJoGgYGBgYGs4Z5aRgYGBgYzBqXLD3l84k67AYNmSdbhI546OFH7bb+QdIjfr8YtSIRtk1X\n8XNnRIX0uBw+0165ttvNaRwbZ8xDZGJSX8ORIFHHTsQ1zRV20Gdp3Wefh9dLOrIdJhLSl1SC9MT0\ntZV7ul9UPb1p6UtkiLEEriKmDCmrEfom7ai1PDjE/juRiQmFkV1Fam/vaZmvb33/MbtttJ/1H6yA\nrk0RZWqLgUJxBMjv0yldsmhIzvYLNeKO8fNnBnm9hmFNHWRxPMmQjDua0lUPfaSORkvEyF4R4/wP\nJ0jV+M7omh5+xgG4ohI34Z7Sa1HKuJaqbKEa3AOknCxHQsX6oPj4Dxcz9qJzfOa4l2FLKqaVriM9\n+sPz8tmpXqaGsM7zWm2DktiucCHjXlRS7qliMk/+yxkjsXibUIClZXQmOHaeVfqiXkkrseBxzvHe\nXKE/atdKDYcTu5hMb9VtkpAw3MW28QCp1LxiqSfx9CnSKTdVy1q+PippU+K9pG+za2Stkimu5/jU\n3bxf3UsAgN69l9lt+6/nfDixpF7WoWaKaTPe2Cpj3fYjOlbUPPNV+/hg9h3y3dU/t9t2T4kzQtYp\ncTbYvv5y9ne71KZ49lo6W6ztzreP99YLjbbgrCMuZETGXzYk/S7J2muf+/1cGdcnfpf7s/UEqaXW\nUZmrLbfy9+iZn9YDAO4KyW9Y32332ueumJL989T2J+22TZuutI/jpeKocLav2W6LfVs7bXyA8S1v\nBaNpGBgYGBjMGpespjGdEDDtSEa3/XVxTzt5zhGxWkRJNx6XN3rCYezOaA0jY2lNw5H8z61rjTuT\nIrpdDs1h+rsuSmNunxj3PDr/ud/iezuSlM/HYry/M93v9Cc9DuHVryVrS1cgTGdoSLv+MpFCA14a\nVvceZ8K0iC7TlrS4Dc6cYhppJ6wikTyTSRpu97wmEnFLGw2jVfNoaIyOiaRrZSgpZSdl/AP5ooVU\nOWpMBzIiZbkchuvAGF0ax7TmFJ+iMTvXLVLq2HSFwxirnOWPiiF0THFO0o4ElcorEvZ4Nl1SA9ki\nqWYldA13P6XmReulalowSkly8AxdMkeyRQvLpFlt8dAuVnlzorhd0qAnS2mEDZwXCbYlTskzcxXX\nbtnT2u3Z4SLs01Ued2wWDWrgLLUgf4VIlEuymG78eAHX+hOtcr0nHHXfF1bIfBc+JJHTt6xj5bfa\nuMz73ly2LT7K/p3JlfWrzKXmFtdOG+XlsnYvx7mjr75aXIgLOrieXx15xj5ucsl1Mmd4frnWjhhD\nLdgfl7GuBA27r3zja9LvU6/Zba7yevv4vEcb++so3QdOyt4rL/rfAIDtQ4xWrxmXfbL8x/zNOLeS\ne3v5iOyZ4p437LbJsGh5y05L/06s537Y8kNZi303M2FiYIzz0/dJSXt+tIWad2LzTgDAC8duBADc\n6nOkmq+SdfxoF6P+/3svv7u8S1ycC4/ROH7kQ9LXzfgjzBZG0zAwMDAwmDXMS8PAwMDAYNa4ZOmp\nhFaDx8OkJo6cbgcATDlynhUkHLUuEtOxDo44gemwarvkHump6ToVrjdRUs7vyv8Zh6HZ0rSUyy00\nQtBhRIfSUedxRwfJbIFkFRtdkOvENE0xfwGNlB/7qBj6gl4a63Keorr93Kvip+/28/zwMKkRJ9xj\nosKHAqRjTnQJlTXiIT03v5vzmVRinPSkGTswOSGfXaiTGKazeK6gSqiBETevkZXFsUZ1BG3MR0Nw\nREe7e3TdkWQBqYxEVCgRf4rX8GZzfWJ6gdzDpHvyw7IeE9UiT5UVMybkM7cIXeAtJz31+qM/tY8f\nfkZoz5xiGsfdjY6yiA6cXyy0V+MeGqlbh4ReSA2QPpgYXWYf59QJpZUXvcpueyEu1FPzGdkbwyka\nipszMk+hnjV224atrCy5s0r79Z/jfJ/WGQv89dIXy1EJ8mynzEXY02631RSxUqAvR4z6eRUOOrZH\nvrPaJ2OKVjGB49YKoftWbaAhvOL4/fZx+xNCfy5qZtaGFw+TfnTCs0PiOwoKT9ltaw4I/bhT8fqp\nhfw9qOmScXjKWGlwc5Ws4a5d3wEAvPcY99Ouq3QGiJdI5w1UbGQfXhWqNXnPVrvt9BmhnoJfkLXo\nPE66df3dsj/bHUlJL5u8zT4ua5XnsuIIn5GsW6Wv48Ni5O+ueNY+t7ZZnve2+YywX5pN6u32b8oz\n8vRCUmRtL2m6+XOYNYymYWBgYGAwa1yymoZHG8LHw5Ss+rQ7acph+E053GGn3VdTDjfXjG6ztFF7\n2vjtRDo5c3rhpDZyp98kvLve9J/T0p2tXW6D3l81tgNANBp5Uz/l3iLVufR1Vi6lJOcNisbictGl\nct1anh8dFUmztZPaw8Ag58sJT4HMXbTHEY3erysDKmoqkSSlzClt7M6MUjqsTEvbcFjGVezl58OT\n0o9Khywz1uHI3bRcj7+N0lMyItpJtq5IGHakTa/M1tKdI1V5kZuSXrZXXLCtGl4vGhIpUnveYuFd\nTNXtrhGpu3CQi1a8nobI9X2iRZ2JUHI83zuzi2h1gUiDgxbzVCW6ZG4HQ9SGClJ0ke1dJveKLttl\nt206p+u8HxEtZNE6VmJ7Ts/dtQ2OGuhRrpXHr92+szjfzWclmjnm0zmN8imFh6tkrd5/6Dq77cdj\ndBRYdY2sQWSSa7C9VsZyzVn57ugKzqc/S5wm2qYo+dYuPm8fn7hc+t95lppCYZOO3mfxOQDAmsBf\nAgC+2vqA3XbdBtnfnWXUzI4/wFTnxbq6Z9epLXZbfoVobpUZ0TC6Nxy2zzU9LnnZEitoaB48T/fW\nrN+VypKhV2l4L2sUF+KC78g6391DR5RTC2V8tzbRcaJGUbP7+pPiGFHzfq5P/xuyPlO6dv1NBXw+\n/IcOAACWlPD3Y+3L/G06eoNoWX/YSu32f86nU9BsYTQNAwMDA4NZw7w0DAwMDAxmjUuWnpqOnh4e\nId0yPCYUS8YZhZ0mFTUdGZ2cgbIKvMkgre+haSnLYeiG4genSYaUwzg+bRR3689ZjoSE0990UlJe\nR4JBl07FHnUk+UvFpP/zasR3fMN6JouLJoQ+UrTlob6O9MS9d28FAOzYSRW8pUt88I/Rnij3mRQK\np7udPvpnu/V8TjoouzxHWveYqNruYiYE7ByW9cjWxveIj+e84+KX3xsnhVRRQOpirFuMqplsRq0X\nTAk9MxqXe+UUcv6Hx0QmygHpOU+Wg+7LyGczMY5pqkIeiYU65mBTDukn75jQbH35XLMm0Gd/5T1C\nlb30EufzfJdQQGwRuE/KBH9/nNdvH5NkdpvrGbV+MMG1DhULtZG7h6n7XysRimfDSokuzlI0om9Z\nKfN0rpvXWx0mVberUo7dSdIjql/GOGAJBVPv2DwVQ7Jmvfmk1Nblk6qyOsTJwuXmd67V/XcXS2Ty\nHYW811hAklsW9zqqPV5N+maFT8fP3E5ngNEn5DzTYgo650mkdeniDXbbtkdlPl2ODA93L6WTyfmI\nxHT0l5JOeu4l2Z9rdXR/pyMDRGCzfG7fSRq/77yF0dWHvUL7fDD3dbvtQMsTAIDxKyWxYM8Ef3K9\nuXLt0FHSo5kcGtndiyU6fZuDYtqYJc9Lfqd8JyvnavtcuEmehfZxJhC9mZnY0ZcnyRf/Vy1jYa7M\n+yv8ujCahoGBgYHBrHHJahpjUZF4z3XyrTtdC9unqEkkLL7Fp1tTDknf/qT+Tho0VEV1UaTUvzON\n7sxM7paidaRtCYb3srR7r88RQR51RKdDu7amHXW8/WmRIrdsFAPj0iYa6dxKpyF33CPj5thzC8Vg\ndt0VlHavSokL3k9/8eKbet02Kt/r0HW3AWBsTAzN5aDhMxRmNDCKRVLMdHEMqlrcJ+Nagi0ZYfR3\nokTmKzfIz0cmGTGNUTGSqgLmEgol5H5lQXF3DLmopVi5Mp85CWomluXQIqHrRecxor1BC3qrNooB\neOHCevtcXlIXYSqn22b/GKX4Qq2lrL6NdajX9cm6PLyTtaQB4GiFzNnWMzQkt/bqwlzLGNUdP3+t\nfexr+b6MIXOX3ZYpkDWuOSiSdqKS7s8lZVvlewmOYWeUc7dQ59eOlVN2HKgXLaqxVwz1ST+N7ucH\nRPKtPM6iTeeWUyV1Z4sWcdUyR8T0PolgLl0lRuLkOj4T9fnS98blXO9OF12ck1WyVxoW0Hh+6r3a\npfylb8GJXRNa436V6ztaI5J/xzC1l6ICPgvjSvS/zx7hM/P/dB6yow1iab/hfLt9bveIGNRr/sqx\n5i/TkNzRKL81RaeZYeCcTxxPPhGWtj8ooFbRoMRBoflyR+6zRo5/7cOiKa2q4n6Y1yPX67pWNLe6\nLBZx64rI57PH+MzsKbrRPl66TJ459ynun+YVMzu+/EcwmoaBgYGBwaxhXhoGBgYGBrPGJUtPTRuz\nW9va7bZ4WAyiLiddk6FRM6OpI7eb71KlDdbTTYkkVemEDrJIO169liNaW2V+NX7jTUZzAB7HEkwb\nxZWXFMN0RT4n0g76rLpM1N2Vy6TSWLYjYj2Z0OPJOPrkSJA4fe3iYqrM3gzv7UShR+ifkyc5pvyQ\nUA2Z+Y5rxhm96+4XQ2t/Hu9fE9JxFQlRiyP5pI5clhhaJ7tprI26SWd4SiWte5mXdchHLVG5OyLy\n3YJcR+nCkFCULg/ps6iX858bE7prcpJ0grdIKJaV84XaqMjl+kynS+9zVM4L+uhIcX5A+lK0gOnn\nS/JmjrBv7Jd5+FGQjgML8iRZYuog56uwnlXrssdljfc1sk8bR6We+HiOUEJtjjT4Lfni57+uZKXd\nZg29ZB+H3bLW8TDjaKZeEwpqKqD3xEpaUmtKhD57+QpW+ltBdhG5xUJ9nT5MymNlhVzPUyFztqSj\n3j5XmiVj787neJf2ca2OLJJ9UOIhvVTm4dw6ceUCia/4yS7GHC0p0unIG5glofNR7tWzzVKB79+a\nGMeyeFz2z+nzEt1+IMQ4lfYq2S9/+zc0Vr+SomPJ5a9KavjHy/gMXF6/AwDwmWGZm0W5jNsZfEqM\n2J3LdthtNa9zb51xSbry+mcm3AaYAAAgAElEQVS4hwJXynnvazIPnv/O1PpncusBAEtb/tJuW7hz\np32cOLsbAFBbttpu+9mz8jxfwwKAbwmjaRgYGBgYzBrmpWFgYGBgMGtcsvRUZEpog852pmFIpISO\n8fipU7tAOsPSMRseN9VlS/tPJXWNi6SLdNF0bQxnzsGUxe/GHUzJNDLWmwM+vBlHyhIdW+JxVO5z\nklnpjE6omOS7vmKeeErUVokK7nP4yE9TawFHWpLpGh8AkNYeYR7loKRcv5omBQAmPEJXhM+xkkFM\nJ/9T/exlpY+eKOFKmfuGLlIKrhxduRDibx4P0dspkxAKzJPDOan28rhrTPo56KG3yVRC5sqX1DRP\nL6mRcETopAKHN5qlSH/0pOV6Hj8poqImGeeicvGA6s2l58qEprYWxTj/0TLGgCzolb640mxLZRze\nXw7kjWrq7AjriXSVCQ3T56m125JxVqJzFcj614Vb7Tal24L7JWVG1p2k2paNyHHxCOMqRisW2Mf5\nk7IG5UfpDXS6Uta/Xdc5Wd7DNBUHddqRTUuZiuONST4P6/fLPuvwkZ5Zs0T2Q8VyoYKCxdx/PTtl\nrw7Ps5tQVNnOP0alat5UD9esIX/mei9HD8kax+vW2m3tbqlvEmvnfM5fwb4Nlso+WhvinBwJytw2\nZ4TC6exk5buN8x4CANy7jLFQiy3GzCQsoQM3V5AO+8GQxNEsypFxDYxyvSO3Cs06L0EKrK+fCQjd\nA/J8pe6gR9UO7fx2942yBmNDrLhX9IL0JVr4NbstdB+f/VdiQmHekO+IgzoyMx39H8FoGgYGBgYG\ns8Ylq2lEwyK1h0bGHa0iKQazKT26nNkEteE6L8i3c+N8kUJOnhEJZzw85fi8ViXcjN1QXl4bim90\n+35a6J2ONE+nHPW+daJEZ0JC95sM23LesqjClJeIIa4wT7QnlaSmkO0TQ6LHoa4kHMZ5Zcn1Uo5K\ncWpmRQO+kBis+yepSQzky1wscBjPhwtp2M4L6/GXUZKtbJJU2kd7RULN7eUWnArJWN2jjBJHPrUA\nX7AeAOBKME4iP1unE9dR5NEcSqWJYRnfiI9tPjc1kVyIpOuapOZSqdO5W5VigM8764iILpKxubM4\nh4EWyl2pEpEiXQM05MM9syG8pUprTVMc3742kUqvznMkFXREF7+htZMaUPvI9YkUOtAkRtr6E/xu\nX5MYXYvKHbE5rzFh5V4d0R+4jufn7ZNxZ+vqiSfKGYexcEI0xlCMcRpXpvl8ddXI5q475og9sqR/\n8/Uc54Q5dwvni+R7QnGM4T7ur63FoqmGEw5HjTzn80zUtci9xyscmuthcQAY2MSI/wMZahVNKZHW\n9wSZdaA6I30a04H1C1fS6H1mQlKUr76Oz/sn/4FxEI9tkZiJ7fHv2m1llaKxlHxJ5r37Y7fb5+aP\nSW3y0/dQ+yj+9vvs4wqtTRw/xSj6a3Jk/Gn9O9Kfy3gVd0b6p3LovPDyPhq9bz0t+ydZTmeBeXW/\nvt5gNA0DAwMDg1nj0tU0oiJehx1FmKaRHaT0mInxfDglEu6KRYzovef2WwEA3/ihpEA+cJgcZpbO\nC1VVTh55YIgFgqJJkUicRZqUkvf09MS7HEuQ0TYIZ2p2n/dX649HHdpObo7m9PVlEo4azNM5rCxH\nBHzQy7GntKaRdkjDScwsGQ8My3UTFlNX18RFaizOp42oPcq+T7mFo79iMYsA3XbjNQCArz0qEbet\nk4zGztPujoVbOZ+Rgxx/PKUlRkdRKVdKBu6ekHNei32JerTLcT61i5SXUqsa0HaaAKXprDwh2PNj\nwkt7izjX42HRIv2O9clzRGDHk6JFuQvJ8ydzKEU60VMg2s+uDKXgJbr295p5XKPjbvpC1vSJlFl1\nFSP4779GtIVnfiguwof6KRmXn5dI6tYOSqrz0nS59W6QdQm2sI91CRnPTyHaUhBMoz1VLRrj4l66\nqA7EKXV358p3g1u5R4p0BHj1oERTn3JT6+yvEpvDMnA9j5ZTEzrbK/vAW8z1+1k+7+dE8QZZy0Nf\nOmq3TXxSNK3Gf+EYgrfQfbfltOzpTRna3FZ9QrSooaW69nro2/a5xDHJgTXP2my3bbuLWcXmnZdc\nVo3dHP8j18s6t71X5uaKZw7Y58bvkvnfev9pu23PIv5+9ARkL67spg0pcqNkCNiun9mbBmgz8099\nXMY9v8VuWzdKl9z+gGgiU46Qgoqj0/XMP4zZwmgaBgYGBgazhnlpGBgYGBjMGpcsPTU8pA1GaRqN\np11ZnZX2MilSCXXVYrR7j6ZQAGDxfHHXW7VcVM/9B0mnVFWIO+LH7vsdu+3MWaqGB4+LgW1igsbh\neFzosOn05tEE3UGne5V09C/hcL/1ahdfXy4NrQ310r9pGsvtIrXh0TKBVzki0x3R4R7NmilHG9xv\ndgmexrk+iQYOBKkOu9LtAIAISG8EJxxG+hqhR+68hca/hiZRudcsFqPc6YMswVayXAx4H1j1Ebut\no5aJE893imG0ZYQRzIm4GFgjLqEBklN0RPDUCS0zOUpKItjviF7XnpjlozQsL14shlhPmewLFSK1\nU1GpXSBHuGeyU0wA6MvI2k5FHN4EiZkjmEf03piXs8huS24Tg/DrjbxnnttR1U5nwb6/mhRi66AY\nYpcWSVTx3slX7XM5YaHLtn6CPq0nQ+xP/StChfjSpAOtu+X42iNCW0yOMNX4sK5h3+NiEtCy03QH\nXXij7IOnT/B+d2cLBdP+Htk/I2VM3b70jKRzHxxihHlFDqmv7HHZi8WRN+y22j5Zq6/gzfA3yv7/\nvXuYRv2Rv5W9cH4J6a2D/VyvRR6hsu78r9fbbdvGdJaDBpmn8ny6z86rkbkev3rYbvP+D+6nsmvE\nCJ/scKSG1676nSEZy3e20gh99YDcf+hq0l39S/fbx+kfSx8aP8X9UNojvz/dKenLq34+ex8tk6SV\nkb10k443cbxvFAsd97lzdDvedyOrGs4WRtMwMDAwMJg1LllNYzoQzh+k5On1TtfYpiHT7TD83nWn\nvL0v27DCbpt2eK2olLe+2xE8V1EihqX1K2m8u2I9JcdrBsSoNj5GSWd8QiTs4SGROOIOTaOlVSS/\nV3bTmJdwFGmCTutdVV5kNzU1ipEvSxt9o0mOx+WeTsNO46GzwJOVTOl7UHJ25s5yIuWXeQxl01Dn\nzdHzmaL2kaqkVP/+az4HAFjQQDdHvyVaUlWtNliXOdwpS0RCunwT27YWschP37CM41gfpSvfhEj3\nXQMyx+F+GqYnMtL21B72OZXmli8I6z5UcP2W12gD+JSsj1L8fLpbjOLuUrrJpp2OBeUyt70nHbJY\nZmbHgnmN2v37BfatsFmkwlgRJcH83Qz8unqNOGjUOHIH5RSKoXNfsbTVT75snwsUyfredjtTi1fu\nYz3s5Pp6ucdZjgcQo+uZ938AAJB3nsF0h8+IFD/USueR7mLmBhuOSP+XxqjZLqgVrSN7SuYuGlls\nn+sYkD3V4Ui/jgFee71OmZ52OH4EfDMHSz71Pfne6RCZgLpbZG2qV9OxZcWBx3itK0XDaAkzIBDt\nvwAAXLtUJPBHX6H76tjwdrnGEtZhD3yB7sfRPtEKgyf+gtdrEyeL8o3yfyXea5+qCYpW2JGh5har\npstx45WyVudGvmq3De34UwCAL+8nAID733OTfe7vD4hR/sPFzD92bE+9ffzZlaKhf6uR61PoZfDz\nbDEnmoZSyq2UOqSU+pn+u0EptVcpdU4p9YhSyqfb/frvc/p8/Vz018DAwMBAMFf01OcAOAuKfgnA\nVyzLagQwCuDjuv3jAEZ1+1f05wwMDAwM5ggXnJ5SStUAuBXA3wP4YyUJkq4B8CH9kYcA/C2ArwO4\nQx8DwGMAvqqUUtYv5xefAf6gUAV5uY5o4JCouTGHb/mieaR6rrtGVM2coCNKW8ciWNNmaosR2gvq\nxNBY5MiV5HPMaGChGBXTScYOpH+p6z7F623fLdTOzv1UsacSVM3dPqGbEmleLx7TUco6ujnbRzou\nE5B7xR30k3LQUx6/yAzuCCmUVHqGhFkAanUMwfwpGo07XfLZeJR+6UtraNRcc6MY8LIzjmp6WTqi\nd1j64XcY+hsr5LtBryOq18OxNlWJ331uSb3dljelKYy0fCdYwejuV14TauX5XuZq6mtx1Asf07l/\nfPzOeFLolrqUjHPIEUE+Vid7pTxKY64LpFMigzKfZQVsy+ueuTJafY/QEO5x0nnJQhlfrIV7dmqB\no0rcdZLzaMiR1Kz6qNwrq01oqsBZ5kUau0piArZ10tB7y5WkT4+1SozIdRs5J8+OyveXB2UPehbT\nsFxYLfvp2antdttkKb87mC207tobuQdfXCx010dSMtfZocvtc5lqMfqW+khHdi2lk0dLROdV66Zh\nfeLumeNe7t0sDhUvn6yx2/brvFyeva/YbT1MJYXPb5D9mewmpeffJPPzVItQhK0TpOcKr5LfhwZH\nFL8qItUTL5ZncMcmUj7X7xQHgLoFQrN6XmOlv5Zqmaf2Hka5r+/g8emVTwEAbu8nHVaxUu6XFZVn\nJezh/H90tVBpr0SZfr0iyP5v0xkVxk7z92p5XNOrTGH1lpgLTeP/AvhvgJ0psBjAmGVZ0yPpBjBd\nL7IaQBcA6PPj+vNvglLqU+9khw0MDAwMBBdU01BK3QZg0LKsA0qprW/XdS3LelAp9a/OtuyADK0g\nn5KLW4khyOfju/K2G+nu1rhADLFTQ3xTT7vsjodEAnBmkV20SN7sQUchmrQjwnxaCJ0YpbTp07mk\nsrPlOglH7p6x8R59T7aVFPLifq0ZjA5T2jrbJhHAKxaKVJZO8btpbWRPwhGh7PC+ne5LIEjJPp2e\nWYkLekSCK8ynO9+pdq2h5PP6l9/A6O9580XyVH10o8yk9fzpbKLWEKX8jTUiedU5clUNhHntQZ1l\nNRx3GEYLtKwREyNsbz+1nsCoGLNLWxklnV1GQ6pb5/06m+R8dhwRCTPnBvlcXop5sPK9omn4euiG\nGatgXwohzhI+FyW5gULWy3biqE+Mn4vK6Hiw85ysh3855aL766nZBRvE6cEa4Xg6RsUAviNfMrC+\nlNxtn/s/1r0AgLscdbwmDtF9dXShdiM/TmN8ZFC0j9K1OjLeUVTr/AK577GperstL8GxliZl/3Yf\npOF9eZ9o29aNMg/BEDWh7lHZz5mN1PzzOpiRt2Gx3Hs4RcN/wRMzy7nHRmUfvd7Nn7Rlr8rc5k5w\nAqqvpCbcNV1gbYkjk/B++X5ZRCT0cBWN5FshY2nrYX9Hz/DZrsuIq/7H2unIcnC95L9yaweY+oUO\n9/Qh0QTzIzS2xwtYkOnmJ0Xr+VYBn88/rBBje3Se/K6lu+nOv2ya2ejnb9DkRL19vCggrsI167i/\nOg8sx6+LC01PbQZwu1LqFgABAHkA/hlAgVLKo7WJGgDT5v8eALUAupW4seQDGPnVyxoYGBgYXAhc\nUHrKsqw/tyyrxrKsegAfAPCyZVn3AngFwD36Y/cDeEofP63/hj7/8mzsGQYGBgYG7wwuljiNLwD4\nsVLqfwA4BGA63++3AHxPKXUOQAjyopkVSkuFSigrouoJHZ9RV016Ye1S+uhHJ0QdTiWp8sViQsEM\n9YvxM+ih4TonTwyWsTTVwXiUdEU6oT+bICeUnS10k286B7ljBTx+uVdOFg3TlzXTcFleIvTHo4++\nZrcdOyl0zJZNksTO7+L9LZ0cMfnvGLfj8fivtDkLQDmhlshYfC0Oo/p5UbmXzKfhdms14wpcg0IH\npgO8f6JD6I/eHlGVHewbVIlQYK01DhX+NOWarBHpg2uCxmOPV+YnEJLPubJohN1RLtcZzeP6bFrH\nOIHGcqFlvvf4EbvtVJec3+STNVMJ9t09Kfcd8XA9cwfq7eNkUK4TCdAYm5kg1ebE2JREUtdnaBg9\nosdQXUJ69ISH1EzziBhYizppOC7IFYrjsgIdR1PgcITIl+NdBZyv6AnSgeW5evL7HbEPQZmzbE2r\ndZ8gvRZs17ElihTghhxer2iNxDY8dugFuy2m6c7vtF4BALh2HiPcm4qk76P7LrPbzi7gfi96Vqif\nqmU/ttsG+0kvORGaL0kP/zzE/n5khczj1WHGXeXX0RGksleSN05O3Mbra7p3F+QZ6/Pw92E0Lvco\nnuTvwxWLub57T34DALDqnivstpJOvY/08xM5QLqzKyLxS7EzTGJYWc3U6PWL5Lfr7hImLHT/4m75\n7l0S27LkEH8r9leIc8WizRzvkidIZQ7oGKbccSZdnVo1PZZmzBZz9tKwLGs7gO36+DyADTN8Jgbg\nfb/cbmBgYGAwNzBpRAwMDAwMZo2LhZ5621FaKN4F+Y44DY9+RTbWs2ZwRTH5kdEh7X/vSDOSTola\nWV0hdFcWnaegdJ0KfhqYcniTeNNyw7xC1gtI62p/iYSu9+0j/VFRoROULWD+/0ULmFZg83rxEz9+\nhPTAyRY57hwSNXNFk+Neup6HO02VPO2oBTFdIdCZIDGdnrlewdKY0Aw7ckntpVISn7m2hr73ATe9\nq6aGhYIKw1EtL0s8VpoWimr+s5fo06684v/g7iNdMj7E+QnkSB9K5pH+GTsr9IBX17UocHhCLQvI\n+Obl05tndRPXe3Wd0Ain3yB9dXhIkgae6BKvmTXZ9G4aS6T1vUhZJBw++xO62LXVSQoiq85RhdCB\njzUIdfeQIw1Hnlv6Ma+daS/C19FzZlKnxIk3Mi72wO583ad6AEBhFddyICQUWLKXfbihqtM+Pq09\njlLzf2S3lbaIN2HNOaFEusYYxxTdKF4+JQn6/i847UiQuVQ859adI722PyLj+8zV4r8yXWESAMKv\nC8122JGkMHeIMRnRIpnnzmLKtpPZvLYT9d1SZ+JbV5PqWfqM9O2y1RxzZ9mH7GN/Wp6dxHmOIbRJ\n1njBc18GAJQl/9o+d+NSuc7Texz9uYLjKSoS76UThxyVHSvFo8q/U7zacis5voISnVxyFZ8py0FX\nWueEZqr8AWuuDN35sLQNCaWWuP779rkNXeIJ9cY+pmV5vYJ+Q6sysq8iC9nn6taN+HVhNA0DAwMD\ng1njktU0fDrvd2UFU0EX6gpzDbXUNPL91ESSaZEQ0qCx26XrbG/YIG/x555nJaxQSKTF0Cg/H52i\nYSpLG4D9PibRm3b9SupEdl5HTXGvjgHxKH6+tpyaRn2VSH3vuY3V3L758DMAgL0HJSX7wnqem9Yv\npsKUNN2O1Odet9wnp9BRV9vhBOBEIl+k+/oEfcrL8kXidQdpDM2q4JbSWeCR7+F4vAGZ+/Ia6V12\nAaWyVFik0tE+GvNHMkxDXeGrBwDE8nm/yAI5LojJd8aL6+1zMV1zvKGcklqDj6m8l6wUmenKrfTF\n//FPxDh+/rBonY130bAf1FJzwt1ut0XDnLv8LOmLeyGNtUNd9Nl3YrRVpL7wPCbQK14ue610AWOL\nPn8T99aLZ0QDjWRTc0svl3tmxkVadYWZar66XKK6i/M45h2nqGn2B2Uvr0lxTds3yfrND8kY5i9i\nnMa5IdE+yjq5J72rqdnlj0hfyu+hIbb9Z2JQdu2TcRzzU7O8qkLG09jviB1JMGYksFyejQVJznFP\nxcz783DuIQDA5m/QaPzwtaKFZZ5jbMi9zYyD+M4uSfZXvI6S901a83tkjTxXOx+kZlB6TvbL0r+6\n2m5rASshHnWLthPspONBT0w0n5WFkkww9+wd9rk12QcBAJ/bwYjvP/wk52coLLFHZz/BNVODoo0f\nHxCN432Fd9nnvDrleeUXD9lta79LU/EzQdlXNX1MWtm2Yjr7BOPV3gpG0zAwMDAwmDXMS8PAwMDA\nYNa4ZOmpvDxRxWpraDQtLhJ1tarMUU0txfemS9NSace71B8UY111ldBJdbU0DLZ1SCqIxUt5vWTC\nUT9Bx124HAkDM9oQHo0Kd1PkY2WtAV1tMOqoH5AVpJFuNCSUyZJFNBTXNcj4DhwTqqh5BamD4hxt\nqE+xT85KhtO1NaodNQpiCadZn3CnRG3PbSTVlHVEaIPFTbxnTpBGZZdbG2ktGuKTSZmLxauEElk8\nj4bz3sNCsdWt4JjzCmjYH40JLeOf4P1UVNo6dcqUvHyONdEjhs6eEdJTKpvjO3VM1nntGt6v5Yys\nx3OnhU5Y1k3qoiIjYx9oI6VW6qBTegplfUqLud6x9Mz1SXIbhKZbf4xVIp+KSi2HtCOBZMsP+f38\nZc/KNR3G87qM7L2henEsuCmH/X09S46vyKOxviyHhs8hj9CrHWvoKHBTUqiNnw/JnNzVyLiHrBZx\nOjiTJgV2mYcxDuHfk++W/yON1VsaJH7oode3AQA+k896Eo+vFSpmQ4JrnO0+bB/vHhMKb3kx+1Cx\nm1SVE9UV9QAAz12ke++Q6YK1idUMn61jlU1vlvz8bS57wG5rPyIUk3W5zEnzKSYPPblK9k7XXq7J\np9sdTiS3SAqUyY/R8N53UuJO0pY4XXzsMsZ1HH9Oxl1Syz1kPc3fq/n3yHVCZ5n2w5Urc7FGf2Xn\nEsa9qFPtAIC7n2f/TrUzuWbnZfLsV/pIgS3br/cqc1q+JYymYWBgYGAwa1yymkZoSiSAfYfozldb\nIZJXUyO1hfEYXUwz2krt9lAyjoTE0Ge55I28fgONprvekGtf7kgIV8BgXYRGRXMYn6L0PTXtaquj\ntaOONOdHj0mkZk09DaFpHxMQtvWJBJcXZE3n1avEYLljt6SZHp6kVJmVI+NwCruJJDWFtE4DjjEa\nO6OOvjoxWij9HjvK6Ok6nRK+uJCSUNsgpfb0mEjMlaWUjEcicj7oEuP8ysWcz6MnxJVzaZfDOaCW\n3x2PivtpDqg5JHpFkkpViIZQdpZa395ukaQb6xzSuiO5YmRCjJSlMd5vuq55716Zz/FBuiwWlMh4\n83K4Pl0pSsnePNlLnvOsEz8Q41o50TMin91Zw/Wo3iBS6XuSlPzzwPHsGJS9mJ/iNYv1po3GZA17\nFtN9c98JEUff43D57ih1RPQXSErwiRae36FTzVuR5wAArx/9gn0u+ogY7UsrKO2PVzpcYH8gEuyC\nYkbd7+4RI3cqLqnBD22gYX9NvXx3+Ag165L5HHt8SAzEfbHtdttr/pkrIQ51iMNEutBhNL5ajyWP\nUfXZfUyT3zQm8xM+S8eFHUnRfFd5ZN73L+fvQ+G43Pt38ihrT1xzLft+XJwe/AlGhNeOyhyHW+W7\nD97CvegPSv9WD9HNtuh+Xjt8XBwK/GV8JneVtgMA9u0Rg/i6E9T09i+TZ+HxY9TEj9XSkWNLvlx7\nfJAuuWdqpK/3YPYwmoaBgYGBwaxhXhoGBgYGBrPGJUtPnWsV9e21XYwQvflqUVOLCkgJJWJU1906\nGaEvQLrC0vUxlEtogOXLGa378i6haro0DQIAxSucRnGhVmIJJuYdHpfrDYWERjoZY1H5qE52uG7t\nUkefHMkG7VoXVJmXLBLD2Zlzona3tdHwVa+plmiCxrdYin1x6Yj2aIT0gCO34pvgOiA0zQ/3M9nZ\nTWskJsRXRErKG2a0cLhBz2M/jf0VWTInqQmh3VZvZD7/1463yxgclfHqU4yOt9rE6BfJYxLKqUkZ\nw4D2jT+aQ8Nglk5AuWQ5/fRHHBHe7phO6FhCymOjNt6fHZE+t/dyQiqWito/MkBKRoUYNzBpyVqp\nbMpiicTMEfbbC54HABz6Bg2/922Sfu6+mXTh9RGOZ9WrYjTvXcI9c+QaoQaXnBZetCvvZn4+8x0A\nwMlKUmAx67R93PGaJKm7bAvpplfPiWNBTlhiGM60/cA+V7i1HQCwpmm93ZY7zr5Wt8tzoN7P+dkw\nT6ifeEo+l9xFI2xvo/Q5liSd0uZI6IdrxXgefJT9yy5mjI4Ty3xCNR1/jvzwlitlrTuXsa20n3E/\nyU1iVO74Lp/f238s8733R0IxNrzB5+/MuOznPys+aLfdMcCf0MvSsgdfGiZ96p6UuIyFBbJP9n39\nQfvcpjKJl9hYyuejP8T9PuGT35Iz6xxpKJ6SuJEzJ8Xo3uSlof7KLbIXRtZzf9T8mM/7Pk3h/tkS\n0pHbx7W3gF1h+61hNA0DAwMDg1njktU0ThwVF9SAoyb26hUiIfk9lDx8OdQ0pivZud2UzFLaRXTa\nNl5Rzrd0RrvStrbwzV5bTEPfxLhIP9lZdCsdGxJN4+UXxUDodtSYvuYaid4sLqLkkYo78thMd8LF\nPhdmS18XVIvG0dvBCN1Es/68w1DvAaVqn09L3c5qff9OGvXXjoiGkWtxy8zfKJJxSbFDEvLSCFqs\nazxnNdI4nwiL1hPJiEQbnHBoKbqyYusxRx12R/R3xqtdbmPM/TQ2JXP/8k9EMkyWUKu660bJiVXt\nSBfe7SgxXZrUUeRJ5ndClmgxdfPlHsPHeuxTmfp6AEAgh/tnIp+ahidLjtOOCGYVmXk+S/p1doIl\nXOtCLcEn/XS7fKaTjgdbt8g40j5HNb8BuWdgRDTNRZVMLf68dnoobKFLtaqm5tO0QI5rQtR2sprk\nHmff2AsAyD6y3z5Xer9IxrcHKEnvmuTcvlEuUvK1ndRchzPyvGwakT7vW8nob1+bSM3r51EzGYgz\ngtn7kuzVySV0hti0To/9S3gTJneK5rpkA++9e0r24sJnuf+Svdzr/VNSGDuwhfeffFrG2zwuWvpr\nH2P0/eWviEtx2RQ1k7Sf+/2JtvsAAAWur9pt824VDWvX1+T5SaxnNopzi0Rmr3mMD+jug4xAv8wl\ne+eOF7gHX+wSR4eJ35M9ktXGteiKS1vVcT4DmeV0521KiQPQnl4yLe3uG/HrwmgaBgYGBgazxiWr\naUyOikh55+032W2b1okLn9/LN7ulKMVMB7vF4pTGPBGZIrfSUxXnd8tKxPVypJ+SR18P3TF9+j5J\nN7/TflYk45iuW9zQQEmzVBeMSkV5f7djhTJB0QymM+QCgIJIUWU6mPF0iJxxX49IHLXVjoCoAF0S\n3W6RZFyKskPacW0nRovgDMcAACAASURBVCZF47nhDvLZt60TCdZvUXLJFPP640G5/1SM2pI7Jf0t\n8oiGMRnh3OW5RCOLDDN3z6kJhzumzvPkLSEvPbZD5tOVEom7KYsup3l+uf9YktJ+ccThIpurXRAn\nuAeCXulrSb9M/O4otc7+QSloU+yo65XnCNwsnBJJri/BRatO0GXaicF94jbrK7jSbltYL+M/Hmad\n7GXV1BzUedG6Bprp0pv/hvDeeUul34lW2pSa6mXuzowxK/Kny6klDWl38s5Avd1WsFM4ffeISMaF\n/4VaT32+PB/bTtCNM+MoolWb2w4AaG+lCza0ltleKTaxwfPcH4vLRJM4GuUzULPmeft4cqGM3VdL\nG9+hJ6gVOXFkswQt3jRI99VGJWMNNXEMyzzMBdanhI3wbucgCv5ICkJ9M/5tAMCWI5yvXdslA3LQ\nyzVbup4sQnep7Ft37q12W+CfHwcAJEtF4t+U+2H73Pg52ft7m7g+16/gfus6LPa+4Tb2b6unHQCQ\n1yP/TzxB+8rK+/4eAJC65Xt2W3rHVvvYk5Lfj56MY7/3PKmPPoXZwmgaBgYGBgazhnlpGBgYGBjM\nGualYWBgYGAwa1yyNo1N64UPLKqi50h4UjwZYg4PopTDppGxxLMi5ahkl7F00kGdrM4N+vlff42E\n4Hd2kJOsLCXHWak9rfoH6L2RnyueQbfcLLxobTVtILk5cm2Pw3vL43MuUUqfp8eKW1cHLMmRtslJ\nugcdOyI++TUlK+02X5Bjny6t4fPSWwm+mRPsLdkq3iTZFeSre/pkvnJyaHeIx+mJkkqK3SK731Gf\nRM9PPC5zkiniuSs/sBUA0NtF3/PFDk+i8gXSt75ByjqppWIz2LxePHcWNNHbqtgtcxsaoYdasYve\nJFG9zkUe+sa7dUqRQKVOQXGIa/vqEdkrH6qgx0xWgJx32Cvni/y0mwy7Zg58qVgj+3NLhp53fcXS\nt1X9TF3immKyvo6kzG1+aIHddtovthz/iNhTJrOZlqWxSbzCXN30WAo7amsUxiXu49AAbUg+vWdv\nrJE6DfmgjWD1Tln7l97HPelrYV8ycYmvCV9Pb7Ts43K9kcXC30/9nGs3miMJ+RYsYpqPuqPk78+n\nZG5yk4zdKKuk/cOJ+S2yD6aW0TspkS/7aP0wPdzORrnWWCcxJKXN2+2mQ6/cCQC4qf6jAIAcH8/d\nfPsHAQAvttKbqXqScS/jQalc+J5WPu8/yIg3mKdZxsonG2gbld+P9zXQw7Oln3O3zi/f8XyQc7Lr\nu2JzSW+QJIwFEdbG6I5LepCRbdfZbdl1jHmKnpd5rNnBWLM7VjqMUrOE0TQMDAwMDGaNS1bTmFev\noyMn6WnT3ytSuHK4zicsSoLTHtzK4U0EHeVblC2SXDrDc4XTMQQ1lL79TkldC/WFhdQmNl8hsQ3B\nLC2hph2eDAG3viWvEcymJGDX9E5Qcg5o7cNdJv2sraP3UFqnRFcO7Sjop1QTjYoElnSkLo9GZo5g\nXqU1glAHvV/CFeKpk2ljW7eLslSOjsUYzuH1E7tky1U2yJxl51LTqPCLJDSS75DHfPRDr4NIkf46\nR3r2oEiOqWppy+qgJBptEIm9rJjSdbWbi98Xk+NImt5lVRCPmpV50nbAR80xzydrnw5zzSZzHDEe\n2pGqzEttL+6YWyc2lMl+OpLHtN2JXSI1JuY5/OhH2u3j/Ixca7CP3lMLwjJ/eZMyxiM1jOuo75S4\ngoHF9P0PJ/nIh8KyPxavZkT2RKesZTAo4x6upeb4mNZmbzzHhIkHK6jZVSo5XmZRUzqdLZrSokmR\n6mNFzCiQ5xLPLyvJLAq917L+eekxkdjHqskWjI7NPJ+p+TJnda2s3HeoVMZ1dKDPbltey/6+NvBz\nAECBr9lu8+mUCDsnpKrhfZNMpX5IiUZWWcf91Of4Pfi0W+qJt9f+ud22tVjm7/DNsp/u+Rbnc0Gb\neD7tz+b4ShyeTWcsuc/N32TS1d23y6/U0pikGJxoftQ+93StrMvtP6f20Rjj8zVym3ihle6mh+W+\nRfIM0Mf0rWE0DQMDAwODWcO8NAwMDAwMZo1Llp7KzxE1NO0IvkpFdC0LR9oMH35V3fV6HTUnNCUU\nz4jannJk9CvOkenLD7I+R9JBN6WUXNsX5D3yIepgULdFxtm/HJ82oHq5LN4g1cvRMaGTUhlezx2Q\nvgaV9Ov6q9dwIEn5XDDHkeTNQ2rLFxSVPuMouJGT40gJ4kBOrqjKQTdV2+4JoStcHo7BE2PkWzIo\nn63wM0BtpEEoK0unKxkPkTqAkntfV8Y+hEI8bs2WcbhdDueFEjmfHhAqoriA1I3fJfOYdpRg8BY5\n5qJbDPi5PtIp6VA9ACBaJDTdFbcyoWKhpu5Ky0gBjvm5V/x+oUInQTqyODxzcF9UT9PmNPtzskIM\ntxPjjlQtv6Bhd+J+MbquGKahPd4s1Ma+QzqdSJxG46oCoUQ2u1ijYl/Hdvu4fo3MmTrHPdFwmVyn\n55jMsTeb+29JhxjZw4U0nGOIhvypVqnG+L2sdrut0SsG8JVN0q+cMs5napcE5OVkM6Cv4XUmQNy5\nQIy+Xce4v/6oUfr6MbwZyTbZnz8ZZtU8V5uMr6GSzgpP3ETDfuA1eT66zzKYssMr56/XyS63F7FK\n4QY915PsIiJTt9jH3XrvL83iM/BsWvZCo65/sWcLjf5TuhpowQuv2G3LPkjHgtGFspefeZX0adGk\nnI9E5dkqeux++1xJo9BdR7JIj46d4W9T+dA/AQD+bQX3b/05qQdy0w2YNYymYWBgYGAwayjLst76\nU+8CKKWsS2UsFwOUUjDz+fbBzOfbCzOf7yz0/M7of280DQMDAwODWcO8NAwMDAwMZg3z0jAwMDAw\nmDUuGe+pQCAwoJQqf+tPGswGfr8fSs2cUsTg14eZz7cXZj7fWQQCjkpbv4RLxhA+jebmZmv//v1v\n/UEDAwMDAxtKqQOWZTW/1ecMPWVgYGBgMGuYl4aBgYGBwaxxQV8aSqmAUmqfUuqIUuqEUurvdHuD\nUmqvUuqcUuoRpZRPt/v13+f0+foL2V8DAwMDgzfjQmsacQDXWJa1CsBqADcppTYB+BKAr1iW1Qhg\nFMDH9ec/DmBUt39Ff87AwMDAYI5wQV8almC6IopX/7MAXAPgMd3+EIA79fEd+m/o89cq4zJhYGBg\nMGe44DYNpZRbKXUYwCCAbQBaAYxZljVdlKEbwHS5smoAXQCgz48DKMYvQSn1KaXUfqXU/qGhoXd6\nCAYGBga/tbjgLw3LstKWZa0GUANgA4DFb/GV2VzzQcuymi3Lai4tLX3rLxgYGBgY/KcwZ95TlmWN\nAXgFwGUACpRS04GGNQCmi/D2AFKuTZ/PBzACAwMDA4M5wYX2nipVShXo4yCA6wGcgrw87tEfux/A\nU/r4af039PmXTSpbAwMDg7nDhdY0KgG8opQ6CuANANssy/oZgC8A+GOl1DmIzeJb+vPfAlCs2/8Y\nwJ9d4P4aGBgY/Eb4yle+MtddeFth0ogYGBgYvEPo7u7G8uXLceLECVRXV7/1F+YQJo2IgYGBwRzj\ngQcewPj4OB544IG57srbhksmy62BgYHBXOOLX/winn32WQQCAQBAT4/49Dz66KPYtWsXACAWi+GW\nW27BX//1X89ZP38TGHrKwMDA4G3C2NgYrr/+evxHv0HNzc3Ytm0bCgoKLmDP3hqGnjIwMDC4wCgo\nKMC2bdvQ3Dzzb+/F+sL4dWBeGgYGBgZvIyxvFup/9x/gKXyz4bupqeld/8IAzEvDwMDA4G1Dy8Ak\n7vjaLuw+3QskI3B7PFi4cCE8Hg8mJiYwNTU11138jWFeGgYGBgZvA1440Y/3fm0XOkMRBM5uQ57f\njT/6/Odx7NgxfP7zn0c6ncbXvva1ue7mbwzjPWVgYGDwG8CyLPzLy+fwf7a1AABuX1WFkuwluOMf\n/xSrVq0CAHz5y1/Gfffdh+eee24uu/q2YNYvDZ2S/HoAmwCU6+YBALsBvGjSexgYGPy2IZJI4U9+\ncgTPHuuHUsB/u3Ex/stV86HUml/57KpVq+yXyLsZs3ppKJmBHwNoBJAGMAxAQVJ+eAC0KKU+YFnW\n4XeqowYGBgYXE7pCEXzy4f043T+JXL8H//zB1bhmcflbf/Fdjre0aSilygE8DyAG4BYAuZZlVVmW\nVQkgF8CtABIAnldKlb2TnTUwMDC4GLDn/Aju+NounO6fxPySbDz52c2/FS8MYHaG8D8AEAWwxbKs\n5y3Lik+fsCwrblnWcwCu1J/5/XemmwYGBgYXB763pwP3/dtehKYSuKqpFE9+djMay3LmulsXDLN5\nadwA4AHLsib+vQ/o2hhfB3DT29UxAwMDg4sJiVQGf/HkMfz3nx5HKmPh01fOx7c/sh75Qe9cd+2C\nYjY2jUYAB2fxuQOQFOcGBgYGlxSGw3H83vcPYl97CH6PC1+6eyXuXHNxZ619pzCbl0Y+pDb3W2ES\nQN5v1h0DAwODiwvHe8bx6e8dQM9YFBV5ATz44XVYWfPujur+TTCbl4YCMFt3WvUb9MXAwMDgosIz\nR3rxp48dQSyZwdq6AnzjvnUoywvMdbfmFLON03heKZV6m65lYGBgcFEjk7HwT9vO4GuvtAIA3t9c\ng//vzuXwe9xz3LO5x2x+6P/uHe+FgYGBwUWCyVgSf/TIYbx4ahBul8Jf3boEH7m8HhLfbPCWLw3L\nssxLw8DA4LcCbcNT+OTD+3FuMIz8oBcP3LsWmxtL5rpbFxUMpWRgYGAAYEfLEH7/hwcxEUuhqTwH\n3/xwM+YVZ891ty46vOVLQyn1v3+dC1qW9d/+890xMDAwuLCwLAvf2tmGf3j2FDIWcP3Scnzld1Yj\nx29k6pkwm1l5P2bvPWUBMC8NAwODdwViyTT+4sljeOKg1PL+w2sX4vPXLoTLZewX/x5mY9OovwD9\nMDAwMLigGJiI4VPfO4AjXWMIet34p/evwi0rKue6Wxc9ZpOw8AWl1KJfartGKWXIPgMDg3clDnWO\n4j3/shNHusZQXRDE45+53LwwZonZ0FPXQaLCAQBKKTeAbQDWY3bpRQwMDAwuGjx2oBt/8cQxJNIZ\nbGwowgP3rkVxjn+uu/WuwX/W0mMIPwMDg3cVUukM/udzp/GtnW0AgN/dNA9//Z6l8LpN1etfB8Y9\nwMDA4JLHeCSJ3//RQbx2dhgel8IX71iOD22sm+tuvSsx25fGTN5TpryrgYHBRY+zA5P45MP70T4S\nQXG2D1+/bx02NBTNdbfetfhNck+9NFM+KsuyTPU+AwODiwIvnhzA5x85jHA8hWVVeXjww82oLgjO\ndbfe1TC5pwwMDC45WJaFB7a34h9fOAPLAm5bWYkv37MKQZ9JOPibwuSeMjAwuKQQSaTwp48dxc+P\n9kEp4E9vXITf27rAJBx8m2AM4QYGBpcMesai+ORD+3GybwI5fg/++QOrce2S8rnu1iUF89IwMDC4\nJLCvLYTPfP8ARqYSqC/Owr/d34zGsty57tYlB/PSMDAweNfjB3s78DdPnUAqY2HLwhJ89YNrkZ/l\nnetuXZIwLw0DA4N3LZLpDP7umRP4/p5OAMAntzTgCzcthscE7L1juKAzq5SqVUq9opQ6qZQ6oZT6\nnG4vUkptU0qd1f8X6nallPp/SqlzSqmjSqm1F7K/BgYGFy9GwnHc+2978f09nfB5XPin963CX966\n1Lww3mFc6NlNAfivlmUtBbAJwGeVUksB/BmAlyzLWgjgJf03ANwMYKH+9ykAX7/A/TUwMLgIcbJ3\nArd/dRf2tYVQluvHo5++DHevq5nrbv1W4ILSU5Zl9QHo08eTSqlTAKoB3AFgq/7YQwC2A/iCbn/Y\nsiwLwB6lVIFSqlJfx8DA4LcQPz/ahz/5yRFEk2msri3Av/7uOpTnBea6W781mDObhlKqHsAaAHsB\nlDteBP0Apn3kqgF0Ob7Wrdve9NJQSn0Koomgrs7kkzEwuBSRyVj4yost+JeXzwEA7l5bg79/73IE\nvCZg70JiTl4aSqkcAI8D+LxlWRPOoBvLsiyl1K+V18qyrAcBPAgAzc3NJieWgcElhslYEn/0yBG8\neGoALgX85a1L8bHN9SZgbw5wwV8aSikv5IXxA8uyntDNA9O0k1KqEsCgbu8BUOv4eo1uMzAw+C1B\nx8gUPvHQfpwdDCM/6MVXP7QGWxaWznW3fmtxob2nFIBvAThlWdb/cZx6GsD9+vh+AE852j+svag2\nARg39gwDg98e7Dw7jNu/ugtnB8NoLMvBU5/dbF4Yc4wLrWlsBvC7AI4ppQ7rtr8A8L8APKqU+jiA\nDgDv1+eeBXALgHMAIgA+emG7a2BgMBewLAvf3tWOv//5SWQs4LolZfjK76xGbsAE7M01LrT31E78\n+1X/rp3h8xaAz76jnTIwMLioEE+l8ZdPHsdjB7oBAL9/dSP++PomuFzGfnExwESEGxgYXDQYnIjh\n098/gEOdYwh63fjy+1bitpVVc90tAwfMS8PAwOCiwJGuMXzqe/sxMBFHdUEQD354HZZV5c91twx+\nCealYWBgMOd48lA3vvD4MSRSGWxoKMLX712L4hz/XHfLYAaYl4aBgcGcIZ2x8KVfnMaDO84DAO7d\nWIe/ec8y+Dwmf9TFCvPSMDAwmBOMR5L4gx8fwo6WIXhcCn97+zLct2neXHfL4C1gXhoGBgYXHOcG\nw/jkw/vRNjyFomwfvn7vWmycXzzX3TKYBcxLw8DA4ILi5dMD+NyPDmMynsKSyjx888PrUFOYNdfd\nMpglzEvDwMDggsCyLHz91VZ8+fkzsCzg1hWV+PL7ViLLZ36G3k0wq2VgYPCOI5pI4wuPH8XTR3oB\nAH9yQxM+e3WjSTj4LoR5aRgY/P/t3Xt4VdWZx/Hvm4QQLrlwvyQhwSJ4QYIaUdAZmVFbq7aIo1Rb\nBFunjm19Wlqn16e1YqvTGeap7YxitY4lKBV1tFUrtJNatVMDamC4qIhSyeWEcAmQhISEJCdr/tg7\n4eQQwkGSc0l+n+fZT7L3Xmefd2/Cec9ea+21pE/tqm3itsdLebuqnmGpyfzsxnO54qxxJ36hxCUl\nDRHpM6VlB7j9iQ3UNLSQN2oov1xUyNRx6bEOS06BkoaI9InVb1bwg+ffpjXouGTKaB747LlkDU2N\ndVhyipQ0RKRXtQbb+dHv3mXlunIAvnDxZL531RmkJOuBvf5ASUNEes2Bxha+smoj6z7cT2pyEvfO\nn84NhbknfqEkDCUNEekV26rr+eLKUgIHmxiTPpiHbz6f8yaNiHVY0suUNETklK3dWs03nt5MU2uQ\ngpxMHr65kPGZabEOS/qAkoaIfGTt7Y6fvfwB//HyBwBcd2429113DmmDkmMcmfQVJQ0R+UgajrRx\n59Ob+MM7e0gy+N5VZ3LrJZP1wF4/p6QhIietYv9hvriylO17DpGRlsJ/fvY8Lp06JtZhSRQoaYjI\nSSnZUcOXf72R2sOtTBk7nF8uKmTy6GGxDkuiRElDRCLinKOopIwfvbSNYLvjsjPG8rMbZ5KeNijW\noUkUKWmIyAkdaQty12/f4anSSgC+PPdj3PnxaSQnqf1ioFHSEJEe7T3UzJee2MiG8oOkDUpi2fUF\nfKpgYqzDkhjRc/0i0sX999/f+fuWQC3zHnidDeUHmZiZxn/fPkcJY4BT0hCRToFAgKVLl1JVVcXz\nm6q44RfrqK5r5oL8ETx/xyVMz86MdYgSY6qeEpFOy5cvp66ujoV33sPO/E8DcNOsSSz99Nmkpug7\npihpiAxo99xzD2vWrCEtzRvyozIQAOAvf3iRlPQS8kYN45VXYcjbV3HXXXfFMFKJF+aci3UMvaqw\nsNCVlpbGOgyRhFBbW8sVV1xBT/9nCgsLKS4uJisrK4qRSbSZ2QbnXOGJyul+U2SAagu2s66yiZzP\n3kvq+NO7LaOEIeFUPSUywOxvOMLqtypZtb6cXXXNAEy++T72rfpnDuwq7yw3depUJQw5hpKGyACx\nNVDHipIyXtyyi5a2dgBOGz2MRbPzuGgc/N3jR0hJSWHy5Mns3LmT+vp6GhsblTSkC1VPifRjLW3t\nPL+pivnLX+dTD/yFZzcGaA22c9kZY1n5hVn88RuXcsvFk1nx6MMEg0GWLFnC1q1bWbJkCcFgkAcf\nfDDWpyBxRncaIv3Q3vpmnnijgl+/UUFNwxEAMtJSWFCYy82z88gb1XWAwYyMDIqLiykoKABg2bJl\nLFy4kLVr10Y9dolv6j0l0k8459hYcZAVJeWs3VpNW7v3f3vauHQWz8nn2nMnMjRV3xOle5H2norq\nX5CZPQZcA+x1zk33t40EngLygTJggXPuoHkzufwcuAo4DNzinNsYzXhFEkFza5AXNu9i5boy3q6q\nByA5yfjk9PEsmp3PRaeN1MRI0mui/bVjBfAAsDJk23eAl51zPzGz7/jr3wY+CZzuLxcCD/k/RQSo\nqm3iifXlrH6zgoOHWwEYOSyVGy/IZeFFeUzMGhLjCKU/imrScM792czywzbPA+b6vxcBr+IljXnA\nSufVn603sywzm+Ccq45OtCLxxznHug/3U1RSRvG7e/BroDgnO5PFc/K5ZsYEzc8tfSoeKjjHhSSC\n3cA4//dsoDKkXMDfdkzSMLPbgNsAJk2a1HeRisTI4ZY2nttYxcp1Zby/pwGAQcnGp86ZwKLZ+Zw3\nKUtVUBIV8ZA0OjnnnJmddMu8c+4R4BHwGsJ7PTCRGCmraeTx9eU8XVrJoeY2AMakD+ZzF07is7Mm\nMTYjLcYRykATD0ljT0e1k5lNAPb626uA3JByOf42kX6tvd3x5w/2UVRSxqvv76Ojg+P5eSNYPCef\nK88erxFnJWbiIWm8ACwGfuL/fD5k+x1mthqvAbxO7RnSn9U3t/LshgAr15Wzs6YRgNSUJD5dMJFb\n5uRrLguJC9HucvskXqP3aDMLAD/ESxZPm9mtQDmwwC++Bq+77Q68Lrefj2asItGyY+8hikrKeW5j\ngMaWIAATM9NYODuPGy+YxMhhqTGOUOSoaPeeuuk4uy7rpqwDvtK3EYnERrDd8fK2PRStK+P1Hfs7\nt88+bRSL5+Rx+ZnjSElWFZTEn3ionhIZMGoPt/DUW5U8vr6cwMEmAIYMSmb+edksnp3PtPHpMY5Q\npGdKGiJR8O6ueopKyvjtpiqO+CPM5o0ays0X5XHD+blkDh0U4whFIqOkIdJHWoPt/OGd3awsKefN\nsgOd2y+dOobFc/KYO3UsSUl6tkISi5KGSC/bd+gIq9+sYNUbFeyu9yY5Gj44hevPz2HR7DxOGzM8\nxhGKfHRKGiK9ZFNlLUUlZby0pZqWoFcFNWXscBbPzmP+eTkMH6z/bpL49FcscgqOtAV5aUs1RevK\n2VxZC4AZXHHWOBbPzufiKaM0vIf0K0oaIh/B7rpmVr1RzpNvVlDT0AJA5pBBnSPM5o4cGuMIRfqG\nkoZIhJxzvFV2kKKSMn7/zm6C/hCzZ4xP55Y5+cybmc2QVI0wK/2bkobICTS1BHlhcxUrSsrZVn10\nkqOrz5nA4jn5XJA/QlVQMmAoaYgcR+WBwzyxvpynSiup9Sc5Gj08lZtmTeJzF+YxPlMjzMrAo6Qh\nEsI5x+s79rOipIyX39vTOcJsQY43ydHVMyYwOEVVUDJwKWmIAA1H2vjNxgBF68rZsffoJEfXzJjI\n4jn5zMzNinGEIvFBSUMGtA/3NbByXTnPbghw6Ig3ydG4jMEsvDCPG2dNYkz64BhHKBJflDRkwGlv\nd7z6/l6KSsp57f19ndsvyPcmOfrE2eMZpBFmRbqlpCEDRl1TK8+UeiPMlu8/DMDglCSunZnNojl5\nnD1RkxyJnIiShvQL999/P1//+te73bd99yGK1pXxm41VNLV6kxxlZw1h0ew8FhTmMkKTHIlETElD\nEl4gEGDp0qUsWLCA7OxsANqC7fxx2x6KSspZ9+HRSY4umTKaRbPzuOzMcSRrhFmRk6akIQlv+fLl\n1NXVsXz5cu783g9Z/VYFq9ZXUFXrTXI0NDWZfzgvh8Vz8pgyVpMciZwKJQ1JOPfccw9r1qwhLc17\nuK6qqgqA5Y89zk+feAHnwLW1MuGcOdz1gx9wfWEOGWma5EikN5jreHqpnygsLHSlpaWxDkP6UG1t\nLXMvu5zNGzcct8y06QWUvPYKI0eOiGJkIonLzDY45wpPVE53GhL36ppa2RqoY3Ogls2VtWwO1HLg\nb75J6q4f0LL7g2PKFxYWUlxcTFaWHsgT6W1KGhJXmluDvFtdz5bKWjYH6thcWcuHNY3HlMvMyuKi\n7y7nlX/7IjVVZZ3bp06dqoQh0oeUNCRmgu2OHXsbutxBvFd9iLb2rlWmqSlJnDUhg5m5WRTkZjIj\nJ4vJo4axa1cVF/xLMykpKUyePJmdO3dSX19PY2OjkoZIH1HSkKhwzlFV28TmyqPVTFur6jjcEuxS\nzgymjhtOQU4WBblZFORkMW18Oqkpxz6h/cADDxAMBlmyZAk//vGP+f73v09RUREPPvgg9913X7RO\nTWRAUdKQPnGgsaUzOWzxq5n2N7YcUy47awgzc7OYkZNJQW4W07MzI55LOyMjg+LiYgoKCgBYtmwZ\nCxcuZO3atb16LiJylHpPySk73NLG21X1bAnUssmvZqo80HRMuRFDB1GQm8WMnCxm+tVMo4drQECR\neKDeU9InWoPtbN99qPPuYXOglvf3HCKsGYIhg5KZnp3RpZopd+QQzXAnkuCUNOS4nHOU7T/ceQex\nJVDH21V1HGlr71IuOck4a3w6BblH7yBOHzucFI0UK9LvKGlIp731zZ3dXDcHvCRR19R6TLn8UUM7\n7x4KcjM5a0ImQ1I1m53IQKCkMUAdau54YO5okqiuaz6m3Jj0wRSEtEHMyMkka6hGhRUZqJQ0BoAj\nbUG2VR862lDtPzAX3gdi+OAUZuRkdjZUF+RmMT4jTe0QItJJSaOfaW93/HVfQ+cdxJZALe9W19Ma\nDHtgLjmJMydmD2jiDAAAChlJREFUUJCT2VnNdNro4SRpuHAR6YGSRgJzzlFd18zmylo2BWrZUlnH\n1qo6Gvy5rjuYwZSxw7tUM50xIZ3BKWqHEJGTo6QRIz3NNHc8tYdb2Byo88dlqmVTZR01DUeOKTcx\nM81rqPYfmjsnO5N0DQ0uIr1ASSMGuptpLlxTS5B3dnVtqO6Y1zpU5pBBfk8mr5ppRm4mY9PT+voU\nRGSAivukYWZXAj8HkoFHnXM/iXFIpyx0prl7772XtmA77+9pYEug1h96o47tew4RDHtibnBKEudk\ne9VLBbleksgbNVQN1SISNXGdNMwsGXgQuAIIAG+Z2QvOuXdjG9nJCZ1pzjlHRaU309wvfvUEjzyz\nhsYjbQRbWxnysUKyLr4JgCSDM8an+yO7etVMU8elM0gPzIlIDMV10gBmATuccx8CmNlqYB6QUEnj\nq1/9Ki+++CKvvfZal+0HqiuACgCG50xlweLbmDUth4LcLM6emMHQ1Hj/5xGRgSauByw0s+uBK51z\n/+iv3wxc6Jy7I6zcbcBt/uo0YHtUA41MMjAVGNrNvsPA+0Cwm30SmdFATayD6Cd0LXtXolzPPOfc\nmBMV6hdfZZ1zjwCPxDqOk2FmpZGMKCmR0fXsPbqWvau/Xc94ryCvAnJD1nP8bSIiEgPxnjTeAk43\ns8lmlgrcCLwQ45hERAasuK6ecs61mdkdwB/w2gQec869E+OwektCVaclAF3P3qNr2bv61fWM64Zw\nERGJL/FePSUiInFESUNERCKmpBFlZnalmW03sx1m9p1Yx5PozOwxM9trZm/HOpZEZ2a5ZvaKmb1r\nZu+Y2ddiHVMiM7M0M3vTzDb713NprGPqDWrTiCJ/WJT3CRkWBbgp0YZFiSdm9rdAA7DSOTc91vEk\nMjObAExwzm00s3RgA3Ct/j4/GvMGhRvmnGsws0HAX4CvOefWxzi0U6I7jejqHBbFOdcCdAyLIh+R\nc+7PwIFYx9EfOOeqnXMb/d8PAduA7odhlhNyngZ/dZC/JPy3dCWN6MoGKkPWA+g/pcQhM8sHzgXe\niG0kic3Mks1sE7AXKHbOJfz1VNIQkS7MbDjwLLDEOVcf63gSmXMu6JybiTeaxSwzS/gqVCWN6NKw\nKBLX/Lr3Z4FVzrnnYh1Pf+GcqwVeAa6MdSynSkkjujQsisQtv+H2v4BtzrmfxjqeRGdmY8wsy/99\nCF4HmPdiG9WpU9KIIudcG9AxLMo24Ol+NCxKTJjZk8A6YJqZBczs1ljHlMAuBm4G/t7MNvnLVbEO\nKoFNAF4xsy14XxiLnXO/i3FMp0xdbkVEJGK60xARkYgpaYiISMSUNEREJGJKGiIiEjElDRERiZiS\nhsSUmU03M2dmc0O2OX/GxkiP8a3Q1/dSXHP9OBL+CV4zu9vMamL4/mVm9u+xen/pXUoaEo9mA8+c\nRPlvAXP7JhQRCRXXc4TLwBTLoaP9p6IHx+r9ReKd7jQkqszsy2ZWaWaNZvYi3lOz4WW6VE+Z2SVm\n9r9mVu8vm8zsBn9fGTAK+KH/OudXLeX7v18TduwVZlYasn63mdX47/EW0AzcEPKSiWb2Oz/eCjO7\nPex4s83sBTOr9stsMrPPhZW5xY/lHDMr9su9Z2bXdXPu8/2Je5rMbL+ZrTGzvJD9083sJTM75C/P\nmNn4iC5+1/cZaWaPmNkeM2s2sxIzuzBk/6tmdszdnpkt86+D+etpZvZv/r/pEX/CIT1F3o8paUjU\nmNk84EHgd8B1wFbgsRO8JsMv/yHwD8D1wONAll9kPlCHN2bSbH/ZeJKhDQWKgEfxBpR7M2TffwFb\n/HjXAA+FJaI84HXgVuBTeIP9/crMburmfX6NN9bYfOADYLWZ5YSc683Ac8BfgQXA5/Em7Rrj75/i\nv1casBC4BTgbeLHjQzwSZjYY+CNwOfBN4FpgH/DHkAT0FHCVmQ0LeZ35cT3tjg4l8d9+HPf55/8W\n8IKZzYw0HkkwzjktWqKy4H0Yrw3b9ku8iWnmhmxzwB3+74X+enoPx60B7g7blu+/7pqw7SuA0pD1\nu/1y88LKzfW3PxK2vRhYf5w4DK/K92HgTyHbb/GP9YWQbaOANuB2fz0Jb8Tj53o4z8eB7UBqyLbT\ngSBwdQ+vuxuoCVm/FWgBTg/ZloKXrJb562P8+G4MKTPbP49Cf/0yf/3SsPf7M/BMyHoZ8O+x/vvT\n0juL7jQkKswsBTgPeD5s14mG3/4r3nSuvzazeR2jhvYyB6w9zr7fhK0/B5xv3tS9mNkIM/sPMysH\nWv3lNmBqN8f6n843dG4/3sQ8HXca04CJwK96iPNyP552M0vxr+lOvA/lwh5e191xNgA7Q44D8FrH\ncZxz+4A/AZ8Jed1ngL8650pDjrMbeL3jOP6xXj7JeCSBKGlItIwGkvE+KEOFr3fhnDuIN6T0IOBp\nYJ9fp39aL8Z20HnT73anu3hT8M4HvDuXzwDLgI8DF+BVuaV1c6zasPWWkHKj/J/VPcQ5Gvg2R5NT\nx3IaXedpOZHRwEXdHOfzYcdZDXzSzDLMLAmvreepsOOM7+Y4d59kPJJA1HtKoqUGrxplbNj28PVj\nOK831ZXmzUlwOfBTvPaBi3p4WbP/MzVs+4ju3qKH43QXbxtQY2ZpwDXAV5xzv+go4H/Anqz9/s9j\nOgaEOIB3p/FoN/tO5jmMA0Ap8KVu9h0J+f03wEN489iX490JhSaNA3hVateexHtLglPSkKhwzrWZ\n2f/hfQD9ImTXMT2IejhGE16j73TguyG7Qr+xd9iL9633zI4N5k1jOgfvAzBS8+ladTUf2OCcC/rH\nSyLkg9bM0oFP03Mi6s52vA/gxcCLxynzMl7D9wbnNxZ8RC/j3RVVOOeOe6fnnDtoZv+DdydVjjc5\n05aw49wJNDjnEn5yIYmMkoZE033Ac2b2EN632Es5wfSXZnY18AXgt0AFkA38E159e4f3gKvN7Pd4\n7R/bnXOHzOx54Ot+e0Mt3gdc00nG/Ekzuxevvv86vKqyeQDOuTq/m+5dZlYPtAPfwevNlXEyb+Kc\nazezbwGrzGwV8CRe4vl74Em/HeFuvM4EL5nZY3h3F9l+TCucc69G+HYrgduBV/0ntT/Eqx6bBex2\nzt0fUvYpvOq2OuCBsOMU400oVmxm/wq845/3TCDNOfddpP+JdUu8loG14M1cGAAO43Vh/Tg9956a\nhtetsxLvG30A705lZEj584H1QGPosYBxeA3v9XjflG+j+95TNd3EOdc/1ifw7jQO++/95bByU/C+\ncTfiJbVvhR+To72nhoe9toywXkV4iWkDXvXafuAlIC9k/xn+9TiAlwB34PXWyunhmh9zjkAm8HP/\nurb45/YccHFYuXT/3B0wrZtjDwaW+nG04DWM/56Q3lzdnaeWxF00c5+IiERMvadERCRiShoiIhIx\nJQ0REYmYkoaIiERMSUNERCKmpCEiIhFT0hARkYgpaYiISMT+H0q7EfpjKpZFAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Er = vis.Experiment_reader()\n", + "pth = \"out/gn\"\n", + "Er.set_path(pth)\n", + "Er.read_all_expriemts()\n", + "ank=1.0/8\n", + "ul = Er.get_data(\"Inc\")['gn_001'][0].max()\n", + "def fd(x):\n", + " return ul-x\n", + "Er.print_param_description(0)\n", + "ank=1.0/8\n", + "# plot IND\n", + "Er.annotated_plot2(0,'Inc', zoom=0.8, pad=0, max_hight=3, xybox=None, fd=fd,\n", + " figposx=[ank,3*ank,5*ank,7*ank], \n", + " figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,16],\n", + " ylabel=\"IND\", add_points=True)\n", + "# plot FID\n", + "Er.annotated_plot2(0,'Fid', zoom=0.8, pad=0, max_hight=300, xybox=None,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,500], ylabel=\"FID\", add_points=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "sp_001: {'alpha': 0.0}\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAD8CAYAAABzTgP2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAADqFJREFUeJzt23+o3fV9x/Hnq7k0axE00WitMbu2\nCiNu0MJBKdvA1V9x0EZa/7D7o2FryR+rf6yl0BTHtOof6tZZSruN0BZCYdXOURqQItFWGGNYT6yj\nzdo0t7HFpLZNjQhOqmR974/7dTufy4k3ud9z78nR5wMO93y/38+99/3xgs97zvcmVYUkSa9607QH\nkCSdWQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ15qY9wEqcd955NT8/P+0xJGmm\n7N+//9dVtWm5dTMZhvn5eYbD4bTHkKSZkuRnp7LOt5IkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSG\nYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLD\nMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB7vrjSeaXXN+S5MUkn5zEPJKk\nlesdhiTrgC8CNwBbgQ8l2bpk2UeA56vqUuA+4J4l1/8e+FbfWSRJ/U3iFcMVwEJVHa6qV4D7ge1L\n1mwH9nTPHwSuThKAJDcCTwMHJjCLJKmnSYThIuCZkeMj3bmxa6rqBPACcG6Ss4BPAZ+ZwBySpAmY\n9s3n24H7qurF5RYm2ZlkmGR47Nix1Z9Mkt6g5ibwNY4CF48cb+7OjVtzJMkccDbwHHAlcFOSe4Fz\ngN8m+U1VfWHpN6mq3cBugMFgUBOYW5I0xiTC8ARwWZJLWAzAzcCfLVmzF9gB/AdwE/Dtqirgj19d\nkOR24MVxUZAkrZ3eYaiqE0luAR4G1gFfqaoDSe4AhlW1F/gy8NUkC8BxFuMhSToDZfEX99kyGAxq\nOBxOewxJmilJ9lfVYLl10775LEk6wxgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElq\nGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1\nDIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB\n7vrjSea789cm2Z/k+93H905iHknSyvUOQ5J1wBeBG4CtwIeSbF2y7CPA81V1KXAfcE93/tfA+6rq\nD4AdwFf7ziNJ6mcSrxiuABaq6nBVvQLcD2xfsmY7sKd7/iBwdZJU1feq6ufd+QPAW5Ksn8BMkqQV\nmkQYLgKeGTk+0p0bu6aqTgAvAOcuWfNB4MmqenkCM0mSVmhu2gMAJLmcxbeXrnuNNTuBnQBbtmxZ\no8kk6Y1nEq8YjgIXjxxv7s6NXZNkDjgbeK473gx8A/hwVf3kZN+kqnZX1aCqBps2bZrA2JKkcSYR\nhieAy5JckuTNwM3A3iVr9rJ4cxngJuDbVVVJzgEeAnZV1b9PYBZJUk+9w9DdM7gFeBj4IfD1qjqQ\n5I4k7++WfRk4N8kC8Ang1T9pvQW4FPibJE91j/P7ziRJWrlU1bRnOG2DwaCGw+G0x5CkmZJkf1UN\nllvnv3yWJDUMgySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoY\nBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIaEwlDkm1JDiZZSLJrzPX1SR7orj+eZH7k2qe78weTXD+JeSRJ\nK9c7DEnWAV8EbgC2Ah9KsnXJso8Az1fVpcB9wD3d524FbgYuB7YB/9B9PUnSlEziFcMVwEJVHa6q\nV4D7ge1L1mwH9nTPHwSuTpLu/P1V9XJVPQ0sdF9PkjQlkwjDRcAzI8dHunNj11TVCeAF4NxT/FxJ\n0hqamZvPSXYmGSYZHjt2bNrjSNLr1iTCcBS4eOR4c3du7Jokc8DZwHOn+LkAVNXuqhpU1WDTpk0T\nGFuSNM4kwvAEcFmSS5K8mcWbyXuXrNkL7Oie3wR8u6qqO39z91dLlwCXAd+dwEySpBWa6/sFqupE\nkluAh4F1wFeq6kCSO4BhVe0Fvgx8NckCcJzFeNCt+zrwX8AJ4GNV9T99Z5IkrVwWf3GfLYPBoIbD\n4bTHkKSZkmR/VQ2WWzczN58lSWvDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJUsMw\nSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEY\nJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSY1eYUiyMcm+JIe6jxtOsm5Ht+ZQ\nkh3dubcmeSjJj5IcSHJ3n1kkSZPR9xXDLuDRqroMeLQ7biTZCNwGXAlcAdw2EpC/q6rfA94N/GGS\nG3rOI0nqqW8YtgN7uud7gBvHrLke2FdVx6vqeWAfsK2qXqqq7wBU1SvAk8DmnvNIknrqG4YLqurZ\n7vkvgAvGrLkIeGbk+Eh37v8kOQd4H4uvOiRJUzS33IIkjwBvG3Pp1tGDqqokdboDJJkDvgZ8vqoO\nv8a6ncBOgC1btpzut5EknaJlw1BV15zsWpJfJrmwqp5NciHwqzHLjgJXjRxvBh4bOd4NHKqqzy0z\nx+5uLYPB4LQDJEk6NX3fStoL7Oie7wC+OWbNw8B1STZ0N52v686R5C7gbOCves4hSZqQvmG4G7g2\nySHgmu6YJIMkXwKoquPAncAT3eOOqjqeZDOLb0dtBZ5M8lSSj/acR5LUU6pm712ZwWBQw+Fw2mNI\n0kxJsr+qBsut818+S5IahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAyS\npIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJ\nUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJjV5hSLIxyb4kh7qPG06ybke35lCSHWOu703ygz6z\nSJImo+8rhl3Ao1V1GfBod9xIshG4DbgSuAK4bTQgST4AvNhzDknShPQNw3ZgT/d8D3DjmDXXA/uq\n6nhVPQ/sA7YBJDkL+ARwV885JEkT0jcMF1TVs93zXwAXjFlzEfDMyPGR7hzAncBngZd6ziFJmpC5\n5RYkeQR425hLt44eVFUlqVP9xkneBbyzqj6eZP4U1u8EdgJs2bLlVL+NJOk0LRuGqrrmZNeS/DLJ\nhVX1bJILgV+NWXYUuGrkeDPwGPAeYJDkp90c5yd5rKquYoyq2g3sBhgMBqccIEnS6en7VtJe4NW/\nMtoBfHPMmoeB65Js6G46Xwc8XFX/WFVvr6p54I+AH58sCpKktdM3DHcD1yY5BFzTHZNkkORLAFV1\nnMV7CU90jzu6c5KkM1CqZu9dmcFgUMPhcNpjSNJMSbK/qgbLrfNfPkuSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqG\nQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGqmqac9w2pIcA3427TlO03nAr6c9xBpz\nz28M7nl2/G5VbVpu0UyGYRYlGVbVYNpzrCX3/Mbgnl9/fCtJktQwDJKkhmFYO7unPcAUuOc3Bvf8\nOuM9BklSw1cMkqSGYZigJBuT7EtyqPu44STrdnRrDiXZMeb63iQ/WP2J++uz5yRvTfJQkh8lOZDk\n7rWd/vQk2ZbkYJKFJLvGXF+f5IHu+uNJ5keufbo7fzDJ9Ws5dx8r3XOSa5PsT/L97uN713r2lejz\nM+6ub0nyYpJPrtXMq6KqfEzoAdwL7Oqe7wLuGbNmI3C4+7ihe75h5PoHgH8GfjDt/az2noG3An/S\nrXkz8G/ADdPe00n2uQ74CfCObtb/BLYuWfOXwD91z28GHuieb+3Wrwcu6b7OumnvaZX3/G7g7d3z\n3weOTns/q7nfkesPAv8CfHLa++nz8BXDZG0H9nTP9wA3jllzPbCvqo5X1fPAPmAbQJKzgE8Ad63B\nrJOy4j1X1UtV9R2AqnoFeBLYvAYzr8QVwEJVHe5mvZ/FvY8a/W/xIHB1knTn76+ql6vqaWCh+3pn\nuhXvuaq+V1U/784fAN6SZP2aTL1yfX7GJLkReJrF/c40wzBZF1TVs93zXwAXjFlzEfDMyPGR7hzA\nncBngZdWbcLJ67tnAJKcA7wPeHQ1hpyAZfcwuqaqTgAvAOee4ueeifrsedQHgSer6uVVmnNSVrzf\n7pe6TwGfWYM5V93ctAeYNUkeAd425tKtowdVVUlO+U++krwLeGdVfXzp+5bTtlp7Hvn6c8DXgM9X\n1eGVTakzUZLLgXuA66Y9yyq7Hbivql7sXkDMNMNwmqrqmpNdS/LLJBdW1bNJLgR+NWbZUeCqkePN\nwGPAe4BBkp+y+HM5P8ljVXUVU7aKe37VbuBQVX1uAuOulqPAxSPHm7tz49Yc6WJ3NvDcKX7umajP\nnkmyGfgG8OGq+snqj9tbn/1eCdyU5F7gHOC3SX5TVV9Y/bFXwbRvcryeHsDf0t6IvXfMmo0svg+5\noXs8DWxcsmae2bn53GvPLN5P+VfgTdPeyzL7nGPxpvkl/P+NycuXrPkY7Y3Jr3fPL6e9+XyY2bj5\n3GfP53TrPzDtfazFfpesuZ0Zv/k89QFeTw8W31t9FDgEPDLyP78B8KWRdX/B4g3IBeDPx3ydWQrD\nivfM4m9kBfwQeKp7fHTae3qNvf4p8GMW/3Ll1u7cHcD7u+e/w+JfpCwA3wXeMfK5t3afd5Az9C+v\nJrln4K+B/x75uT4FnD/t/azmz3jka8x8GPyXz5Kkhn+VJElqGAZJUsMwSJIahkGS1DAMkqSGYZAk\nNQyDJKlhGCRJjf8FFDYZsBaypoYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEPCAYAAACp/QjLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsnXd4VUX6x79za0JCCITQS0IVlCIE\n7F13XburaxcQFbEBSlURAQHpoGtFQUDFsrqrrqvu4m/XZXdVugrSkRZqIJCeW+f3x8w57yT3JgQI\nuTfwfp7Hh+Occ8+Z8965mXnrCCklGIZhGMbEEesOMAzDMPGHK9YdOFYSExP3lZaWNo51P04VvF4v\nfD5frLtxysDyrF5YnieXhISE/SUlJU2inRO1zawkhJC1rc/xjBACLM/qg+VZvbA8Ty5aviLaOTYr\nMQzDMBHUOrNSBLkrAQCFhYV2UyAQAAA4nE67LUyHCIWC+t+A3ebQ02QooFYp+/cV2OcKC/wAgIQ6\ndBPh8NP9/JbaS+J0wK2foSblZFddul/eIXUOQbst/zD1JQT1nKRG5V8WEELd1+322G0e4dX3IPV7\n/bpN9vGRgr0AgFatSXts0qJd5M0B/OPPrwMASktLqT+hkHq2g9YS0lhWhMOhMv+qfupzISXPvCN0\nv9IS9d5ur8O4nmQRDlqyIHk/PGwyAODVqaMBAAnOBLpfsfruw6DnlxTRcVivgbwp5d8WEEI9w+mk\n786lZVxSRN/J7uy99nFRyWEAQMP0VLttwovzI2+O6PK84a4hEdf95f1Z1N9K5PmHe0cBAObMfs4+\ndzzyFPo4HFY3fmTEBPvcq8+rZxyPPJ94ZkZEm7XynzX2GbvtaPJMbRDV0gGhBfHZeySv4xmft/VR\n7/jh/BcARB+fg56aaLe9Pusp+3jgE+ozr04bSf0qJ8/jGZ+jppDsZk0cCgAY8vR0AMDLU0l2VR2f\nry/61G6bPk6979AxL6CqsObAMAzDRFCjmoMQYh6A6wAckFKeZbQ/DuBRACEAf5NSjqjqPb9f+RMA\noKikxG7zFetVgItmZz9oVe3TqzhfKa2uADXjB4JqlZPgolVMgicJALD9p412W2mANBV/UD0nSBM5\nAlqxcDiUiJsaK6EDu3eoe/iL6RmuNPu4actWAIBff/6FnqHv54TSQJwi0T5XorWmHdt32W07ttGx\nN1G900UX97TbshKiqCUANv2q+ubzk2Y09Gm1opk8/jG7LQh62aDW1AIBkrclz5DWHKZMX2ifGfeM\nWjmPGUWrv4EDbqT7hcL6s3S37++5Xt1Vr/TrJ9P3M/u1+QCAe265ym5778/f2MdPDHwAALB/RzY9\nQ3/1DqgVnkOQJubX4+NgziG7LecAHbs96p3O6NTGbntm2GAAwMTpL8Ikmjwnjx2kHx6m/lRRniOH\n9QEA7Nx7gPrjUppjzo49dlsgRCvhaPLUyrMtz00P97PP5eXmqHsEqc9uZ7J9XD+tIYDo8rzj1pvU\nqxnytFb7l557gd12NHm2dddDNCwtZMakoXZbwK/kNWrsS3bbhPEPUd+0PMdNeNtue3pkXwDAqrXr\nAQCTpy2wz40f/USZZwFlx+f9/a8DACxbS7/P8vI0x2dV5Wny/Wp176V/uBkA8P6f/mKf+8MNqi8f\n//XziM8BQPczOgIA7rj5d3bb4YKSqNdWRk1rDvMBXG02CCEuA3AjgG5SyjMBTK/hPjEMwzDlqNHJ\nQUq5BEBuueaHAUyWUvr0NQciPsgwDMPUKPHgkO4A4CIhxEQApQCGSSmXR7tQCDGgfNvSVepSy3wB\nAMVFRerASXp0kT/fPvb7VLtpmnE6lWoeCiq1sGEaRXelpyvTw74cmtdKAkeMe2uzkmGlsnzdHrd6\nhnAm2ecO5BfrNrre76Tn1Qmo47xSmrsLCvUzSpVqWlRAZoNDB3cDAA4fOUz3C5Jan+JUDqxcI1y8\nOBA9PHDLtq0AgHCYzltmAVPNttoA4O67LgVQ1pRgmdPCIUfEZ58aoUww9959LfU3RCY2XzDSDPLF\np0sAADfcdKV6vsNrn7v5hqt0G11/+60328fFOiigOEB9Li3VzwioL81XQmadggL1PRdZ4whAMGT8\nVBzqiys0zIi+UHR5PjZMOQAHPfwHu+2l1/4EAHjogevo80FS++fP/z8AQJ97fkuPLCfPunXpXVJS\n1Pd7JN8wdUaR52efLLHbrrn+YgDAl3+lNoubrlcy/svni6O+08AB6mdoyvPTT74CAFz1G/VOi/9B\nzlDru+/YoZvddjR5Tpg2G0CkmW7Io7cBKDs+LXmaY7Jv38vpWYFwmX5E49GB/ezjlJR6Efe77fZL\n7eNjHZ95Jeo3++lfydRpjk+vHp833UJmoPySsuPzkotprFjj03yfthld7OPAMYzPyogHh7QLQAMA\n5wIYDuAjYX4rBlLKOTXZMYZhmNOVGk+CE0JkAPjCckgLIb4GMEVK+S/9/1sBnCulzKng82WS4L7+\nWK3MXB633ZZ7SK2gD+fRSr/QR6uqYEDNrHUSyBGU4FEOonBIrX6TEhvY55KT6wAAcnJ/tdsOF5Lz\nr7hULclLfTRV+0tVH51Odb+UlKb2uVLtrS4upj6FA8aqW2sZG7att9sO7FfvVFqi5vNCY1ngK84D\nAHg8dA8zdjctXd3vgvNpddGlo3J6Dxw8sswKZOr4R1S/XfT5wkK1gi4y+lsaMENddWijm8J13S6l\nbcmwWiF6PeSAS0jQobeF++22olLSenxRHLJBrelYK+jExPr2uYD2Bvp81CdprEytVdyeA7vttrw8\n9U4Bv1qHlJbSs7ZsWgYAaNu2u91mxkbWTVH369ihld3WqpkaS9Nmv3Jc8pz24p/s40ceUg7HaPKc\nOvstAMDoYcPsc89Pm4byPPbwrfZxdchz0aJP7GPr/S678kq7rTJ5BnxKi3G5DG3BkOeWrSoc/Y7b\n77DbjkWew555GQAwfDBpZ1Nnf2QfPzpQOclffeMzu23EEBWkMGXWmwDKagkTRquwUXN8WnIHgAfu\nvwEA8NZccgjffec1AEieC9+lZ/Xvfw+AiuV5150qyMAcn//6RmkZXbqdD+DY5GmNzx9//J5OV/B3\nPt6T4D4FcBkACCE6APAAOBjTHjEMw5zm1HQo6/sALgXQUAiRDeA5APMAzBNCrAXgB9CX62MwDMPE\nllpfW2nTD28AAIKGw8XKfC71k4c4GDKckdo0s3s3mTK2b1VBUrmHlGPQY2Q0N2miTEz100nRatCQ\n1NoEPcX6/RS3DqnOB7TJJSefrt++e59+/na7bd8ecpjnF6p3yS3Ms9t8PmWKsrK//ca7+YqVSt8k\nvbnd5nKRiSkE9U4XX2CnluDy85SJ6c6Bw8qonAteUSkmIcPhZ2WWPjiE8hL+OHWwfWyZEnJzyYGb\ns1/13couf+9zclAOvu9BAEBSCn0nk14kd9K05wYCAIJBY2xqtTmkM1DzS+i7yMk9op9Plsgjh8nB\nW6JNfIWl5KQNBKwseSuYwFTblZkwNYVMi2YG9doNP6guGXIbO0LlLoyb9seo8uz3GJl+3pyt4ugD\nxjOt9wIql+fr736o7vf7u+xzqanKZGfKM7kuyceth148yXPdJoo5ObOTyr9Zt2EVdU/LsHxtpcrG\npynPh4eSI3vWxCf0+1Q8Pl1OCk6Z/+dF9rMtJo0eaB9XJs9RE18FAIwc8jg96zjk+csvK2DSunVn\n+3jnTmVu7tQ+y24zx+ea9cqcdOtNN9ltZ2kT6NiplAtivWM8m5UYhmGYOIMnB4ZhGCaCeMhzOCES\ntY4XAql47iRlEioJUKzxpq2UW7d1y04AwIrla+22PdnqfFG+UjP9flJRkxLVfdp2oBIYPXueYR9n\nNFPqcnIyRXx4PSrCaet29awlS36i5+vSByVFZEoqKTEiSYIq8soIU0YwoNTQUp9SjUOGBcs6dnqp\n2JfbTV9tsY7a2qXNWQAgHUYkjoHHqdYLYUOedz+uTCIeD5UE6PPwGPvYUr9vuJJiyz9brGL1z+mk\nntOjHUVKLVuqolMaN6X7PfngPfSuWuNOSKDcEKtExP4cFauwfv0O+9z+w8pE4PeRqm6a+EIha4zA\naLPMdMrkYVgp7GOHmyLgnEYRx/ZtlXnusosvtdsuPq9yeZqmkQ/mKNn5jYiqvftpLOzfp95x61Yq\ngXL4kHpHS57BIL2fV0fqmfJs06aZfTxjzjsR/Xrl+afVs45Dnr9sXAMAaN+O3rm8PLN3U2Rf40aq\nLIYpz+5dL7KPAzp679KLLrHbnhsZWZwQAPo+OhVAWZPP+288CwC4Y8B4u23KWIrm2r9PmY+jydOn\ni+yZ8jz/TGXmuukKGs8H91Nk2cw33wUAzBrzpN1mjc9oUfi9e54DoKw8f1z7s33csa3K/wihYoJB\nSlKy5Gma5kx5WhzKpVwsKTIquXt0WHNgGIZhIqj1DumdP6hiWk6jHHHAqWoJ//N7Klv91y9/sI/z\n9MroYC7lQUir7K/+N2QUSrOcbG4jl6JpYyqU1yxdaQxJdSiW36U1mp27VOzyumxatZfCKhNNqwwz\nw9vK6AwY2osvWKivU/0Km5oDlDOtSTOKu0+qQ1qE36+0jbrJ5HR7sM/vAQCPDh5cZlW76BW1onQY\n64aQQ31u7SYqC7xy9Wb7uLhIOdIKjLLpUndQ6H/DRvp4WLf9soMKGV7S4xz7uH6K0hi8hibk1Cvw\ng4fUd5Z9iFZFAd1Xh4PkuXHTGpSndctO9BldmC4cVv0yfwZhKGd+an3Kg9m8Yal9fMaZvQAAiQnk\n9L/i4t4AgBmzZ1eYSW7x7quqPHZF8vzHt38HAHTrdLbdZslzzSa14jwrgzTXNTofpmtbclrWr0cB\nFZXJ8/2v/gYA6NSWZLNuy7qIvrdre6Z9bI3VkLHaPlZ5er30W1r7038jnjd0iNIcysvzeMZnZfKs\nbHyauRRHk+eHf/8SAPCbc1RewuJllGNg0aE9BYRs2rw24rxJ06YZui/HPj6jVTSoCHZIMwzDMMcE\nTw4MwzBMBLXeIe3xKNXOCVJT121SjqcFCymFft8Bcuh4vcp5VFxMbVZuhFPHTLuMXaWc2pFmxhIf\nySMTSnF+gb6HUehP5x5YdfwLDbNXSPfZY6S/Bwy11q/r0wf9Zu6GNns5rX4ZteFDqi/FOZR+72iQ\nbh83apGp7mGomQdyqP8mLpd6V4fR3+y9qvb+v5eQqvz9KopJ73pGD/WuRvkQK/Z803Zl8jjTMINY\nO/R1b09OapeT5O3Te3Ms3UCmoa6t1c51lpxKjXVNWPfZZTiNM43nWTH3oaAZG6/NCtoUJY1d+Zxh\nvd9HPpkdO3SgvTDqpam9MMKGPPPyqTSCyYevK+ezKc9bH1LlGc7vSfc8kkey66bj/k15btym6vt3\nbNUh4tld2ihz0ppf19ltWWdQkTtLnmHDFunTuQedmmcAAEpL6bcQzSxhynO7YQ60aJyuTJqVyXPL\nxmV2W/v2PezjXuepgnM9zqFCg9NnqZyaGbNnl3mONT5vH0jO52nPqTwGc3weTZ7W+HRIJZMNxjtZ\n79+tHZmBiorp+40mz24Z7QHQ+MxsTntTWOPTNCWZsm3ZQn22zPjUm8NEk+fBA8rZn2j8ZszxGc2c\n9OD9DwMA3pz7WsS5imDNgWEYhomg1msOVmG7UIicg99+txoAsG4LheelN6DsYWvV5DeczvY+s1Jr\nDkYROyuM0Szu53QYmoD1WcMh6vQoJ5lL1+X2GoWxinUBtNJSer4511tXGv4wu3Cd1DvahcIUFnfV\necqZm+CmTX2Xrt1GzytUDuOApK974/otiIZdGjpM167bqO5lagtnGuWXraztoKH9SL0ia6/3qja1\nJIfWykyHn8Oot710vfr+sjp2tdusezv1dW7Dh+bTsbxWli5QVp7WlcZCiwoD6oDhsKTvomsHtZJz\nG1mzW3ZRKLRPr7LX/Pgvu61JKmWjmkSTZ/87Vbnm7H2UMZuSTNnDP+tM4TIlmVsqjcHSIJ1R5Nmz\nE4WWmuMzrL8LaTh8HTqDPpo8W+uVbOvmtM/4jt00Xlrp9l17ttptXk9Zee7M3mmfu+YyFWbpdpJT\nO5o8f179T7vt2quppLWJJc9oq+OX3qSADFOelY1P619Ts+2qne/m+Fy5gULRLczxaclz/Q4lpw6t\nO9rnNmmtpFWztnZbK0O2luT37CMZt26hPm/Jc9duCsP93aUXAqh4fHY9W4XghiT1P6MJBctUFdYc\nGIZhmAh4cmAYhmEiqPVmJb9PqZd5hWRm+WnDdgBAEVkKkOo39lrwW7kCZhatPra3cCOzkrVPgqOM\nKcn8rPrXdBJKbU5yOJW6nWg4syF0FrbP6GCZSGMZ0eiAuk+pX5mI2rRtZJ/rf5+q/59obMqe/BmZ\nPL76t1KJnV46f/Bg9HxMq85/sWHy2rEncmuNUNDM6LZisU2Hrz7WJjcYZhCrDr0wTEmm7M7WJqug\nIePVW5RDtqvOTt64lZx7HTOVGcBy2gJA25ak1keTp9ABDAGdedq4McnmsktVQTOPs47dlrCC7v3j\nOmWuPKPz+XZbQQFlOJtUJk+fuXNgMPL7MN+hMnn+tEXJIqszOXlNea5Y/yMA4OwOZAaRWvbCoeTg\nMcY2hM4aNsx0Zl92ahOT2VZenj2707k7blZ7HRxNnp3OvMBuS0+lHAKTYAU7GAIVy7Oy8fmrNo11\nNsxA0eTZ84zuEZ9dvuFHu81yYndpo8aiNOTZIaOz7jt1cGs2OcAtk6GJWyh5btmlrjPlWdXx6XDT\n+YICIzGqirDmwDAMw0RQ6zUHl3ZI5xVSxuxeHaYZNBywQSPMlMoKG1mRuk3qlZlZS8f+XCAQ0QYA\nAav8cZnFn6PMP6aHNEk77xLdkU5vACgpKS7TT/VstTJx6Pt07Zxpn3MnqlWpw0EliXv2oPOHD+ta\nPTsP2W37D5C8TJza4VdslA8+rMM009IzqD/Gqt7SwMz+Whmo2/YpZ7YVbgkAK/WKq0dHcmqbhLRz\n1MwC79JOr3r1irdLO/qsV4cKmm3JibRq8vt9ZfoJAGFdwn3nPrXK6tHzQvuc06PGhRAU3tkmkzS1\noiIVuvyFrh8FAN3Oil5bKZo8/7nkfwAqlmebFmWdzwDJ03KgrjNCL61VbWW7fQHAWW27mK0AgLW/\nrtXnSKvYtH1dRNvarVQLqJNeCW/dRX1o31zdW+gutGpB8qqqPPcfLLDb/vPdd1HfxZLny5NG223z\n/vQ1gLIO2PVbIzPkzQzsVo3V76N1I+XEdhih65Y8VxqawdGwZG+N07VbSF7WuDQ122iZ8x1bkVNc\nhNWYzmiSAQA4O6ulfc6S55CnJ9lto4Y8aB9/9JnahS6rey+7zdqp71hgzYFhGIaJgCcHhmEYJoJa\nb1aysokPGoXYDh5RZqWwmZUcIhOSlSkciGJqSohSgsqvzUll1HZDLbQMAqYD1XIIOq1sU6Ownh13\nb5iS3B4j5l+XCC8pITU8WKr637qFytfo3YtMKCV+VUhQUNgzMlqRg/XuWy4FACz5L6nJm3apzdPX\nrC/zqnY2cUEhqaEFRcqsJM2s5DDJznrXaKYm26SRSYXduunM6IrkaT1nrWEaOEM7nR2WPI1nWZ80\nTQNO49gqaf3TZlL12zbpVKYPwwdTyXB/UJmABFn9kN6QzFQXnqPMKn1vvd5ua5xOOSYmljwfe3qC\n3fbifLUrXu4RCqI4mjyz9yvznGUGKfMMyyRakTx1e2fDrGS3tVEOftOE1TnjTOuiiOsBoLsOGOic\nQaZCbe1CeprKL2jXtrV9rqryXK8DSQCgfqrKjVj87X/Kvmsl4/PgAcrtadeaTDRWsEmLxmRqtX6r\n7ii/91UbVQBHVyNDes3WXyIvjIIlz04ZRqFCbb7uYpjpzu5oBA/o4np+I2gmpHOhduxXZs8/tCWz\npyXPmZMfs9tkkP5+vPCMajflufcQ7SpZVWpUcxBCzBNCHND7RZc/N1QIIYUQDaN9lmEYhqk5alpz\nmA/gZQALzUYhREsAvwGwM8pnKuVIiQoh3LKTMgj9RUpz8AhaDfmlMSvrf4PGyt2+Un8mBKqlUqI3\n3wlWIC5n2BelVTtp7RUsPUvqlYzHyKguMbK1oR2sIS99xhtSzrqLzlGZlZ070GYuTqHezW08I+yk\nd69bXy3ZrryQVnqXBNUq8tOvvynT6yK/Ws3uO0jO66BPycIlaCUblMZKV/8bMlbrVlvrJi31/xuh\nxPr9QzCd/rQy3botspxxSH9/639VTtCOmeaqVer+kTzXbFhpH5+lV2kdjc1phN545d4bVZhl1nmU\nverQY8BprJ2kg949MUmNgy5ntLDbOofUir78SteS5+ABfey245FnY2vVq8NMO2dShm2pDr+uSJ4d\nMs9AJOr8hm2qrH2HDNLsosnzLMPZD/27CbuNfap1xv4Z7dSGWNNfei/iia9MHkTPOE553vW4qlU1\n6MF77TZLno0NzSDRS7/VysanW8szjMhgk4q0hfYZHSPaNm3bUOb/zfG5cZty8HduQ5rb2o1mtQFV\nSjyaPC2N7cXJtCd1tPH5yDNUg+q1F1StqWjyPBZqVHOQUi4BkBvl1CwAI1C26gHDMAwTI2LukBZC\n3Ahgt5QysngJwzAMExNi6pAWQtQB8DSUSakq1w8o32Y5lbdu2263+bSzyiyTHDY2CQ5rpdJpVGKz\nHKdWkz9AZh6/VmhCxlQqzWzbcKRKWj7m3GWI2nJOC2NPXWuHN5OQYfZq3kjtRNX1TGX+SDIyuAN6\nf19n2OiTUejPundaGu1m5Q7Ts8s8Uzvq9h+grOiALowmTDOL6azU8jF3YrPkaTUFjYCAoL7e6G4Z\neWa0IpOJhV9/z9Y5c+c/ywwiouSmqGdbhRGp/w3qKe9965aNAQBeI+PYKp386FMv2W2vTiSTiOVA\nNHfWc8roz44mz226OFuZuPsWZNbatVtl7TZvTm179ypna2bzlvqdSJ6bdkYWUWxtOGSFYbKysL6/\n1i3b6vvRNVt3bozon2kmsYgmz3c//woA8No02gP64eHK5OEwvvCHnybZvvy8cqAOn/i63fbmJNoD\n2uT1aSMBAA8Nm2y3fdJYyeRo43PvXtrXulkzVVJ7+25lye7YOjPis2aZ8m3byWyU2VoVJty2g3ab\nKz9mnYiSJ2UYRkx5VjY+Rz92HwBgwstv2+de0Y5oc3waFnQ7GKQq8qyMWGsObQFkAvhJCLEdQAsA\nq4QQTaJdLKWcU4N9YxiGOW2JqeYgpVwDwE6V1BNElpTyYMw6xTAMw9Ts5CCEeB/ApQAaCiGyATwn\npZx7IvcsLlLq9c7t2XabP6jMKC4v1TB3gKIjpFbJXU4ztl7pZQFdIC3gIDOPtTeDWTsvaO4nEKWm\nVbjcnt3usFGqQ8czuwxThmmECoV1YcAAKXZNWitlqmUzNZd6jFrulvqfYJTjCBlx8yGtc7qEYUpy\nRDeD+Hzq2oM5FK20+4CSbTOdyg8AwuixsHfPM+WpI5J0sbGQUarEqn1vdiFkyGvbTqWut2jZnu5X\nTp5hw1Qig5ElTzq0pugbS12XIbpHanoqACCtfj3dd5KdJc+5U4YbzzNyOHSUkVOYMoyuhEeTp0VF\n8rTYvZv2S2jdTEWbWCVcthn1/S3atCIzVMgYb9Fq1Vny3J2tZG3uL9DSiPqxCIeM95e6kJ0hz6Wb\ny0b2mPK0zDTzpo6w296cPJTup80gb002TB8yujwHDp8CAPjTX2jvB8vE5nRTsb5o49PErXdWa91Y\nRf3tPLDXPpfZIgMA4DB+8BmtaCxaG7ZFK4Fh0dLYu8GSZ/aB7XabafZqp3N4KhufJi6dMFLR+Hzg\n6ekAgEdH/9Fumzt5ZIV9rYganRyklHce5XxGDXWFYRiGqYRanyFdUqhWUrllMgDVrJyYRCsJh1kV\nTzuQUxJpddOujZrp121Uzr08IwPTrgDnpNwHYaxSILwR/bIW0Vbmqbk/rLWKNrNSnWUczFaJYVoN\nNG6oVhD1U5Q2JAK0ak3yqOxcl7FC9BtOcqFXekEfPU9EVxzgL1XXFBZQobjGacpp6/GS5iHMqnh6\nFZ9oaEJNGqvPZO/ZBwAoNvYohtTHIaOPTrq3qTHY57U8rVVm2Mg4t4ombtlJTsMORvauVb7ZXK39\n/TtV/O7316rMU2FEG1jOaachzz7DqcjZWxOH6e4bWdoVeO+iybNJQ6UFJiZTVrUpz9ZNVHz6jn2k\nDbdpUVaezRsahe20ZhT2U8a1KU+IyOADS57NtNPbEUWeVgFAANhsyDbD2qUsSgb1nEmj1P0Nec6f\nofbMdjuoH6YD3HKYm/J86NnpEX0GgGmjVQz/x5/9w2hVz96zb7vdktmyrXFa3b9T6wy7qUljtcf6\nP5fSvtYWrRs3he4Q9TGKPJu3iAycoPFpFnmMlKdVZh4AtuiCfJZcAaBeXZVBnpQYWbrc69KWDGN8\nlpFnlD3AzaKTVSXWDmmGYRgmDqn9mkOJmh0Ljc1+LJISyS4fLqXzhbo2SZeONJPfesO1AIDXF/0F\nALDyR8rSraPrHjVrXN9u259zmPoQUBqFuRmQtZGNJWCHIeqwntnNkuEed+T+1CWG9lI3WX3eo29j\nbXIEkK1dGvFsiW5696DWHEJG1nQAkXZYAPD71X3N/a0tvIZ2I41Q31KtFbRq1tRuOzdLZSUv/q9a\nmf26nWzkXpeSza/79tht7ZpSxrdfr9jMzYDsUGPr/81Q1ii23zIb/7RSWoTfR2Og741XA6B9us1N\nZKzvURoZzO9Oe9Y+tjQRp5HlG0L0ldmJyPO351HJ5crkufug2j+4Z0fSuFZs2GQfN2+s6nEdqzxN\nzdb04QT1q5ry7HfT7wAA8z9VoaxzJj1ln4smT4+T3v3ukePVZyfTZ956QfknHnhqKkyGT5gFAOjW\nPlK7NFfKHTPpt71RV0+4+vzedtvX3yk5XtBDydWUZx2dXV2/XpLdlpdPv8XKxueevSo0tlUTIzQ2\nijxdhn/MGp8ho0pCYoI6/9AzU6wn2Ofc1j7aFcjTkoMpT7PuWFVhzYFhGIaJgCcHhmEYJoJab1Y6\nmKMc0SHDAWSFiJo7t4WDpLK1aq7MH9f/9nK77Yw2Ksuy21mq5O6KVVQuulkT5Qzsf8/tdtvGzaS2\nr1qrMkrz8wvtNp9Wua2y2yWdsgudAAAgAElEQVR+UgutXgWM/vkNZ65bh8566pLDMjND9c8yPzkd\nZDZy6TneLYxMbSMb1TKdCDMl2Rk9DK8gX5ncftpM2Z/NGimTT9jcmczI0G3YQJnbLFUdAOZMUE7I\n1i1Vv7duo5qK9VOVc33Y9b+z2/bspVDCbbuUuamkhAIAftyk5N2+uTKR+IPUf6tXLRpl2G3m/sZO\nHTq7yyjpPOJBVaLbUu8dRj1pp1bhnWbaqRFKa2XRCzO81hFdns9MVdnBXdqS89IyA5ry3LSNxpNl\norjnluvstmaN0gBEl2fPM9S7XnYR7Wl9tyHb1k1VoWNTnqs3qjHboYVyfkeTp2kGMR2eljzrGGbb\nRumqfwunq+/d4yJHajR53j18vH1sWeTKhAZXIE/7I+FIs2qHtmT6MuVpyefqy2iPamt8frd6NQCg\ncWqafW6rHovDB/S32442PsvLc9f+3fa5Zg11GPIxyHPBZ2p3uwXTntbvW/n4vGvYOJTnWOQZDdYc\nGIZhmAhqveZgJYx5jZAvt9vag5lWt07DAfv7m34LADivN5XQteblJk1ViJvTSDJr0lBtYNKrKzmZ\nLuxFK9PL9yvHYd4R2nAoL1+VEreSn3yG5rBpq3Jm/+t72nzGb4QSWqF3zRo3sJs6tFOrjzoutcwq\nCdD7OJxWeXBayZgJYVInovmN+k1mLSMTK8GpRydaheVYmycZyURmUqG18dCCGWPttqREpeG8//nn\nAICmDchZnVpXheNOmzPPbps7hfYEPpCnwhCLi8gJePklqqxxQb4qXR4wVrp796vrftm0w24Lhk15\nqr5mdSWH5N03qlV2n2ETAQDvTHnOPif0KitsaGKmQ0/qVV+fkVTfZ+5kSkgyGfGQWn1+s+R7u836\nbjb/uiHqZwbe8wcA9J0D9ENdpPcHbpZGDnxLnm1bUXjrGW3pfDR5WtrJg7erkuXHI8/69SjJdNpb\nqkT3BzOVE/SO4WMj3uudaeQgfU+HtwJAWDt4zXpRFY1Py9naszOFKtuamDE+O7TKsI+vuER976Y8\n++mV9sN9lQb56V9pP/Are2cBAG694Uq7bfqbVNto0lOPAigrz6ED7gAAXHqh+i1s+oDCkC1Zt2hk\nbjZklqhXVgpzfFp4dbyqKc93pqqxao7PRTPp92NppMHw0eVZGaw5MAzDMBHw5MAwDMNEUOvNSt5E\npVKm1KU9aT25St0r9ZGZpWNrMtFcebkyAyUnGlnLPr1PtOUuluQAattKOZkaJBt5CYbkEtorh2wo\nQGp2qFzJbo/h8Pz2e+UY/e8KcnoX+UlFdXqU2u4P0f18pcq044QydyV5yIwWTlDP8hlmI7N8tcur\n1gDOYtMhFj0u3+1R1yYmUta3q1A51U0HerN06luXs5TZop/hFFukzQYD+6qKKZ99/l/7XOOGyvn3\n7nQyxSQZe2g3b6Liy8Mh4x3Ll0AX9AWs26Ti/L/+z7/stqaNaQ9jh04dD4bpfgGdm/KnWSqe3so6\nBQDp1nuMG2q5MPen1hsPz5/2pN1m1jIyqUyerZpn2G3nnU37Fd97h9qbOsFjZLhqM+LD/e7SnaT3\nt+SZnGDG0VMfoslz7gzqO3Di8pwy8iEAwO1PqExyYZSEl/owEDYyuKPIM2QUKQuFy37fFqMHPQgA\nWLWeNj/PbKnKb28xnNCmPK3xacrTGp9LftZlvI8iz/dnUm0ivzb1RBuf/Z+cod7PyL0ZObAfAOCD\nz5bYbb4gVQyIVgJ86qiBAACHrgH14fQx9jlrfN4x+AVEwzK9LZhO9asqkmdlsObAMAzDRFDrNYek\nBPUKqfUo7NMp1MYqHg/Nfdf9lsLY2rVVoalFOZSha4XC5uWq0Fiz6mlHnXmaSItlhIyMa+jD/MPk\nkPbo7NekJHUff4hqPx3J262fSW0N69PNvXqlf/gg7ai6eZsKpevSXtXlCQXpsyHt7A6ANAdjkW/3\nJcFYvYZC0VcSXrdacibVofd3COVcd7loNdSjGznkmzRWoamLXqSqm2Gd7VlcqIID9uRS6OXwB/qq\nfhllY8JGhrD1GiWGw88KT/Z61bsEw1SraMrr89Xz//K13VY3yQxQUP0uKqBQ430HVFBAqyaq+uUd\nQ0nreXemeo8QSBswS3NZfXF7jKz2ClZmVZXn96spI//P85Wj25dPWfh3DVWZwn1uuVF91qh62qyZ\nGhOmPG97jJzlC6crLeG+4bMi+rdAb8pjyrOoWI27Vk0oiMCU56Zf1Qq3fWsaAyOnvKE/o+RpVs0N\na2f3sciz7yDV//tHlu2zJc/fXHie3faP/36P8vzwI2XIDx+snM6mPO98UmUef33rTQDKjk9r1X/j\nNV3pHao4PhdOV7Wf5k55zD63+hd177Ah430H9tnH1krflOf1V6j9zkdMVhv2LDK0bEue82c8QX0y\nht97+ndYlfFZGaw5MAzDMBHw5MAwDMNEUOvNSunpytHcqEEqNep46VbNKe67R2eKMS7JVw63YIDM\nDKW6tHLOPqXuJbpIbU9OUc7u0hCZknwlpFKG/PpaP9lykpKUmchjZSkaknZ51bOS65BufV4WqZSN\nG6pci48++o/dtmadcpxddK6K7/Y66PlSFwALVOBk9vl8EW3mRkMmKbokeEoyFR2Djh9v2JA2Hsls\nQbL1lygzSShEAQD3Dlf72950xSUAgHaNaT/eO2+7WPU3bJjB/NTHcFD3LUjy8XqVWcNlFTszHK6D\n71eZxF8upgztDkacfz2daW6anXZmq/NntFfBBm9PJjOAXSitAlXcdMxbOCvYv7oyeW7OpjyC6aMf\nto9bNFcBDncOmmK3lRYpk8Tn/6eCGHYdoo2AEhJVpr8pz/lTBtLjtDzL72sOAItmaKel0X2nW42j\nBC+NJ1Oed/9eZV+HjO/MLtn9grqfWxjnqijPB5962W7zGcEkJpY8TVNS2xbKWX7+2d3stmnP0Pvf\ncp/Kr3hnJu0DbpmOoo3P0Y8pp3dF47PfSGXqee3ZB+y28uOzzwjaaMfiZcNJfe2V50ecN+W5M/tA\nmXN3DZtmH8+b8ri6vgJ59h06EwDw1uTH7TZLtvcMmRL1M9FgzYFhGIaJgCcHhmEYJoLab1aqr0wG\n9Yw8B71dANrpYnUA0CSNooEO5+hIASNdPxRUpqHmTZSZyggugdAFrsxI9iI//Z9b73qVYuz3GtI7\nL/l11E7YQyp6kybKbNSuLaXzdzTU9gt6qTyMtT/9aret26SOd+YoU1iXDsazdMy+M0S2AXPPZqvg\nl2kOMU1AJik6uqpOglHoS2vDTdKpOFlqMkWv3PmIird+a9IAu23O848AAH74UZlOtuynUhFjBil1\n3NxRwmeYkJy6QGBiEn2nVhy5tQeGNLa9S01VY2DzTlLFL72I9kLo2FbFue/asd9uy96rjg/mKzm0\nakrPunew2oXs7ekUDWIWybN21XpgFJkO5kwiM4ZJZfK8+arL7LZhE16zj+dOUbKb+8JDxvPVT7VB\nqhrHnY1dw4Su6380ec6fTGYVK7rFoUuF9BlJ7/LrsLsBAE0aN7TbzL1Mosnz3C7K3Ll0LeUfWCyY\nOVg9K0xr0WjyfH0CmdasPJTyWPLsc9M1dtsPK1Vk0nerf7Lb3pr8hHGs7usrpXtaZrD7b1Umya0H\nNtrnhFC7A1YkTwtzfPYZ9SIAYOGUwRHXjdXyvKgXlfz4YvH/7OMZz6nP/Lp3u91mydMq1WGOT5fO\nCapInhZmob9oJsWjUaOagxBinhDigBBirdE2TQixQQjxsxDiL0KI1MruwTAMw5x8alpzmA/gZQAL\njbbFAJ6SUgaFEFMAPAVgZJTPRsWjU0Gb6tU4QAXBMluS5lDPSzNvQBf6CoFWcw69D3Pv3iqz8qu/\n/2Cfy81VeRO5h+n6kiKaleskqJWP14grtubpgHY+uo09p906h8Jl7O3bsjFpDhnNlPZy/XXn2m1v\nLvwrAGDpKl26OoPOWfpCUSHFUTuNktxuvf9tcn2Sgd9wxpu4dD3q+qmUN7Jpl1r9X3YBrcYffGq2\nfbxwqnJ8mdvUCp1x3K6d+g56tqeV7iUXKcdhYRENP7+RHevVGaBuI83XkmdIF31zOs2d89T3cqlR\nSDGtHq1003UOSc8etHvYrDc/BAD0v+P3AICm6XRu/gy1kistIQehwyh5bO3U9+7sYXabxxv9p2TJ\nc+GnX9ptZ3dUsmiUlhb1Mx6901fY+Hn2e1qt7CeNVBrK3I+/sM892f82ACcmz4WzaaVtydMs+RxN\nnktWrrLbunYsu5/yvBmU8yL1nuG+0ujytJylpjyDFWi20eRprYoH3vkHu+3iLNpDOqR/g6Y8F05T\nf2LmffK3iGeMf+ktAMCrL5A2aMrTot+ol+xjy8E9b+qgiOuiyfOpR++1jzu1VxaEJx6kLQGKtcXh\n6RdeKXN/gMbnfcMoB2TOpEft43lae0kyNJto+0ofjRrVHKSUSwDklmv7h5TSsoH8AKBFTfaJYRiG\niSTeHNL9AXxV0UkhxICKzjEMwzDVR9w4pIUQz0D5fN+r6Bop5RwhxBtmW0qKMn+0bEEKR1oD5axt\n1ohMTQjSPOjQ5qSQMTd6E+sCAJo3Uypoq5ZUqG/bDrX5+Bmd6X4Bv+EA0nkLDqPwneVkKylRuREN\nPOTc2693rysppFwFcxeow7nKYd6pYxO7rVWmer+Va5TDL6sLmaHSkrXD3HCamTvjWTH4zT10v1J/\n9EJxibofaYbJ4+Keai+F+kaJkvemjKJ3lZa5gFTfviNUuYfXJytTQUMjIOCA3uOiWQu6XyhoqO16\nzwph1qPX6rDl4B8y4k373NCHlFnFb5gtvB4yARYVqrImzZuRO+vyi85Rfdcia9OKzCZ1tZnQlOHQ\nSWQJnfG0Kv/RwMitCURxWAJAnyfVjmfjh5La/8233wEoK89FU2mvA2H/LEmei2arGv75pWrM/Pbc\nnva59HTVjxORZ7Krrn0uT+8GeDR5Tn+OHOYN9U51l+hx01+X+zCfNfOZvnabuZPbzGf6AaiaPBON\n34nFJVk9AADnZ51NjSGS3b0jXir/EZsS/TsdOGq63XbP9VcAALqfTblRpjxnj1dr1IQEKkfz6mT1\n/ebrsjqmGcgan98uo0Kbl5xPJtq+ulihyWUXlt3b4fnhtCvdkSPq78eMp/vYbdb+MQAw7AU1VufP\nGBJx32MhLiYHIUQ/ANcBuEIej1udYRiGqVZiPjkIIa4GMALAJVLK4qNdX57cIuVcXrZ6nd3Wsola\npXZoR6v/PCOMzVq0OA0HXXGucoVIh1pJ9erdwz73v+Xq3ucbtbdSaaGF3MNqJs8rogzqIiuEVWdM\nlhjlt39eo4K1WmQYKz0PFdLbtlcVBExJpNVs926qLPGS71cAAA4W0EqhTrJ6D2OxBH+AHLahgF4B\nHqFQzxKjryaFPrVi27KNdrJKS1XO9KZN6B2KjbBYazp3OEkTe2OCypS1dvlq245WYRu3qnt3MJQX\no64bCnU2cLGPBO6zQlj1imzqWMr+HDFWOWuvu4IyZMMuGkoHjqiCa4keylLOaK0yvNfrEOGCEpKH\nN0G9h7nl9oThtEp7cuJ8AMDM8ffZbX6jryZvz54AAOg3mHY+W62zY0153m3sKmetj+a+QMXWfIUq\ngEAKJYe//7DSPnfFeSo44UTk6TfKb+/YqTTlBum0Sh83m3btGzv0ZgCVy3PEY3fa56aNUYXvTHmO\nmkwGAluzqII8bx2oNNY+iylgpCBPjeuKxmc05kxUGu3A0arE9sN332KfO9r4HDJmDoCy2sGYESpc\nVRptFpY8Kxqf1vtPfZqc1Bt2Ku3svN5nAgAuPoc+a43PEZNIhub4fF73JSmZtL0Xnr4rol9Ho6ZD\nWd8H8D2AjkKIbCHE/VDRS3UBLBZC/CiEeL0m+8QwDMNEUqOag5TyzijNc2uyDwzDMMzREbXNxC+E\nKOOW+NvbKg544jRSOH53maqFfsv1F9tt/lJSU51OvTdAAumKPu1ICjvUvzv3k0PspTf+BAC45joy\nNXXvQs7pnN3KwWrmTRzMU/fJydVF6Urpftk7VdG07j062G1NW5AJzBFSKmeKh+KUD/qVyvzp3/4N\nAOjQkrKrL7tIxZgXlRTYbaXFZDJz6AzvpGRSebXVC79/8KWosc/nnEW17LufmaHaenay24IB0rkd\nOu7f7SZTVkDXv3/gKVUw7IWnyAz01TfKJHB2DzI1ZbQiE1t+rnoPMy69oFjdL79Qf08BchAeOqgy\ndTMyaf+B+oYDXISVWS3RRQ7EgqD67p+fOh8AcN/tlHE7/yMVMGfKZezQu+3jcTMXqXd6htY6VkL6\ns1PeL/M5y/TQ+0zKwVi69mcAwLyplM5TVXlKof49mEfvfzR5Pjlarb/GPEmmharI87NvyHQ1dtQd\n9E5ans9N/Yvd9uyIfgCA5auUCbZpGgVgnNlJBUL4/GS6C/jIaGFleI+d8YHd9vxIJdvy8pz8tDJX\njpo0w2576PYbABzf+KxMnkuWL7fbZk2kInuVjc+Zr6m/Ff3voPF0tPE5borKuTHf05LnhGkLUJ5p\nzynz2/HIc/TkRWXuJYSAlDLSFob4C2VlGIZh4oCYO6RPlF9+VqGdCUbJ5O5dVJiY1wjP8yST5mDt\njGZm2Qb1SsLyUVu7mwFAWIeobt20y25rmUZaQn6ecngm1Wlstx3JUbP6P79Rqw8naJa//HIVppbW\ngFZXQR850BKsTjioz/WTVF/bNlchrXt2UC6hP0tfb/jwXEZlGI9HLwzM3d8qKO/92N1qheg29vjN\naKWyTd3G7nguJ63MrPLf1goNAEK6btXb01S2Zmo90oKkDqncv/eQ3ZZWl4ZiSbEK1/R66TuwVmTn\ndlWrQ4fxsmedpTSnusn0fYcC9P5uy1EuqM9JXtXXvrep8tMLPqL0mo5t1Spx4gjSDMxV1ES9CoNZ\nMrmC8smP3qWyXl9Z9KHd9sJwlY3cogm9XzR53j2EdqebO1llHFsVy49FniMHXqsOjOjQonwlv7fe\nU5nW53eljOLvflaarelwHf0kZR9b8pz6zK12W5L+rr7+RpVNv+kK0tqDfnX9M5Pet9vGDyXZWjvi\nvfAUaSfBYPTxmb1D7aJ4Ydfudtt/f/4RAPDOjLF2W/8RdLzoRXV812BqmzdFaW39R6qaSK9NJi3O\nkucdN1xht3XuQL/taOPTkmdl4/Oltygb25SnRTR5RsOSp1nszQGS15jpSs5ej/GZCuRZGaw5MAzD\nMBHUes2h4LBaQd90w9V227k9VYKQ102ztzQ2H7GSwkqNDUVcxUoUTqFFYoTSNWqo7LeH9uXYbXt3\nk03Xo58TcNJntm9WWkapXlFkZpKWkK6TfYIl9Hyn8U2EE3V4oZ/uJ6B8F4100t+GXAo13btb2UBb\nNqfVJBJoWeF0qlWDQ9BaIOSPHipYUqRCJntl0cqsfRsVRvuHR6huzF/m0KYhDq1lmFVfLRk7rHoy\nhg04pa7SQAqOUDjukdxIrSTkoM/8/gpVKTNQotoaNSItISW5jn4ner6hxNjhhUGjUq3Qmlw9nVR1\n1TmUdNTzbFVnKa0B+SjgpmW3w6G0hKHPv2u3TRoRLdaC5DnkPkoAs+TpNrQFKajvNz2otASvl55f\nWKi+4xOR57jXKPTRkmfP9updTXkOG6BCVUc9QuGdpk+iMnkO6qtqVRUV0tg+khsZNp2YQAPekqep\nqYSNe5tEG58fv6ZCme8dOtZu+/RNSsKzNIZPXn/BbrPG5zszntQPpGdXVZ7jXy9rvweAHu2UlhBt\nfFYkz7FDVfvISZ9E3O/xPuq7+ONC8u9Y8pz8Omli056hMNgZz6rQYY+xoVdF8qwM1hwYhmGYCHhy\nYBiGYSLgyYFhGIaJoNb7HM7tpfZfaNCMisoVFqh0+lIjPCNo+BzsXcUMG3lY6uJ5YRUV5DSKnl11\nubLP7txBO7M1TafohaY6smnffrJR1qurQgWu+Z2K2mjZnHwUdXW+gcuIlnJ5zK8iqM9THoZT7zbX\nUO/AVlBA0UprflK7rLVoSLkJnkRjJzD9Kh63Eb7giRrajLmfquiVlydQIbjSElXaY9EMip65eQBF\ndyycMRpA+Z2ndLE3qX0Phjy7nqU2cz+YQzuJpaZQdMZzLypb7vjBlFtQR9uoz+6uokHSGpANODFB\n3duMljJLo1hhOk4Hyduhbcx1E1RbSQntb7Fzhypf0qAu5ZK4jJ38rK0IXh5H5R4SXHRvE0uepj29\np45e+cPQZ+22d2ZGHpvy7DdcFWdbOEWV1DDl+eW/vwdQNk5+/BCSXX0d2fT8kHvsNkueq7ZsUQ1b\nqM/nZKnvp7rlaeJEpDzN3wNclY9Pk3dnqhIl788cTz0UkTb2WwY+FdEWTZ7RxuestynS6KUxKpot\nWj6LJc8Rl99mn4s2PsfMIv/PxOH0XVlMHaFyUo4ciXwPS55Th5MfK9r4rIo8K4M1B4ZhGCaCWp8h\nvfZvqoL3oQIqXOfR5bmFEdrrl0ahOPtextyoV7oNklSmcomxP2v2YRWldDiXVkDp9WgV3kBHEPmM\nOPLDOvM0sY6OWgiR5pKY4NSPpNk8MYkyJu09n40MyAStTRQVqX6+8/kS+1xdj1pFX/9byuBOSaUV\nh7WKc7npGQV5KprkusdeKbMCsuLpC0qpMJhLV/QTxlAJSkNL0P+aq2OrGl+yVz3TbyRhHipSGlZR\n4WHqbx3SnJJ1BJER0IEineHu8WqNIUzfp8ft0I+kZ3i8pHVZ5dNDQYrQcuvVr8+n+rlkBe19nOhS\n323PbpRxnJhE7/v4OBWlNHs0rcRLilV/npr5YdQV5aznqGR3dcvzkbEqMmfUw7STmCnPp6ar/s58\n5kG7begktdvZ5Kd02W1DniMnz414/h/HU4b7Y8+qEtiznqXtVSx5Pva8+uyNl59nnzuaPP1+vQe6\noSlXJE+L2WMfs4+fGKeqJFT0t2z2M49EvM+Qia8CAN4apzSH+8dQdJN1XUXyrMr4HPUC7Swwc7TS\nNJ6c8JbdZsoz2vh84nlV3G/GCFWq+1jGZ2XyHGVkTVvvyhnSDMMwTJXhyYFhGIaJoNY7pOslq5ry\nIWMXtqAughU2ykV44ER5zEJclinHp1Png0ZCVVqyElO9RCqOFzDMREGdmORJpGfU0w63RN1WnEf9\nS/Zo56ubxO9OJDPV4SPKDBQMGw5B7ehL1MlSV11m7HoVUNclJlN9fbjIJOVJVKaIsLHhQ3Jy5I5a\nAFBH724VNnYNC+ld46RRIsIVZV3hNEqYWKpyQJtLQsbOXnW1We3ZaVRSYupTVI8+pM19Lg89o442\ntnh0m6+Y+pfgqmN1gPpiOPiLipWabanqAPDquIfV/XRRwq66uKDqgHqGx9jpyyxl8uJzypxkmgUT\nEgxnv8GiWcpZX1hMu/5VhzwfGTvTPjf7WWWyCoUp8SxkmIlmj1Gmoycn0u55spw8H38usjjyq8+T\nKcyU56yxykwTksZOim7V1z+OUaamaPIcPm2+3fSysXeDy6PMH1WRp2XymTPxiQrPAcDLz1LfrXIf\npjwtE9Tr44bq62nXNEueQ55/xW4zx+cgbR56cexAu23MJDIZAcC0EXR9givyz6zHS20PjZylPvMM\n7axnj08dOPD5v2j/itEP36XPRR+fxyLPymDNgWEYhomg1jukmRNDO6Ri3Y1TBpZn9cLyPLmwQ5ph\nGIY5JnhyYBiGYSKo6T2k5wkhDggh1hptDYQQi4UQm/W/9WuyTwzDMEwkNepzEEJcDKAQwEIp5Vm6\nbSqAXCnlZCHEKAD1pZQjK7pHYmLivtLS0sYVnWeODa/XC5/Pd/QLmSrB8qxeWJ4nl4SEhP0lJSVN\nop2rcYe0ECIDwBfG5LARwKVSyr1CiKYAvpVSdjzafbKysuSKFStOal8ZhmFONYQQK6WUWUe7Lh58\nDo2llHv18T4AFWoFQogBQogVQogVOTk5FV3GMAzDnCDxMDnY6BjVClUZKeUcKWWWlDIrPT29BnvG\nMAxzehEPk8N+bU6C/vdAjPvDMAxz2hMPk8PnAKzC5H0BfBbDvjAMwzCo+VDW9wF8D6CjECJbCHE/\ngMkArhJCbAZwpf5/hmEYJobUaOE9KeWdFZy6oib7wTAMw1ROPJiVGIZhmDiDJweGYRgmAp4cGIZh\nmAh4cmAYhmEi4MmBYRiGiYAnB4ZhGCYCnhwYhmGqgVmzZsW6C9UKTw4MwzAnSHZ2NsaNG4fdu3fH\nuivVBk8ODMMwJ8irr76KvLw8vPrqq7HuSrVRoxnSDMMwpwLjx4/Hl19+iYSEBACwNYaPPvoI//vf\n/wAApaWluOaaazBmzJiY9fNE4MmBYRjmKJQGQsgvCeBISQB5JQF0/c0dWPjhn7F16dIy123ZsgVb\ntmwBAGRlZWHQoEGx6G61cMyTgxCiFWhDnv1Syp3V2yWGYZjqJxgKI0//cbf+yOeXBHCkOEDt9rG/\nTJsvGI64X/iqp+DJfRb+fZsjzmVlZWHx4sVITU2tiVc7KVRpchBCOAE8DeBhlNupTQixD8CrACZL\nKUPV3kOGYU4Ks2bNwhNPPBHrbhwT4bBEQWnQ+CNf9o94frk/8kdKqK3QFzzu57qdAvUS3WX+S63T\nHN7eCzBv5F3Iyd5uX9uhQ4daPzEAVZgchBACwBcArgLwJwCLAWQDEACaA/gtgHEAzgdw7UnrKcMw\n1YYVXXPbbbehefPmNfpsKSWK/SG1eq9gpW4dl2/LLw3geLe9FwL0hz3RjRT7j7zxBz/RE9GeWseN\nRLcT6k9hWbKzs7EgWAqXy4XMzExs27YN+fn5KCoqOvUnBwB3A7gcwNVSym+inJ8nhLgKwBdCiLuk\nlIuqtYcMw1Q7ZnTNxIkTj+se5e3wecXGcUkAecX+CDOONRkEw8f5Fx5AstdlrN6N1Xy5P/LmNSmJ\nbtT1uuBwRP6BPxFefvllhEIhDBkyBBMmTMDo0aOxYMECvPLKK5g0aVK1PqumqcrkcCeA+RVMDAAA\nKeViIcR8qImEJweGiapMR3cAABvuSURBVDMqi675dsl/EAxJFJeWIOvCy3FT/8HHbYevKgluR5k/\n4inmSr3cH3r6I+9BSoILLmf8ROCnpKRg8eLF6NatGwBg2rRpuOeee/DVV1/FuGcnjpBH0dGEELsB\nPC6l/PNRrvs9gD9KKWtER83KypIrVqyoiUcxTK3nyJEjuPyKK7F61coKr/E0aYfGt0+AIyG5Sve0\n7PAp1h9084+4Yb4pv8JPSXQjwe2srldjjhEhxEopZdbRrquK5pAGYG8Vrtunr2UYJg44UFCK5dsO\nY9m2Q1i2/TAOXjwcnj3Ro2uSW3TAhY/PRsO0BtFNNnqFb7bV8US3wzOnBlWZHDwAqhKFFALgPt6O\nCCGeAPAAAAlgDYD7pJSlx3s/hjndyD5cjGXbcu3/fj1YVOZ8Qp0UXD38j/hu5kAc3L3dbu/QoQOW\nLl1a6x2oTPVS1TyHx4UQR9Memh5vJ4QQzQEMAtBZSlkihPgIwB0A5h/vPRnmVEZKiV8PFpWZDHYf\nKSlzTR2PEz1b10evjAbondkA3Vum4uD+veg17dSMrmGql6pMDjsBXFjF+51IQpwLQKIQIgCgDoA9\nJ3AvhjmlCIUlNuzLtyeC5dtzcbDQX+aalAQXemc20P+l4cxmKXCXc96eytE1TPVy1MlBSplxsjsh\npdwthJgONbmUAPiHlPIf5a8TQgwAMAAAWrVqdbK7xTAxwx8MY+2evDKTQUFp2SSuhslenNOmAXpr\nzaBj47pHDdU8laNrmOrlqNFKNdIJIeoD+ATA7QCOQCXbfSylfLeiz3C0EnMqUeIPYfWuw/ZksGrn\nYZQGyoaKtqifiN6ZDXCO1gwy0uqwQ5g5ZqotWknXUqoyx1lr6UoA26SUOfqZf4bKuK5wcmCY2kx+\naQArd9Bk8HP2EQRCZRdq7Rolo1eGmgx6ZTZA89TEGPWWOR2pis9hO1QEUVWQVbxneXYCOFcIUQfK\nrHQFAFYLmFOGQ4U+LN+ei6V6Mli/Nx9mkrAQwJnNUmzNICujARome2PXYea0pyp/yK8/yvk6AAYC\nuAxA4Hg6IaVcKoT4GMAqAEEAqwHMOZ57MUw8sOdISZnJYMuBwjLn3U6B7s3roXdmGs7JbIAereuj\nXuJxR4IzTLVTFYf036K1CyGSATwG4AkAKQBeAzDleDsipXwOwHPH+3mGiRVSSmw/VIxl2w5hqXYe\n78otG1aa4HagR6v6djTR2S3rI9HDWcJM/HI8+znUBzAEamLwAHgDwHQp5b5q7hvDxCXhsMSmAwVY\nto00g5wCX5lr6npdyMqoj96Zaeid2QBdmteDxxU/NYEY5mhUeXIQQjQCMAzKhBQC8DKA2VLKQyep\nbwwTFwRCYfyyJ1+VodiWi+XbDyOvpKwFNS3JY+QYNMAZTVLgrOYKoAxTk1QlWqkFgJEA+gMoAjAZ\nwMtSyvyT3DeGiQmlgRB+2nVERRJtz8XKHYdR7C9bQaZZvQQ72ax3ZgO0TU/isFLmlKIqmsMWqJpJ\n3wJ4HWqCuLCiH4KU8svq6hzD1ASFvqAOKz2E5dsO48ddR+APlc0xaNMwqYxm0KJ+nRj1lmFqhqoW\n3gNUNNKlUDvAVYQEwF42Jq45XOTH8u25tmbwy558hIy4UiGAM5rUtZPNemXWR6O6CTHsMcPUPFWZ\nHDJPei8Ypoocz77H+/NLVRSRdh5v3F9Q5rzTIdC9ZaqeDBogq3UD1KvDYaXM6U1VQll31ERHGOZo\nVGXfYyklduWWYKl2Hi/bnosdh4rLXONxOXC2PRmk4exWqUjyHk/uJsOculTFIZ2DqmdIQ0rZ6IR6\nxDAVEG3f43BYYktOYRnNYF9+2W1Akr0u9GxNOQZdW9SD18XWT4apjKosl17BMUwODFNdVLTv8buL\nPsDHf/sG+aVB5BcVw53RE6kX3Gl/rn4dt72HwTmZaejUtG5c7TvMMLWBuKjKejxwVdZTnyNHjuCq\nq65CZd+zp0k7nPXANFzQubWtGbRLTz5q6WqGOV2pzj2kGSYmpKamYvHixbjk8ivw8+pVEefbdOqK\nT7/4CmdlNuUcA4apZljXZuKakCsRKTePg6t+WQd0hw4dsPK7f6NLm2Y8MTDMSYAnByZuKfIF0X/B\nCmzbewiOYAlcLhfat28Pl8tl73vMMMzJgScHJi4JhMJ4+L1V+GnXEWDd16jrdWDIkCFYs2YNhgwZ\nglAohFdeeSXW3WSYUxaeHJi4IxyWGPHxz1iyKQcNkjy444KO+L9vvsG0adPg9Xoxbdo0LF68GCkp\nKbHuKsOcsrBDmok7Jn+9AX9ZvRt1PE683a8XurW8KuKabt26oVu3bjHoHcOcHrDmwMQVc5ZsxZwl\nv8LlEHj9np7o1jI11l1imNMSnhyYuOHPq7Ix6csNAIAZt3XDxR3SY9wjhjl9iZvJQQiRKoT4WAix\nQQixXghxXqz7xNQc3248gBEf/wwAGH1tJ9zYPXrtJIZhaoZ48jm8COBrKeWtQggPAC6Yf5qweudh\nPPzuKgTDEg9d0gYPXNQm1l1imNOeuJgchBD1AFwMoB8ASCn9APyx7BNTM2zNKUT/+ctREgjhlh4t\nMOrqM2LdJYZhED9mpUwAOQDeFkKsFkK8JYRIKn+REGKAEGKFEGJFTk5OzfeSqVb255eiz9xlOFwc\nwGUd0zH5li6c7cwwcUK8TA4uAD0AvCalPBtqK9JR5S+SUs6RUmZJKbPS09lZWZvJKwmg77xl2H2k\nBN1bpuKVu3vAzZVTGSZuiJdfYzaAbCnlUv3/H0NNFswpSGkghAcXrsCGfQVom56Et/v1Qh1PXFg4\nGYbRxMXkIKXcB2CXEKKjbroCwLoYdok5SYTCEoM/WI1l23LROMWLhfefg/pJnqN/kGGYGiWelmuP\nA3hPRyr9CuC+GPeHqWaklBj96Vr8/Zf9SElwYWH/c9A8NTHW3WIYJgpxMzlIKX8EcNQNKJjay+xv\nNuP9ZTvhdTnwVt9e6Nikbqy7xDBMBcSFWYk59Xn3hx148f82wyGAP955NnpnNoh1lxiGqQSeHJiT\nzldr9uLZz9YCACbe3AW/ObNJjHvEMMzR4MmBOal8v/UQBn/wI6QEhl7VAXf2bhXrLjEMUwV4cmBO\nGuv25GPAwhXwh8Loc15rPHZ5u1h3iWGYKsKTA3NS2JVbjL5vL0OBL4hrujTBc9efydnPDFOL4MmB\nqXYOFfrQZ94y5BT4cF6bNMy6vTucDp4YGKY2wZMDU60U+YLoP385th0sQqemKXijT094Xc5Yd4th\nmGOEJwem2vAHwxj47kr8lJ2Hlg0SseC+XkhJcMe6WwzDHAc8OTDVQjgsMeLjn/CfzQeRluTBwv7n\noFFKQqy7xTDMccKTA1MtTPpyPT79cQ/qeJx4+75eyGwYUXGdYZhaBE8OzAkzZ8lWvPXfbXA7Bd64\ntye6tkiNdZcYhjlBeHJgTohPVmZj0pcbAADT/9ANF7XnfTYY5lSAJwfmuPnXxgMY8cnPAIAx13XG\njd2bx7hHDMNUFzw5MMfF6p2H8ci7qxAKSwy8pC36X5gZ6y4xDFON8OTAHDNbDhSi//zlKAmEcEuP\nFhh5dcejf4hhmFoFTw7MMbEvrxR95y3D4eIALj+jESbf0oXLYjDMKQhPDkyVySsJoO+8Zdh9pARn\nt0rFK3f1gNvJQ4hhTkX4l81UidJACA8uWIGN+wvQNj0J8/r2QqKHy2IwzKlKXE0OQginEGK1EOKL\nWPeFIYKhMAa9vxrLtueiSUoCFt5/DuoneWLdLYZhTiJxNTkAGAxgfaw7wRBSSjz72S/4x7r9SElw\nYeH9vdE8NTHW3WIY5iQTN5ODEKIFgGsBvBXrvjDErG824/1lO+F1OTC3Xy90aFw31l1iGKYGiJvJ\nAcBsACMAhCu6QAgxQAixQgixIicnp+Z6dpryzg878NL/bYZDAC/f1QO9MhrEuksMw9QQcTE5CCGu\nA3BASrmysuuklHOklFlSyqz0dC7TcDL5cs1ejPlsLQBg0s1dcFXnxjHuEcMwNUlcTA4ALgBwgxBi\nO4APAFwuhHg3tl06ffl+6yEM+eBHSAkM+00H3NG7Vay7xDBMDRMXk4OU8ikpZQspZQaAOwD8U0p5\nT4y7dVryy548DFi4Av5QGH3Pa41HL2sX6y4xDBMD4mJyYOKDXbnF6Pf2chT4gri2S1OMuf5Mzn5m\nmNMUV6w7UB4p5bcAvo1xN047Dhb6cO/cpcgp8OH8tmmYeXs3OB08MTDM6QprDgwKfUH0n78c2w8V\no3PTFLxxb094XZz9zDCnMzw5nOb4g2E8/O5K/Jydh5YNEjG/fy/UTXDHulsMw8QYnhxOY8JhieEf\n/4T/bD6ItCQP3ul/DhrVTYh1txiGiQN4cjhNkVJiwt/W47Mf9yDJ48T8+3ojo2FSrLvFMEycwJPD\nacobS37FvP9tg9sp8Ma9WejSol6su8QwTBzBk8NpyMcrszH5qw0AgBm3dceF7RvGuEcMw8QbPDmc\nZvxzw36M/ORnAMBz13fGDd2axbhHDMPEIzw5nEas2nkYj7y3CqGwxCOXtsV9F2TGuksMw8QpPDmc\nJmw5UID+85ejNBDGH3q2wPDfdox1lxiGiWN4cjgN2JtXgj5zl+FIcQBXnNEIL/y+C5fFYBimUnhy\nOMXJKw6g77xl2JNXih6tUvHyXT3gcvLXzjBM5fBfiVOY0kAIDyxcjk37C9GuUTLm9euFRA+XxWAY\n5ujw5HCKEgyF8dii1Vi+/TCa1kvAwv69kVrHE+tuMQxTS+DJ4RRESonRn67FN+v3o16iGwv690az\n1MRYd4thmFoETw6nILMWb8IHy3fB63JgXr8sdGhcN9ZdYhimlsGTwynGwu+346V/boHTIfDKXT3Q\ns3WDWHeJYZhaCE8OpxB/+3kvnvv8FwDACzd3wZWdG8e4RwzD1FZ4cjhF+G7rQTzx4Y+QEhj+2464\nrVfLWHeJYZhaTFxMDkKIlkKIfwkh1gkhfhFCDI51n2oTa3fnYcDClfCHwuh3fgYeubRtrLvEMEwt\nJ172kA4CGCqlXCWEqAtgpRBisZRyXaw7Fu/sPFSMfm8vR6EviGu7NsWY6zpz9jPDMCdMXGgOUsq9\nUspV+rgAwHoAzWPbq/jnYKEPfeYtxcFCHy5ol4aZt3WDw8ETA8MwJ05cTA4mQogMAGcDWBrbnsQ3\nhb4g7nt7ObYfKsaZzVLw+j094XVx9jPDMNVDXE0OQohkAJ8AGCKlzI9yfoAQYoUQYkVOTk7NdzBO\n8AfDGPjOSqzZnYfWaXUw/77eqJvgjnW3GIY5hYibyUEI4YaaGN6TUv452jVSyjlSyiwpZVZ6enrN\ndjBOCIclhv3pJ/x3y0E0TPZgYf/eSK/rjXW3GIY5xYiLyUEoD+pcAOullDNj3Z94RUqJ5/+2Dp//\ntAdJHifm39cbrdOSYt0thmFOQeJicgBwAYB7AVwuhPhR/3dNrDsVb7z+71/x9v+2w+0UmNMnC2c1\nrxfrLjEMc4oSF6GsUsr/AuAwm0r404pdmPL1BggBzLytOy5o1zDWXWIY5hQmXjQHphL+uWE/Rv15\nDQDgues64/puzWLcI4ZhTnV4cohzVu44jEfeW4VQWOLRy9qi3wWZse4SwzCnATw5xDFbDhTg/gXL\nURoI47asFhj2m46x7hLDMKcJPDnEKXvzStBn7jIcKQ7gyk6NMOnmLlwWg2GYGoMnhzjkSLEffeYu\nw568UvRsXR9/vLMHXE7+qhiGqTn4L06cURoI4YEFK7D5QCHaN0rG3L5ZSPRwWQyGYWoWnhziiGAo\njMcWrcaKHYfRtF4CFvTvjdQ6nlh3i2GY0xCeHOIEKSWe+ctafLN+P+olurGwf280S02MdbcYhjlN\n4ckhTpjxj034cMUuJLgdmNevF9o3rhvrLjEMcxrDk0McsOC77Xj5X1vgdAi8clcP9GxdP9ZdYhjm\nNIcnhxjzxc97MPavvwAAXvh9F1zRqXGMe8QwDMOTQ0z535aDeOLDHyElMOLqjrgtq2Wsu8QwDAOA\nJ4eYsXZ3Hh56ZyUCIYl+52fg4UvaxrpLDMMwNjw5xIAdh4rQ7+3lKPQFcV3XphhzXWfOfmYYJq7g\nyaGGySnwoc+8ZThY6MOF7Rpixm3d4HDwxMAwTHzBk0MNUugL4r75y7DjUDHOap6C1+/tCa+Ls58Z\nhok/eHKoIXzBEB56ZwXW7s5H67Q6eLtfbyR742KvJYZhmAh4cjjJzJo1C+GwxNCPfsL/thxCw2Qv\nFvbvjfS63lh3jWEYpkJ46XoSyc7Oxrhx47ArtRu+2FiCZK8L8+/rhdZpSbHuGsMwTKXEjeYghLha\nCLFRCLFFCDEq1v2pDl599VXk5eVh7ptvwON0YM69PXFW83qx7hbDMMxRiQvNQQjhBPAKgKsAZANY\nLoT4XEq5LrY9OzbGjx+PL7/8EgkJCQCAjb/uAAAUb/gPvP6dePp/XpSWluKaa67BmDFjYtlVhmGY\nSomLyQFAbwBbpJS/AoAQ4gMANwKoVZPDoEGD8Ne//hX//ve/y7QHD+/F2pV7AQBZWVkYNGhQLLrH\nMAxTZeJlcmgOYJfx/9kAzil/kRBiAIAB+n8LhRAba6Bvx4oTQAcAdaKcK16xYsWm+vXrh2q4T6cS\nDQEcjHUnThFYltVLbZFn66pcFC+TQ5WQUs4BMCfW/TgWhBArpJRZse7HqQLLs/pgWVYvp5o848Uh\nvRuAWXWuhW5jGIZhYkC8TA7LAbQXQmQKITwA7gDweYz7xDAMc9oSF2YlKWVQCPEYgL9D2eznSSl/\niXG3qotaZQarBbA8qw+WZfVySslTSClj3QeGYRgmzogXsxLDMAwTR/DkwDAMw0TAk8NJ4lQsBxIr\nhBDzhBAHhBBrY92XUwEhREshxL+EEOuEEL8IIQbHuk+1GSFEghBimRDiJy3PcbHuU3XAPoeTgC4H\nsglGORAAd9a2ciDxghDiYgCFABZKKc+KdX9qO0KIpgCaSilXCSHqAlgJ4CYen8eHUNs4JkkpC4UQ\nbgD/BTBYSvlDjLt2QrDmcHKwy4FIKf0ArHIgzP+3d/6xWpZlHP98EeVUQipQpm4wp1GLNkoyyTbP\njJkKhYcyauVC2YysP2otyn/q9EduhbPVapj5g3KIQB5TQJ1HlGyWJRijNWCFArJhcjj8EkECrv64\n7jdunufl5bzwHt/zHq/Pdu997l/Xfd0Ph+d67h/PfZ0EZvYs0NtsPQYLZrbNzF5M13uBdfgpBcFJ\nYM7rKXp6Ci3/1h3GoX+odhxI/OcLBhySxgIfAf7aXE1aG0mnSVoDvAZ0m1nL388wDkHwNkXSmcBD\nwLfMbE+z9WllzOywmU3AT3e4VFLLT3+Gcegf4jiQYECT5sYfAhaYWVez9RksmNku4Bng6mbrcqqE\ncegf4jiQYMCSFlDvAdaZ2R3N1qfVkTRa0lnp+h34RpT1zdXq1Anj0A+Y2SGgchzIOmDxIDoO5C1H\n0kLgL8A4SVslzWq2Ti3O5cANwJWS1qRwbbOVamHeBzwjaS3+YthtZsuarNMpE1tZgyAIghIxcgiC\nIAhKhHEIgiAISoRxCIIgCEqEcQiCIAhKhHEIgiAISoRxCN4SJI2XZJLaszRLHgD7KmNOXr9BerUn\nPVr+i1ZJnZJ6mtj+Jkm3N6v9oLGEcQiaySRgSR3l5wDt/aNKEAQ5A8KHdPD2pJlHGqevhIc1q/0g\nGOjEyCHoFyTdIukVSfskLcW/Ii2WOWZaSdInJf1J0p4U1ki6PuVtAkYCP0z1LE0JjU3XUwuy50ta\nlcU7JfWkNl4ADgDXZ1XOk7Qs6btF0uyCvEmSHpW0LZVZI+nLhTIzky4fltSdyq2XNL1K3zuSg5j9\nknZIekzSmCx/vKTlkvamsETSuX26+ce2c46kuyT9R9IBSX+W9PEsf6Wk0uhN0tx0H5TibZJ+mv5N\n30yObeKr6kFMGIeg4UiaBvwKWAZMB/4B3HuCOiNS+ZeAzwGfB+4HzkpFOoDd+JlAk1J4sU7V3gn8\nFrgbPxjtb1nePcDapO9jwLyCwRkDPAfMAj6DH1p3n6QvVWnnAfwsrQ7gX8CDki7I+noD0AVsBL4A\n3Ig7hxqd8i9KbbUBXwFmAh8CllYe1n1B0jDgKWAy8F3gOmA78FRmaBYB10p6V1ZPSa/FdvQIhd8n\nPW5L/X8BeFTShL7qE7QYZhYhQkMD/tB9vJD2G9wBSnuWZsA30/XEFB9eQ24P0FlIG5vqTS2kzwdW\nZfHOVG5aoVx7Sr+rkN4NPH8cPYRPyf4aeDpLn5lk3ZSljQQOAbNTfAh+Qm9XjX7eD2wAzsjSLgYO\nA1Nq1OsEerL4LOAgcHGWNhQ3SnNTfHTS74tZmUmpHxNT/FMpfkWhvWeBJVl8E3B7s//+IjQmxMgh\naCiShgIfBR4pZJ3oWOiNuCvQByRNq5xy2WAMePw4eQ8X4l3AJXKXr0g6W9IvJG0G/pvCzcD7q8h6\n8v8Nmu3AHcBURg7jgPOA+2roOTnpc0TS0HRPX8YfvhNr1KsmZzXwciYH4I8VOWa2HXgamJHVmwFs\nNLNVmZxXgecqcpKsFXXqE7QQYRyCRjMKOA1/IOYU48dgZjvxo45PBxYD29Oc+4UN1G2nudvWalTT\ndyjeH/CRyAxgLnAV8DF8qqytiqxdhfjBrNzI9Luthp6jgO9x1AhVwoUc6yfkRIwCLqsi58aCnAeB\naySNkDQEX4tZVJBzbhU5nXXqE7QQsVspaDQ9+PTHewrpxXgJ891LV8vPxJ8M3IHP319Wo9qB9HtG\nIf3sak3UkFNN30NAj6Q2YCrwDTO7s1IgPUjrZUf6LS3QZ/TiI4e7q+TV8x1DL7AK+HqVvDez64eB\nebif8834yCY3Dr34VNh1dbQdtDhhHIKGYmaHJP0df9DcmWWVduzUkLEfX3wdD9yaZeVv4BVew99i\nP1hJkLu//AT+oOsrHRw75dQBrDazw0neELIHqqThwGep35H8BvxB+1Vg6XHKrMAXoFdbmsw/SVbg\no5wtZnbckZuZ7ZT0JD4y2ow7AVpbkPMd4HUza3knNkHfCOMQ9Ae3AV2S5uFvpVdwAreJkqYANwF/\nALYA5wNfw+fDK6wHpkh6Al+f2GBmeyU9Anw7rQfswh9k++vU+RpJP8bn46fjU1zTAMxsd9r++gNJ\ne4AjwPfx3VMj6mnEzI5ImgMskLQAWIgbmCuBhWmevxNf1F8u6V58tHB+0mm+ma3sY3O/A2YDK9OX\nyy/h01qXAq+a2c+ysovwabLdwC8Lcrpxx1Xdkn4C/DP1ewLQZma3Egw+mr0iHmFwBtwT3lbgDXxr\n6FXU3q00Dt8u+Qr+hr4VH3mck5W/BHge2JfLAt6LL4Dvwd98b6b6bqWeKnq2J1mfxkcOb6S2bymU\nuwh/g96HG685RZkc3a10ZqHuJgq7eHADtBqfFtsBLAfGZPkfSPejFzd0/8Z3R11Q456X+gi8G/h5\nuq8HU9+6gMsL5YanvhswrorsYcCPkh4H8QXqJ8h2T1XrZ4TWDeEJLgiCICgRu5WCIAiCEmEcgiAI\nghJhHIIgCIISYRyCIAiCEmEcgiAIghJhHIIgCIISYRyCIAiCEmEcgiAIghL/AzfjIdOL2T8BAAAA\nAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY0AAAEPCAYAAAC+35gCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsnXd4VUX6x79zb3KTkEINoROkK0WK\nde2uay+7NhQVu6gIKFVFBASkg64ioiiiYlld265ldXfV9WcFREAQpBOE0NNz6/z+eOecd5LchEvL\nDfH9PA9PTuace86c906YeesorTUEQRAEIRY88e6AIAiCcPSQEO8OHC5SUlK2l5aWZsW7H7WFpKQk\n+P3+eHej1iDyPLyIPI8sycnJuSUlJU2inVO1xTyllNK15V1qAkopiDwPHyLPw4vI88hi5KuinRPz\nlCAIghAztcY8VYE9iwEAhYWFblMwGAQAeLxety3ChwiHQ+Zn0G3zmGk1HKRVTe72AvdcYUEAAJBc\nh2+iPAG+X8BRn1nMHiSaZ9AknpaQzvfL203nEHLb8vdyX8Kg56Q2Lv+ygFJ038REn9vmU0nmHqzG\nr1q5xj3eV7ANANCqNWuhTVq0q3hzAP/6+xwAQGlpKfcnHKZne3jtoa1lSCQSLvOT+mnOhUmeefv4\nfqUl9N6JSR7repZFJOTIguV999BJAIDZU0YBAJK9yXy/YvruI+DnlxTxccSsmZIyyr8toBQ9w+vl\n7y7ByLikiL+TrTnb3OOikr0AgEaZ9dy28U/Mr3hzRJfnZdcPrnDdO6/N5P5WIc+rbxwJAJg761H3\n3MHIU5njSIRufM/w8e652Y/RMw5Gnvc/PL1Cm6MpzBzzsNu2P3nWaxDVYgJlBPHeqyyvgxmf19xE\n7/jG/McBRB+fAx+c4LbNmfmge9z/fvrM7KkjuF/l5Hkw43PkZJbdzAlDAACDH5oGAHhqCssu1vE5\nZ+G7btu0sfS+Q0Y/jlgRTUMQBEGImVqraXyz+CcAQFFJidvmLzarhgSezQPgVbjfrPr8pbwaA2iF\nEAzRqig5gVc9yb5UAMDGn1a7baVB1mwCIXpOiCd+BI0i4vGQ6JtaK6cdWzfRPQLF/IyEhu5x05at\nAADrl/3MzzD384I0Fq9Kcc+VGC1r08YtbtumDXyclELvdPoZvdy23slR1BgAa9ZT3/wB1qSGPEQr\noEnjBrhtIfDLhoxmFwyyvB15ho2mMXnaAvfM2IdppT16JK8W+995Od8vHDGf5bt9c8OldFejGdRP\n4+9n1jPzAQA3XHme2/bq3z9zj+/vfzsAIHdTDj/DfPUe0IrQo1hzC5jxsWvnbrdt5w4+TvTRO3Xq\nfIzb9vDQQQCACdOegE00eU4aM9A8PML9iVGeI4beBADYvG0H9yeBNM2dm35z24JhXjlHk6dRtl15\nrrn7Zvdc3p6ddI8Q9znRm+Ye12/YCEB0efa56gp6NUuejnZw1sl/cNv2J8+2iXURDUdrmT5xiNsW\nDJC8Ro550m0bP+4u7puR59jxL7ptD43oBwBYsmIVAGDS1Jfcc+NG3V/mWUDZ8XnbrZcAAL5fwX+f\n5eVpj89Y5WnzzY907++u/jMA4LW/veOeu/oy6stbH7xf4XMAcHynjgCAPn++0G3bW1AS9dqqEE1D\nEARBiBmZNARBEISYqbXmqe+W/ACAzSAAUFxURAde1seLAvnuccBP7baJx+slFT8cIvWyUUOOQsvM\nJBPG9p173LaS4D7r3sY8ZVm7HB+7L5Geobyp7rkd+cWmja8PePl5dYJ0nFfKc31BoXlGKam4RQVs\nfti9aysAYO++vXy/EJsHMrzkONtjhbsXB6OHMa7dsA4AEInwece8YKvrThsA9L3+LABlTRKOWS4S\n9lT47IPDyZRzY9+Lub9hNtX5QxXNKf9490sAwGVX/JGe70lyz/35svNMG19/7VV/do+LTTBCcZD7\nXFpqnhGkL81fwuahggL6nouccQQgFLb+hDz0xRVa5kh/OLo8Bwwlx+PAu69225585m8AgLtuv4Q/\nH2Lzwfz5/wYA3HTD+fzIcvJMT+d3ycig73dfvmUyjSLP997+0m276NIzAAAffsBtDldcSjJ+5/1P\no75T/zvvBFBWnu++/REA4Lw/0Tt9+i92wjrffccO3d22/clz/NRZACqa+wbfew2AsuPTkac9Jvv1\nO4efFYyU6Uc07u1/s3uckVG3wv2uufYs9/hAx2deCf3NvvsBm0zt8ZlkxucVV7I5Kb+k7Pg88wwe\nK874tN+nbXZX9zh4AOOzKkTTEARBEGKm1ib3ffwWreQSfIlu257dtOLem8eaQaGfV2GhIM3EdZLZ\nAZXsI8dUJEyr5dSUBu65tLQ6AICde9a7bXsL2elYXEpL+FI/T+2BUuqj10v3y8ho6p4rNV7y4mLu\nUyRordKNVvLLhlVu245ceqfSEpr/C61lhL84DwDg8/E97Bjjhpl0vz+cyquRrh3J2d5/0IgyK5Yp\n4+6hfifw5wsLacVdZPW3NGiH5JoQzEQOK05MIO1MR2hFmeRjx19ysgkRLsx124pKWUvyR3EEh4xm\n5Ky4U1Lqu+eCxgvp93OftLWSdVZ9v+3Y6rbl5dE7BQO0yist5WetXfM9AKBt2+PdNjuGMz2D7tex\nQyu3rVUzGktTZz19UPKc+sTf3ON77iJHZzR5Tpn1PABg1NCh7rnHpk5FeQbcfZV7fDjkuXDh2+6x\n835n//GPbltV8gz6SetJSLC0C0uea9dR2Hyfa/u4bQciz6EPPwUAGDaItbkps950j+/tT8752c++\n57YNH0zBEZNnPgegrFYxfhSFt9rj05E7ANx+22UAgOfnsSO673UXAWB5LniFn3XrrTcAqFye119H\nwQ32+PzvZ6SVdO1+KoADk6czPpcu/YZPV/L/vyT3CYIgCIcFmTQEQRCEmKm15qk13z4LAAhZjh4n\n07s0wJ7pUNhyghoTz9atbBLZuI7i3vfsJoekz8rgbtKETFX1M3nubdCI1eNkoyUGAhx3D03ng8Z0\nszOfr9+4dbt5/ka3bftv7KjPL6R32VOY57b5/WTScrLdA9a7+YvJNNAks7nblpDApqow6J3O+EMX\nt+2cU8hUdV3/oWVU15eeHk6fsRyNTibtHYM5r+KvUwa5x45JYs8edhzvzKW+O9n0r77PjtFBt9wB\nAEjN4O9k4hNz3eOpj/YHAIRC1pg16nfYZNzml/B3sXPPPvP8nW7bvr3sWC4xpsLCUnYOB4NOVQAn\niMFW/8ncWC+DTZR2xviKX76lLllyGzOcci/GTv1rVHnePIBNSM/NojyAoPVM572AquU555U36H5/\nud49V68emf5seaals3wSzdCrSfJcueYH9/i4zpQ/tPKXJdw9I8PytaeqGp+2PO8ewg70mRPuN+9T\n+fhM8HJQzPy/L3Sf7TBxVH/3uCp5jpwwGwAwYvB9/KyDkOfPPy+CTevWx7rHmzeT2bpz+95umz0+\nl68is9RVV1zhtnUxptQxUziXxXlHMU8JgiAIh4xMGoIgCELM1No8jRSjK4bBqmJiKpmWSoIcK71m\nHZddWLd2MwBg0Q8r3Lbfcuh8UT6pq4EAq7qpKXSfth24FEivXp3c4+xmpHanpXEESpKPIq7WbaRn\nffnlT/x8UwKipIhNUiUlVmRLiCLBrDBrhIKkzpb6ScUOW5Yw59ibxEXSEhP5Ky82UWRbjFkMALTH\nigyy8HlpfRGx5Nn3PjKt+HxcGuGmu0e7x44af9kfOTb+vU8p1+CkzvScnu04cuv77yhaJqsp3++B\nO27gdzWae3Iy57Y4pTJyd+4CAKxatck9l7uXTA0BP6v8tqkwHHbGCKw2x9xHphPL2uEeexI5Is9r\nFb9s35bMfGefcZbbdsYpVcvTNrG8PpdkF7AivLbl8ljI3U7vuG4dl4LZu5ve0ZFnKMTvl2QiB215\nHnNMM/d4+tyXK/Tr6cceomcdhDx/Xr0cANC+Hb9zeXnmbOVIw6zGVB7Elufx3U53j4MmmvCs0890\n2x4dUbGoIwD0u3cKgLKmo9eefQQA0OfOcW7b5DEcXZa7nczQ0eTpN8UJbXmeehyZy644l8fzrlyO\ndJvx3CsAgJmjH3DbnPFp98vhxF4nASgrz6UrlrnHHdtS/koYlRMKcZKVI0/bxGfL02H3Hs4l0yq7\nirtHRzQNQRAEIWZqrSN887dUhMxrlX0Oeqlm83++4fLgH3z4rXucZ1ZSu/ZwHod2yiubn2GrwJzj\n3Eu0ckGaZnGBwWaZpGGk1uFchASjAW3eQrHXK3N4lV8Kpxw3r0rsjHYngzVoaTv+UKG5jvoVsTUN\nkBOvSTPOG0itw1pHIEDaSXoaO/vuuOkvAIB7Bw0qswpe+DStQD3WOiPsoc+tWMPllxf/+Kt7XFxE\nDrwCqzy9Nh1U5mfESpePmLafN3EByDN7nuQe188gDSPJ0py8ZsW+azd9Zzm7eRUVNH31eFieq9cs\nR3lat+zMnzEF/SIR6pf95xEBBRHUq895PL/+8p173Om4EwAAKckcbHDuGScCAKbPmlVp5rzDK7Op\nDHll8vzX558AALp37uG2OfJcvoZWqF2yWdNdbvJ5urVlZ2n9uhzIUZU8X/vonwCAzm1ZNivXrqzQ\n93Ztj3OPnbEatlbnByrPpCT+W1rx01cVnjdkMGka5eV5MOOzKnlWNT7tXJD9yfONTz4EAPzpJMqr\n+PR7zpFw6NCeA1HW/Lqiwnmbpk2zTV8OfHxGq+BQGeIIFwRBEA4LMmkIgiAIMVPtjnClVD0AzwPo\nAkADuBXAagBvAMgGsBHANVrrvYr0qScAXASgGMDNWuslUW5bAZ+PVEQvWN1duYYcXi8t4FIC23ew\nIykpiZxWxcXc5uR2eE3Md4K1C5jXOPDsWOh9eWyKKc4vMPewCiSa3AlnH4VCy3wWNn32WWUAgpZ6\nHDD7A4QCdu6JMZ95nX5ZtfnD1JfinVyGwNMg0z1u3KIN3cNSV3fs5P7bJCTQu3qs/uZso70PvviS\nVe5vlvDX061TT3pXq4yKEzu/ZiOZTo6zzCnOjorHt2fneIKX5e03e6N89wubmLq1pp0GHTmVWuug\niOlzguWsbmM9z8kZCIfs2H5jnjAmLW3touiNmP1W8tl82aED70VStyHtRRKx5JmXzyUibN6YQ05v\nW55X3UVlKk7txffcl8ey627yFmx5rt5A+yt0bNWhwrO7HkNmqeXrV7ptvTtxcUBHnhHLpuk3uROd\nm2cDAEpL+W8hmnnDludGy6zokJVJptGq5Ll29fduW/v2Pd3jE06hQn09T+ICjdNmUk7Q9FmzyjzH\nGZ/X9men99RHKQ/DHp/7k6czPj2aZPKL9U7O+3dvx+akomL+fqPJs3t2ewA8Pts0571BnPFpm6Rs\n2bZsQZ8tMz7N5jzR5LlrBwUZpFh/M/b4jGaWuuO2uwEAz817psK5yoiHpvEEgI+11p0AdAewCsBI\nAP/WWrcH8G/zOwBcCKC9+XcngNjfTBAEQTjsVKumoZSqC+AMADcDgNY6ACCglLocwFnmspcAfA5g\nBIDLASwwHu5vlVL1lFJNtdbbsB+cgoDhMDslP//6RwDAyrUcRpjZgLOlnVVWwHJ2u/sIa6NpWMX/\nnHBLuyii12NpDs5nLUes10fOuQRT/zzJKihWbArHlZby8+21gXOl5YdzC/5pswNhOMLhe+edQk7k\n5ETetPm7FRv4eYXkqA5qHgarV61FNNwS3BG+duVqupetXRxnlbl2stRDlrakzQquvdmL3NaqPEaL\nsx2NHquu+Xer6Pvr3bGb2+bc22uuS7R8d34Tc+xkJQNl5elcaS3MuKCiCWyOaP4uunWglV+ilSW8\ndguHbPvNqnz50v+6bU3qcfatTTR53nodlcXO2c4ZwhlpnC29zGRGlyl93ZI0DEfj9EaRZ6/OHAJr\nj8+I+S605Wj2mIoB0eTZ2qx8WzfnfeQ3beXx0sq0b/ltnduW5Csrz805m91zF51N4aCJXnamR5Pn\nsh//47ZdfAGXDrdx5BltNf3kcxwIYsuzqvHp/LQ14W7G6W+Pz8W/cMi8gz0+HXmu2kRy6tC6o3tu\njdFiWjVr67a1smTrSP637Szj1i3o8448t2zlcOELzzoNQOXjs1sPChUOa+5/dhMO0omV6tY02gDY\nCeBFpdSPSqnnlVKpALKsiWA7gCxz3BzAFuvzOaZNEARBiAPVPWkkAOgJ4BmtdQ8ARWBTFADAaBUH\nFAeslLrzsPVQEARBqJTqdoTnAMjRWjvBw2+BJo1cx+yklGoKwNGptgJoaX2+hWkrg9Z6rlLqWbst\n4Kd5J6+QzTU//bIRAFDEFgfUC1h7XQScXAc7a9gcu1vusXnK2afCU8YkZX+WftrOSW3MUh4vqe0p\nlhMdymSd+60OlomU1hUaPaD7lAbI1HRM28buuVtvof0XUhLrum1p77Hp5KMvSLX2JvH5Xbui5586\n+ywUW6azTb/trHBdOGRnsDux5Laj2Rwb0x0sc4qzD4CyTFK27HoY01fIkvGPa8kR3M1kY69ex07F\njm3InOA4iwGgbUs2D0STpzKBE0GTaZuVxbI5+ywqBOfz1nHbkhfxvZeuJLNnp2NPddsKCjij26Yq\nefrtnR5DFb8P+x2qkudPa0kWvY9l57Itz0WrlgIAenRgc4o2slcekoPPGttQJkvaMvfZfdlsTFV2\nW3l59jqez/X5M+01sT95dj7uD25bZj3OgbAJVbLjJFC5PKsan+uNie1Yy5wUTZ69Oh1f4bM//LLU\nbXOc512PobGoLXl2yD7W9J07uC6HHe+O6dEmUZE8126h62x5xjo+PYl8vqDASuyKkWrVNLTW2wFs\nUUo5b3ougJUA3gfQz7T1A+DsVPI+gJsUcTKAvFj8GYIgCMKRIR61p+4D8KpSygdgPYBbQJPXm0qp\n2wBsAnCNufZDULjtWlDI7S2xPiTBOMLzCjlDeJsJJw1Zjt+QFQ7L5ZutLFDTps1Kzq415H4uGKzQ\nBgBBp8x0mcWip8wP2xCXapyGKYkVne0AUFJSXKaf9GxayXjMfbod28Y9l5hCq1iPh0s/9+rJ5/fu\nNbWMNu9223J3sLxsvMbRWGyVad5rwkkbZmZzfywtwNHY7P46GbcbtpMT3QkLBYDFZoXWsyM7023C\nxilrZ713bWdWyWaF3LUdfzbJhDTabWkpvMoKBPxl+gkAEVMqf/N2WpX17HWae87ro3GhFIehHtOG\nNbuiIgqx/oeprwUA3btErz0VTZ7/+fL/AFQuz2NalHV6AyxPx3G70goRdVbBVe3OBgBd2na1WwEA\nK9avMOdYC1mzcWWFthXruFZSZ7NyXreF+9C+Od1bmS60asHyilWeubsK3Lb/ff111Hdx5PnUxFFu\n2wt/+xhAWcfvqnUVKwLYGeetsujvo3Vjcp57rBB7R56LLU1ifziyd8bpirUsL2dc2ppwtEoBHVux\nM15FaExnN8kGAPTozYYYR56DH5roto0cfId7/OZ7tBbvffwJbpuzs+KBUO2ThtZ6KYDeUU6dG+Va\nDeDeI94pQRAEISYkI1wQBEGImVpbGt3Jnt5lFbDbtY/MUxE7CzvMpignMzoYxWSVHKV0V8CYpcqo\n/5Z66RgWbMet44j0Otm1VkFCN2/AMkkl+qycBVOKvaSE1flQKfW/dQuKRD7xBDbFlASoAKPisG1k\nt2LHbt8rzwIAfPkVq9trtuQCAJavKvOqbvZ0QSGrswVFZJ7SdhZ2hGXnvGs0k5VrGmnDBfG6m0zw\nyuTpPGeFZWLoZJzdHkee1rOcT9omBq917JQO/+lXNhm0bdK5TB+GDeLS7IEQmZIUWw+R2YjNXaed\nROaZfldd6rZlZXKOjI0jzwEPjXfbnphPuxju2cfBG/uTZ04umfkcc0qZZzim1crkadqPtcxTbtsx\nFFhgm8KOzT7OuajC9QBwvAlUODabTY7GaobMhpQf0a5ta/dcrPJcZQJYAKB+Pcrt+PTz/5V91yrG\n564dnJvUrjWbepwglxZZbLJ1/lYTo/y9L1lNgSPdrIzw5et+rnhhFBx5ds62CjwaM3hXy9zXo6MV\ntGCKEgasYJ2wyeXalEvm06vbsvnUkeeMSQPcNh3i/z8ef5jabXlu2827gMaKaBqCIAhCzNRaTWNf\nCYU6rt3MuYGBItI0fIpXTwFtzeLmZ8ha6btXms+EwbVmSsymSKFKxOiN+KO0Guewu+LlZ2mz8vFZ\nGeQlVnY6jGM3nMSfSQqTk/D0kyiT9NgOvMmOV9G7JVrPiHj53dPr0xLvj6fxyvDMEK063/34szK9\nLgrQ6nf7Lnaah/wkiwTFK9+QtlbG5mfYWt07ba2btDS/WyHP5v3DsIMNeCW7bkPFstFh8/2tWk/O\n145t7FWuNv1jeS7/ZbF73MWs6jpamwYpsyHOjZdTOGjvUzhb12PGgNdaa2kPv3tKKo2Drp1auG3H\nhkkDKL8yduQ56M6b3LaDkWeWs0o24bDHtuGM4lITJl6ZPDu06YSK0PlfNtD2AR2yWROMJs8uVpAB\nzN9NJNHah9xUKOjUjjYqm/bkqxWe+PSkgfyMg5Tn9fdRLa+Bd9zotjnyzLI0iZQk/lutanwmGnlG\nUDHIpTLton12xwptazb8UuZ3e3yu3kCBBccew5reitV2dQUq2R5Nno6G98Qk3nM82vi852Gu0fXM\n41SLK5o8DwTRNARBEISYkUlDEARBiJlaa55ynNnrNmx02/zGSWaXo45Ym0BHjHLqtSrYOQ5bpykQ\nZHNRwKjyYWvq1XZ2caSials+Zj7B+gocp7iy9kx2duSzCVvms+aNaeewbseRGSXVylgPmv2bvRGr\nT1aBROfeDRvy7mOJEX52mWcaB2HuDs4CD5qCcso219hOUiMfe+c8R55OU8gKRAiZ663ulpFndis2\nvTgEzPfsnLN3anTMKSpKbg092ykoyf1vUJeiBlq3pPJnSVaGtVOi+t4Hn3TbZk9g04rjuLR3QvTq\n6M+OJs8NpqhdmbyBFmwe27KVspSbN+e2bdvIydumeUvzTizPNZsrFp9sbTmClWX6cnC+v9Yt25r7\n8TXrNq+u0D/b3OIQTZ6vvP8RAOCZqbzH993DyHTisb7wux9i2T71GDluh02Y47Y9N5H3+LaZM3UE\nAOCuoZPctrezSCb7G5/btvG+5c2aUenyjVupsGLH1m0qfNYuB79hI5uf2rSmgo4bNvHugOXHrBdR\n8rwsk6Etz6rG56gBlLI2/qkX3XNPGwe4PT4tS7wbhBKLPKtCNA1BEAQhZmTSEARBEGKm1pqniotI\nTd+8McdtC4TIHJOQxDXkPeBoDW1U+wSvnRtA+l3QFJYLethc5OyNYdccDNn7OUSpBRYpt1d7YsQq\nWWLisRMsk4htzApHTEHFIM/1TVpTVErLZlR+wWfV0nfMCMlWWZKwFfcfNrprgrJMUp7o5hS/n67d\ntZOjp7buINk2MyUNAEBZPVbuboe2PE2ElCnSFrZKtjh7D9hdCFvy2rCZ1P4WLdvz/crJM2KZXHSo\nYumXDq05GshR+3WY71Evsx4AoGH9uqbvLDtHnvMmD7OeZ+WgmKgnr7JlGH1dFk2eDpXJ02HrVt6v\nonUzin5xStlssPZXcDimFZuzwtZ4i1bjz5Hn1hyStb2/Q0srCskhErbeX5sCgJY8v/u1bKSRLU/H\n3PPClOFu23OThvD9jDnl+UmWCUVHl2f/YZMBAH97h/fecEx13kQuchhtfNokmp3wWmdRFOLmHVzq\nrk2LbACAx/qDz27FY9HZYC9aKRCHltbeGY48c3ZsdNts81k7k4NU1fi0STAJL5WNz9sfmgYAuHfU\nX922eZNGVNrXyhBNQxAEQYiZWqtplBTSymtPmYxHmsVTUnnl4bGrCRrHdUYKr4baHUMrg5WryamY\nZ2WcupXzvJy7oaxVDVRShX45i24n09be/9dZddtZuN4yjm2nlDOvHrIa0YqjfgZpTyrIq9xUH2Uj\nJ1gryoDlnFdmZRjy8/NUdEUDgVK6prCAC+xlNSRnsS+JNRVlVxM0q/4US3NqkkWfyfltOwCg2NqD\nGtoch60+evnetobhnjfydFalESvD3ik2uXYzOys7WNnKTplse3X3yddUNPAvF1OmrbKiHBynuNeS\n503DuDjc8xOGmu5bWemVLMuiybNJI9IaU9I4i9yWZ+smFF+/aTtrz8e0KCvP5o2sgoBGk4oEOMPc\nlidUxaAHR57NjLPdE0WeTuFEAPjVkm22s6tclIzxuRNp2xxbnvOn057oiR7uh+14dxz1tjzvemRa\nhT4DwNRRlIPw1nv/slrp2b9t3+i2tGnZ1jpN9+/cOtttapKVCQD4z3e8b7lD66ymMB3iPkaRZ/MW\nFQM2eHzaxTErytMp5w8Aa00hQ0euAFA3nTLmU1MqlohPSjCWD2t8lpFnlD3e7WKdsSKahiAIghAz\ntVfTKKHZtNDahMkhNYXt/pFSPl9oard07cgz/1WXXQwAmLPwHQDA4qWclVzH1IVqllXfbcvduZf7\nECQNxN6kydlgyBG8x/oKImYlYJdm9yVW3H+8xNJ20tPo8z5zG2fzKYBt+dqKu0tJ5HcPGU0jbGWJ\nB1HRzgsAgQDd196/3CHJ0oa0FZJcarSIVs2aum0n96Ys7E+/opXc+o1sg09KINms3/6b29auKWe4\nB8wKz96kyQ2Jdn63Q26j2JbLbMjUirSOgJ/HQL/LLwDA+7Dbm/s436O2MrZfmfqIe+xoLl4rqzmM\n6Cu5Q5Hn+adwaeuq5Ll1F+1l1qsja2iLflnjHjfPonplBypPWxO2fUQh86q2PG++4kIAwPx3KeR2\n7sQH3XPR5Onz8rv3HTGOPjuJP/P84+T/uP3BKbAZNn4mAKB7+4raqL2y7tiG/7ZXm2oRF5x6otv2\n8dckxz/0JLna8qxjssnr10112/Ly+W+xqvH52zYK4W3VxArhjSLPBMv/5ozPsFUVIiWZzt/18GTn\nCe65RGef9Erk6cjBlqddly1WRNMQBEEQYkYmDUEQBCFmaq15atdOcoCHLceTE8pq77QXCbHq16o5\nmVEuPf8ct63TMZRV2r0LlTZetITLcjdrQk7IW2+41m1b/Sur/0tWUAZtfn6h2+Y3qrtT3rwkwOql\n06ug1b+A5URONCG+vnR2lLbi76sAAAAgAElEQVTJpv45Ziyvh81PCWZNkKiszHQr+9YxwSg7Bdsb\nPVywIJ9Mdz/9ytmuzRqT6Shi7yRnZSQ3akBmO0flB4C548n52bol9Xvdhs3uufr1yKk/9NIL3bbf\ntnHI44YtZLYqKeHAg6VrSN7tm5OpJRDi/ju9atE4222z96/2mhDfLVbp7OF3UCl0x0zgsep2e40p\nwGun2Vohv07VAGWHAXuiy/PhKZQN3bUtO00dc6ItzzUbeDw5po4brrzEbWvWuCGA6PLs1Yne9ezT\nec/yvpZsWzdtBKCsPH9cTWO2QwtyukeTp21OsR2tjjzrWObfxpnUvwXT6Hv3JbADN5o8+w4b5x47\nlr0yIcyVyNP9SKSiebZDWzah2fJ05HPB2bwHuTM+v/7xRwBAVr2G7rl1ZiwOu/NWt21/47O8PLfk\nbnXPNWtkwqUPQJ4vvUe7Eb409SHzvlWPz+uHjkV5DkSe0RBNQxAEQYiZWqtpOIlwSVZoWmKis8c2\nr4a9luP3L1ecDwA45UQuVezM402aUiie10qea9KINpY5oRs7t047gVey5+SSwzJvH28ElZdPJdud\npC6/pWmsWUdO9P9+w5sCBayQRydEsFlWA7epQztardRJoGVZSZDfx+N1yrDzysdOdNMmwS5g1bey\naz3ZOIlbPTvzqm2ns6mVlSRlJ0s6G0K9NH2M25aaQhrRa++/DwBo2oCd5PXSKWx46twX3LZ5k3nP\n5x15FC5ZXMTOx3POpPLRBflUIj5orYy35dJ1P6/Z5LaFIrY8qa+9u7EjtO/ltCq/aegEAMDLkx91\nzymzKotYmpvtSNRmlXjTCK5/NG8SJ1rZDL+LVqufffmN2+Z8N7+u/yXqZ/rfcDUA/s4B/gNeaPZ/\nbtaQAwccebZtxWG4ndry+WjydLSZO66l0vAHI8/6dTl5durzVAr99RnkfO0zbEyF93p5KjtmXzVh\nuAAQMY5lu55WZePTcfL2OpZDql3NzRqfHVplu8fnnknfuy3Pm83K/O5+pHG++wHv9/7HE2mX6qsu\n+6PbNu05rv008UHamdqW55A7+wAAzjqN/hbWvM7h0o6sWzS2N4GytwIgq4Y9Ph2STFytLc+Xp9BY\ntcfnwhn89+NosKHI/uVZFaJpCIIgCDEjk4YgCIIQM7XWPJWUQqppRjrvOezbQ2pjqZ/NNR1bs6nn\nj+eQOSktxcrS9pt9wB03tWbHU9tW5NxqkGblVVgSTW5PjuBwkNX1cLnS6D7L0fr5N+SQ/WoRO9uL\nAqzqen2k/gfCfD9/KZmIvCCzWaqPzXGRZHqW3zI/2WXCE5JozeAtth1x0fMKEn10bUoKZ7knFJIz\n33bcN8vkvnXtQuaPmy1n3EJjfujf7zoAwHvvf+Wey2pETsdXprFJJ9XaI715E4qPj4Stdyxfal7x\nF7ByDeUpfPy//7ptTbN4j2qPSZUPRfh+QZNb87eZlA/gZNkCgE40e8hb6r2y9x83G0vPn/qA22bX\nerKpSp6tmme7baf04P2ob+xDe48n+6yMXmOOvPvm600n+f0deaYl23kA3Ido8pw3nfsOHLo8J4+4\nCwBw7f2UOa+s0vvaHAYjVsZ6FHmGrSJu4UjZ79th1MA7AABLVvHm9m1aUpnztZbz25anMz5teTrj\n88tlplz6fuT52gyu3RQwJqNo4/PWB6bT+1m5QyP63wwAeP29L902f4grJEQrtT5lZH8AgMfUyHpj\n2mj3nDM++wx6HNFwTHgvTeP6XpXJsypE0xAEQRBiptZqGqnJ9Gr16nJ4qlfRhjc+H8+Vl5zP4Xbt\n2lIIbdFOzkh2Qnbz9lAIr11FtqPJtE3hxTXCVoY5zGH+XnaE+0y2b2oq3ScQ5tpY+/K2mmdyW6P6\nfPMkoxns3bXHbft1A4X8dW1PdYvCIf5s2DjZg2BNw1IK3L4kW6vdcDj6yiMpkZaoqXX4/T2KnPoJ\nCbx66tmdAwGaZFEI7cInuIppxGS3FhdSUMJvezhEdNjt/ahfVlmdiJUR7bxGieVodMKok5LoXUIR\nruU0ec58ev47H7tt6al2YAT1u6iAQ6K376BghFZNqJponyGsJb0yg94jDNYe7NJlTl8SfVYWfyUr\nuVjl+c2PXIHg7/PJwe7P56oD1w+hzOibrrycPmtVkW3WjMaELc9rBrCTfsE00ipuGTazQv9eMpsl\n2fIsKqZx16oJBy/Y8lyznlbE7VvzGBgx+VnzGZKnXYU4YpzsByLPfgOp/7eNKNtnR55/Ou0Ut+1f\nX32D8ny7lCsCDBtEzm5bntc9QJnWH191BYCy49PREi6/qBu/Q4zjc8E0qo01b/IA99yPP9O9I5aM\nt+/Y7h47moEtz0vPpf3sh0+ijZQWWlq5I8/50+/nPlnD71XzdxjL+KwK0TQEQRCEmKn2SUMptVEp\ntVwptVQptci0NVBKfaqU+tX8rG/alVLqSaXUWqXUMqVUz+ruryAIgsDEyzx1ttZ6l/X7SAD/1lpP\nUkqNNL+PAHAhgPbm30kAnjE/90tmJjm4Gzeox40m3rtVc45b73ksx0iX5JOjLxRkc0WpKWG9czup\njSkJrP6nZZCTvTTMJil/Caum4YC5NsA2odRUMjf5nKxM6xtISKJnpdVhHf2U3qyaZjWiXJE33/yf\n27Z8JTnsTj+Z4tOTPPx8bQqnBStxbvv9/gpt9gZQNhmm9HpGGhdrg4l/b9SIN4Rp04JlGyghc0s4\nzIEHNw6j/YuvOPdMAEC7LN5v+bprzqD+RixzWoD7GAmZvoVYPklJZB5JcIrEWY7eQbdR5vSHn3JG\negcrT6Guyay3zVebc+h8p/YU5PDiJDYnuAXmKlHp7YAAB28l+5NXJc9fczgPYtqou93jFs0psOK6\ngZPdttIiMm28/28KntiymzdoSk6hyga2POdP7s+PM/Isv289ACycbpylVve9iTSOkpN4PNny7PsX\nyjYPW9+ZWxr9cbpforLOxSjPOx58ym3zW0EsNo48bZNU2xbkpD+1R3e3berD/P5X3kL5IS/P4H3e\nHRNUtPE5agA52ysbnzePIJPRM4/c7raVH583DecNkByespzjF//x1ArnbXluztlR5tz1Q6e6xy9M\nvo+ur0Se/YbMAAA8P+k+t82R7Q2DJ0f9TDRqinnqcgAvmeOXAFxhtS/QxLcA6imlmka7gSAIgnDk\nicekoQH8Sym1WCl1p2nL0lo7RVy2A8gyx80B2PtX5pg2QRAEIQ7Ewzx1mtZ6q1KqMYBPlVJlaiZo\nrbVS6oBc+tbk45JZn0wPda08DbNdA9qZIn8A0KQhRyft3WkiF6yyBeEQmZiaNyFzlxXsAmUKg9mR\n+EUB/i3R7FKWYe3nGzY7ZQVMFFHEx6p+kyZkfmrXlssadLTU/z+cQHkkK35a77atXEPHm3eSSa1r\nB+tZJufAG2Ybg70nt1MozTar2KYkmwwT7VUn2SqQZrTqJplc1K1eGkfTXHcPxYs/P5G/nrmP3QMA\n+HYpmWDW5vLXP3ogqfX2jh5+yxTlNYUVU1L5O3Xi4J09SLS1TWG9ejQGft3MKv1Zp/NeFB3bUpz+\nlk25blvONjrelU9yaNWUn3XjINo17sVpHJ1iFxd0dkG7fSSbIOZOZHOITVXy/PN5Z7ttQ8c/4x7P\nm0yym/f4Xdbz6U+4QT0ax8dau7wps6/C/uQ5fxKbZ5xoG48pmXLTCH6X9UP7AgCaZDVy2+y9ZKLJ\n8+SuZDb9bgXnTzi8NGMQPSvCa9do8pwznk10Th5NeRx53nTFRW7bt4spUurrH39y256fdL91TPf1\nl/I9HXPabVeRaXPdjtXuOaVoN8fK5Olgj8+bRj4BAFgweVCF68YYeZ5+Apc++cen/+ceT3+UPrN+\n20a3zZGnU7LEHp8JJqepMnk62AUSo5km90e1axpa663m5w4A7wA4EUCuY3YyP52/8q0AWlofb2Ha\nyt9z7pHssyAIgkBUq6ahlEoF4NFaF5jjPwEYB+B9AP0ATDI/3zMfeR/AAKXU6yAHeJ5lxqoSn0l9\nbWpW7wAXUmvTkuehukk8UwdNgbQwePXnMftsn3giZZJ+9Mm37rk9eyjvY89evr6kiGfxOsm0Ukqy\n4qKdeT1onJ6J1p7iiSYHJMHau7llFmsa2c1I27n0kpPdtucWfAAA+G6JKRGezecc/aKokOPAvVbp\n80Szv3FafZZBwAoCsEkwdb/r1+O8lzVbSFs4+w+8er/jwVnu8YIp5HCztyFWJsO6XTv6Dnq155Xx\nmaeTw7KwiIdlwMoGTjIZr4lWWrMjz7Apluf12jsd0vdyllWAsmFdXhlnmhyYXj15t7eZz70BALi1\nz18AAE0z+dz86bTyKy1hx6THKi3t7Kz4yqyhbpsvKfqfmCPPBe9+6Lb16EiyaNywYdTP+MzObBHr\nz/bmh0gTmDiCNJp5b/3DPffArdcAODR5LpjFK3NHnnZp7Wjy/HLxEretW8ey+2W/MJ1zdrTZE95f\nGl2ejpPWlmeoEk04mjydVXT/6652287ozXuEh83foC3PBVMpw/uFt/9Z4RnjnnweADD7cdYebXk6\n3DzySffYcay/MGVgheuiyfPBe290jzu3J4vD/Xfw1gvFxkLx0ONPl7k/wOPzlqGcwzJ34r3u8QtG\n20m1NKFo+4bvj+o2T2UBeMd0NAHAQq31x0qpHwC8qZS6DcAmANeY6z8EcBGAtQCKAdxSzf0VBEEQ\nLKp10tBarwfQPUr7bgDnRmnXAO4t3y4IgiDEh1pbRiQjg8woLc2OWQDQsAE5iZs1ZpMVQuzW8Riz\nVNhy9SSlpAMAmjcjVbZVSy5wuGETBXZ1OpbvFwxYjieTd+GxCgY6zr2SEsrtaOBjp2Ku2W2wpJBz\nLexdu/buIUd9545N3LZWbej9Fi8nR2PvrmzOaphmHPWWs87eydDJIWju4/uVBqIX2Esx/WhomU7O\n6EV7WdS3SrW8Onkkv6t2zA6sQvcbTmUv5kwik0MjKxBhh9ljpFkLvl84ZKn/Zs8QZe8HYNRqJ7Bg\n8PDn3HND7iKFNWCZP5J8bEosKqTyLs2bcS7POadTGpCzBcMxrdj8km7MjbYMh0xc4B5Pf4jKoDSw\ncoOCURylAHDTA7RD3bghvCb67POvAZSV58IpvNeEcv9cWZ4LZ9EeCvmlNGbOP7mXey4zk/pxKPJM\nS0h3z+WZ3Rv3J89pj7KjvpHZWfBMM25uNWVP7GfNeLif22bvvDfj4ZsBxCbPFOvvxOHM3pQLfGrv\nHtwYZtndOPzJ8h9xKTF/p/1HTnPbbriU1rXH9+DcLlues8ZRwEdyMpflmT2Jvt98U17INic54/Pz\n77lA6Zmnsqm3nynyaHP2aWX31nhsGO8iuG8f/f8x/aGb3DZn/x4AGPo4jdX50wdXuO+BUFPyNARB\nEISjgFqraewpIqf29z+udNtaNqFVbYd2rC3kWeF2ziLHazkGi/dQkTbtoZXXCSdyJZP/+4HufapV\ns6weL8ywZy/N/HlFnDFe5ITamgzREqvM+bLlVJyuRba1MvRxAcIN26iQYkYKr36P707ln7/8ZhEA\nYFcBryzqpNF7WIsrBILsKA4HzYpxH4ekllh9tSn00wpv7QbeeaxhPXLiN23C71Bshe86vjWPl9cm\nz46nzGBnV7a27XjVtnod3buDpexY9fBQaLKfi/0scL8TamtWcFPGcLbr8DHkJL7kXLaIRhI4KGDH\nPipUl+LjrOzs1pTRvsqEMheUsDySkuk97C3Vxw/jVd0DE+YDAGaMY9dbwOqrzYuzxgMAbh7EO9X9\naLKBbXn2tXYBdFbm8x7nInX+Qgpc0Irk8Mm3i91z555CQRGHIs+AVeZ802bSrBtk8qp+7CzeZXHM\nkD8DqFqewwdc556bOpoKBtryHDnp1QrvG4s8r+pPGu5Nn3KgSkEejevKxmc05k4gDbj/KCplfnff\nK91z+xufg0dTEKetTYweTmG12mpzcORZ2fh03n/KQ+wc/2UzaXOnnHgcAOCMk/izzvgcPpFlaI/P\nx0xfUtNYO3z8oesr9Gt/iKYhCIIgxIxMGoIgCELMqIPJCKyJKKW0/S7/fJHimCdMneO2XXg21aK/\n8tIz3LZAKau7Xq/ZmyGZdU6/cWBFPPRzcy474p589m8AgIsuYZPV8V3ZKb5zKzl27byPXXl0n517\nTDG/Ur5fzmYqNnd8zw5uW9MWbErzhEl1zfBxnPWuAKne7/7zCwBAh5acTX726RQjX1RS4LaVFrPp\nzWMy2lPTWHU21jP85Y4no8Zun9SF9xI4/rhsauvV2W0LBVl395i8hcRENokFzf4Dtz9IhdYef5DN\nSR99RqaFHj3ZZJXdik11+XvoPey4+oJiul9+ofmeguyY3L2LMpOz23C5svqW411FyDyXksCOy4IQ\nffePTZkPALjlWs4wnv/mRwDKxrSPGdLXPR47YyG908NsgnES8B+Z/FqZzzkmjBOP4xyS71YsAwC8\nMIV3g4tVnlrRz115/P77k+cDo+YBAEY/wCaKWOT53mdsAhszsg+/k5Hno1PecdseGX4zAOCHJWTK\nbdqQAz+O60wBGP4AmwCDfl7HOhntY6a/7rY9NoJkW16ekx4is+fIidPdtruuvQzAwY3PquT55Q8/\nuG0zJ3BxwqrG54xn6P+KW/vweNrf+Bw7mXKG7Pd05Dl+6ksoz9RHyYx3MPIcNWlhmXsppaC1rmhT\ng2gagiAIwgFQax3hPy+jENRkqzT18V0pnC3JCiP0pbGm4exkZ2cVh8zKw/GNO7vRAUDEhNKuW8M1\nFVs2ZK0iP48cral1sty2fTtpFfCfz2i14gWvCs45h8LpGjbg1VjIz467ZKcTHu5z/VTqa9vmFHr7\n2ybe1S/Q21xv+Q4TrMo5Pp9ZSNi79VVSRn1AX1pRJlp7OGe3ouzaRGs3wwQvr+ScMuvOig4Awqau\n14tTKTu1Xl3WmrQJ/czdtttta5jOQ7SkmMJKk5L4O3BWcCd3o9Wkx3rZLl1I00pP4+87HOT3T3Qc\n9Ir7nJpEfe13DZX5fsloFwDQsS2tKicMZ03CXnVNMKs22KWpKylTfe/1lOX79MI33LbHh1H2dYsm\n/H7R5Nl3MO8mOG8SZVg7leEPRJ4j+l9MB1YUa1E+ye/5Vymz/NRunEH99TLShG1H76gHONvakeeU\nh69y21LNd/XxZ1Se/opzWcsPBej6hye+5raNG8KydXYwfPxB1mZCoejjM2cTVRc6rdvxbttXy5YC\nAF6ePsZtu3U4Hy98go6vH8RtL0wmLe/WEVQz6plJrPU58uxzGaeUHduB/7ajjU9HnlWNzyef5+xz\nW54O0eQZDUeedjE8D1heo6eRnJN81mcqkWdViKYhCIIgxEyt1TQK9tKK+4rLLnDbTu5FiU9JiTzb\na2tTGCfZrdTa6CWhmETkVUZUVshf40ZkH969fafbtm0r24x95jlBL39m46+klZSaFUibNqxVZJok\nplAJP99rfUORFBMGGeD7KZBvpLFJZvxlD4fEbttKNtaWzXn1iWRehni9tMrwKF47hAPRQxpLiii0\n84TevJJrfwyF+159D9fVeWcub+biMVqJXUXXkbHHqbdj2Zgz0kljKdjHYcP79lTUYsIe/sxfzqXK\no8ESamvcmLWKjLQ65p34+ZbS44ZBhqzKv8pofnVNsth5J3EyVa8eVIeqYQP2gSCRl+keD2kVQx57\nxW2baGklNo48B9/CiW2OPBMt7UIr7vsVd5BWkZTEzy8spO/4UOQ59hkO0XTk2as9vastz6F3Ukjt\nyHs4DNX2eVQlz4H9qJZXUSGP7X17KoZ3pyTzgHfkaWs2EeveNtHG51vPUMj1jUPGuG3vPsfJhY6G\n8facx902Z3y+PP0B80B+dqzyHDenrH8AAHq2I60i2visTJ5jhlD7iIlvV7jffTfRd/HXBew/cuQ5\naQ5rblMf5nDd6Y9QiLPP2mitMnlWhWgagiAIQszIpCEIgiDEjEwagiAIQszUWp/GySfQ/hcNmnEx\nvsICKitQaoWLhCyfhrsLnGWDj2hTdDBCUUpeq1jceeeQ/XfzJt5Jr2kmR1M0NZFW23PZBlo3nUIX\nLrqQokhaNmcfSLrJl0iworcSfPZXFDLnOY/Ea3YHbGR2zCso4Oip5T/RrngtGnFuhS/F2rnNvIov\n0Qqn8EUNzca8dyma5qnxXECvtIRKnCycztE8f76To00WTB8FoPxOYaZInja+DUue3bp0AgDs2sk7\nv9XL4GiRR58gW/G4QZwbUcfYwHscT9EpDRuwjTklme5tR2/ZJWKcsCGvh+XtMTbs9GRqKynh/UU2\nb6IyLg3SORcmwdp50dkK4qmxXPYiOYHvbePI07bX9zLRNFcPecRte3lGxWNbnjcPo6J2CyZTaRFb\nnh9+8Q2AsnH+4waz7OqbSKvHBt/gtjnyXLJ2LTWs5T6f1Ju+n8MtTxsvKsrT/ntAQtXj0+aVGVSq\n5bUZ47iHqqIN/8r+D1ZoiybPaONz5osc+fTkaIqui5aP48hz+DnXuOeijc/RM9m/NGEYf1cOU4ZT\nTs2+fRXfw5HnlGHsJ4s2PmORZ1WIpiEIgiDETK3NCF/xz2cBALsLuOCfz5RBV1ZockBbBfbce1lz\nqVkZN0ilzOwSa//dnL0UNbV3D6+YMuvyqr2BiWjyW3Hwe02mbUodE0URZk0nJdlrHsmzf0oqZ4i6\ne3pbGZ/JRvsoKqJ+vvz+l+65dB+tui89nzPWM+rxCsVZ9SUk8jMK8ii65ZIBT5dZMTn5AAWlXFAt\nwVRCtHd0D2lLqzA/7dW0U8UwLYmeGbCSTncXkUZWVLiX+1uHNa00E9FkBZigyGT0+5KMhhHh79OX\n6DGP5Gf4klhLc8rUh0McMZZoVst+P/Xzy0W8t3VKAn23vbpzhnVKKr/vfWMpamrWKF65lxRTfx6c\n8UbUFejMR7k0+uGW5z1jKFJo5N2885stzwenUX9nPHyH2zZkIu1ON+lBU97ckueISfMqPP+v4zij\nf8AjVGp85iO8J7wjzwGP0WcvP+cU99z+5BkImD3uLc26Mnk6zBozwD2+fyxVhajs/7hZD99T4X0G\nT5gNAHh+LGkat43maCvnusrkGcv4HPn4s+65GaNIM3lg/PNumy3PaOPz/seoKOL04VQS/UDGZ1Xy\nHGlliTvvKhnhgiAIwiEjk4YgCIIQM7XWEV43jWr6h61d80KmeFjEKpvhgxflsQuYOSYhvykhELIS\nxRqmkfjqpnBRwaBlbgqZhCtfCj+jrnH0pZi24jzuX5rPOH0T+WtJTGFz1959ZE4KRSxHpHEwppgk\nsPPOtnYpC9J1KWm8vwES2LTlSyGTRsTacCMtreIOaABQx+xGFrF2eQubXf60VSojIco6xGuVcnFU\n7qAxu4StndjSjXnukalcWmPKg7wfQNiYDRN8/Iw6xmjjM23+Yu5fckIdpwPcFyuwoKiY1HVH5QeA\n2WPvpvuZYo7dTFFG6gA9w2ftzGaXdHniUTJL2ebF5GQryMBi4UwKEigs5l0aD4c87xkzwz036xEy\nfYUjnFAXtsxNs0aTCeqBCbzboS4nz/senVfh+bMfY5OaLc+ZY8jcE9bWzpeJ1Ne/jiaTVTR5Dps6\n3216yto7I8FHZpRY5OmYjuZOuL/ScwDw1CPcd6fsiS1Px5Q1Z+wQcz3vcufIc/BjT7tt9vgcaMxM\nT4zp77aNnsimJwCYOpyvT06o+N+vL4nb7hoxkz7zMO+E6I5PE7Dw/n95/5BRd19vzkUfnwciz6oQ\nTUMQBEGImVrrCBcODeMIi3c3ag0iz8OLyPPIIo5wQRAE4bAgk4YgCIIQMzJpCIIgCDETl+gppZQX\nwCIAW7XWlyil2gB4HUBDAIsB3Ki1DiilkgAsANALwG4A12qtN0a7Z3Jycq5SKivaOeHASUpKKptE\nJhwSIs/Di8jzyJKcnJxb2bl4hdwOArAKgFN4aTKAmVrr15VScwDcBuAZ83Ov1rqdUqqPue7aaDcs\nKSlpAgC9e/fWixYtOtL9FwRBqFUopRZrrXvv77pqN08ppVoAuBjA8+Z3BeAcAG+ZS14CcIU5vtz8\nDnP+XCXLC0EQhLgRD5/GLADDAbecZUMA+7TWTlZWDoDm5rg5gC0AYM7nmesFQRCEOFCtk4ZS6hIA\nO7TWiw/zfe9USi1SSi3auXPn/j8gCIIgHBTVrWn8AcBlSqmNIMf3OQCeAFBPKWcTbrQAsNUcbwXQ\nEgDM+bogh3gZtNZztda9tda9MzMzj+wbCIIg/I6p1klDa/2g1rqF1jobQB8A/9Fa9wXwXwBXmcv6\nAXjPHL9vfoc5/x9J+xYEQYgfNSVPYwSAB5RSa0E+C6dK2jwADU37AwBGxql/giAIAuJY5VZr/TmA\nz83xegAnRrmmFMDV1doxQRAEoVJqiqYhCIIgHAXIpCEIgiDEjEwagiAIQszIpCEIgiDEjEwagiAI\nQszIpCEIgiDEjEwagiAIQszIpCEIgiDEjEwagiAIQszIpCEIgiDEjEwagiAIQszIpCEIgiDETMwF\nC802q+cBOBlAlmnOBfANgM+kZLkgCELtJ6ZJQynVA7RpUjsAYQC7AChQGfMEAGuUUn201kuPVEcF\nQRCE+LNf85RSKgvAJwBKAVwEIF1r3Uxr3RRAOoCLAQQAfKKUanwkOysIgiDEl1h8GvcBKAFwutb6\nE6213zmhtfZrrT8CcIa5ZsCR6aYgCIJQE4hl0vgTgNla6/zKLtBa7wPwDIALDlfHBEEQhJpHLJNG\nOwBLYrhusblWEARBqKXEMmnUBZAXw3UFADIOrTuCIAhCTSaWSUMBiDWcVh1CXwRBEIQaTqx5Gp8o\npUKH6V6CIAjCUUos/9GPPeK9EARBEI4K9jtpaK1l0hAEQRAASO0pQRAE4QDYr6ahlJpyIDfUWg8/\n+O4IgiAINZlYfBrXIPboKQ2g0klDKZUM4EsASebZb2mtH1VKtQHVtmoIyve4UWsdUEolAVgAoBeA\n3QCu1VpvjLEvgiAIwkjm/40AABroSURBVGEmFp9G9mF8nh/AOVrrQqVUIoCvlFIfAXgAwEyt9etK\nqTkAbgNlmN8GYK/Wup1Sqg+AyQCuPYz9EQRBEA6AWAoW/ksp1bFc2zlKqdQDfZgmCs2vieafBnAO\ngLdM+0sArjDHl5vfYc6fa0q0C4IgCHEgFkf4H0FZ4QAApZQXwKcAOlb6iSpQSnmVUksB7DD3WQdg\nn9bayQPJAdDcHDcHsAUAzPk8kAmr/D3vVEotUkot2rlz58F0SxAEQYiBg42eOujVvtY6rLU+HkAL\nACcC6HSw97LuOVdr3Vtr3TszM/NQbycIgiBUQtxCbk1l3P8COAVAPaWU419pAWCrOd4KoCUAmPN1\nQQ5xQRAEIQ7EOmlEi5464O1dlVKZSql65jgFtH3sKtDkcZW5rB+A98zx++Z3mPP/kW1lBUEQ4seh\n1J76d7R6VFrrqnbvawrgJeMX8QB4U2v9D6XUSgCvK6XGA/gRwDxz/TwALyul1gLYA6BPjP0VBEEQ\njgDVWntKa70MQI8o7etB/o3y7aUArj5czxcEQRAODak9JQiCIMSM1J4SBEEQYkYmDUEQBCFmZNIQ\nBEEQYkYmDUEQBCFmZNIQBEEQYkYmDUEQBCFmZNIQBEEQYkYmDUEQhCPIzJkz492Fw4pMGoIgCEeI\nnJwcjB07Flu3bt3/xUcJMmkIgiAcIWbPno28vDzMnj073l05bMRasFAQBEHYD+PGjcOHH36I5ORk\nAHA1jDfffBP/93//BwAoLS3FRRddhNGjR8etn4eCTBqCIAiHQEkgjB0FpcjN96PDOVfjxdfewsbv\nvitzzdq1a7F27VoAQO/evTFw4MB4dPWwIJOGIAhCObTWKPSHsKPAjx35fuwoKOWfpi23oBQ78/0o\n8JfdISJy/sPw7XsEge2/Vrhv79698emnn6JevXrV9SqHHZk0BEH43aC1Rn5JyNUMok0CTltxIBzT\nPX0JHjROTzL/kpGVkYS0c1/HzAFXYdvmDe51HTp0OOonDEAmDUEQagGRiMbe4gBNAAV+7MgvLfuz\nwI9ccxwIRWK6Z0qiF40zzGSQkVxmUmicnuyeq5uSCKVUmc/m5ORgRqAECQkJaNOmDTZs2ID8/HwU\nFRXJpCEIQu1i5syZuP/+++PdDQBAOKKxu6i8ichvaQp+7Mwvxc5CP4Lh2HaCTktK4MkgnSaDrAya\nBDJtbSEpocJkECtPPfUUwuEwBg8ejPHjx2PUqFF46aWX8PTTT2PixIkHdc+agkwagiC4OHkF11xz\nDZo3b37EnhMMR7Cr0F9hEthpTQy5+aXYXRRAOBLbZFA3JdFoBUnISk9GZka5ScGcq+M78v/tZWRk\n4NNPP0X37t0BAFOnTsUNN9yAjz766Ig/+0gjk4YgCC52XsGECRMO+PP+UBg7XRNRRQeyMzHsLgpA\nxzYXoEGqr5yJqOwk0Dg9GZnpSUhO9B5wf48UDz30UIW27t27u5PI0YxMGoLwOybWvII/nX8Bbh84\nHLkVJoFSmiRM297iYEzPVQrGFFTWgZxZbmJolJYEX4LkINcklI51uj9K6N27t160aFG8uyEIRwW/\n7diFC88/H8uWLqn0muRm7ZF59WPwJKft935ej0JmGmsAtu/AdiA3TPUhwSuTQU1CKbVYa917f9eJ\npiEItRA74SzXiiRyjnPzSWMo8IcQOXM4fNuj5xX4mrRD5tWPITk1gzSDKJNAZgZrBg3q+ODxHJzz\nWDg6kElDEI4i7MmgTK6BNTnk5peioDS0/5uBcgyymmWi+9C/4n/T7kTe9s3uuZZt2uLtj/6Ddi2y\nUK9OxbBS4feJTBqCUAMoDYbdBDNHC7D9B47GEPNk4PVQFFFG2ZBSR0Nw2p0cg5ycHJwwLYAiK68g\nWFKEZmke1E/1HeG3F44mqnXSUEq1BLAAQBYADWCu1voJpVQDAG8AyAawEcA1Wuu9ipY2TwC4CEAx\ngJu11pUbXwWhhuFMBlFNRZaGkH8Ak0FmelKZ//gbZySXmRyyMqInnFVFbc4rEA4v1a1phAAM0Vov\nUUqlA1islPoUwM0A/q21nqSUGglgJIARAC4E0N78OwnAM+anIJShuhPSSoMUWmqbhKKZivJKYosm\nSvQq10mc5fgLymkJWenJR8xMVJvzCoTDS1yjp5RS7wF4yvw7S2u9TSnVFMDnWuuOSqlnzfFr5vrV\nznWV3VOip35/5OTkoEuXLvj5558POSHNHyqrGZBGwKGlzuRwMJMBawLJRltgZ3J98RkIcabGR08p\npbIB9ADwHYAsayLYDjJfAUBzAFusj+WYtkonDeH3RywJaTwZlI8isrSDglLsizHPINHrhJbyf/zl\ntYOsjGTUS0mUaCKhVhGXSUMplQbgbQCDtdb59gpLa62VUgek/iil7gRwJwC0atXqcHZVqIGUT0jb\nvCUHADD/ldfw9of/RiAcQWlJKTKPOxlNzrzhgJLOEjwKjdMpySwrvXJHcn0JLRV+p1T7pKGUSgRN\nGK9qrf9umnOVUk0t89QO074VQEvr4y1MWxm01nMBzAXIPHXEOi/UCAYOHIgPPvgAX3zxRZn23zZv\nAEwpal+TdkjIPhd5uQUAKOnMLkWRZXwEjY124BxLnoEgVE11R08pAPMArNJaz7BOvQ+gH4BJ5ud7\nVvsApdTrIAd4XlX+DOH3Qb169fDGO//ACaefjT0bV1U437JDF4x5ZiHaNMtyw05lMhCEw0N1axp/\nAHAjgOVKqaWm7SHQZPGmUuo2AJsAXGPOfQgKt10LCrm9pXq7K9RElmzei3tfXYHUyx9F0StD4d+d\n457r0KEDvvvuf0f9ngWCUFOp1klDa/0VgMqWe+dGuV4DuPeIdko4atBa4+VvN+Gxf6xEMKxxXOMk\nlHqD2FsLN7oRhJqKVAwTjgqKAyHc/8ZSjH7vZwTDGrf8IRtd874BdASDBw/G8uXLMXjwYITDYTz9\n9NPx7q4g1FqkjIhQ41m/sxB3v7IEq3MLUMfnxaQru+Gy7s0wcXldSUgThGpGSqMLNZqPV2zD0L8t\nQ6E/hLaZqZhzQy+0z0qPd7cEodZR45P7BKEqQuEIpn6yGs9+uR4AcHHXpph8VTekJcmQFYR4In+B\nQo1jR0Ep7lv4I77bsAcJHoUHL+qMW/+QLWU2BKEGIJOGUKP4YeMe3PvqEuwo8KNxehKe7tsTJ2Q3\niHe3BEEwyKQh1Ai01pj31QY8/tEvCEc0TmrTAH+9vgcapyfHu2uCIFjIpCHEnUJ/CCPeXoZ/LqNk\n/zvPOAbDz+8oe0gLQg1EJg0hrqzdUYC7Xl6MdTuLkJaUgGlXd8MFXZrGu1uCIFSCTBpC3PjHst8w\n/K1lKA6E0SErDc/c0AttM9Pi3S1BEKpAJg2h2gmGI5j44Sq8+H8bAQCXH98Mj/+lK+r4ZDgKQk1H\n/kqFaiU3vxT3vroEizbtRaJXYdTFx+KmU1pLOK0gHCXIpCFUG9+s2437XluCXYUBNMlIxtN9e6JX\n6/rx7pYgCAeATBrCEUdrjblfrseUT1YjHNE4tW1DPHldDzRKS4p31wRBOEBk0hCOKPmlQQz720/4\n5OdcAMA9Z7XFkD91hFc2RBKEoxKZNIQjxurtBej/ymJs2FWE9OQETL+6O/50XJN4d0sQhENAJg3h\niPDuj1vx4N+XoyQYRqcm6ZhzQy9kN0qNd7cEQThEZNIQDiuBUATj/7kSC77ZBAD4S8/mmHBFV6T4\nvHHumSAIhwOZNITDxm/7SnDPq0uwdMs++LwePHrZsbj+xFYSTisItQiZNITDwv+t3YX7XvsRe4oC\naF4vBbP79kT3lrJPtyDUNmTSEA6JSETjmS/WYfq/ViOigdPbN8ITfXqgQaov3l0TBOEIIJOGcNDk\nlQQx5M2l+GzVDgDAwHPbY9C57SWcVhBqMTJpCAfFz7/l4e5XlmDznmLUTUnErGuPx9mdGse7W4Ig\nHGFk0hAOmLcW5+Dhd5bDH4rguGYZmHNDL7RsUCfe3RIEoRqQSUOImdJgGGM/WInXvt8MALi2d0uM\nvfw4JCdKOK0g/F6o1q3RlFIvKKV2KKVWWG0NlFKfKqV+NT/rm3allHpSKbVWKbVMKdWzOvsqlCVn\nbzGuefYbvPb9ZvgSPJh8ZVdMvqqbTBiC8DujuvfTnA/ggnJtIwH8W2vdHsC/ze8AcCGA9ubfnQCe\nqaY+CuX4Ys1OXPLXr7AsJw8t6qfg73efimtPaBXvbgmCEAeqddLQWn8JYE+55ssBvGSOXwJwhdW+\nQBPfAqinlJJ9QKuRSETjic9+xc0vfo99xUGc3TET/7jvNHRpXjfeXRMEIU7UBJ9GltZ6mzneDiDL\nHDcHsMW6Lse0bUM5lFJ3grQRtGolK+DDwb7iAAa/sRSfr94JpYAHzuuAAWe3g0fCaQXhd01NmDRc\ntNZaKaUP4nNzAcwFgN69ex/w54WyLM/JQ/9XFmPrvhLUr5OIJ/r0wBkdMuPdLUEQagA1YdLIVUo1\n1VpvM+anHaZ9K4CW1nUtTJtwBHn9+80Y/f7PCIQi6N6iLp7u2xMt6ks4rSAIRHU7wqPxPoB+5rgf\ngPes9ptMFNXJAPIsM5ZwmCkNhjH8rZ8w8u/LEQhF0PekVniz/ykyYQiCUIZq1TSUUq8BOAtAI6VU\nDoBHAUwC8KZS6jYAmwBcYy7/EMBFANYCKAZwS3X29ffE5t3FuPvVxfj5t3wkJ3ow4YquuLJXi3h3\nSxCEGki1Thpa6+sqOXVulGs1gHuPbI+Ef6/Kxf1vLEV+aQjZDevgmRt6oXPTjHh3SxCEGkpN8GkI\ncSAc0Zj56Ro89d+1AIDzjs3CtKu7o25KYpx7JghCTUYmjd8huwv9GPT6Uny1dhc8Chh2fifcdcYx\nEk4rCMJ+kUnjd8aPm/fi3leX4Le8UjRM9eGv1/XAqe0axbtbgiAcJcik8TtBa41Xvt2Ecf9YiWBY\no2erepjdtxea1E2Od9cEQTiKkEnjd0BJIIyH3lmOd36kNJebT83GQxd1hi+hJkRcC4JwNCGTRi1n\nw64i3P3KYvyyvQApiV5MurIrLj++eby7JQjCUYpMGrWYT37ejqFv/oQCfwjHNErFnBt7oUNWery7\nJQjCUYxMGrWQUDiCqf9ajWe/WA8AuLBLE0y5qhvSkyWcVhCEQ0MmjVrGzgI/Br72I75Zvxtej8LI\nCzrh9tPbQCkJpxUE4dCRSaMWsXjTHtzz6hLk5vuRmZ6Ep67rgZOOaRjvbgmCUIuQSaMWoLXG/K83\nYsI/VyEU0Tghuz6evr4nGmdIOK0gCIcXmTSOcor8IYz8+3J88NNvAIDbT2uDERd2QqJXwmkFQTj8\nyKRxFLN2RyHufmUxft1RiFSfF1Ou6o6Lu8mOuIIgHDlk0jhK+XD5Ngz7208oCoTRrnEa5tzQC+0a\np8W7W4Ig1HJk0jjKCIYjmPzRL3j+qw0AgEu6NcXkK7shNUm+SkEQjjzyP81RxI78UgxY+CO+37gH\nCR6Fhy/ujJtPzZZwWkEQqg2ZNI4Svlu/GwNe+xE7C/zIykjC09f3RO/sBvHuliAIvzNk0qjhaK3x\n/P82YNLHvyAc0Tj5mAb463U9kZmeFO+uCYLwO0QmjRpMQWkQw99aho9WbAcA9D+zLYb+qQMSJJxW\nEIQ4IZNGDWVNbgH6v7IY63cWIT0pAdOu6Y7zj2sS724JgvA7RyaNGsh7S7di5NvLURIMo2NWOubc\n2AttGqXGu1uCIAgyadQkAqEIJn64CvO/3ggA+HOP5pjw5y6o45OvSRCEmoH8b1RD2JZXgntfXYIl\nm/ch0asw+tLjcMNJrSScVhCEGoV4VOPEzJkz3eOv1+7CJU9+hSWb96Fp3WS8edcpuPHk1jJhCIJQ\n46jxk4ZS6gKl1Gql1Fql1Mh49+dwkJOTg7FjxyInJwfPfL4ON8z7DruLAjitXSP8477T0KNV/Xh3\nURAEISo12jyllPICeBrAeQByAPyglHpfa70yvj07NGbPno28vDxccs9o7Dv2SgDAgLPb4f7zOsDr\nEe1CEISaS42eNACcCGCt1no9ACilXgdwOYCjatIYN24cPvzwQyQn0/4WGzZvAQD8/L+P4Vu2CO0y\n0/DW5xEUf3cRRo8eHc+uCoIgVElNnzSaA9hi/Z4D4KQ49eWgGThwID744AN88cUXZdpD+7YhtG8b\nlm0CevfujYEDB8aph4IgCLFR0yeNmFBK3QngTvNroVJqdTz7UwleAB0A1IlyrnjRokVr6tevH67m\nPtUmGgHYFe9O1BJEloeXo0WerWO5qKZPGlsBtLR+b2HayqC1ngtgbnV16nCglFqkte4d737UFkSe\nhw+R5eGltsmzpkdP/QCgvVKqjVLKB6APgPfj3CdBEITfLTVa09Bah5RSAwB8AjLvvKC1/jnO3RIE\nQfjdUqMnDQDQWn8I4MN49+MIcFSZ044CRJ6HD5Hl4aVWyVNprePdB0EQBOEooab7NARBEIQahEwa\n1UxtLIsST5RSLyildiilVsS7L0c7SqmWSqn/KqVWKqV+VkoNinefjmaUUslKqe+VUj8ZeY6Nd58O\nB2KeqkZMWZQ1sMqiALjuaC+LEk+UUmcAKASwQGvdJd79OZpRSjUF0FRrvUQplQ5gMYArZHweHIoq\njqZqrQuVUokAvgIwSGv9bZy7dkiIplG9uGVRtNYBAE5ZFOEg0Vp/CWBPvPtRG9Bab9NaLzHHBQBW\ngaoyCAeBJgrNr4nm31G/SpdJo3qJVhZF/iiFGodSKhtADwDfxbcnRzdKKa9SaimAHQA+1Vof9fKU\nSUMQhDIopdIAvA1g8P+3d/6xWpZlHP98CeVUQiqQPzec06hFG0sySTfPjJkKhYcycuVC2Yysf1qL\n9J86/ZGb4my1Gmb+IB0ikMcfgDqPKNksl1CM1oQVCsimxeGnIUjA5R/X/cp9nvPw8rxxel9evD7b\nvfPev677uh/YfT33j+e+zGxXq/VpZ8zsgJmNx2+zuFBS2y+hhtFoLpWuRQmCVpHW3h8B5ptZT6v1\nOV4wsx3A88AVrdblaAmj0VziWpTgmCVt3N4LvGJmd7Zan3ZH0mhJJ6ffH8QPwKxtrVZHTxiNJmJm\n+4HatSivAIviWpSjQ9IC4E/AWEmbJc1stU5tzMXAdcBlklancFWrlWpjzgCel7QGf2HsNbOlLdbp\nqIkjt0EQBEFlYqYRBEEQVCaMRhAEQVCZMBpBEARBZcJoBEEQBJUJoxEEQRBUJoxG0FIkjZNkkjqz\nNEseG6vKmJ3XHyS9OpMebf8Fr6RuSX0tbH+DpDta1X4wuITRCI5FJgKLGyg/G+j8/6gSBEHOMe/u\nNXj/0cqro9NX0cNa1X4QHOvETCNoKpJukvS6pN2SluBfzRbL9FueknSJpD9I2pXCaknXpLwNwEjg\nx6mepaWlc9LvKQXZ8yStzOLdkvpSGy8De4FrsipnSlqa9N0kaVZB3kRJT0h6I5VZLenrhTIzki6f\nktSbyq2VNK2k713Jcc8eSVslPSlpTJY/TtIySW+lsFjS6ZUefv92TpV0t6R/Sdor6Y+SPpvlr5A0\nYLYnaU56DkrxDkm3p3/Td5LDofiK/DgmjEbQNCRNBX4FLAWmAX8D7jtCnRGp/KvAl4GvAA8CJ6ci\nXcBO/M6kiSn8pUHVPgT8FrgHv1Duz1nevcCapO+TwNyCIRoDvAjMBL6IX/Z3v6RrS9p5CL9rrAv4\nB/CwpLOzvl4H9ADrga8C1+NOu0an/PNSWx3AN4AZwCeBJbVBvAqShgHPApOAHwBXA1uAZzMDtBC4\nStKHs3pKei2yQ1dJ/C7pcWvq/8vAE5LGV9UnaDPMLEKEpgR8MH6qkPYb3DFNZ5ZmwHfT7wkpPryO\n3D6gu5B2Tqo3pZA+D1iZxbtTuamFcp0p/e5Cei/w0mH0EL7k+2vguSx9RpJ1Q5Y2EtgPzErxIfiN\nxz11+vkgsA44MUs7HzgATK5Trxvoy+IzgX3A+VnaUNxYzUnx0Um/r2VlJqZ+TEjxz6f4pYX2XgAW\nZ/ENwB2t/v8XYXBCzDSCpiBpKPBp4PFC1pGu316Pu3N9SNLU2q2hg4wBTx0m79FCvAe4QO66F0mn\nSPqFpI3Af1O4EfhYiaxn3mvQbCvumKc20xgLnAncX0fPSUmfg5KGpmf6Gj4oT6hTr0zOKuC1TA7A\n72tyzGwL8BwwPas3HVhvZiszOW8CL9bkJFnLG9QnaCPCaATNYhTwAXygzCnG+2Fm2/ErpU8AFgFb\n0pr+uYOo23Zz97tllOk7FO8P+MxlOjAHuBz4DL7k1lEia0chvi8rNzL9faOOnqOAH3LIONXCufT3\n03IkRgEXlci5viDnYeBKSSMkDcH3ehYW5JxeIqe7QX2CNiJOTwXNog9fRvloIb0YH4D5aaor5D4J\nJgF34vsDF9Wptjf9PbGQfkpZE3XklOm7H+iT1AFMAb5jZnfVCqQBtlG2pr8DDgZkbMNnGveU5DXy\nHcY2YCXw7ZK8d7LfjwJzcT/2G/GZUG40tuFLalc30HbQ5oTRCJqCme2X9Fd8ALoryxpwgqiOjD34\npu844JYsK39jr/Fv/K33E7UEuRvTz+EDYFW66L901QWsMrMDSd4QsoFW0nDgS9Q3RGWswwfgbwJL\nDlNmOb7xvcrSZsH/yHJ8VrTJzA470zOz7ZKewWdSG3HnTGsKcr4P/MfM2t65UFCNMBpBM7kV6JE0\nF3+LvZQjuL+UNBm4AXgM2AScBXwLX2+vsRaYLOlpfP9jnZm9Jelx4Htpv2EHPsDtaVDnKyX9FF/v\nn4YvlU0FMLOd6ZjujyTtAg4CN+OnuUY00oiZHZQ0G5gvaT6wADc8lwEL0j5CN36YYJmk+/DZxVlJ\np3lmtqJicw8As4AV6UvtV/HlsQuBN83sZ1nZhfhy207glwU5vbhDsV5JtwF/T/0eD3SY2S0Exx+t\n3omP8P4KuOfCzcDb+BHWy6l/emosfqzzdfyNfjM+Uzk1K38B8BKwO5cFnIZvvO/C35RvpPz0VF+J\nnp1J1hfwmcbbqe2bCuXOw9+4d+NGbXZRJodOT51UqLuBwqki3DCtwpfXtgLLgDFZ/sfT89iGG8B/\n4qe1zq7zzAf0EfgI8PP0XPelvvUAFxfKDU99N2BsiexhwE+SHvvwjfGnyU5zlfUzQvuG8NwXBEEQ\nVCZOTwVBEASVCaMRBEEQVCaMRhAEQVCZMBpBEARBZcJoBEEQBJUJoxEEQRBUJoxGEARBUJkwGkEQ\nBEFlwmgEQRAElXkXzpNMPT8BlEQAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Er = vis.Experiment_reader()\n", + "pth = \"out/sp\"\n", + "Er.set_path(pth)\n", + "Er.read_all_expriemts()\n", + "ank=1.0/8\n", + "ul = Er.get_data(\"Inc\")['sp_001'][0].max()\n", + "def fd(x):\n", + " return ul-x\n", + "Er.print_param_description(0)\n", + "ank=1.0/8\n", + "Er.annotated_plot2(0,'Inc', zoom=0.8, pad=0, max_hight=3, xybox=None, fd=fd,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,16],\n", + " ylabel=\"IND\", add_points=True)\n", + "Er.annotated_plot2(0,'Fid', zoom=0.8, pad=0, max_hight=300, xybox=None,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,650], ylabel=\"FID\", add_points=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "blur_001: {'alpha': 0.0}\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(imgs) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::mi-0.1*r = -0.960127735138\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAD8CAYAAABzTgP2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAADqFJREFUeJzt23+o3fV9x/Hnq7k0axE00WitMbu2\nCiNu0MJBKdvA1V9x0EZa/7D7o2FryR+rf6yl0BTHtOof6tZZSruN0BZCYdXOURqQItFWGGNYT6yj\nzdo0t7HFpLZNjQhOqmR974/7dTufy4k3ud9z78nR5wMO93y/38+99/3xgs97zvcmVYUkSa9607QH\nkCSdWQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ15qY9wEqcd955NT8/P+0xJGmm\n7N+//9dVtWm5dTMZhvn5eYbD4bTHkKSZkuRnp7LOt5IkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSG\nYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLD\nMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB7vrjSeaXXN+S5MUkn5zEPJKk\nlesdhiTrgC8CNwBbgQ8l2bpk2UeA56vqUuA+4J4l1/8e+FbfWSRJ/U3iFcMVwEJVHa6qV4D7ge1L\n1mwH9nTPHwSuThKAJDcCTwMHJjCLJKmnSYThIuCZkeMj3bmxa6rqBPACcG6Ss4BPAZ+ZwBySpAmY\n9s3n24H7qurF5RYm2ZlkmGR47Nix1Z9Mkt6g5ibwNY4CF48cb+7OjVtzJMkccDbwHHAlcFOSe4Fz\ngN8m+U1VfWHpN6mq3cBugMFgUBOYW5I0xiTC8ARwWZJLWAzAzcCfLVmzF9gB/AdwE/Dtqirgj19d\nkOR24MVxUZAkrZ3eYaiqE0luAR4G1gFfqaoDSe4AhlW1F/gy8NUkC8BxFuMhSToDZfEX99kyGAxq\nOBxOewxJmilJ9lfVYLl10775LEk6wxgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElq\nGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1\nDIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB\n7vrjSea789cm2Z/k+93H905iHknSyvUOQ5J1wBeBG4CtwIeSbF2y7CPA81V1KXAfcE93/tfA+6rq\nD4AdwFf7ziNJ6mcSrxiuABaq6nBVvQLcD2xfsmY7sKd7/iBwdZJU1feq6ufd+QPAW5Ksn8BMkqQV\nmkQYLgKeGTk+0p0bu6aqTgAvAOcuWfNB4MmqenkCM0mSVmhu2gMAJLmcxbeXrnuNNTuBnQBbtmxZ\no8kk6Y1nEq8YjgIXjxxv7s6NXZNkDjgbeK473gx8A/hwVf3kZN+kqnZX1aCqBps2bZrA2JKkcSYR\nhieAy5JckuTNwM3A3iVr9rJ4cxngJuDbVVVJzgEeAnZV1b9PYBZJUk+9w9DdM7gFeBj4IfD1qjqQ\n5I4k7++WfRk4N8kC8Ang1T9pvQW4FPibJE91j/P7ziRJWrlU1bRnOG2DwaCGw+G0x5CkmZJkf1UN\nllvnv3yWJDUMgySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoY\nBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIaEwlDkm1JDiZZSLJrzPX1SR7orj+eZH7k2qe78weTXD+JeSRJ\nK9c7DEnWAV8EbgC2Ah9KsnXJso8Az1fVpcB9wD3d524FbgYuB7YB/9B9PUnSlEziFcMVwEJVHa6q\nV4D7ge1L1mwH9nTPHwSuTpLu/P1V9XJVPQ0sdF9PkjQlkwjDRcAzI8dHunNj11TVCeAF4NxT/FxJ\n0hqamZvPSXYmGSYZHjt2bNrjSNLr1iTCcBS4eOR4c3du7Jokc8DZwHOn+LkAVNXuqhpU1WDTpk0T\nGFuSNM4kwvAEcFmSS5K8mcWbyXuXrNkL7Oie3wR8u6qqO39z91dLlwCXAd+dwEySpBWa6/sFqupE\nkluAh4F1wFeq6kCSO4BhVe0Fvgx8NckCcJzFeNCt+zrwX8AJ4GNV9T99Z5IkrVwWf3GfLYPBoIbD\n4bTHkKSZkmR/VQ2WWzczN58lSWvDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJUsMw\nSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEY\nJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSY1eYUiyMcm+JIe6jxtOsm5Ht+ZQ\nkh3dubcmeSjJj5IcSHJ3n1kkSZPR9xXDLuDRqroMeLQ7biTZCNwGXAlcAdw2EpC/q6rfA94N/GGS\nG3rOI0nqqW8YtgN7uud7gBvHrLke2FdVx6vqeWAfsK2qXqqq7wBU1SvAk8DmnvNIknrqG4YLqurZ\n7vkvgAvGrLkIeGbk+Eh37v8kOQd4H4uvOiRJUzS33IIkjwBvG3Pp1tGDqqokdboDJJkDvgZ8vqoO\nv8a6ncBOgC1btpzut5EknaJlw1BV15zsWpJfJrmwqp5NciHwqzHLjgJXjRxvBh4bOd4NHKqqzy0z\nx+5uLYPB4LQDJEk6NX3fStoL7Oie7wC+OWbNw8B1STZ0N52v686R5C7gbOCves4hSZqQvmG4G7g2\nySHgmu6YJIMkXwKoquPAncAT3eOOqjqeZDOLb0dtBZ5M8lSSj/acR5LUU6pm712ZwWBQw+Fw2mNI\n0kxJsr+qBsut818+S5IahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAyS\npIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJ\nUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJjV5hSLIxyb4kh7qPG06ybke35lCSHWOu703ygz6z\nSJImo+8rhl3Ao1V1GfBod9xIshG4DbgSuAK4bTQgST4AvNhzDknShPQNw3ZgT/d8D3DjmDXXA/uq\n6nhVPQ/sA7YBJDkL+ARwV885JEkT0jcMF1TVs93zXwAXjFlzEfDMyPGR7hzAncBngZd6ziFJmpC5\n5RYkeQR425hLt44eVFUlqVP9xkneBbyzqj6eZP4U1u8EdgJs2bLlVL+NJOk0LRuGqrrmZNeS/DLJ\nhVX1bJILgV+NWXYUuGrkeDPwGPAeYJDkp90c5yd5rKquYoyq2g3sBhgMBqccIEnS6en7VtJe4NW/\nMtoBfHPMmoeB65Js6G46Xwc8XFX/WFVvr6p54I+AH58sCpKktdM3DHcD1yY5BFzTHZNkkORLAFV1\nnMV7CU90jzu6c5KkM1CqZu9dmcFgUMPhcNpjSNJMSbK/qgbLrfNfPkuSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqG\nQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGqmqac9w2pIcA3427TlO03nAr6c9xBpz\nz28M7nl2/G5VbVpu0UyGYRYlGVbVYNpzrCX3/Mbgnl9/fCtJktQwDJKkhmFYO7unPcAUuOc3Bvf8\nOuM9BklSw1cMkqSGYZigJBuT7EtyqPu44STrdnRrDiXZMeb63iQ/WP2J++uz5yRvTfJQkh8lOZDk\n7rWd/vQk2ZbkYJKFJLvGXF+f5IHu+uNJ5keufbo7fzDJ9Ws5dx8r3XOSa5PsT/L97uN713r2lejz\nM+6ub0nyYpJPrtXMq6KqfEzoAdwL7Oqe7wLuGbNmI3C4+7ihe75h5PoHgH8GfjDt/az2noG3An/S\nrXkz8G/ADdPe00n2uQ74CfCObtb/BLYuWfOXwD91z28GHuieb+3Wrwcu6b7OumnvaZX3/G7g7d3z\n3weOTns/q7nfkesPAv8CfHLa++nz8BXDZG0H9nTP9wA3jllzPbCvqo5X1fPAPmAbQJKzgE8Ad63B\nrJOy4j1X1UtV9R2AqnoFeBLYvAYzr8QVwEJVHe5mvZ/FvY8a/W/xIHB1knTn76+ql6vqaWCh+3pn\nuhXvuaq+V1U/784fAN6SZP2aTL1yfX7GJLkReJrF/c40wzBZF1TVs93zXwAXjFlzEfDMyPGR7hzA\nncBngZdWbcLJ67tnAJKcA7wPeHQ1hpyAZfcwuqaqTgAvAOee4ueeifrsedQHgSer6uVVmnNSVrzf\n7pe6TwGfWYM5V93ctAeYNUkeAd425tKtowdVVUlO+U++krwLeGdVfXzp+5bTtlp7Hvn6c8DXgM9X\n1eGVTakzUZLLgXuA66Y9yyq7Hbivql7sXkDMNMNwmqrqmpNdS/LLJBdW1bNJLgR+NWbZUeCqkePN\nwGPAe4BBkp+y+HM5P8ljVXUVU7aKe37VbuBQVX1uAuOulqPAxSPHm7tz49Yc6WJ3NvDcKX7umajP\nnkmyGfgG8OGq+snqj9tbn/1eCdyU5F7gHOC3SX5TVV9Y/bFXwbRvcryeHsDf0t6IvXfMmo0svg+5\noXs8DWxcsmae2bn53GvPLN5P+VfgTdPeyzL7nGPxpvkl/P+NycuXrPkY7Y3Jr3fPL6e9+XyY2bj5\n3GfP53TrPzDtfazFfpesuZ0Zv/k89QFeTw8W31t9FDgEPDLyP78B8KWRdX/B4g3IBeDPx3ydWQrD\nivfM4m9kBfwQeKp7fHTae3qNvf4p8GMW/3Ll1u7cHcD7u+e/w+JfpCwA3wXeMfK5t3afd5Az9C+v\nJrln4K+B/x75uT4FnD/t/azmz3jka8x8GPyXz5Kkhn+VJElqGAZJUsMwSJIahkGS1DAMkqSGYZAk\nNQyDJKlhGCRJjf8FFDYZsBaypoYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEPCAYAAACp/QjLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsvXmcXFWZ//95aq/e1+zppBMSEMEA\n6QCBqIgyo7igo6KMqGxfdBxFnEUdf/5UGGWcYRxm5jswihADLigu44iCYxwVBAIkgQAhrEln6SSd\n9L7VXnW+f5xz7nluV3XSCZ2u6uR5v1551e1zb9177rm3cs6zk1IKgiAIgsAJlLsDgiAIQuURKncH\njpR4PN6dSqVml7sfxwvRaBTpdLrc3ThukPGcWmQ8jy2xWOxAMpmcU2ofzTS1EhGpmdbnSoaIIOM5\ndch4Ti0ynscWM75Uap+olQRBEIQiZpxaqYj+zQCA0dFRrymbzQIAAsGg11Zwm8jnc+Yz67UFzDSZ\nz+pVyoHuEW/f6EgGABCrciehQMadL2PFXjecAYTNNfSkXBOqdecb6tP7kPPahgdcX/LQ16meNf5m\nASJ93nA44rVFKGrO4cTv57e95G0PjuwHALQtctLjnAUnFZ8cAPoeAwCkM+7+slm9nVd51ke3nSvY\n8XRthbwex0y6AABIJgrevnRK32sml3Lny7PtnN5fKLgFjTIPMGjGNVhw65psWvcvk3L3n8uz7wb1\nsSrk+qfM14n0eUm5Z5dJ6uP6ega9tp6DB13/Cvo6s+a0eG3z5s9HSfr+qPuTc886Y+4vy94/vj9r\nxjGXc2OWy+rtrPnMZNy9ZLM588nPx97PXN70251PmfHxPt3lkTfX9V2fb5vz5JRry5vVfS6nP7Os\nf+mU7ks66fqUy7sLhkP6YVTXVHltdfXu9+Lj4P8AADJZNp4Zfd/pjLv/VNptp9N2f/F3Mhk7dux9\nNufO5Vgbu39vPEuMz/hPfT59fJa1ZXP8+Znv5Iuvly/o8SwUiqUnIvcbCLL/60Ih/S5HImGvLRrR\n/19c/dmvFJ1nIkRyEARBEIqYVsmBiNYCeAeAg0qp01j7pwD8JYA8gF8ppT472XNu2Pw0AGAsmfTa\n0gmzCmUrxQzcqjKdSplPtlyCXkFlzconFmrw9sQi1QCAnU+/6LWlsk5SyZhVQM4tVmAW2wgE9BDP\nbXKr9oN7d+lzZBLuGqFmb3vuwjYAwI5nnnPXMOcLQq+oghT39iWN1LRr5x6vbVen247G9T29/g0r\nvbaOWAmxBMBLOzoB+CWHjJGMsgUmTeTZds5KDm51o8zKXtkVecFJOnZVNJYc8toSqWFvO22uZ1eh\n+nx6ZRQKaCkpxF7dvJEcsmxlSAF3PZiVVDrv3oGUub9cTp83x2yeYyP6Xerv7ffahodcX+NGgkyT\nW61FahtRioHBAd03LjmYFX4m68YwzVb9GbvS5KtZ8/WckWzzbGxynqTBJAf2fHLe83Er14KRGArm\nEgX2UxgvpYzvS8ZcL8NWwrbPdiWeTrP7NZKi7/mQ6388rp9po7sEKMSeHyOR0M+mlJSQSrl75ttJ\nKzmkmDRRJDm4vtmVfi5fLC0Abhz90oS/jY+X3ebjVWo/lyzsNazk4De76GcXDLq1vZUWACAS1hJD\nNMren/yR222mW3JYB+CtvIGI3gTgEgArlFKvBfDP09wnQRAEYRzTOjkopR4C0D+u+S8AfF0plTbH\nHCz6oiAIgjCtVIJBejmA1xPR1wCkAPyNUmpjqQOJ6NrxbY8/qQ/lKo3E2JjeCDqxaizj1BaZtG7n\nqplgUIu2eaNmaGl2Bs3W1hgAoLvHzWvJrDNWjmWsWO/6ZW2NkbC+BgWrvX0HhxOmzR2fCbrrVWX1\n9lDKzd0jo+YaRlweG3EG3L7evQCcCgMAMjknltcFtZjZz1QniWxpMXPvfn2ubI6L7fpaqYxT3aWy\n7mRWpVDIsxtSxnBsxjgSrvF2WTvaWMKp5oYTbjyT6aQ5L1eD6C+FgvpZhIipqaxqgKktYIz0AFAw\nFxwec9cbGtHPIDGmr5Eac98dHRkzn+74PBuP5hZ9L/WznOqiNeG2OaNj2rGBqy2sSivF/PcTPpWI\nUYOkmSonY9VJRh3khgbK/FFgDgMFxRwcCsVqP7tt39MMex/s74OrhlKZ4u0U3++pdqwKhxt/c75+\nAkAk7P7rqa8zhuggU43E3W+TkzRqpSQzOCeTehwTSTaezPhtt1MlxrikWqmEwZmr5DyVT77YIG0N\nzaVUchmumuNqJauW5kZsc27+nC3WEB0Kud9bOMyMz1H924jzPpc4z+GoBIN0CEATgHMB/C2Ae4mo\npN+tUur26eyYIAjCiUolSA5dAH5mItueIKICgBYAPZP58ilLtfE2xNy2+vv0CnpgyK30R9Nuls1l\n9XZVzLkixiJ6NVjI61m3Ot7k7aup1iubk9tP9toGRvd52wnjQsnd5zIpvRILBu35nGve3Lal+nts\n5VzIupVwX79e1e7vdiv1gwf0PaWSej4fHWXGtYQ2lkYi7hwBZtCLxrXU0lDf6rURW1lyquLGQMsM\nnsGAHVu2GmWuiHbBk027OT1rXFiVWZpGIm7VFonoawTIvX7VEbdSjBhDm8/gZySHQkH3JZtzzzNj\nlkWjKe4+OeZtJ81KuH/ISY8DQ3rsrZtykq38reulYsutSMT1tTZn+sddbScI1CoYl9Icd700EsPY\nmJP+Rkbdsx4xUuHYGHvGKWPwzFoDpbt2wGxS0PUhEGD9IbsK5UZs42qcMa7GbOwSCX3dMd/q2/XF\nSjl8dZ40bUm7ImdShXVrDjDXy6oqJtkZA2usyrmy1qZKv59J42wyxqSEsYTeHmXjybft/mSCOSSk\n/O6tfsmhWDIo+FbhE0sO9jNTyiB9BJKD58JqHpnfbdVKDtxVlblCe26wXhMKpePcDkklSA4/B/Am\nACCi5QAiAHrL2iNBEIQTnOl2Zb0HwAUAWoioC8CXAawFsJaItgLIAPio5McQBEEoL9M6OSilLptg\n1+VHe84lC7T6h/vxzm7QImsq49RGPGLWqmb27nUG3J3btZNUf58W7yMsonnOHH2NxlanynhN22Jv\nO2ZG0YroAAClj80adUjPsPvuzr3dAICBA07t1b3PqTyGR/W99I863/q09eXP+g1pAJBOaBVKfauL\n0g1FnVopOar3Z5hBtj7KjMeM+S11AIAcU0FYw+MwE9X7B5zaJp8eM/125+/v1dtJY/ANsriDmhpt\nVK6td32oq3dqhqZarQYLBLgqUH+OGMPxwQGmhhnS/errZ2M47Po6PKbHbmjE9XlkTKvurDEzw9UY\nRgSPclVXhEXbm8ecZ6oBmmA9EzY6H64kKRhjZTrhWu09AEB/v+7n0JC7x8SYjRUo+PoIAMGQ3o6E\nnSIgEgkU7QfTLNh4nqQxHI8ytdrwqB6TkTGnhhlNcDWOUSuxiHSrVrJGbK4iIXNhbjTlbvexuH4f\nkj4DNwt6YHjG5xIqueERFzc0MsLej1GjimL3kDQqsYxRBftjSqwarliVBDg1GW/LjYt9yJZQIflV\nScXR0vw3p2x8gxk7/luwhugIUyXxmAyrpgNTRVHgyJVElaBWEgRBECoMmRwEQRCEIirBW+lVEQ9r\nESsPJ5KFq7VKKJl1qoqXtrvYuu2v7AYAbNq41Wvb16X3jw0bcZMlDqs24f1Ll7sUGCtXnuJtL56n\n1U41NS6FQjSiPS+279TXeuihp9319+trJcecGiSZ5GKt8chh95nLajE5ZVQ43BPBbgejMa8tzPzI\nE8Zra49RZwGACpyBUjQYzyyWLQAjJsne4JAb44F+l5iwa7fe3rPbjXGP8a5KjBhRnqlBqqt1P1tb\nXezDvHkuXUlrs1ZtxWPufrLGS2egb9jcS5+7/kF9f/1DLFniKPd51+J3Ms383G0shU2jwQY0ZGJe\nIhF3fcW8q226lIQvHUNpR/Kw8d0nltnOqoaspxQA9Pc5lciBg/oe+/udmm7UjGPWqF6IjWfY/AZi\nMffM42zbelpRgHmT2RQm5h6GR52aZshsczUiVytZLyU+njYdBVcnWYJmDLiHFU88Z+NZUuw3x1VM\nnFTSenpx7y49dkNDTm04NOTGc9iomEbZPdqYCKsG495K+RJqJe7pVSiRDM9TK+X98Q76/kol3ivh\n/eRLrqfHyqqDQsHimAZ+eKkkfKFgsSrqSBDJQRAEQShixksOysQshOFmxmxWGzQf2uDSVt93/2Pe\n9tCANvT29juDsDLJxMh85lniudGkXpH0P8lWy3vdd+e1aomhusqthENmNbd7j4443tblVu0p09cg\ni4rm0au5rF7h+NIe50bNcbpfPHIyD204HeOZ/5gBNWj61T3gVk4DydLrgrCJy8ixCM+BAb0Ke/HF\nA17bM9tcYr/du3V7z0E3JiMmjiBjDJXEo2NNTEpfn/Nr7+t1EdLNDVryizGjul2JHTRG5z29zpmg\nd6Q4ataf0tn6nrv9WRPhncsX+9MHjfFPgaW4ZvuzJqngaNq1Do6WXukGzLPOZdyxVmLo63Mr3e5u\n53ywb7++t34mndmIeBsFzr3WI+Zdi8fceMVj3Ae+hORgVrhW+uFSwrCREkYSpSO4xyetA4BcwW9A\n5ivVoEkKFwry2Ay3bVfgXFrnMRYca0hOcGO5ef7DTFoYGORShN7msSQJc282GR83IOfzxZID9zco\nJTnY1b8nOZSKni4RZQ04CYC/YwEjMVgpQDFNgH2OIZaRwJ8uP1/UxtOPTxaRHARBEIQiZHIQBEEQ\nipjxaiVrNLQVwgBg20ta5XHX3fd6bd0HnRgajWqDY4KJzbYqXNCIxyHmFxw0BqAgSww2OOSMhYnh\nEXMOJhab2ANbF2GUqb3yps8RloPdl+/fGveYKsKem7wc7k7sDud1XxI9e722QJNLlTFrQbs+B5ON\nD/a4/nPIJMwbG3Ni+fZOrRLb9JRT0z33grtWX5++/8SYE+ttigirpmMaBU/lw8crydQIPQe1iing\n+45Rf5j0CYM8yZpNXcDkcn8ufuuDzkRv85wDRnUUZXEfdTV6DKqq3DsVYOkzYNJ+jCXZePa7e+dY\nrRVPhWHjGLoPOFXS3n1OTbZvv1bPDQ64Z2TTRti6AkGmWLJqpWjU9TfGtsNGxcNTllkVR8q8a6PM\nuD5mU2HwmgkZnp7BVpZjcR7m1NY4Ho+y1ChVJhEcS5kRj7v+hUwlOF4Jjf82OTZ5Ho9ZGDUqt+Fh\n9wyGmFrJpkoZGXZqJft9Fz9UrFZSPO5A8W396Vcr+WMfeNxVKYNzicJu7LftjMl2XJViVShh05Hw\nFDfuPPa/Ll4zg1DigodBJAdBEAShiBkvOdjEdvm8M8b94dGnAADbXtnltbU2uejhtFnVZpjR2a4k\nbdrjEEti57mGseR+QRaxaFdQBbbUDZro2pDJyx1Vbh5OmFUKTyHsM0aZT+59FjWJAZWpaJcvuFXQ\nRavPAQDEwnVe2+NbO931RvWKKsvqJL/4/CsohU1w1z/ozv/yDl2D+oWXd3ttXfucK2kqaaN33Uqz\nYIzjQXNnEZ5e2AwTjwhNcrfQtK0hXVwnOWU+M75EaMUGQp4e2i7T+ErIRpzaSOJZs5wzQftC/a5U\nV9V7bQOj7nxDRmJIMHfKA92l04GlTEI7HrHd06slrf0HnBF+f7eTHA72DJnvuNWvrZOtzIo0xN41\nKxlE0+79THKDsOfSyJwezJikx42rbjMpu3kNZW5ANWPLJTsbnW2lhPp652xQX6fHNl7t0taHw85N\nOGBSyvOVcCY9geRg3jWfQdq63rKoaJ8UYQzVvC2RsNUgzbtbotKbX3JwfVAors5m3z0rnZeSKvzF\n3Jhx3rqrMu2CFQWsxB1morf9LUXD7ni+HTGSmP0E/P+XTBaRHARBEIQiZHIQBEEQipjxaqWM8TUf\nYj7MT7+wEwAwxlylG5jKw6qTfIm1rNjslXBzaiVbJyHgUyXx7+rPAjdaGXVSwFQuiwe5QdMm/WId\n9KVbV0WNAejzpDJaNF6ydJa376orL9HXCDs1SM1//97bfuBBHZ0djLr9vb2l/Z7Txgje2+9UGruN\nCml/j1ODcJ9xa/DNM9WEshXr7S0wudZFc7r7y5Uw/vlFc7PPjGuQOQxEYEV6pvpgOo+C2V/gBjpz\nnrpa/VxOWsyi389cpvexiPdde929v7xTj8cwG4PB/tJ++WMmud4gM4b2GkNzT5+LkO/tL641wdVW\nNslfAFat5O4/n9f3wN/JIIuOtTEFXA1yKL/8rFWD8DB89uWQOV+IqS2q4/o30livVUezZrl3rblZ\nb1dXObUSBZwKzNqCuSu+UqXfTy/OgSUKtLUbeAQ0fz9HvQhpbpD2q5VypdRKJYzQHJ9ayb6zNmaB\nf9dGO3NVErMg27ImAaZWsmrDSCkDv4l+r4qzuBZm4I+ZGJdohEfJS4S0IAiCMAXMeMkhZAzSQ6PM\nuGfcNHPMAMvd5KwLZS7Hq1XpNmWMoHzl5X0vWxxNCwBZY2zO+xY7Ad8Ht0ZVG1fWeLjY6A0AyWTC\n1099bZO615zndae2e/vCcb2KCgTcan/lWW7/wIA2lm7f7YzIBw668eLYFM79LNq0x6x0h5kRMMUk\nMZt/ucCqw9mI84AxpPGFl11l+XLJsFW9lxLbJ03Y85hxYJKWzVQd8i11uBuwiXhlK+GgObi1URtL\n2xfN9vYtO0lvN9Q5yaGuzqXvDpmKazy/0whb5XNGzZgNDRdH7w6yXEDDpVa1TLK0q3h734q/n6at\nwO45kC9e6vqewTgjPndzLuVmGSxRiSzGVrP1NdoAPatZR7fPn+NyZc2epcexpsZJDmCr5FTaVqPj\nTgkTSQ7GIM1dmY1bKnd/tQZnABhLpsx3XFvSy61ULDnYlNxHIjmocW2+LElGYuDSAo8QD3hGZ7c/\nZlb9VcYluYZJCTUmN1l1tXMNroo7A7+tIR2NcHdhtz1ZRHIQBEEQipDJQRAEQShixquVbDRxb59T\nk/QOajVIgUclM5WHFaGzJVRNsRJ1uG10rq96KU8cZj5zPMWvOTZojYFMzLff5KqkMDMYBUyK8CQT\nnXNG3bNogfbBP3vVCm9fMqP94slpPrC4zRkEP/TeCwAADz28xWt7aY9Olvfs875bxahRj/QPuqRv\nVv3BxX6upsO46FAAIGMctlWp8szAmDWqO56qTjHjtGeQ9on1fnmdfBZ8vR30Vb5i4xkwBsagO5/1\nx184V1cLbG9zBv7ZJpV4Xa0T1atYBHWVCdSYxarXWSPzbx6ED6tuGhrhKaQTvn0AkEg5lUfavG9Z\n9s7aMbbu7sRvP+/70PtL6IZ8ahDl99X3Ha1KGFC5A4BJAmdT2QNAQ50xRBvj87zZTiU3z1RSrKt1\nsQ8gN55p826Pscp4qVRpFa6tOGfjHfSxGd/n+O/bam+ZNE+b7k/VnfdFzx9arTRehcQpaXwuUYWN\nN4WDNm6BpVy36iQTVV5X497FWrNdU+N+8FXsWdiElbzyXih05P/VT6vkQERrieigqRc9ft9fE5Ei\nopZS3xUEQRCmj+mWHNYB+A8Ad/NGIloI4E8A7C7xnUMymNQugK/sdimkba3kCLGCG8qtJGxrjq3c\nvSPNd/JgxitTfCc3wXAFC6WiOY17p7dEcNeyxsUIs6omWbQ2jPEoz1ar0bxeyb/+nJMAAKcun+eu\nT/reeNryQtDde22jXkm8Zc2pXtsbc6cDAH7+69/6ej0woq/T2+8idodN7iib5hrw59XxrsnWnzav\ni23LFZixOmekqUCIfZcthVXxys2u16zdmljEuV2lKeYuHODnM88gzGosNxvD6ZJFWmJYvMDlomqq\n0yvcKpafqIYZ9GrMCm92gzOwjhq/6X+5Ez5K1TcetfWrmbSQZc4OBTtWPkm1YG9WH8MkMSuUFrgR\nmklRpVa6qminGy87nn5JrLi4UFWVW802mDFradJR+nNanEF6bquWJrjkwDMM2NT01hkCcEV4xmNd\nT1O80JCVDJiTBM9VZo3NPOK+YN2evUEpftfGyVOHwUgM9j0FH0/96YtqZ//3RI3WgBv4rWtwrZUc\nqp2UUF+rt2uZ5FBdVWyQjjBJpJSDzeGYVslBKfUQgP4Su24B8Fkc2dMQBEEQjhFlN0gT0SUA9iql\nnj7swYIgCMK0UFaDNBFVAfgCtEppMsdfO77NGpW3d+702tKj2oDKIw59NWCNyicYLFZN2KZM1omt\nGRuBy6ZSbkClQrHxTI2zVoXYUFvjNDGDka3wxskzUXD+LK0Ged1rlwIAqlkEt61JHCywPjG1iz23\nVaUAQLhQ2u95YEir6Xr7nIA3alRNeVZpTrFoZOfHzfzsvSBvY3zmle5s4rIAM+CzdYoqqVay92U/\nWZJDo/7gNlguRAeCxVGmrWYs2uZrddKcFhbTENPqDy7682VUrE6fp6HKifWFfAlPBgDDJunhKEtn\nnkhqVVOWqRIVU3lY7QOF2PM0HSDvb4etWOdTjJSI0OXjU2Tg96mVzLWoWDUCuCpvvPJcfa1WsTU3\naGN+S4N711rq9TZXK/H6xlYdxtNm821O2hqSM8VqI15djSfN81Q97CaCZpBVyN4rj7MpTqzHGW/M\nB/z/H+jzFceFhHlyPOaAEjPxCFVsPGuMgbnWxDTUM4N0g1Er1U2gVnIGaa5WOnI5oNySw1IA7QCe\nJqKdABYAeJKI5pQ6WCl1+zT2TRAE4YSlrJKDUupZAJ4PoZkgOpRSpfMfC4IgCNPCtE4ORHQPgAsA\ntBBRF4AvK6XuPPS3Dk1iTIuXu3d2eW2ZnBbXQ1GXoz/Ai8Ub/3Fe8FwZf6Ws8ZXOBpzIb2sz8Nx5\nOSaGp1l+MktB+cXMMPPWKRivCu57zCXYfMEkBsw6wW7OIi1MLZyn59JI0ImUVlyOsXQc3Jsob1Q7\nIWKqpEBp7wVbjH1gwFUps+k8wM7JK7sFjQ6J12ywieHyWZPYjamkvPwYQaZWYn7vOb/GQ2/bP8xX\nAkymDxoBuKQ3DutrmOXXaDLqj9lGndRQ7WphxENapA8Rfz/YuUM2bqJY7TOeUaNWGmMxDTYtRoHF\ninCp33qt8DH2Nm38iC+mxvaRx94Ub/PYB/t1+6l8d2gTz7FO+dRKuj3OvbmM+qOhpsb3CQD1po5D\nXRVPQcLeP5tOhVdPy5f4UcGpm0rVl+BPiccRhMzgRsL8miGzz6jcCkytWaL+wqFiRPixqkSMiPUU\nikSK4xgAIG4S5fFEejYOx3krObWRVTHV1brxrPGplfT5fGqlErEWh2NaJwel1GWH2b94mroiCIIg\nHIIZHyGdHNUr0v6+Idaq5/E4m20Dvohe/Z06NlOftEQbere9qCukDY2OsePtStf5pROrZAVy0Yne\n9czCwUZe51mB43yuuIZy0Gdg1vu50Xd2i/YVbzRVtSjrVkHVEb3qDbGVTIYZyUnp8+VYMjOawO15\neMikP2aJ4mx/AsxPm69DqoxxraXBrRZtIrBBY+DuG3AR7F61PV+cCVulm9vO8cWjXeGWqPpmvxDw\nGVB5hKreDrP+25WYXdXGw24VFg3o5+kbogB7fqb/fLU9kQ/2mEm8x6N3cyYqlycP5CtJL10zkyzt\nws9KG77KZQVrkOaSTokIfu4U4NXVNvE4PmNucXLDYAkffV6nusY871ojHdTGmbE0qvdVRdzvhD97\ne2YeEV8olf0PLnmi3+BsUojzKG4mxeZNP3kdZStFegk3S0TjTySJltpfKGrjkoOVXLi04H7vVmKo\n5pKDMUhXG8mhhtXfttJELWvj+23K7sirlBzKbZAWBEEQKpCZLzmYer68kIelmq1eCilW/COnV8Wn\nn7zca3vfu94OAPjmD/4LALB5i8vwYVfGPF/MgR4XQZzM6tU2LwZkXdnsAAfYUBfMqpanDOerChs9\nmmTSS22N/r5VW9oiR4DTaSoWEc5XwjkjOeRZ1HQWpV0FR0f0vSSTPGJXr9Z4SmFes3a2SXt9SvtC\nr62hXksznV37i+7VruBsbWHA78o6bHJKJdIsl5PVNduVLk87lLfFbErkYoJbWfLiJ1ZysCveaMCt\n2kLQ28yTFHztGAwYaZBFKRcmkB1SpjhNLlN8/3wMY+x5WZdGnrvIrvxsLiBe/9xKDjyKmdhK0coE\nWSaKZcx5bCQyj0jOZKxdw90Hjy63LpkxlhLa9rkqpvscj7KVrLGFRVmUeaiEayV/ZoWJ/UgB+KVE\nZ1MordO3481rKlup3Ukopd8dFO0tnfvLpUAv7rJ9FuEQ71+xfcEvOej+V3v2iOKxLnU8AMTM/oiv\njrhIDoIgCMIUIJODIAiCUMSMVyv19mhDNDeoWRdRXrmtkHNieNv8uQCAd/7phV7bKUu0SmTFaacB\nADY9+ay3b94cXRnsqss/4LW9+PJL3vaTW18EAAwPj3pt6bRWY9m028mME4Ntr3iytQwTOcPGdTZS\n69wr2xfr/ln1UzDA3ALNHB8m7i7KxW79SQVuYSwd0Wtr8/LKWNa4y1UBDSzadVm7TiO+csXJXltt\njYmQNSqy4RE3NlYsXta+wGuLMgNdv0n0NzDs0oYnEnY89T2OJZwaZGhE93mU1RXm9kzrelnLkpc1\nmgjeWuMC6FN52DrVvprTTIVgt31G09KulzYVNFdVWIOu3/3QPX/rdFBXwxPV6XuwyeV8VeKMQZ5H\n4IaZCs3aRnmadZt+fcxUTxsZdUkVR4wRnSfC4ypT29co67M1TtsKZjH2Plv1RijAXZ25n65V07im\nwARaJc+5gKmIrLowF+OqJPc8rAtrnld+LOH+Wkxph4PSNc5tW7FKDt7vhyXbY2qlGqNW4qoj697q\nffrG2oyxL8qa7Tft3HVX1EqCIAjClDDjJQcbMBZlNVTDYVuD2a0UgswA+2fv/lMAwOqzT/fa7Dw+\nZ67OtRNkQWZzWnSxklWvc3WZ16xyq+QLD6wCAAwNOnfNoWGTo6hH1xlOM8nhpe3amP37Dc94bRme\nbtkYOufNbvKalp/UBgCoMgFYSZZ7JhC06cGdqy1P0ausEZPlbxqfC8Zrt26hbKVhVzy8aMkcZpw/\n/VTtBmzzPgEuKGzfQZ2jqbnR3cucFr1qP/sM5xAwa5ZL8TxqnAcGmeQwOqaN89Y1tK/fudq+vFMH\n1L+4o9trGxxxYxGwQVvM3c/N4r3+AAAgAElEQVQWT7EGv0iIS1o24RDPH8W2TTAgMQcAqNKSg5W6\nwsw4aN0Yo2zlxwOaZjVpt+UGZrC3BljrEsslB+tmWlXFjJbMWBkyxmQeOGaL5gyN6LHuG3SSXW+f\n3h4Ydk4cbNGNeMysXFlK+Yi5hjX68hg3+yoFmDhAfFlqJIeAz6Gg9HiWKoyTs8Zn9p0Ie3+tM0TB\nt5z3G6J9paNKJrDiudlKSA5mbO0Y++uf689SBZMAJzH4JAcrnZWQAryx9jkJuM7ad5lLV6GQSA6C\nIAjCFCCTgyAIglDEjFcrReNa3OLpgCP9WgWRSjvVwsmLnFrjLRdqNVBNnImFxqdeWXOxcmL50jZt\nOG2qYXEJbORiy7SKJZ91aoD8OF/pCLnz/WFDJwDg4U3O6D2WcTENwYgWTTN5d750Sov6QWj1Q3XE\nqdEKMX2tNFMbEZPrQ1Ej6iecGiQ3Qe6aWKw4HbONuOQGz/ZFs73tU0/RxvJFC9wYDw5qtY81VNZW\nu6pp82Y1AwCWtLmKsG0Lml3fjFotwZ6fM8Tqz+6DTuVkc8j09Du1XiLjvhsyqjiuGrK5tMhUsg4y\nYz6ZOAYVmCCOwahHiI1hoJSDO5zxkKt5rAqJ2wibG92ztunDbRps02F9X8aAzKueWfVCfZ17J3j9\na9sHHtFt1UoDJhL+QG/x8byueYLFvVgjaTTCVY/6k2wadqZyU7ZaONcl+Ypge3nD2f7ShuKoia3I\n5dx4WnUSz5WWYz9QWwHOH/tir6k/fPZxT61Uug/2PLxmes5TK9mMCG6f/b+A598KhbiBX9+LVdcB\nQDRq8zHZdN/FDiZ+1R3LK2XUokGeQn8iC/8hEMlBEARBKGLGSw7VZra1EbkAEKQeAG7WBYB3/On5\n3vZJS/Wqd6xnn9dmDUpD/do1lmc9PfnkZQCAuFvcIc8irmE2h1n+oIjJlVRt3CczeZf7aXBor7mm\na2thK8eoWekP9LqCOy936kjj05fp7Kz5nPtu3hi7s3CSA/OS9foSYxG3PAMmp95IYNxAauvX1te5\ntmXtruTGogV6pVtb48Z7cFD3RZlstDxittHkYGqo4+58rg82GyjPxQPobWvoC7GVUEuzPk9Dg1tK\npdjKMhzS24WCG58hU8BoNKkltsY8z/tj8u6UNGAC3nKTL34nWGbV2LFjkm2ArOup+1JrkyuOM6vF\nutm68bbPyxp8M5niVWZTvbtGYwMvBKPvny/W00byqK2Nmr7wvEw2V5O7Z+7qGgpaV1Y33na1autf\n8+JVOTPuuRLunYATKHxv5AQR0jYqmxdHshIYN9ryjLfKi2TnkoO/G+STHFRRmyoRQV3wSQ4m55WV\nIHJcSi++F26cjoat2yp7plZiMMZlX92poO0fG1C+7bnxMoeJCaSgQyGSgyAIglCETA6CIAhCETNe\nrdTaqo2gs5qcn7zNytY23ysyh7NOdTEKyeGDAIBc1vl2p1JaBOvp1r7y8RBTedRpcT2VZwVbks6A\nnM+YY5mRsLpaq04iNjc2G+lQVF+rpsqJfas7XNzE7BYda3HvvX/02p7dtgMA8PpzTwUARAPu+srI\n5dkJjMzpdLqojRca4jQ3avVcI/Oxb6zThtG5s+u9trY5Ls6h3iQFJMUilHPaIExGzcB9+uPGnzvA\ncoxzlY+Vipk9zYvbKBijYyzm1jW1NXrf7FZnwK2t5YVloqYv7jsDg1qt1GNSijc1u+PjwWIDbsl6\n1qVDRXzYSPJ0yvXN+qFzgy5XKzXXmzTizCkgkzFG1YJNd+6uYSOVeZyDVSUBQE213g4yg202Z56Z\nVzPdvbsJ875keGJI9vysYTUed++QNURnzXNM5dw7l87qfoXCPOKY1W/3CuQcXvVhk8oF2LHWnz+f\nY0ZodegYlPGqI/JFw7tespOwLRvLUGyQzpox40kObZQ8jwDnKeVtQj6upgubd8MWGPMblK3ar5Tq\nDCio4prqEzlMHAqRHARBEIQiZHIQBEEQipj5aiWjBuHeIDZS/KTFrr7AnGanJhnoMWkWWHqNvPFu\nmT9Hq6mYo4iXJoFlEMAYy88fzusL1jU6tYsVOW3e/ULEiXVz5mi10UlL27y2k5fO87bPX6XjMLY+\nvcNr2/aS3t7do1Vhpy9n1zL1JIJ5Vv2KqQRs7nqe6C+fd3EAHKtWam5wag47xvNaXRxDK6v6FrOX\nZXWyrc+5VW9UxZhawqgg0iwZYjLDUgHYxHcBnjzOr3rg6QBqavTD4vU2wqw+Qiyq343e3mGvbcyo\nTg4MaPXS3KQbz5CJIaEjUStNoBKxsQqZjFOzWBVbjKmVmuqc2qne3A+vOZwK6Gdnq8hxNUfIeBrx\nsuDEdHJWdRT01Uw3KRiMKoOriGprtBoulWaqOabHsuoKnuzNU0+ZWuFJFqOSSJv0KyFe95p7OhVX\nnpsgu4tXB4Fng/AS6/EiH6qUp1lxIkU6ZIxFabVSQZVSK9l6I7ovWfb7s235EjVGAJcUL8rTYYzz\nUvLH6JjkfkyVlFfs916wn9ybqcLTZxDRWiI6SERbWdvNRPQCET1DRP9FRA2HOocgCIJw7JluyWEd\ngP8AcDdrWw/g75RSOSL6RwB/B+Bzkz2hTQc816zGAaCxXq9q2xc6yaE+6iSLbF6vbvJwRruAqcN8\n9tk6ZfcD//OYt6+/X8dN9A+445NjbtauiukZOsoqY9k1QtYYEMOs5nTYxFCEyB2/cLaTHBbP0yv0\nd77jXK/t23ffBwB4/EmdKnzZYrfPrsHGRl0yOr5KDAf1dWoa3RhkmDGeY6uzNTMpaLaJ2J3T7Fbm\njTVOcgib++DVuaxBvtXUvh5g6cyzRmIYSbjVZSzOq6LpzyhbCuftasksW3N85WXSbdfVuNV3U4Nb\nYzQ06GfL02LvP6iTHx4wcS29g+54W87bl6uMrfrsSo9LL4TSkkOVSctdm+YGaWPQZQbIehboYVN5\n89W6XWmmjcSaY6tGa6zMs9Vylvn5Z8yKusBWj16iOBsvwFbq1vhcXe3e9yC7V7sg5Wm8rXOB/W0l\nmBNELGUkhyCTHAqsvrFN1jdRADXDRtzzaOiCkZj5Of2r/uL03O55Fcd0lGrjiQCd5MBW7qYaYdas\n+EM5Fq1to6Z9dc/dpo154HEvofGG6BLR/YUC1w4Q2za/kQD/ToVLDkqphwD0j2v7jVKeTPQYgAVF\nXxQEQRCmlUozSF8F4IGJdhLRtdPYF0EQhBOWijFIE9H/B23z/f5Exyilbieib/G2ujqtMli4wAkc\nzSYf/rxZTtWEnJsHA0adlGdzYzSuDbDz52mRrW2hM7527toDADjlVHe+bIb5UZu4hQBLGWBD65NJ\nHRvRFHFJ5g6Y6nXJURerUBV3xr+Bfm0wf83JLkVFm6matvnZ5wEAHac7NVRzjTWGcTHXiZQ2RmB+\nxJ0vleHmdUfIqGjirD/WSN3CYkmqom5/wKqVWEqAeFxvNzXr79b0OIExbQzjQ2NMrVTtVGw5o8JJ\n+QxqWmy2qQgGRpnB0ySR477g9voAUF8bMtdlYzxqqs2ZCnX7WaqSmHmeMZaOgWPVPRGmpgpMsMwK\nG0d1nj4kYlQivLpXDatSVxXTaqUAV52YW4tG9dhl8s65IGfUC1yVlMywuJGATcTGVR3m/TRjl+JV\nE41ahcc2BFi9C6sS4TVBrEooa/rCDdJjSWNwZmqOPFMBWXVKkFmkJ7Kf2vQZ+SBXc5l++ozQpQzS\nxW1WXcSNu6UMvkoxtY2pqMi0ZMhZw7H5WfH3IWBUPoUJYg3I3CxXldln5caBq7ismoo/T349m/yQ\nqZXUBHq6Q1ARkwMRXQHgHQDerEq5hQiCIAjTStknByJ6K4DPAnijUipxuOPH0z+mVzxPPLXNa1s4\nRxtDl5/kVv9DKbeSsauwIDP4Jfr1ylEF9Epq1dlnefse2ajPfR5bjDFPT/QPaElgaMxFUI9ZF1Yz\n9SdZ+u1nntXOWgsWu2SB+YhLpNe5XycErIs7A/AZK5YAAB7asAkA0Dvi3DKrTIQws0khk3Ur8XzW\nGAcHD3ptSdZXTv+QfgQpVmmsplqvZJuY+2qErYQV9LUKihuJ9adNpV7HXDX7TJU8LjlUp5zx064+\n+UIrnTUrXZOye3jIvSpDpmIcN3gGw04yoqC+/5oa17+6en294RG9b2DEpQBvHNWdr2bunVDFrodh\nJo1OtNK192CT1QHOIF3D0nhXM0nNiwJm5xwvOXDJIGekQB7RTCm3xrKSBY/KzZpKgknznG2tc8BV\nTgswd9goM0jbOs78edv7LBjzYTrr+pdIWbdMHmXMKrkVbPU4d77ABBZpWwGuwCQH5bnFTpQosVC0\n30kM1tGBGetV8fuXLxZikT+E+yt/H0qNF++efS68zrMdCycNc4N4seTA+2cN0r7EfJVukCaiewBs\nAHAyEXUR0dXQ3ku1ANYT0RYi+uZ09kkQBEEoZlolB6XUZSWa75zOPgiCIAiHp+xqpVfLK9t1bYQ/\nPrLZa3vbm84A4FeDZFKsILtJqheJudgDZeozkBHjTjttubfvd488DQDY09XjtTWfzo3TWiRPZZys\n2Dukz9fTr1Uo21J7vH1Jk7Rv5Vmnsj5xudCex6ldXnOyNki/+Mp2AEBnZ7e3b/GCk/R5M041ksqx\nKlAmwjuZcAZwliPQx56uAwCA0RGndrJ1AhpZ7EMs7vzygyYfPdcEhIzcXG1UUrVMrWST3Q2xAvY2\neR4AZI3/fyrtxqR/UB/bNzBq7sV911Zza2A1DCIx1hkTXRxjbU31ul8Zo5JJJt37MWa2Q8wI6wve\nNd3K+gsQoBTWSSDI8/dHbEF59/7FY7wSm1E3EVcNmXoO5ru+xIlZY6xnyd54UrZsrtggau87ZdR0\nPKLXGmR5HAcvZm/VH9xtP2feN2XjUNj50hl/bQLAb5y2aiGuVpooziFi1Eo87sDFoHDdT3F0M/+O\nM+raT3d0wPz+uJqWJ+bz1Em+IhB2fIprQdhHr5gayFcdxFajY+9IKOCPGvclJTT3oXxxFm533lYq\n5IqhozBIV5orqyAIglABzHjJ4blntGtnjJVKOuN0nZsoGnJW40iNWxnaymjBIHOfNAY0a6Oew9JT\nF4yL6vaX3Op/YbMzJg4P6Wjb6ipXV3mwR69sf/fbjfpacCvdCy88GwDQ3OTcW3Npt5SP2U4EXJ8b\njavn0vlagti3y7leZjrM8cxgHmIrx0jEWtDY6mOC9N4DfbqaXZSlLJ/Tql1gW5udtFTLc1mFbOU0\n5vpnltdVxrhqo34Bt6gbHXaSzDCrBFcw+ZgSCbf6PLBPG+z3dffpc7AaxbNnacN+NbtGhK2sbXrn\nIFu51plrDBnX3UzSnS8xqp9Fdcy9H5EwN5aalRkbwsIETnZk2sOsPzY/UJxJDjEmOdha3YqthMOm\nn5Gw/WQRxmmTspqvHrM8ZXRxdTIbaW0liAJbVVuJKRzk0g6rSW7TpzPRIWP6mjXXVez9yhvjt8sL\nBbDs2qz+8SQkh5AdG1/4uvl26VxIXsQzu8e8WUnbMuEB9tvwVumsjV/PRk6rEtezq/UAc8u1hujD\nOWLy79ht697rHw8rCZWO4LbvgT8FukgOgiAIwhQw4yWHkQG9gn73u97qtZ27ciUAIBp2S2lFzlXP\nBoWlWKBOKKGHIkhmSJgr56wWvTLt63Y2h/17nRtqxFwnG3Tf2fmyljJSw7qtvd1JCa0mmCyXdNdn\nno4oxPUsn2HuigStp59lgv5e6O9ifdG2hoXz2fI75lZpQePiGeC5dXiQFMO6Vzaw3EQL5mnJoaGO\n2RxYXiC7hM4xO0fQXMuu4GtY/WorRSQSLGiQBxWaDLYFtvrNpYw7pjmMSwkNJs9Tbczdf4RJhban\nPOtn3KzOYyY/0ACzYYwO6efSVOv6HI6683FbhCWfKy2JRc11eIbVqpg+r5UgACDM3Krt+8kXmp77\nrBlPLjnYGsSK59opcP26WX0yvbPdDlo3SpYnKRqxgXvunnkGVpsLqcCkA7IBY3lblIitas02lyZ8\nBgu7PYmsrFaq4oNzuOI88PJyuetbk0cpnb7KW8mAP1P+vqui71hzQsFzW+Wrer89YiJ8dgqyrqz+\nT/9xpSUHe2m/jUMkB0EQBGEKkMlBEARBKEImB0EQBKGIGW9zOHeVrr/QNM8llRsd0WkiUsxjJ8ds\nDlYfmOPJxozONFDQetYg09FddOEaAMDuXa4y29xW55k013g2dR9wKS3qa7U++eK3vQEAsHC+s1HU\nmjQOIaYXD0X4o8iZ/U6vHjTV5lpqdNvIiPNWevbpFwAAC1pe57VF4u7erft4JOx03IiU1kEuXqg9\nkmpqnadXnfGUUrxyW4LpW21+eeYukzcVwcjoPWviLs5hTotOazIyypLjsTiHGuNJFGC192a1aJ1q\nbbW2K9TUOHtAc1O16TOLc4hy7xrrScL0smHjkRTRtoZBpg9Pjel3Jc/sILz2hlfAjenwcxPkz6it\n1v0MMxuBrSvh8/tn31EF613D9OpmM2R84aMhbg+wz7W4voA+j/EWYn701sZQyJukiSzGwNpHYnF2\nz8xbyXrQcDsLKfNbsrVS2HiGrP6cKdUDPv26Oe8k1OIRzzZTojIf18H79PHGW4intDDJ+sjzSHL3\n52wNPPEiz3dhxo71t2Duzer2fZ5Epc5RAp/JhfyfPCmh3eRj6HdmUvyjqD+TRSQHQRAEoYgZLzks\nWtwMAOgb6fXauvfpVTX3Q88oJyV4XtV8tWes/U3VelWbZ4m4GuvNqndBs9cW5Stvs2BrbHTSwflr\nVgAA4lVmBZ53kkvc1FPm3gTxahfN7dV8zjgPmphZRQdn6X4ubJvljjf+67wuLa8ylkzqqOIsq9ub\nTJSuIb1gnk32x1aeeZ3kbpRVmvN5R3h+3w77bWXGMcakllaTUr26yl0jFnHfttW+4nGegltLDHah\nx/3uq0w8QiTGK5exlaBdNbE018GCloKaGvS4JhJun/WICQe59FGcnpoH6QYm8AapMxIOj5AOmRME\nuYcM9zSy3iZ89VmwHmf6OtxbyXo9BX0psfn92yhglrzNeuSYawRYTIO9f56S3F+VTpMLOkmFzLKc\nzPPmMQ321n3jycfDekxNFNzAiJTwVkKpuAO23zpDFUp8x66P/XEMxdIJl0rsip39F4GCuX9f5LZ3\nlsOt2q1kO3FbqYhrn/TFvulJHb7xEG8lQRAEYQqQyUEQBEEoYsarlepNUfk8q8KWM8FVBRb+HvEZ\nlzThMKt5YFQ5aSPe51hmuuYaPUz1cVcfIsvURDljoIrEWcF4Y9SKm7bEEKuPEDHBZEw1EGa5/QcG\ntRoox1QDQaM6iZPu10VvOtPdSFYfF69xRl+EnEoqEtcqqwIzUtbUOOMtp6FOt6dS7v6yNjkbMzj7\nqlp5xjMeqOMX/7nKoNYUro9GXNAaBdy5bUBYNObWLjGbasLW4mBSsjWghiLM4M5UGDYhXSHHxtO8\nD00NJskaU/HZrSoWaMeD1AKmr1xNEZxgmWUN0v5UB16UktdW8KVqQPF3xqmVeH+sGo7XSygorlYq\nNnCPN1D61F5GdRP03XNxuopcoIRayaw3c2z8bbAYT0ESKlG7IFh8iSK89Bk+g2+p+2MBb0alwlXF\nRWolbq32tlm9DmL3U7DX4CkyxhuxJ6dK8m2VVHsVpwZxifrc0QGmorQqzknEFB4SkRwEQRCEImim\nVeUkIqkkOoUQ0VG5uQmlkfGcWmQ8jy1mfEsKFiI5CIIgCEXI5CAIgiAUMd01pNcS0UEi2sramoho\nPRG9bD4bD3UOQRAE4dgzrTYHInoDgFEAdyulTjNt/wSgXyn1dSL6PIBGpdTnJjpHPB7vTqVSsyfa\nLxwZ0WgU6XT68AcKk0LGc2qR8Ty2xGKxA8lkck6pfdNukCaixQB+ySaHFwFcoJTaT0RzAfxBKXXy\n4c7T0dGhNm3adEz7KgiCcLxBRJuVUh2HO64SbA6zlVL7zXY3gAmlAiK6log2EdGmnp6eiQ4TBEGY\ndm655ZZyd2FKqYTJwcP4qE4oyiilbldKdSilOlpbWyc6TBAEYVrp6urCDTfcgL1795a7K1NGJUwO\nB4w6CebzYJn7IwiCcETcdtttGBoawm233VburkwZlZA+4xcAPgrg6+bzv8vbHUEQhENz44034v77\n70csplOsWInh3nvvxSOPPAIASKVSuPjii/GlL32pbP18NUy3t9I9AC4A0ALgAIAvA/g5gHsBtAHY\nBeBSpVT/ROewiEFaEIRyMTg4iIsuugiH+j+oo6MD69evR0NDwzT27PBM1iA9rZKDUuqyCXa9eTr7\nIQiCcDT0jabx2I5+bNjRi9glX0ak66+Q6X656LhKnRiOhEpQKwmCIFQkQ8ksnujsx4btfXh0ey9e\n6B5hewntH74J++/+awwf2O21Ll++fMZPDIBMDoIgCB6JTA4bdw5gw/Y+bNjei2f3DnmV5AAgGgqg\nY3EjVi9pxuqlLWjCCM77bgaJUAjt7e3o7OzE8PAwxsbGZHIQBEGYqaSyeTy1exAbtvdiw44+bNkz\niCyrrREKEFYuavAmgzPbGhALu9oOn//8PyKfz+P666/HV7/6VXzxi1/EXXfdhVtvvRU33XRTOW5p\nypDJQRCEE4ZsvoBnuoawYXsvHt3eh827BpDOscJABKxYUI/VS1uwemkzOhY1ojo68X+TdXV1WL9+\nPVas0DXjb775Zlx++eV44IEHjvm9HGtmXD0Hi3grCYJwOPIFhef3D+NRMxls7OzHWCbvO+aUObU4\nb2kLzlvajFXtTaiPhyc42/FBRXorCYIgHEuUUnj54CgefUVPBo939mMomfUds6S1GuctbcZ5S1tw\nTnsTmmuiZeptZSOTgyAIMxalFHb2JTxvosd29KF3NOM7ZkFj3JsMVi9txuy62ARnEzgyOQiCMKPY\nO5j0JoMN2/uwfyjl2z+rNuqbDBY2VZWppzMbmRwEQahoekbS2LCjzzMi7+pL+PY3VUdw7pImrDZ2\ngyUt1SAqWRZZOAJkchAEoaIYTGTw2I4+Ix304eWDo779tdEQzmGTwcmzaxEIyGQw1cjkIAhCWRlJ\nZbFxZ783GWzbPwzuRBkPB9GxuNHzKHrtvDqEgpWQUPr45ognByJqgyvIc0AptftQxwuCIHCSmTw2\n7xrQNoMdfXimawh5FoYcCQZw1qIGrF7SgvNOasaKBQ2IhGQymG4mNTkQURDAFwD8BcZVaiOibgC3\nAfi6Uipf4uuCIJzAZHIFbNkz6BmQn9o9iEzeBZ4FA4Sz2hqw2hiRVy5q9EUhC+XhsJMDacvOLwFc\nBODHANYD6AJAAOYD+FMANwA4D8Dbj1lPBUGYEeTyBWzdN+xNBpt2DiCZdetGIuC0+XXam2iJDjyr\nOUQUslAeJvNEPgTgQgBvVUr9tsT+tUR0EYBfEtGfK6V+MKU9FAShoikUFF7oHvEmgyc6+zGSzvmO\nWT67BuctbcG5S5px7pImNFRFytRbYbJMZnK4DMC6CSYGAIBSaj0RrYOeSGRyEITjGKUUtveMea6l\nj+3ow0DCH4W8uLnK8yY6d0kzWmslCnmmMZnJ4QwAd07iuP8B8H9fXXcEQZgubrnlFnzmM5+Z1LF7\n+hNefqJHt/ehZyTt2z+vPuZNBquXNmNeQ/xYdFmYRiYzOTQD2D+J47rNsYIgVDhdXV244YYbcOml\nl2L+/PlF+7uHUtiwoxePvqIng72DSd/+lpqINxmct7QZbU1VEnh2nDGZySECYDJeSHkAR53OkIg+\nA+AaAArAswCuVEqlDv0tQRCOhttuuw1DQ0O47bbb8LWvfQ29o2kv8GzD9j7s6B3zHV8fD+PcJU1e\nrMFJs2pkMjjOmayLwKeI6HDSw9yj7QQRzQdwHYBTlVJJIroXwAcBrDvacwqC4Ljxxhtx//33IxbT\nSee6uvYCAL75ne/hP3/0KyQyeahcFvGlHWg4/zJUR4I4u73Jy0/0mrl1CEoU8gnFZCaH3QDWTPJ8\nryYgLgQgTkRZAFUA9r2KcwmCwLjuuutw33334cEHH/S19+/fDfuzbVp0Cj7/2b/Cm1e04/T59QhL\nFPIJzWEnB6XU4mPdCaXUXiL6Z+i3NAngN0qp34w/joiuBXAtALS1tR3rbgnCccPOEeDMj30Dz+y9\nBpn9LxftP2vlSvzvb3874+seC1NHRVSCI6JGAD8F8AEAg9DBdj9RSn1vou9IJThBODS5fAG/2XYA\ndz7cic27BnRjZgxD93wWg927vOOWL1+Oxx9/XCaGE4QpqwRncilNmqPMtfQWAJ1KqR5zzZ9BR1xP\nODkIglCakVQWP9q4B+se3YmuAe1lVBcL4bJz2nBRWwjvvCeNUCiE9vZ2dHZ2Ynh4GGNjYzI5CD4m\nY3PYCe1BNBnUJM85nt0AziWiKmi10psBiFggCEfAnv4E1j26Ez/auAejJkJ5cXMVrjy/He9buQDV\n0RA+//nPI5/P4/rrr8dXv/pVfPGLX8Rdd92FW2+9FTfddFOZ70CoJCbzH/k7D7O/CsDHAbwJQPYw\nx5ZEKfU4Ef0EwJMAcgCeAnD70ZxLEE4klFJ4cvcA7ny4E7/e2g2b3PSc9iZc8/oluPCUWT4vo7q6\nOqxfvx4rVqwAANx88824/PLL8cADD5Sj+0IFc9Q2ByKqAfBJAJ8BUAdgLYB/nK4U3mJzEE5ksvkC\nHtjajTsf7sTTewYBAKEA4V0r5uGqNe04bX59mXsoVCpTZnMoceJGANdDTwwRAN8C8M9Kqe4j7qUg\nCEfEUDKLHz6xG3c9uhP7TO3khqowPnROGz6yejFm18XK3EPheGHSkwMRzQLwN9AqpDyA/wDwr0qp\nvmPUN0EQDLv6xvCdR3bi3k17kMjohAVLWqtx1fnteO9ZCxCPSP0DYWqZjLfSAgCfA3AVgDEAXwfw\nH0qp4WPcN0E4oVFK4YnOftzxcCd++/wBr3Tm+Sc145o1S/DG5a1SO1k4ZkxGcngFOmfSHwB8E3qC\nWDNRXhWl1P1T1TlBOFnBoeMAABlASURBVBHJ5Ar41bP7cOfDndi6V6/BIsEALjlD2xNeM7euzD0U\nTgQmm3gP0N5IF0BXgJsIBUDkW0E4CgYTGXz/8d24e8NOHBjWKbGbqiO4/NxFuPzcNsyqFXuCMH1M\nZnJoP+a9EIQTmB09o1j7SCd+unmvV05z2awaXL2mHe8+c77UUxbKwmRyK+063DGCIBwZSils2N6H\nOx7uxO9eOOi1v2F5K65Z047XL2uRlNhCWZmMQboHk4+QhlJq1qvqkSAcx6Rzefxii7YnvNA9AgCI\nhAJ471nzcdX57Vg2u7bMPRQEzWTUSrfiCCYHQRCK6RtNG3vCLvSOantCS00UH1m9CB86pw3NNVJj\nWagsJqNW+so09EMQjktePjCCtY904mdP7kU6VwAAnDKnFlevace7zpiHaEjsCUJlcjRJ8gRBOARK\nKfzx5V7c8XAnHnqpx2u/8JRZuGZNO1YvbRZ7glDxyOQgCFNEKpvHz5/ai7WPdOKlA6MAgFg4gPet\nXIArz2/H0taaMvdQECaPTA6C8CrpGUnju4/twvcf24W+sQwAYFZtFB89bzH+/Ow2NFZHDnMGQag8\nZHIQhKPkhe5h3PnHTvz3ln3I5LU94bT5dbh6TTvefvo8REJSg1mYucjkIAhHQKGg8OBLPbjj4R14\n5BWdc5IIuOjU2bhmTTvObm8Se4JwXCCTgyBMgmQmj58+2YW1j3RiR88YAKAqEsSlHQtxxXmLsbil\nusw9FISpRSYHQTgEB4ZTuHvDTnz/8d0YTOhCh/PqY/joeYvxwVVtqK8Kl7eDgnCMkMlBEEqwde8Q\n1j7cifue2YdsXseArljYgKvXtONtp81BOCj2BOH4pmImByJqAHAHgNOgI7KvUkptKG+vhBOJQkHh\nf184iDv+uAOPd/YDAAIEvO20Objm9e04q61R7AnCCUPFTA4A/g3Ar5VS7yOiCICqcndIODEYS+fw\nk81d+M4jndjZlwAA1ERD+MAqbU9Y2CSvonDiURGTAxHVA3gDgCsAQCmVAZApZ5+E4599g0nctWEn\n7nl8N4ZTOQDAgsY4rjhvMT6waiFqY2JPEE5cKmJygK4Z0QPgO0S0AsBmAJ9WSo3xg4joWgDXAkBb\nW9u0d1I4Pnh6zyDufLgTv3p2P/IFbU9YuagRV69px5+cOhshsScIAkip8idcJaIOAI8BOF8p9TgR\n/RuAYaXU/z/Rdzo6OtSmTZumrY/CzCZfUFi/rRt3/LETm3YNAACCAcLbTpuDq9e048y2xjL3UBCm\nByLarJTqONxxlSI5dAHoUko9bv7+CYDPl7E/wnHCSCqLezd1Yd2jndjTnwQA1MZC+POz2/CR8xZj\nfkO8zD0UhMqkIiYHpVQ3Ee0hopOVUi8CeDOAbeXulzBz2dOfwF2P7sSPNu7BSFrbExY1V+HK8xbj\nfR0LUROtiFdfECqWSvqFfArA942n0g4AV5a5P8IMZPOuAax9uBMPbN0PY07A2e1NuHpNO97ymtkI\nBsQVVRAmQ8VMDkqpLQAOqwcTTmxuueUWfOYzn/G15fIF/Po5bU/YsmcQABAKEN61Yi6uXrMEpy+o\nL0dXBWFGUzGTgyAcjq6uLtxwww249NJLMX/+fAwls/jRxt2469Fd2Duo7Qn18TA+dE4bPrJ6MebU\nx8rcY0GYucjkIMwYbrvtNgwNDeEfvvFvaLngo/jxpj0Yy+QBAEtaqnHlmna896z5qIrIay0Irxb5\nFQkVy4033oj7778fsZiWAHbs2gMA+Na67yH4kwcAADUhhXe8/WKsvekfERB7giBMGRUR53A0SJzD\n8c/g4CAuuugiHOo5d3R0YP369WhoaJjGngnCzGWycQ4SCipULA0NDfjpffejafFrSu6XiUEQjh0y\nOQgVS9dAAtfcsw3Vl3wZ0eYFvn3Lly+XiUEQjiEyOQgVydN7BvHuWx/FSwdG0VYXQF0wi1AohGXL\nliEUCmF4eBhjY2OHP5EgCEeFTA5CxfHrrd34wO0b0DuaxnlLm7EqsRFQBVx//fV49tlncf311yOf\nz+PWW28td1cF4bhFJgehYlBK4Y4/7sBffH8zUtkCLu1YgHVXno3WpgasX78eN998M6LRKG6++Was\nX78edXV15e6yIBy3iLeSUBHk8gV85b7n8L3HdgMA/vZPT8YnLlgqldcEYYqZaVlZhROYkVQWn/zB\nU3jwpR5EQgF84/0r8M4V88rdLUE4oZHJQSgr+waTuGrdRrzQPYKm6gi+/ZGVWLmoqdzdEoQTHpkc\nhLKxde8Qrlq3EQdH0ljSWo3vXLEKi5qry90tQRAgk4NQJn677QA+dc9TSGbzOKe9Cd/68Eo0VEXK\n3S1BEAwyOQjTznce6cTf/3IbCgr4s7Pm4+t/9jpEQuI4JwiVhEwOwrSRLyj8/S+3Yd2jOwEAf3XR\ncnzqwpPEI0kQKhCZHIRpYSydw3X3PIX/feEgIsEA/ul9r8O7z5xf7m4JgjABMjkIx5zuoRSuvmsj\nnts3jIaqMG7/cAfObhePJEGoZCpqciCiIIBNAPYqpd5R7v4Ir55t+4Zx1bqN6B5OYXFzFdZesQpL\nWmvK3S1BEA5DRU0OAD4N4HkAkhfhOOD3LxzEJ3/wJMYyeaxa3IhvfbgDTdXikSQIM4GKcREhogUA\n3g7gjnL3RXj1fPexXbj6ro0Yy+RxyRnz8L1rzpGJQRBmEJUkOfwrgM8CqJ3oACK6FsC1ANDW1jZN\n3RKOhHxB4R/ufx53PNwJALjuzcvwmbcsE48kQZhhVITkQETvAHBQKbX5UMcppW5XSnUopTpaW1un\nqXfCZElkcvj49zbjjoc7EQ4SvvH+Ffiri5bLxCAIM5BKkRzOB/AuIroYQAxAHRF9Tyl1eZn7JUyS\ng8MpXH3XJjy7dwh1sRC+9eEOrF7aXO5uCYJwlFSE5KCU+jul1AKl1GIAHwTwO5kYZg4vdA/j3bc+\ngmf3DqGtqQo/+8T5MjEIwgynUiQHYYby0Es9+MT3n8RoOoez2hrw7Y90oLkmWu5uCYLwKqm4yUEp\n9QcAfyhzN4RJcM8Tu/HFn29FvqDw9tfNxTfevwKxcLDc3RIEYQqouMlBqHwKBYV//J8X8K0HdwAA\nPnHBUvzNn5yMQEAMz4JwvCCTg3BEpLJ5fOZHW/DA1m6EAoSb3nM6Ll21sNzdEgRhipHJQZg0PSNp\n/J+7N2HLnkHUxkL45uUrcf5JLeXuliAIxwCZHIRJ8fKBEVy5biO6BpKY3xDHuitXYdnsCeMVBUGY\n4cjkIByWR17pxce/txkjqRxWLGzAHR/pQGuteCQJwvGMTA7CIbl34x584b+eRa6g8NbXzsEtHzgD\n8Yh4JAnC8Y5MDkJJCgWFb6x/Ebf+fjsA4GNvWILPvfUU8UgShBMEmRyEIlLZPP7mx0/jl8/sRzBA\nuPGS1+JD5ywqd7cEQZhGZHIQfPSNpnHtdzdj864B1ERDuPVDZ+GNyyXJoSCcaMjkIHhs7xnFld/Z\niN39Ccyrj2HtlatwyhypuyQIJyIyOQgAgMd29OFj392MoWQWp8+vx50f7cCsuli5uyUIQpmQyUHA\nz57swud++gyyeYWLTp2Nf/vgGaiKyKshCCcy8j/ACYxSCrf89mX8+/++DAC4ek07vnDxaxAUjyRB\nOOGRyeEEJZ3L43M/eQY/37IPAQK+8q7X4iOrF5e7W4IgVAgyOZyADIxl8LHvbsYTO/tRHQniP/78\nLLzplFnl7pYgCBWETA4nGJ29Y7hq3UZ09o5hTl0Md17RgdfOqy93twRBqDBkcjiBeKKzH9d+dxMG\nE1mcOrcOa69YhTn14pEkCEIxMjmcIPz3lr342x8/g0y+gAtPmYV/v+xM1ETl8QuCUJpAuTsAAES0\nkIh+T0TbiOg5Ivp0uft0vKCUwv/935fx6R9uQSZfwEdXL8LtH14pE4MgCIekUv6HyAH4a6XUk0RU\nC2AzEa1XSm0rd8dmMplcAX/3s2fx0ye7QAR86R2n4srz28vdLUEQZgAVMTkopfYD2G+2R4joeQDz\nAcjkcJQMJbL42Pc24bEd/YiHg/j3y87ERafOLne3BEGYIVTE5MAhosUAzgTweHl7MnPZ3ZfAFeue\nwI6eMcyqjeLOj67C6QvEI0kQhMlTUZMDEdUA+CmA65VSwyX2XwvgWgBoa2ub5t7NDDbv6sf/uXsz\n+scyOGVOLdZesQrzGuLl7pYgCDOMijBIAwARhaEnhu8rpX5W6hil1O1KqQ6lVEdrq6SRHs8vn9mH\ny779OPrHMnjj8lb8+OOrZWIQBOGoqAjJgYgIwJ0AnldK/Uu5+zPTUErhPx/cjn/69YsAgD8/pw03\nvuu1CAUrZu4XBGGGURGTA4DzAXwYwLNEtMW0fUEpdX8Z+zQjyOYL+OJ/bcWPNu0BEfCFt70G17y+\nHXq+FQRBODoqYnJQSj0MQP43O0KGkll84vub8cgrfYiFA/jXD5yJt542p9zdEgThOKAiJgfhyNnT\nn8BV6zbi5YOjaKmJ4s6PdmDFwoZyd0sQhOMEmRxmIFv2DOKauzaidzSD5bNrsPaKVVjQWFXubgmC\ncBwhk8MM49db9+PTP9yCdK6ANSe14LbLz0JdLFzubgmCcJwhk8MMQSmFb/9xB/7hgRegFPCBjoX4\n6ntOQ1g8kgRBOAbI5DADyOUL+NIvnsMPHt8NAPjcW0/Bx9+4RDySBEE4ZsjkUOGMpLL4yx88hYde\n6kEkFMAtl56Bt79ubrm7JQjCcY5MDhXM3sEkrl63ES90j6C5OoLbP9KBlYsay90tQRBOAGRyqFCe\n7RrCVXdtRM9IGktbq/GdK85GW7N4JAmCMD3I5FCB/Oa5bnz6h1uQzOaxekkzvnn5StRXiUeSIAjT\nh0wOFYRSCt95ZCf+/lfboBTwvpULcNN7TkckJB5JgiBMLzI5VAi5fAF//8ttuGvDLgDA3/zJcvzl\nm04SjyRBEMqCTA4VwGg6h0/94En8/sUeRIIB3Pz+1+GSM+aXu1uCIJzAyORQZvYPJXHVuk14fv8w\nGqvCuP0jHVi1uKnc3RIE4QRHJocysnXvEK6+ayMODKfR3lKN71yxCotbqsvdLUEQBJkcysXvXjiA\nT/7gKSQyeZzd3oRvXb4SjdWRcndLEAQBgEwOZeHuDTvxlV88h4IC3nPmfHz9vacjGgqWu1uCIAge\nMjlMI/mCwtd+9TzWPtIJAPj0m5fh+rcsE48kQRAqDnGgP8bccsstAIBEJoePfXcz1j7SiXCQ8C+X\nrsBnLlouE4MgCBWJSA7HkK6uLtxwww1409vehS+u34ete4dRHw/jWx9eiXOXNJe7e4IgCBNSMZID\nEb2ViF4koleI6PPl7s9UcNttt2FoaAjv+IsvYeveYSxqrsLPPnGeTAyCIFQ8FSE5EFEQwK0ALgLQ\nBWAjEf1CKbWtvD07Mm688Ubcf//9iMViAIBXdur6Cwe2/B4NXduQm1OLD/00g4svvhhf+tKXytlV\nQRCEQ1IRkwOAswG8opTaAQBE9EMAlwCYUZPDddddh/vuuw8PPvigrz03uB+9g/vR+wrQ0dGB6667\nrkw9FARBmByVMjnMB7CH/d0F4JzxBxHRtQCuNX+OEtGL09C3IyUIYDmAUvm1E5s2bXqpsbExP819\nOp5oAdBb7k4cJ8hYTi0zZTwXTeagSpkcJoVS6nYAt5e7H0cCEW1SSnWUux/HCzKeU4eM5dRyvI1n\npRik9wJYyP5eYNoEQRCEMlApk8NGAMuIqJ2IIgA+COAXZe6TIAjCCUtFqJWUUjki+iSA/4HW2a9V\nSj1X5m5NFTNKDTYDkPGcOmQsp5bjajxJKVXuPgiCIAgVRqWolQRBEIQKQiYHQRAEoQiZHI4Rx2M6\nkHJBRGuJ6CARbS13X44HiGghEf2eiLYR0XNE9Oly92kmQ0QxInqCiJ4243lDufs0FYjN4Rhg0oG8\nBJYOBMBlMy0dSKVARG8AMArgbqXUaeXuz0yHiOYCmKuUepKIagFsBvBueT+PDtKplauVUqNEFAbw\nMIBPK6UeK3PXXhUiORwbvHQgSqkMAJsORDgKlFIPAegvdz+OF5RS+5VST5rtEQDPQ2cpEI4CpRk1\nf4bNvxm/6pbJ4dhQKh2I/PiEioOIFgM4E8Dj5e3JzIaIgkS0BcBBAOuVUjN+PGVyEIQTFCKqAfBT\nANcrpYbL3Z+ZjFIqr5Q6Azq7w9lENOPVnzI5HBskHYhQ0Rjd+E8BfF8p9bNy9+d4QSk1COD3AN5a\n7r68WmRyODZIOhChYjEG1DsBPK+U+pdy92emQ0StRNRgtuPQjigvlLdXrx6ZHI4BSqkcAJsO5HkA\n9x5H6UCmHSK6B8AGACcTURcRXV3uPs1wzgfwYQAXEtEW8+/icndqBjMXwO+J6BnoheF6pdQvy9yn\nV424sgqCIAhFiOQgCIIgFCGTgyAIglCETA6CIAhCETI5CIIgCEXI5CAIgiAUIZODMC0Q0WlEpIjo\nAtamTAXAyZ7js/z7U9SvC0w/ZnxEKxF9hYh6y3j9nUT0z+W6vjC1yOQglJPVAH58BMd/FsAFx6Yr\ngiBwKqKGtHBiUs6UxiZKOFqu6wtCpSOSg3BMIKJPENEeIhojovugo0jHH+NTKxHRGiL6IxENm39b\niOj9Zt9OAM0Avmy+p4xKaLHZfse4c68jok3s768QUa+5xkYAKQDvZ1+ZR0S/NP3dTUQfH3e+1UT0\nCyLab47ZQkQfGnfMFaYvpxPRenPcC0T0ZyXu/T2mQEySiPqI6H4iWsT2n0ZEvyKiEfPvx0Q0Z1KD\n779OExHdTkQHiChFRI8S0Tls/x+IqEh6I6KbzTiQ+TtGRP9knmnaFLaRqOrjGJkchCmHiC4BcCuA\nXwL4MwDPAlh7mO/UmeN3AHgvgPcB+C6ABnPIewAMQecEWm3+PXmEXasCcBeAO6AToz3B9t0J4BnT\n3/sB/Oe4CWcRgEcAXA3gndBJ675DRJeVuM4PoHNpvQfAywB+SEQL2L1+GMDPAGwHcCmAK6GLQ7Wa\n/SeZa8UAXA7gCgCvBXCf/c96MhBRFMBvAbwFwN8CeDeAHgC/ZRPNjwBcTETV7Htk+nWvcikUfmL6\ncZO5/40AfkFEZ0y2P8IMQykl/+TflP6D/k/3gXFt34YugHIBa1MAPmm2O8zftYc4by+Ar4xrW2y+\n945x7esAbGJ/f8Ucd8m44y4w7bePa18P4LEJ+kHQKtlvAfgda7/CnOsq1tYMIAfg4+bvAHSG3p8d\n4j6/C+BFABHWtgxAHsDbD/G9rwDoZX9fDSADYBlrC0FPSjebv1tN/z7Ijllt7qPD/P1m8/cbx13v\nIQA/Zn/vBPDP5X7/5N/U/BPJQZhSiCgE4CwA/z1u1+HSQm+HLgX6AyK6xGa5nGIUgAcm2Pdf4/7+\nf+2dTahVVRTHf0vN3iAN7ZGSQiDRK3AQWCE6MERMMSqbvGHkwM+RBJYT0UFCCIoi6EA0AvFj8DRC\nEOWFDYIGPoJAUMj0maDo+/D742lvOVj75n7nHK/vxhO58v/B5txzzj5rr30Ge+299jp3dQAzLFK+\nYmYTzGybmXUDD1JZCrxdIevYfw269xIJYGorhzbgDWBPHT3nJX0GzWxMeqfniMH3/TrPVcnpAs5l\ncgB+rclx96vAL0B79lw7cNbdT2ZyLgO/1eQkWZ0N6iOaCBkHMdK0AqOJATGneD4Ed+8n/ur4JeAg\ncDX53KeNoG79Hmlbq6jSdwzRH4iVSDuwCZgPfEC4yloqZF0rnA9k9V5Lx0t19GwFvuGxEaqVaQzN\nE/I0WoGZFXK+KsjZDyw0s/FmNorYizlQkDO5Qs76BvURTYSilcRI00O4P14vXC+el/CIXlpg8Z/4\n84DNhP9+Zp3H7qXj2ML1CVVN1JFTpe9DoMfMWoBPgFXuvrNWIQ2kjdKbjqUN+ow+YuWwq+JeI98x\n9AEngRUV9+5nvw8BO4g8593EyiY3Dn2EK+zzBtoWTY6MgxhR3P2hmf1BDDQ7s1uliJ06Mu4Sm6/T\ngbXZrXwGXuMKMYt9t3bBIv3lLGKgGy6LGepyWgx0ufu/Sd4osgHVzMYBn9J4IvkzxED7JfDzE+p0\nEhvQXZ6c+f+TTmKVc8Hdn7hyc/d+MztGrIy6iSRAfxbkfA3ccvemT2IjhoeMg3gWbAQ6zGwHMSud\nw1PSJprZImAJcBi4AEwBlhH+8BqngUVmdpTYnzjj7jfN7CdgddoPuEYMZHcb1HmhmX1H+OO/IFxc\nnwG4+/UU/rrOzG4Ag8C3RPTU+EYacfdBM1sD7DWzvcA+wsDMBfYlP/96YlP/iJntJlYLU5JOP7j7\niWE29yOwHDiRvlz+m3BrfQhcdvctWd0DhJvsOrC9IOc4kbjquJl9D5xK/X4PaHH3tYgXj+e9I67y\nYhYiE95F4A4RGjqf+tFKbUS45D/EDP0isfKYmNWfAfwO3M5lAZOIDfAbxMx3KdXRSj0Ven6UZH1M\nrBzupLZXFuq9RcygbxPGa01RJo+jlV4pPHueQhQPYYC6CLdYL3AEeDO7/056H32EofuLiI6aWued\nl/oIvApsTe91IPWtA5hdqDcu9d2BtgrZLwMbkh4DxAb1UbLoqap+qjRvUSY4IYQQJRStJIQQooSM\ngxBCiBIyDkIIIUrIOAghhCgh4yCEEKKEjIMQQogSMg5CCCFKyDgIIYQo8QjOnpL7Zd9EvwAAAABJ\nRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(imgs) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::mi-0.1*r = -28.3580941916\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY0AAAEPCAYAAAC+35gCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsnXmcXFWZ939P7dV7esu+NGQBBBNC\nWKOCDLjggisKMqMM74d3ZkBFZ0ad5VVhnHmdYUZcIL4y4yCOIuI2iMJoEALKHgIhEMhGts7We/VS\ne9V5/zjn3PPcruqkEtJd3Z3n+/n0p26fe+vec8+93ec8OymlIAiCIAiVEKh2BwRBEISpQ6jaHThe\nxOPxg+l0ema1+zFdiEajyGQy1e7GtEHG8/gi4zm+xGKxQ6lUala5fTRd1FNEpKbLvUwGiAgynscP\nGc/ji4zn+GLGl8rtE/WUIAiCUDHTRj1VQt9zAIDh4WGvKZfLAQACwaDXVnSbKBTy5jPntQXMtFrI\n6VXNoYND3r7hoSwAIFbjTkKBrDtf1orPbpgDCJtr6Em8LlTvzpfo1fuQ99oG+11fCtDXqW0ffbMA\nkT5vOBzx2iIUNedwYvwrm7d62wNDBwAACxY6KXTWvMWlJweA3qcAAJmsu79cTm8XVIH10W3ni3Y8\nXVuxoMcxmykCAFLJorcvk9b3ms2n3fkKbDuv9xeLbgGkzAMMmnENFt06KJfR/cum3f3nC+y7QX2s\nCrn+KfN1In1eUu7ZZVP6uN7uAa+tu6vL9a+or9M+q9VrmzN3LsrS+3vdn7x71llzfzn2/vH9OTOO\n+bwbs3xOb+fMZzbr7iWXy5tPfj72fuYLpt/ufMqMj/fpLo+Cua7v+nzbnCevXFvBSAP5vP7Msf5l\n0rovmZTrU77gLhgO6YdRW1fjtTU0ur8XH12/AQBkc2w8s/q+M1l3/+mM285k7P7S72SzduzY+2zO\nnc+zNnb/3niWGZ/Rn/p8+vgca8vl+fMz3ymUXq9Q1ONZLJZKW0TubyDI/teFQvpdjkTCXls0ov9f\nXPu5L5ecZyxE0hAEQRAqZsIlDSLaBWAIQAFAXim1ioiaAfwYwCIAuwBcoZTqJyIC8A0AlwFIAviE\nUmpDJdd58rmNAICRVMpryyTNqpWtLLNwq9BMOm0+2fIKesWVMyulWKjJ2xOL1AIAdm3c4rWlc06y\nyZpVQ94tbmAW5wgE9NDPbnar/K59u/U5skl3jVCLtz17/gIAwGsvvuyuYc4XhF6BBSnu7UsZKWv3\nrr1e2+6dbjsa1/f05rec5bWtipURYwBsfW0nAL+kkTWSVK7IpI8C285bScOthpSRBJRdwRedZGRX\nUSOphNeWTA962xlzPbtq1efTK6lQQEtVIfZKF4ykkWMrSQq468GsvDIF9w6kzf3l8/q8eWZrHRnS\n71JfT5/XNphwfY0biTNDbnUXqZ+BcvQP9Ou+cUnDSATZnBvDDJMSsnZlyle/5ut5IwkX2NjkPcmE\nSRrs+eS95+NWukUjYRTNJYrsT2G0VDO6L1lzvSxbOds+25V7JsPu10iWvudDrv/xuH6mM9wlQCH2\n/BjJpH425aSKdNrdM99OWUkjzaSPEknD9c1KBvlCqXQBuHH0Sx/+Nj5edpuPV7n9XBKx17CSht+s\no59dMOhkAStdAEAkrCWMaJS9P4WjtwtVS9J4q1JqhVJqlfn9CwB+p5RaAuB35ncAeCeAJebnOgDf\nnvCeCoIgCB6TRT11OYC7zPZdAN7H2r+vNE8BaCKi2dXooCAIglAdQ7gC8FvScuh3lFJ3AJiplDpg\n9h8EYOMt5gLYy77badoOsDYQ0XWjL/L0hmcB+FUjyZERvRF04tlI1qk/shndzlU8waAWkQtGXdHa\n4gypbW0x3eFup65I5ZyRdCRr1QOuX9bGGQnra1Cw1tvXNZg0be74bNBdryantxNpN9cPDZtrGLF7\nZMgZjnt79gFwqhAAyOadeN8Q1OJqH1PBJHPlxdV9B/S5cnku/utrpbNOBZjOuZNZ1USxwG5IGYO1\nGeNIuM7bZe13I0mn4htMuvFMZVLmvFydor8UCupnESKm7rIqBqb+gHEOAICiueDgiLteYkg/g+SI\nvkZ6xH13eGjEfLrjC2w8Wlr1vTS2OxVIW9Jtc4ZHtEMFV39Y1ViaxR8kfaoVo07JMJVQ1qqljFrJ\nDQ2U+aXIHBWKijlWFEvVh3bbvqdZ9j7Yvw+uYkpnS7fTfL+nIrKqIG50zvv6CQCRsPuX1NhgDOBB\npmKJu79NTsqop1LM0J1K6XFMpth4MqO73U6XGeOy6qkyhm6u2vNUR4VSQ7g1cJdT7WW5io+rp6x6\nmxvPzbn5c7ZYA3go5P7ewmFm9I7qv40473OZ8xyJakwab1JK7SOidgBriehVvlMppYgrNitAKXUH\nEX3nuPZSEARBKGHCJw2l1D7z2UVEvwBwDoBDRDRbKXXAqJ+sH+M+APPZ1+eZtiNyysnaaBxi7mV9\nvXrF3Z9wksFwxs3K+Zzerok5l8lYRK8eiwU9S9fGm719dbV6JbSsY5nX1j+839tOGldP7uaXTev5\nMBi053MuhLMXnKy/x1baxZxbOff26VXwgYNuZd91SN9TOqVXGcPDzKiX1EbaSMSdI8AMidG4lnKa\nGtu8NmIrUU5N3BiGmaE1GLBjy1avzGXSLpByGSct5YyrrTJL2UjErfIiEX2NALnXsjbiVpYRY+Dz\nGRqNpFEs6r7k8u55Zs0yajjN3TxHvO2UWTn3JZy02Z/QY2/dqVNMUrAuoootzyIR19f6vOkfdwke\nIwCtaFxf89xF1EgYIyNOWhwads96yEiRIyPsGaeNoTVnDaPu2gGzSUHXh0CA9YfsqpUbz41LdNa4\nRLOxSyb1dUd8q3XXFysV8dV8yrSl7AqeSSHW/TrAXERrapgkaAy7sRrnclufLv9+poyTywiTKkaS\nenuYjSfftvtTSeYIYSWispJGqSRR9K3ax5Y07Ge2nCH8KCQNz9XWPDK/e62VNLhLLXPZ9tx1vSYU\ny8fvHZYJtWkQUS0R1dttAG8D8BKAXwL4uDns4wDuM9u/BPAnpDkPQIKpsQRBEIQJZqIljZkAfqE9\naRECcLdS6n+I6FkA9xLRtQB2A7jCHP8AtLvtdmiX22smuL+CIAgCY0InDaXUawCWl2nvBfBHZdoV\ngOuP5VonzdNqJO6HPLNJi77prFM/8Qhhq+LZt88Zjnft0Jqyvl6tJoiwCO5Zs/Q1ZrQ5lcipCxZ5\n2zEzulbUBwAofWzOqFW6B913d+07CADoP+TUZwf3O9XJ4LC+l75hFxuQsbEIOb8BDwAySa2KaWxz\nUcmhqFNPpYb1/iwzBDdGmdGaMbe1AQCQZ6oMa/AcZCJ/X79T/xQyI6bf7vx9PXo7ZQzNQRY3UVen\njdn1ja4PDY1OXdFcr9VpgQBXKerPIWOw7upn6pyE7ldvHxvDQdfXwRE9dokh1+ehEa0CtEbULFeH\nGFE+ylVmEZZdwDzmAlMx0BjqqbDRHXFlS9EYSTNJ12rvAQD6+nQ/Ewl3j8kRG+tQ9PURAIIhvR0J\nO4VCJBIo2Q+mobDxSCljsB5m6rnBYT0mQyNOnTOc5Oogo55iEfhWPWWN51zVQubC3FjLwwZicf0+\npHyGdRa0wfCM3mVUe4NDLu5paIi9H8NGpcXuIWVUa1mjUvbHxFh1XqlKCnDqNt6WHxW7kSujivKr\npEqjw/nfnLLxGWbs+N+CNYBHmEqKx5RYdR+YSosCR69smiwut4IgCMIUQCYNQRAEoWKmbcLCeFiL\nagU40S5cq1VLqZxTeWzd4RLO7di+BwCw/tmXvLb9nXr/yKARW1nCtVqT5uDkpS4VyFlnneJtL5qj\n1Vd1dS6VRDSiPUF27NLXeuyxje76B/S1UiNOnZJKcfHYeAix+8zntLidNqog7hlht4PRmNcWZn7w\nSeNFtteoxQBABVagHE3GU4xlTcCQSU44kHBj3N/nEjp27tHbe/e4Me423l7JIaMSYOqU2lrdz7Y2\nF7sxZ45L29LWolVk8Zi7n5zxGurvHTT30uuu36Xvry/BkkwOc599LcanMsxP38aC2HQibEBDJmYn\nEnHXV+T6b9PGJH1pKco7wodN7AGxjIBWxWQ9twCgr9epVg516Xvs63PqvmEzjjmjwiE2nmHzNxCL\nuWceZ9vW84sCzLvNpnIx9zA47NQ9CbPN1ZFcPWW9pvh42rQcXC1lCZox4B5fPGGfjcdJs785rqri\npFPW84x7m+mxSySc+jGRcOM5aFRVw+webUyHVadx76lCGfUU9zwrlkki6KmnCv54DX1/5RIWlvHG\n8iUl1GNl1UqhYGlMBj+8XPLCULBUpXU0iKQhCIIgVMy0lTSUibkIw82kuZw2pD72pEsPfv8DT3nb\niX5tYO7pc4ZoZZKwkfkssIR9wym9gunbwFbX+9x357RpCaO2xq2cQ2b1t2evDjfZ3OlW+WnT1yCL\nAufRuvmcXhH50kvnh81xul88UrQAbbAd4RkTmeE2aPp1sN+ttPpT5dcRYRNXkmcRrf39etW2Zcsh\nr+3FzS6Af88e3d7d5cZkyMRBZI2BlHg0sImp6e11fvm9PS4ivKVJS4oxZsy3K7cuY+ze2+OcGHqG\nSqOE/amzre+8258zEe35Qmk8QNAYHRVYKnG2P2eSMQ5nXOvAcPmVccA863zWHWsljN5etzI+eNA5\nPew/oO+tj0lzNgOAjXrnXvcR867FY2684jHuw19G0jArYistcali0EgVQ8nyEeujk/0BQL7oN1zz\nlW3QJNMLBXlsidu2K3Yu3fMYEY41YCe5kd48/0EmXfQPcKlDb/NYmKS5N5vEkBuuC4VSSYP7OZST\nNKy04Eka5aLFy0SVA05i4O9YwEgYVmpQTHNgn2OIZWDwlyUolLTxNO+VIpKGIAiCUDEyaQiCIAgV\nM23VU9ZYaSu6AcDmrVp1ctf37/XaDnY5cTYa1YbOJBO/bRW/oBGzQ8yvOWgMT0GWUG0g4YyUycEh\ncw4mXpvYCVuXYpipzwqmzxGWA99Xb8EaFZlKw56bvBz6TnwPF3Rfkt0u80qg2aUMaZ/Xoc/BZOyu\nbtd/DplEgyMjTrzfsVOr1tY/79R9L7/qrtXbq+8/OeLUAzZVhlX3Mc2Epzri45Vi6ojuLq2qCvi+\nY9QoJo3EAE9OZ1M4MPneXwvB+tAzEd4854BRQUVZ3EpDnR6Dmhr3TgVYGhGY9CcjKTaefe7eOVb7\nxVOC2DiMg4ecSmrffqdu239Aq/kG+t0zsukzbF2HIFNQWfVUNOr6G2PbYaMqIipVT6XNuzbMjPoj\nNiUIr1mR5WkqbCVAFqdiTm2N8vEoSxFTYxLosdQh8bjrX8hU7uOV6/jfJscmHeQxF8NGdTc46J5B\ngqmnbMqYoUGnnrLfd/FPpeopxeMmFN/Wn371lD92g8eNlTN0lynEx/62nRHbjqtSrGoobFoWnurH\nncf+6+Kp/QhlLngERNIQBEEQKmbaSho2IWCh4IyA6554HgCweftur62t2UVLZ8wqOMuM3XbladNL\nh1jyP8+FjSVFDLIITbviKrKlcdBEE4dM/vOocvN20qxqeKpmnxHMfHIvuahJqKhMBcJC0a2aLj3/\nXABALNzgtT390k53vWG9AsuxOthbXtmOctjEgH0D7vzbXtNpwF7dtsdr69zvXF7TKRut7FamRWOU\nD5o7i/A0zmaYeARsiruvZmyN8NI62GnzmfUlkCs1TPI03HZZx1dONsLWRk63tzsnho75+l2prWn0\n2vqH3fkSRsJIMrfPQwd7UI60SQTII9S7e7RkduCQM/4fOOgkja7uhPmOWy3bOujKrGBD7F2zkkQ0\n497PFDdEe66XzNnCjElm1LjqNpMandfI5oZbM7ZcErTR6FaqaGx0Tg6NDXps47WuPEA47NyZAyZ1\nP185ZzNjSBrmXfMZwq2LMIsC90kdxkDO25JJW73TvLtlKvP5JQ3XB4XSanr23bPSfDkpxF98jzkF\nWLdapo2wooOV0MNMVLd/S9GwO55vR4zkZj8B//+SShFJQxAEQagYmTQEQRCEipm26qms8ZVPMB/s\nja/uAgCMMFfvJqY6sWopX0IyK357JfecesrWqQj4VFL8u/qzyI1lRi0VMJXm4kFuSLXJ0lgHfenu\nVUljAPo86awWsU86ud3b96fXXK6vEXbqlLr7HvG2H3xUR6MHo25/T095v+2MMb739DnVyB6jijrQ\n7dQp3OfdGpoLTMWhTLU47xaYfOyiV9395csYHf0ivtlnxjXIHBUisKoBpkJhupOi2V/khkFznoZ6\n/VwWL2LR/mcu0ftYhP/ufe7et+3S4zHIxmCgr3xcwYhJSjjAjLA9xsDd3esyAvT0ldb64Oovmxwx\nAKuecvdfKOh74O9kkEUD25gIrk45XFxBzqpTeNoB9uWQOV+IqT9q4/pvZEajVkG1t7t3raVFb9fW\nOPUUBZwqzdqgeSiBUuXfTy9OgyVYtLUzeMQ3fz+HvYhwbgj3q6fy5dRTZYzfHJ96yr6zNuaCf9dG\nd3OVFLNc27IyAaaesurHSDnHAhPtXxNncTnMsSBmYnSiEZ4VQCLCBUEQhHFk2koaIWMITwwzo6Jx\nJ80zwy9357Ounvk8ry6m25QxvvKVmve9XGn0MADkjJG74FscBXwf3ApWa1xu4+FSYzsApFJJXz/1\ntU2KZHOeN57W4e0Lx/WqKxBw0sFZK93+/n5tpN2xxxmvD3W58eLYVNl9LLq226yMB5nxMc0kN5vn\nusiq+dkI+4Ax4PGFml2V+XLtMCnASz3ukz7secw4MMnMZgQP+ZZG3F3ZRPiylXPQHNw2QxtpOxbO\n9PYtWay3mxqcpNHQ4NKkh0yFPJ7/aohJBZxhM2aJwdJo5QGWK2mw3CqYSaJ21W/vW/H307QV2T0H\nCqVLY98zGOU8wN2xy7mDBstUjoux1W9jnTZ8t7foaP65s1wusZntehzr6pykAbaqTmds9UDuDDGW\npGEM4dzl2rjPcjdda+gGgJFU2nzHtaW83FOlkoZNfX40koYa1ebLImUkDC5d8Ij4gGfsdvtjRkqo\nMa7TdUyqqDO522prnQtzTdw5Ftga4dEId2t225UikoYgCIJQMTJpCIIgCBUzbdVTNnq6p9epW3oG\ntDqlyKOwmerEiuK5MiqrWJn66zYamYur3M/aKj3yPJWyOTZojZBMXWC/yVVSYWaoCphU7CkmgueN\n2mjhPB1DcM7ZrjBiKqv9+slpULBogTNEfuyDFwEAHvvDC17b1r06yeCmV3y3imGjZukbcMnyrBqF\nqw+4ug+jomEBgIxR2lYRKzDDZs6oAHmKP8WM4p4h3Kce8Mv95PMc0NtBX6UyNp4BY9gMuvPZeIL5\ns3V1x44FzrFgpknZ3lDvRP4aFjFeYwJN2lm1QWvc/u2j8GHVVokhnqo76dsHAMm0U51kzPuWY++s\nHWPrrk/89gu+D72/jI7Jp05R/lgD39GqjOGWOx6Y5Hm2ZAAANDUYA7gxes+Z6VR7c0zly4Z6F7sB\ncuOZMe/2CKtkmE6XVwXbCoE2XkMfm/V9jv6+rc6XzfD09P6U6AVftoDDq6dGq6I4ZY3eZarm8aZw\n0MZdsNT2Vi1lougb6ty7WG+26+rcH3wNexY20SevlBgKHf0UIJKGIAiCUDHTVtIYSGlXxe17XKpu\nWws7QqwQinIrD9uaZyt970jznQKY0cwURcqPMYzBYrnoVeOG6i0p3LWsUTPCrLkpFp0OY7QqsNVt\ntKBX/m8+dzEA4LSlc9z1Sd8bTw9fDLp7r5+hVx6XvOk0r+3C/BkAgP/+n4d8ve4f0tfp6XMRyoMm\nt5ZNJw748w5512TrVZv3xrbli8xInjfSVyDEvsuWzqp0pWfXd9ZeTizC3q7qFHNrDvDzmWcQZjW0\nW4zB9qSFWsJYNM/l6mpu0CviGpa/qY4ZEuvMinBmkzPsDhv/7q99Fz7K1a8etvXJmXSRY04WRTtW\nPsm2aG9WH8MkNyvEFrnxm0ld5VbGqmSnGy87nn7JrbToU02NW/02mTFrbdZZCWa1OkP47DYtfXBJ\ng2dUsCUArBMG4Iojjca6yKZ5ASgrSTDnDJ7LzRq5eYaBonXP9gal9F0bJX8dASNh2PcUfDz1py+K\nn/3viRotA3cssC7M9VbSqHVSRWO93q5nkkZtTakhPMIkl3KOPUdCJA1BEAShYmTSEARBECpm2qqn\nrDF7x85dXltmWBtueYSlr8avUR0Fg6UqDtuUzTnxN2sjjtnUyw23VCw12qlRVrIQewTWKE7MUGUr\n8nEKTKSc267VKW98w8kAgFoWsW5rTgeLrE+KRwzrc1uVDACEi+X9tvsTWt3X0+uq8A0blVWBVQZU\nLPra+aGzOAEvqN0YvXllQpvwLcAcB9i6RpVVT9n7sp8sOaRRo3DbLxfGA8HSqNo2MxYL5mq11KxW\nFpMR02oUrkLgy65Ygz5PU41TDxQLZTwoAAyaZJHDLG18MqVVVjmmklRMdWK1GBRiz9N0gLzfHbbC\noE/BUiYimY9PiWOBTz1lrkWlKhbAVeXjlQIb67WqrqVJOxG0Nrl3rbVRb3P1FK9fbdVqPD053+Zk\nrAE7W6p+4tXweLJBT2XEbiJoBlmF7L3yOKHShISc0U4EgP//gT5faVxLmCcVZI4vMRNPUcPGs84Y\ntutNTEYjM4Q3GfVUwxjqKWcI5+qpo5cbRNIQBEEQKkYmDUEQBKFipq16KjmixdQ9uzq9tmxei/2h\nqKuREAATXY3/Oy90r4z/VM74eucCTnVga2PwnIN5Js5nWF43S1H5xdUw8x4qGi8P7jvNJeFC0SRU\nzLm5ftZCnVBv/hzt7RMJOtHUit0xlpaEezcVjIooREwlFSjvTZEwMRn9/a6qnE1rAnZOXokvaHRR\nvGaGTahXyJmEeEy15eUJCTL1FPPbz/s1J3rb/mK+EmC6gaBZE5X1DmJ9DbM8I81GjTLTqKWaal0t\nknhIqwZCxN8Pdu6QjfsoVR+NZtiop0ZYTIZND1JksS5ce2C9aPgYe5s2/sUXE2T7yGOHSrd57Ib9\nuv1Uvju0CftYp3zqKd0e595lRo3SVFfn+wSARlNHo6GGp2Jh759NK8Or3RXK/FHBqa3K1ffgT4nH\nQYTM4EbC/Johs8+o7opMPVqm/sXhYlz4sapMjIv1XIpESuMwACBuEgzyBIQ2jsh5Tzn1k1VVNdS7\n8azzqaf0+XzqqTKxIkdCJA1BEAShYqatpJEa1ivYvt4Ea9XzfpzNzgFfBLP+TgOb2RefpA3Mm7fo\ninaJ4RF2vF0ZO796YpXHQC4a07ueWWjYSPMCK2BdyJfWyA76DNt6Pzc2z2zVvu4zTBU0yrlVU21E\nr5JDbOWTZcZ5Uvp8eZYEjsZw2x5MmDTTLMGe7U+A+ZnzVUiNMeq1NrnVpU2gNmAM6739LmLfq47o\ni5Nhq3pz23m+2LQr4jJV+uwXAj7DLY/I1dth1n+7crOr4HjYrdqiAf08fUMUYM/P9J+vzsfy6B8x\nCQt5tHLeRCHzpIt85emlxWaSqF0oWunEV2muaA3hXDIqk7GAOyN4ddNNPJHPiFyaFDJYJsaA1yGv\nM8+73kgT9XFmpI3qfTUR93fCn709M88AUCyXNREu6aTf0G1StfOodSb1Fkw/eZ1sK3V6iUrLZB8Y\nS3Itt79Y0sYlDSvpcOnC/b1bCaOWSxrGEF5rJI06Vl/dSh/1rI3vt6nRIyJpCIIgCBPF9JU0TL1m\nXmDFUstWO8U0K8qS16voM5Yt9do+9N53AQD+392/AAA898JL3j67kub5dA51u4jpVE6vznmRJuty\nZwc+wB5B0ayCeWp2vgqx0bIpJu3U1+nvW7WoLT4FOJ2pYhHwfOWcN5JGgUWJ51DepXF4SN9LKsUj\nlPXqjqdu5jWJZ5r04qd0zPfamhq19LOz80DJvdoVn60dDfhdbgdNzq1khuW6srpsuzLmaZkKtshQ\nmVxVcCtRXpTGShp2hRwNuFVeCHqbebyCrzWDASM9sqjs4hiyRtoUDcpnS++fj2GMPS/reslzO9mV\nos2VxOvbW0mDR20TW1laGSLHRLesOY+NvOYR2NmstZu4++DR9NZ1NMZSb9s+18R0n+NRtvI1trYo\ni6oPlXEB5c+sOLa/KwC/VOlsFuVtBna8ec1sK+U7iab8u4OSveVzo7lU86Vdts8iHOL9K7Vf+CUN\n3f9az95ROtbljgeAmNkf8dWJF0lDEARBGEdk0hAEQRAqZtqqp3q6tQGcG/KsKyuvtFfMO3F+wdzZ\nAID3vP1ir+2Uk7RqZfnppwMA1m/Y5O2bM0tXcvvTqz/itW3ZttXb3vDSFgDA4OCw15bJaHWYTW+e\nyjpx2vaKJ6nLMtE1bFx8I/XODbRjke6fVWMFA8x90awJwsTdWrn4rj+pyC2b5SOYbe1lXsnMGpW5\nSqGJRfcu6dDp2s9avsxrq68zEcFG1TY45MbGitdLOuZ5bVFmGOwzCRL7B1169mTSjqe+x5GkU6ck\nhnSfh1ndaG5HtS6i9Szp2wwTsVxvXBV9qhNbh9xXU5ypIuy2z1hb3kXUptzmKg9rSPa7Sbrnb50d\nGup4gj99DzYpn6+qn3EE4BHHYaaKszZZns7eprkfMdXuhoZdMsohY7znCQS56tX2Ncr6bI3ituJc\njL3PVk0SCnCXbO5PbNU9rikwhnbKc2pgqiardszHuErKPQ/ralvglTrLuOmWUt7RoXwNe9tWqtqD\n9/fDkhQy9VSdUU9xFZR1w/U+fWNtxtgXVc72m3buYizqKUEQBGFcmbaShg2Ei7IaueGwrbHtVhZB\nZvj9wPveDgA4/5wzvDY778+arXMRBVnw3KxWXUTm7De6uttvOtutqi8+dDYAIDHg3EoTgyaHU7eu\nI51hksbWHdqI/siTL3ptWZ7W2hhY58xs9pqWLl4AAKgxgWUplpsnELRp2J1LME+FrKzxlOW3Gp0r\nx2u37qtsZWJXSLyYzCzmFHDGadpd2ebFAlyw2/4uncOqZYa7l1mtepV/zgrniNDe7lJpDxunhQEm\naQyPaKcA68La2+dcgrft0jXQt7x20GsbGHJjEbDBaMwt0Ra1sYbGSIhLZjYhE8+vxbZNkCMxxwOo\n8pKGldLCzChp3S2jbKXIA7WlHsWqAAAgAElEQVTam7V7dRNzFLCGX+u6yyUN6w5bU8OMpcxIGjJG\nbB4QZ4sZJYb0WPcOOEmwp1dv9w865xG2SEc8Zla6LHV/xFzDGpt57J59lQJMfCC+jDWSRsDnyFB+\nPMsVLMpbozf7ToS9v9YJo+hb/vsN4L6SXmUTfPHcdWUkDTO2doz99e31Z7lCVoCTMHyShpXmykgN\n3lj7nBNcZ+27zKWxUGiKSBpEFCSi54noV+b3DiJ6moi2E9GPiShi2qPm9+1m/6Jq9FcQBEHQVEs9\n9WkAvKDoPwO4VSm1GEA/gGtN+7UA+k37reY4QRAEoUpMuHqKiOYBeBeAfwTwWdJy+sUArjKH3AXg\nywC+DeBysw0APwVwGxGRKucwPYpoXIttPO1ypE+rMtIZp6JYttCpRy65WKuT6uJMvDQxAcqaqZUT\n709eoA22zXUsroKNaGyJVtUUck6dUBjV9Qi58617cicA4A/rnbF9JOtiMoIRLeJmC+58mbRWGQSh\n1Ri1EaeOK8b0tTJM/URMPxCKGpVB0qlT8mPk9onFStNe2whTbmjtWDjT2z7tFG2kXzjPjfHAgFYf\nWQNpfa2rcjenvQUAcNKCVq9twbwW1zejnkuy5+cMwPrzYJdTXdkcO919Tj2YzLrvhoxKj6uYbK4x\nMpXKg8yJgEwchgqMEYdh1CzExjBQzkEfzmjJ1UVWFcVtky0z3LO2adptunHTYX1fxnDNq9RZNUVj\ng3sneH1z2wcewW7VU/0m8v9QT+nxvG59ksXtWONsNMJVmPqTbLp7prpTtho810n5ipx7+dnZ/vJ/\n+lETG5LPu/G0aimeSy7P/kBtxT5/7I69pv7w2eU99VT5PtjzFNgzz3vqKZsBwu2z/wt4frJQiDsW\n6Huxaj8AiEZtviqbVr3UscWvAmR5t4x6NchLFYzlWXAYqiFpfB3A5+DcSloADCil7H/eTgBzzfZc\nAHsBwOxPmON9ENF149lhQRAEQTOhkgYRvRtAl1LqOSK66HidVyl1BxF9h7fVmtnZRiADQJC6AbhZ\nGgDe/fbV3vbik/UqeaR7v9dmDVmJPu3Cy7PILlu2BAAQd4tBFFiEOczmIMuvFDG5pGqNm2e24HJj\nDST2mWu6tla20owayaC/xxVC2rZTR1afsURnuy3k3XcLxsieg5M0mDev15cYizDmGUU5jUZi44ZZ\nW5+4scG1LemY5W0vnKdXxvV1brwHBnRflMnuyyOEZ5gcVU0N3O3Q9cFmV+W5igC9bQ2MIbZyam3R\n52lqckuvNFuJhkN6u1h045MwhaWGU1rCm1HgeZFMXqKyhlPAW57yxfIYy7I6O3ZMEg6QdZF1X2pr\ndkWL2lutO7Abb/u8rKE5my1dlTY3umvMaOIFevT988V9xkgq9fVR0xeet8rmsnL3zF1yQ0HrcuvG\n265ubX1zXlQsb8Y9X8YNFXACiO+NHEPJYKPQedEqK7FxYzHPIKy8yH0uafi7QT5JQ5W0qTIR40Wf\npGFyglmJI8+l+tJ74UbxaNi617JnaiUMY9T21QML2v6xAeXb3jqdOWqMITUdjolWT60G8F4iugxA\nDEADgG8AaCKikJEm5gHYZ47fB2A+gE4iCgFoBNA7wX0WBEEQDBOqnlJK/Y1Sap5SahGAjwJ4WCn1\nMQCPAPiQOezjAO4z2780v8Psf7gSe4YgCIIwPkyWOI3PA7iHiL4C4HkA3zXt3wXwX0S0HUAf9ERT\nEW1t2vja3uz8/G02uwVz272mlae5GIvUYBcAIJ9zvunptBblug9qX/94iKlOGrTYny6wQjopZ7gu\nZM2xzDhZW6tVMBGbg5w9gVBUX6uuxomP569ycR8zW3WsyL33/t5r27T5NQDAm887DQAQDbjrKyPf\n58YwbmcymZI2XgCK0zJDq/lmsBiBGQ3aIDt7ZqPXtmCWi9NoNMkUSbGI7Lw2RJNRV/CYhLjxRw+w\nXO5cdWSla2bH8+JOisbYGYu5dVB9nd43s80ZjuvrecGfqOmL+07/gFZPdZvU7c0t7vh4sNRwXLZe\neflQFx82cj6Tdn2zfvTckMzVUy2NJl07c0bIZo0xt2jTyrtr2MhsHqdhVVIAUFert4PMUJzLm2dm\nupBl+sykeV+yPKEme37WoBuPu3fIGsBz5jmm8+6dy+R0v0JhHmHt7p28wkVHXifaZHwBdqyNRyjk\nmfFbHT6GZrQKinzR/66X7CRsy8ZilBrCc2bMeHJImxWAR7zz1P02kSFX94XNu2ELv/kN2VZ9WE4F\nBxSNetcXYT+Go8bhqNqkoZRaB2Cd2X4NwDlljkkD+PCEdkwQBEEYE0kjIgiCIFTMZFFPHXfajDqF\ne6fYiPnFi1x9h1ktTt3S323STbA0IwXjbTN3llZ3MccVL10Ey6SAEVYfIVzQF2yY4dQ3VnS1dQ+K\nEScezpql1U+LT17gtS07eY63vfpsHUfy0sbXvLbNW/X2nm6tUjtjKbuWqecRLLBqZUy1YGsH8ASJ\nhYKLY+BY9VRLk1OX2DGe0+biMNpYlb6YvSyrg2595q2apCbG1BtGlZFhSSRTWZYSwSYMDPCke34V\nBk+LUFenHxavdxJm9SliUf1u9PQMem0jRgVzqF+rqWan3HiGTAwMHY16agzVio21yGadusaq6mJM\nPdXc4NRXjeZ+eE3pdEA/O1v1j6tLQsbziZd9J6bbsyoorp6y6qGIUYlwVVN9nVbnpTNMxcf0YVbt\nwZPkeWouUws+xWJskhmThibE65pzz6vSSoFjZLnx6lDwrBheQkJeZEWV83wrTUBJh40RKa+eKqpy\n6ilb70X3Jcf+/mxboUyNF8AlE4zytCCjvKb8MUYmKSJTSRUU+3sv2k/uXTVF0ogIgiAIU5NpK2nY\ntMuzzeodAGY06lVwx3wnaTRGnSSSK+jVUAHOWBgwdbbPOUenRn/wN095+/r6dNxHX787PjXiZvma\nmJ7Ro6ySmV1T5IzhMsxqiodNDEiI3PHzZzpJY9EcvaJ/z7vP89r+/fv3AwCe3qBTsi9Z5PbZNdvI\nsEvix1eV4aC+Tt0MNwZZ5gTAsdX0WpjUNNNEKM9qcSv5GXVO0gib++DV1KwjQJupbd7P0sbnjIQx\nlHSr0VicV7HTn1G2dC7Y1ZVZ5ub5Ss2kNW+oc6v15ibnGNHUpJ8tTz9+oEsnjTxk4nJ6Btzxtly7\nL8cbWyXalSGXdgjlJY0ak/68PsMN4caQzAyfjSxQxaZM56t7uzLNGAk3z1aZ1khaYKvrHItTyJoV\neJGtNr0Eezbega3srdG7tta970F2r3YBy9OlW6cG+7eVZM4XsbSRNIJM0iiy+tU2yeFYAeMMm2GA\nR38XjYTNz+mXEkrToLvnVRqTUq6NJ1B0kgZb6ZvqkTkjIYTyLDrdRon76tq7TRuzweN2QqMN4GWy\nGRSLXJtAbNv8jQT4d0TSEARBEMYRmTQEQRCEipm26qmGBq16mD/PVYFrMfUI5rQ7lRXybt4MGLVU\ngc2l0bg2/M6do0W/BfOd0Xfn7r0AgFNOc+fLZZkfuIm7CLDUCTbFQCqlYzuaIy453yFTbTA17GIt\nauLO6Njfpw31py5zqToWmCp3z23SSYNXneHUWS111gjHxWUnmtoYh7kRd750lpv1HSGj6omz/ljj\neCuLhamJuv0Bq55iqRHicb3d3KK/W9ftUqJkjEE+McLUU7VOVZc3qqC0z5CnxW+bkqF/mBlaTfI9\n7sturw8AjfUhc102xsOmOqCpKHiApWyJmecZY2kpOFZtFGHqrsAYy7KwcbTnaVQiRrXCq7HVsaqC\nNTGtngpwFYy5tWhUj1224Jwa8kZNwVVSqSyLewnYBHZcZWLeTzN2aV7l0qhneGxGgNUbsaoVXpPF\nqpZypi/cED6SMoZupi4pMFWSVcsEmSV8LLutTSNSCHJ1memnz/hdzhBe2mbVTtyoXM7QrBRT/5gK\nmEzbhrw1WJs/K/4+BIzqqDhGrASZm+UqN/us3DhwVZlVd/Hnya9nk0Yy9ZQaQ993GETSEARBECpm\n2koafSN6hfTM85u9tvmztBF26WInLSTSbuVjV21BZmhM9umVpgroldfZ56z09j3+rD73BWzxxjxS\n0devJYfEiIsYH7GutmapkGJpzl/c9BIAYN4il2SxEHEJCHce0IkUG+LO8Lxi+UkAgMeeXA8A6Bly\n7qM1JiKa2cKQzbmVeyFnjJIDXV5bivWV05fQxvQ0qwxXV6tXvs3MzTbCVs4K+lpFxY3T+tOmrG9g\nLqW9pqohlzRq087oalerfGGWyZmVsUmNPphwRv+EqfDHDa3BsJOkKKjvv67O9a+hUV9vcEjv6x9y\nqdZnDOvO1zI3VKhSF8kwk17HWhnbe7BJ/gBnCK9j6dJrmWTnRT2zc46WNLgkkTdSI4/gprRbmVpJ\nhEch50zlx5R5zraWPeAq3QWY226UGcJtnW7+vO19Fo3rZybn+pdMW/dRHlXNKu8VbbU/d77AGJZw\nW7GvyCQN5bnvjpVgsliy30kY1sGCOQmo0vevUCr0onAYN13+PpQbL949+1x4HW87Fk565ob4UkmD\n988awn0JDcUQLgiCIIwnMmkIgiAIFTNt1VPbd+js6r9//Dmv7Z1vXQHAr07Jpp24HDTJCCMxFzuh\nTH0MMuLg6acv9fY9/PhGAMDezm6vreUMbhTXon0662TOnoQ+X3efVsVsTu/19qVMssOzVp7G+sTl\nS3sep745dZk2hG/ZvgMAsHPnQW/fonmL9XmzTsWSzrOqXSaiPZV0hneWW9HH3s5DAIDhIae+snUa\nZrDYjVjcxRUETT0ArlEIGfm71qi26pl6yiYJTAy6a9ikgwCQM/EL6Ywbk74BfWxv/7C5F/ddW32v\nidWQiMRYZ0w0dYy1NTfqfmWNaieVcu/HiNkOMeOvL1jZdCvnLwCBcljnhCCvn2CMuTVx9/7FY7xy\nnlFbEVcxmXoa5ru+hJM54yTAkuTxZHa5fKkh1t532qj7eASzNQTzOJRImKtOShPi5c37pmwcDTtf\nJuuvDQH4jeJWvcTVU2PFaUSMeorHTbgYGq5DKo3m5t9xxmT76Y4OmL8/ru7lCQ09tZSvCIcdn9Ja\nHPbRK6ZO8lVnsdUD2TsSCvij5H3JHM19KF+ciNtdsJUluawghnBBEARhPJm2ksbLL2oX1BgrbbXi\nDJ27KRpy1upInVtJ2kp2wSBz8zSGO2sbn8XSgBeNK+2OrU5amN/ijJiDCR1dXFvj6mYPdOuV8MMP\nPauvBbcyvvhinei3pdm54eYzbukfs50IuD7PMC6pJ8/VEsf+3c5FNLvKHM8M9SG20oxErOWOrVbG\nSKPe36urD0ZZavhZbdpVt63FSVf1PNdXyFa6Yy6KZjleY4y6NsoZcIvA4UEn+Qyyyn1Fk68qmXSr\n1UP7taPA/oO6NhexGtQz27VDQS27RoStxG0a7SBb6TaYaySMi3E25c6XHNbPojbm3o9ImBtpzUqO\nDWFxjPIvZNrDrD82f1KcSRoxJmnYWuyKrZzDpp+RsP1kEdUZkxqcrzZzPDV3aTU5G1luJY4iW4Vb\nCSsc5NIRqzlv09QzUSNr+poz11Xs/SoYo7vLmwWwLOasvnUFkkbIjo0vXN98u3yuKC/Cm91jway8\nbRn4APvb8Fb1rI1fz0aKqzLXs6v7AHMftgbwI5UI4t+x29YN2T8eVnIqH7Fu3wN/qnmRNARBEIRx\nZNpKGkP9esX9vve+w2s776yzAADRsFt6K3IuhTbYLc0CkEJJPURBMkPFXE7bW/VKtvegs2kc2Ofc\nZSPmOrmg+86ubVoqSQ/qto4OJ1W0mSC5fMpdn3lkohjXq4Isc6skaDtAuwlmfLWvk/VF2zLmz2XL\n9Zhb1QWNK2qA5x7iwV8M6wbaxHI3zZujJY2mBmbTYHmT7JI7z+woQXMtu+KvY/XJrdSRTLJgSB4s\naTICF9lqOZ82bqPmMC5VNJk8WPUxd/8RJkXanvIsqnGzmo+Z/En9zEYynNDPpbne9Tkcdefjtg5L\nIV9ecoua6/CMtTUxfV4rcQBAmLl/2/eTL0w9N18znlzSsDWmFc9FVOT6e7NaZXptux207p4sj1Q0\nYgMS3T3zjLY2V1SRSRNkA+EKtlgUWwWbbS59+AwidruCLLdWCuODc6SiSfDylrnrW5NKOZuBKlhJ\ngj9T/r6rku9Yc0XRc6/lUoDf3jEWPjsIWZdb/6f/uPKShr2034YikoYgCIIwjsikIQiCIFSMTBqC\nIAhCxUxbm8Z5Z+v6F81zXDK+4SGdLiPNPIjyzKZh9Y15nqTN6GQDRa3HDTId4KUXvwkAsGe3q6Q3\nu815Ss02nlYHD7nUHo31Wl992TvfAgCYP9fZQOpNOosQ07uHIvwR5c1+p7cPmuqArXW6bWjIeU9t\n2vgqAGBe6xu9tkjc3bt1f4+EnQ4dkfI6zkXztYdUXb3zPGswnluKV9pLMn2uze/P3HcKpoIbGb1q\nXdzFacxq1eldhoZZUkEWp1FnPJsCrFZie6vW2dbXartFXZ2zN7Q015o+sziNKPf2sZ4tTO8bNh5S\nEW3LGGD69vSIflcKzM7Ca594BfeYjSA/Rh6R+lrdzzCzQdi6Hr64BfYdVbTePkxvbzZDxpc/GuL2\nBvtcS+s76PMY7yUWB2BtGMWCSTbJYiSs/SUWZ/fMvKesRw+345Ayf0u2Vg0bz5DVzzOlfcCnvzfn\nrUDtHvFsP2UqKXIdv0/fb7yXeGoPk+SQPA8pd3/OlsETVvK8H2bsWH+L5t6s7cDn2VTuHGXwmXTI\n/8mTOdpNPoZ+5yrFP0r6UykiaQiCIAgVM20ljYWLWgAAvUM9XtvB/XoVzv3os8pJFZ5XOF8dGu+D\n5lq9Ci6wBGYzGs0qeV6L1xblK3WzwJsxw0kTq9+0HAAQrzEr9oKTdOKmXjb3bojXuuh1r6Z31nn0\nxMyqO9iu+zl/Qbs73vjf87rDvCpcKqWjqHOsLnMqWb5G+Lw5NkkiW6kWdHLAYVYZ0Oet4fmtO+y3\nlRnHGJNy2kzq+toad41YxH3bVmeLx3mqcy1h2IUhjxuoMfEUkRivNMdWjnaVxdKJB4taampu0uOa\nTLp91kMnHOTSSmkacB6UHBjDO6XBSEQ8IjxkThDkHjvc88l6v/DVatF6wOnrcO8p64UV9KUe5/dv\no55Z0jvrIWSuEWAxGfb+eep3fxVBTT7oJBsyy3gyz5vHZNhb940nHw/rwTVWcAYjUsZ7CuXiJth+\n65xVLPMdu572x2GUSjNcirErfPYvAkVz/75Ide8sR1rlW0l47LZyEeY+aY1905NSfOMh3lOCIAjC\nOCKThiAIglAx01Y91VinVUcFVjUvb4LGiiwNQMRn1NKEw6zmhFEJZYyaIM8y+rXU6eFrjLv6HDmm\nbsobw1gk7q7RaIxpcdOWTLD6FBETJMdUDGFWW6F/QKuT8kzFEDQqmDjpfl361jPdjeT0cfE6Z2xG\nyKm2InGt+ioy42hdnTMac5oadHs67e4vZ5PaMUO3rwqZZ7TjAUh+NQJXPdTX6nuNRlwwHgXcuW2g\nWzTm1joxm3LD1kJh0rY13IYizNDPVCE2kV8xz8bTvA/NTSY5HVMV2q0aFkDIg+8Cpq9c3REcY1lm\nDeH+lA9e9JXXVvSlrEDpd0app3h/rDqP16soKq6eKjWsjzaM+tRnRgUU9N1zadqOfKCMesqsT/Ns\n/G0QHE/FEipTOyJYeokSvDQiPkNzuftjgXxGNcNVziXqKW4l97ZZvRRi91O01+CpQkYbzytTSfm2\nyqrPSlOkuASH7ugAU3VaVWkFsZKHRSQNQRAEoWLoWFyuJiNEpKbLvUwGiOiY3PGE8sh4Hl9kPMcX\nM75lBRGRNARBEISKkUlDEARBqBiZNARBEISKmTbeU7FY7BARzTzykUIlRKNRn9eT8PqQ8Ty+yHiO\nL7FY7NBY+6aNIdyyatUqtX79+mp3QxAEYUpBRM8ppVYd6ThRTwmCIAgVI5OGIAiCUDETOmkQUYyI\nniGijUT0MhHdZNo7iOhpItpORD8moohpj5rft5v9iyayv4IgCIKfiZY0MgAuVkotB7ACwDuI6DwA\n/wzgVqXUYgD9AK41x18LoN+032qOEwRBEKrEhE4aSjNsfg2bHwXgYgA/Ne13AXif2b7c/A6z/49I\nXCYEQRCqxoTbNIgoSEQvAOgCsBbADgADSimbaL8TwFyzPRfAXgAw+xMAWjAKIrqOiNYT0fru7u7x\nvgVBEIQTlgmfNJRSBaXUCgDzAJwD4JTjcM47lFKrlFKr2traXncfBUEQhPJUzXtKKTUA4BEA5wNo\nIiIbaDgPwD6zvQ/AfAAw+xsB9E5wVwVBEI6ZW2+9tdpdOK5MtPdUGxE1me04gEsBvAI9eXzIHPZx\nAPeZ7V+a32H2PyypbAVBmCp0dnbipptuwr59+4588BRhoiWN2QAeIaIXATwLYK1S6lcAPg/gs0S0\nHdpm8V1z/HcBtJj2zwL4wgT3VxAE4ZhZs2YNEokE1qxZU+2uHDcmNPeUUupFAGeWaX8N2r4xuj0N\n4MMT0DVBEITXzc0334wHHngAsZiu7mgljHvvvRePP/44ACCdTuOyyy7DF7/4xar18/UguacEQRCO\nEwMDA7j00ktxuP9Bq1atwtq1a9HU1DSBPTsykntKEARhgmlqasLatWux8qzy/3sn64RxNMikIQiC\ncBzpzgRR//4vITRjrq996dKlU37CAGTSEARBOG78fEMn3nvb49i+rxeBfBKhUAhLlixBKBTC4OAg\nRkZGqt3F141MGoIgCK+TdK6Az//0RXz23o1I5QqYve9RNESDuPHGG7Fp0ybceOONKBQKuP3226vd\n1dfNtKncJwiCUA1e6x7GX/xwA149OIRIKICb3/sGvPbQRrzr5oewfPlyAMAtt9yCq6++Gg8++GCV\ne/v6Ee8pQRCEY+T+jfvxhZ+9iJFsAR2ttbj9qpU4bU5Dtbt1TFTqPSWShiAIwlGSzhXwlV9vxg+e\n2gMAeNcbZ+OrHzgD9bFwlXs2/lQ8aZiU5JcCOA/ATNN8CMCTAB6S9B6CIJwI7O4dwfV3b8BL+wYR\nCQbwf959Kq4+byFOlKoNFU0aRHQmgHsALAZQANADgKBTfoQAbCWijyqlXhivjgqCIFSb/3npAP76\nJy9iKJPH/OY41lx1Fs6Y11jtbk0oR/SeIqKZAH4DIA3gMgD1Sqk5SqnZAOoBvAtAFsBviKh9PDsr\nCIJQDbL5Im66/2X82Q82YCiTx9tOm4lfffLNJ9yEAVQmaXwSQArAm5VSg3yHUioD4EEiehLACwBu\nADA1E6oIgiCUobM/ievvfh4b9w4gFCD8zWWn4k9XLzph1FGjqWTSeBuANaMnDI5SaoCIvg3gg5BJ\nQxCEacJDmw/hL3+yEYlUDnOb4vjWVWdi5YIZ1e5WValk0lgMYEMFxz0HneJcEARhSpMrFPGvv9mC\n7zz2GgDg4lPa8W8fXo4ZtZEq96z6VDJpNELX5j4SQwCmpoOyIAiC4UAihRvufh7P7e5HMED467cv\nw3VvPgmBwImpjhpNJZMGAajUnVZGVRCEKcu6LV347L0b0TeSxcyGKG67aiXOXtRc7W5NKiqN0/gN\nEeWP07kEQRAmFflCEV9/aBtue2Q7AODNS1rx9Y+sQEtdtMo9m3xU8o/+pnHvhSAIQpXoGkzjU/c8\nj6de60OAgM9cshTXv3WxqKPG4IiThlJKJg1BEKYlT2zvwafueQE9wxm01kXxzStX4IKTW6vdrUmN\nqJQEQTjhKBQVbnt4O77+u61QCjj/pBZ848oVaK+PVbtrk54jThpE9C9Hc0Kl1OeOvTuCIAjjS89w\nBjfe8wL+sL0HRMCnLl6MT1+yFEFRR1VEJZLGFajce0oBkElDEIRJydOv9eKTP3oeXUMZNNdG8PWP\nrMBblrZVu1tTikpsGosmoB+CIAjjRrGo8P8e24F/++1WFIoKZy+agW9duRKzGkUddbRUop76LYBP\nKqW2sLaLATytlJr6BW8FQZjW9I9k8dl7X8AjW7oBAH924cn4q7ctRSgo1a6PhUrUU5dAR4UDAIgo\nCGAtgLNRWXoRQRCEqvDc7j7ccPfzOJBIozEexq0fWY6LT5l55C8KY3Ks3lNiMRIEYdKilMJ3/7AT\nX33wVeSLCmcuaMJtV63E3KZ4tbs25RGXW0EQphWJZA5/9dONWLv5EADg2jd14PPvOAWRkKijjgeV\nThrlvKekvKsgCJOKjXsHcP3dG9DZn0J9LIR//fByvP0Ns6rdrWnF68k99bty+aiUUlK9TxCECUUp\nhe8/uRtf+fVm5AoKZ8xtxO1XrcSClppqd23aIbmnBEGY0gymc/ibn23CrzcdAAD8yfkL8XfvOhXR\nULDKPZueSO4pQRCmLC/vT+D6H27Art4k6qIhfPWDZ+Ddb5xT7W5Na8QQLgjClEMphbuf2YOb7t+M\nbL6IU2c3YM3HVqKjtbbaXZv2yKQhCMKUYiSTx9/+YhPue2E/AODKcxbgS+85DbGwqKMmApk0BEGY\nMmw5OIS/+OFz2NE9gng4iH/6wOl4/5nzqt2tEwqZNARBmBLcu34vvnjfS0jnilg6sw5rPrYSi9vr\nq92tE44JjXYhovlE9AgRbSail4no06a9mYjWEtE28znDtBMRfZOIthPRi0S0ciL7KwhC9UllC/ir\nn2zE5376ItK5Ij64ch7++/rVMmFUiYmWNPIA/lIptYGI6gE8R0RrAXwCwO+UUl8loi8A+AKAzwN4\nJ4Al5udcAN82n4IgnABs7xrG9T/cgC2HhhANBfAP7zsdV6yaX+1undBM6KShlDoA4IDZHiKiVwDM\nBXA5gIvMYXcBWAc9aVwO4PtKKQXgKSJqIqLZ5jyCIExj/vv5ffjbX2xCMlvASW21WPOxlThlVkO1\nu3XCUzWbBhEtAnAmgKcBzGQTwUEANg3lXAB72dc6TZtv0iCi6wBcBwALFiwYtz4LgjD+pHMF3HT/\nZvzomT0AgMtXzME/vmQHXMwAABgQSURBVP8M1EXFBDsZqMpTIKI6AD8DcKNSapDIJc1VSikiOqq8\nVkqpOwDcAQCrVq2SnFiCMEXZ2TOCv/jhBrxyYBCRUABfes9puOqcBeD/I4TqMuGTBhGFoSeMHyql\nfm6aD1m1ExHNBtBl2vcB4ArMeaZNEIRpxq9fPIDP/+xFDGfyWNhSg9uvWonT5zYe+YvChDLR3lME\n4LsAXlFKfY3t+iWAj5vtjwO4j7X/ifGiOg9AQuwZgjC9yOQL+NJ9L+H6uzdgOJPHZWfMwv2ffJNM\nGJOUiZY0VgP4YwCbiOgF0/a3AL4K4F4iuhbAbgBXmH0PALgMwHYASQDXTGx3BUEYT/b2JXH93Rvw\nYmcC4SDh7y47FR+/YJGooyYxE+099QeMXfXvj8ocrwBcP66dEgShKvz25YP4y59sxFA6j7lNcaz5\n2Eosn99U7W4JR0DcEQRBmFByhSL++cFX8R9/2AkAuOTUmfi3Dy9HY024yj0TKkEmDUEQJox9Aync\ncPcGPL9nAKEA4fPvOAX/680doo6aQsikIQjChPDIq134zL0vYCCZw+zGGG676kyctbC52t0SjhKZ\nNARBGFfyhSL+be1WfHvdDgDARcva8LUrVqC5NlLlngnHgkwagiCMGwcTaXzqR8/jmV19CBDwl29b\nhj+/8GQEAqKOmqrIpCEIwrjw2NZufObHL6B3JIv2+ii+deWZOPeklmp3S3idyKQhCMJxpVBU+Mbv\ntuFbD2+DUsCbFrfi6x9dgda6aLW7JhwHZNIQBOG40TWUxo33vIAndvSCCLjxkiX45MVLEBR11LRB\nJg1BEI4LT+zowafveQHdQxm01kXwjY+eidWLW6vdLeE4I5OGIAivi2JR4fZHtuPWh7aiqIBzO5rx\nrSvPRHtDrNpdE8YBmTQEQThmeocz+My9G/HY1m4AwA1vXYwbL1mCUHBCc6EKE4hMGoIgHBPP7urD\nJ+9+HgcH05hRE8atH1mBi5a1V7tbwjgjk4YgCEdFsahwx+9fwy2/2YJCUeGshTPwrSvPxJymeLW7\nJkwAMmkIglAxA8ks/vLejfjdq7pO2v9+y0n4q7cvQ1jUUScMMmkIglARz+/pxw13P499Ayk0xsP4\ntw8vxyWnzax2t4QJRiYNQRAOi1IK//n4Lnz1wVeQKygsn9+E2648E/Oba6rdNaEKiEwpCIKPW2+9\n1dtOpHL4sx88h3/41WbkCgrXrF6En/zv82XCOIERSUMQBI/Ozk7cdNNNuOKKK9Cn6vAXdz+HvX0p\n1EdD+JcPvRHvPGN2tbsoVBmZNARB8FizZg0SiQT+/O/+L16d+y5kC0W8YU4D1nxsJRa21Fa7e8Ik\nQCYNQTiBufnmm/HAAw8gFtPR23s79wEAHrzv5wjWP4qZDVH01IZw18Bl+OIXv1jNrgqTBFJKVbsP\nx5VVq1ap9evXV7sbgjAlGBgYwKWXXorD/c2sWrUKa9euRVNT0wT2TJhoiOg5pdSqIx0nhnBBOEFR\nSmFTdw5Lr/lnRGYtKXuMTBjCaEQ9JQgnGMlsHj/fsA/fe2IXtncNAwDmfewf0X/3X6P/wG7vuKVL\nl8qEIZQgk4YgnCB09ifxX0/uxj3P7kUilQMAzGyI4o/PW4gL5wbwth9mEAqF0NHRgZ07d2JwcBAj\nIyMyaQg+RD0lCNMYpRSe2dmHP//Bc3jLvzyC7zz2GhKpHFbMb8I3ProCf/j8xbjh4iX44Z3/jkKh\ngBtvvBGbNm3CjTfeiEKhgNtvv73atyBMMkTSEIRpSCZfwP0bD+DOx3fi5f2DAIBQgPDuN87GNasX\n4cwFM3zHNzQ0YO3atVi+fDkA4JZbbsHVV1+NBx98cML7LkxuxHtKEKYRXUNp/OCpPbj76d3oGc4C\nAJprI7jqnAW4+ryFmNUohZGE8lTqPSWShiBMA17sHMCdj+/Cr17cj1xBLwRPnd2Aa1YvwnuXz0Es\nHKxyD4XpgkwagjBFyRWK+J+XDuLOx3diw54BAECAgLe/YSauWd2BczuaQURV7qUw3ZBJQxCmGP0j\nWdz9zB784KndOJBIAwDqYyF89Oz5+JPzF0kyQWFckUlDEKYIWw4O4c7Hd+IXz+9DJl8EAJzUVotr\nLliED6ych9qo/DkL44+8ZYIwiSkUFR5+tQt3Pr4TT+zo9dovXNqGa1YvwluWtCEQEBWUMHHIpCEI\nk5DBdA4/Wd+Ju57YhT19SQBATSSID66ch49fsAiL2+uq3EPhREUmDUGYROzsGcFdT+zCT9bvxUi2\nAACYNyOOT1ywCB9eNR+N8XCVeyic6EzopEFE/wng3QC6lFKnm7ZmAD8GsAjALgBXKKX6Sbt9fAPA\nZQCSAD6hlNowkf0VhIlAKYXfb+vBnY/vxCNbur32805qxjWrO3DJqTMRFBWUMEmYaEnjewBuA/B9\n1vYFAL9TSn2ViL5gfv88gHcCWGJ+zgXwbfMpCNOCcokDI6EA3rdiDj5xQQdOm9NQ5R4KQikTOmko\npR4jokWjmi8HcJHZvgvAOuhJ43IA31c6ZP0pImoiotlKqQMT01tBGB9s4sAfPbMHg+k8AJc48Mpz\nFqClLlrlHgrC2EwGm8ZMNhEcBDDTbM8FsJcd12naSiYNIroOwHUAsGDBgvHrqSAcI0opPLurH3c+\nvhO/efkgiiZ7z4r5Tbhm9SJcdsZshIOSP1SY/EyGScNDKaWI6KiTYSml7gBwB6BzTx33jgnCMTJW\n4sD3LJ+NT1xQmjhQECY7k2HSOGTVTkQ0G0CXad8HYD47bp5pE4RJT9dgGj94ajfufmaPL3Hgx87V\niQNnNkjiQGFqMhkmjV8C+DiAr5rP+1j7DUR0D7QBPCH2DGGys3HvAO58fCd+vemAJA4UpiUT7XL7\nI2ijdysRdQL4EvRkcS8RXQtgN4ArzOEPQLvbbod2ub1mIvsqCJUiiQOFE4mJ9p66coxdf1TmWAXg\n+vHtkSAcOzZx4H89uRsHByVxoHBiMBnUU4IwpXj14CC+9/guSRwonJDI2y0IFTBW4sCLlrXhmtUd\nePPiVkkcKJwQyKQhCIdhMJ3Dvc/uxfef3O1LHPihs3TiwJPbJHGgcGIhk4YglOG17mHc9cQu/PS5\nTkkcKAgMmTQEwSCJAwXhyMikIZzwHC5x4DWrO3DqbEkcKAgWmTSEExZJHCgIR49MGsIJhVIKz+zs\nw/ee2OVLHHjmgiZcs7oD7zx9liQOFITDIJOGcEKQzhVw/8b9+N4Tu0oSB16zugMr5jdVuYeCMDWQ\nSUOYFtx66634zGc+U9JuEwf+8Ok96B3RiQNbaiO4ShIHCsIxIZOGMOXp7OzETTfdhCuuuAJz584F\nIIkDBWG8kElDmPKsWbMGiUQCt91+Oy74yA1lEwf+6eoOnCOJAwXhdSOThjDluPnmm/HAAw8gFtOq\npb2dnQCAW79zF279wf0AgEAxjwsuugR3fvOfJXGgIBxHSCeTnT6sWrVKrV+/vtrdEMaR/v5+XHjx\nJdj0woYxjzlz5Vl4+HcPoalJDNyCUAlE9JxSatWRjhNJQ5gSjGTyeGJHLx7d2oV1W7oxcOHnEDn4\nf5A9uK3k2FWrVmHt2rUyYQjCOCCThjApUUphe9cw1m3pxrqtXXh2Zz+yhaK3v7WlGe/6yn/gl/9w\nDfbvfs1rX7p0qUwYgjCOyKQhTBqGM3k8vr0H67Z047Gt3dg3kPL2EQEr5jfhomVtuGhZO86Y24gD\n+/fhv/8+iVAohI6ODuzcuRODg4MYGRmRSUMQxgmZNISqoZTClkNDWLelG49u6cb63X2eeyyg4yku\nXNqGC5e14S1L2jCjNuL7/m233YZCoYAbb7wRX/nKV/D3f//3uOuuu3D77bfjn/7pnyb6dgThhEAm\nDWFCGUrnPGni0a3dOJBIe/sCBJy1cAYuMhPF6XMaD1vYqKGhAWvXrsXy5csBALfccguuvvpqPPjg\ng+N+H4JwoiLeU8K4opTCKweGsM4YsDfs7ke+6N65tvqoliaWtuHNS1rRVBM5zNkEQRgvxHtKqBqJ\nVA5/2NaDdVu68OjWbnQNZbx9wQDh7EUzcNGydly4tA2nzW6QMqmCMIWQSUN43RSLCpsPDOLRrd1Y\nt6ULG/YMoMCkifb6qGfAXr24VareCcIURiYN4ZgYSGbx+23ONtEz7KSJUIBwbkezJ02cOrte0ncI\nwjRBJg2hIopFhZf2J7xJ4vk9/WDCBGY1xIw00YYLFreiISbShCBMR2TSEMakfySLx7Zpd9jHtnWj\nZzjr7QsHCecubPbUTktn1ok0IQgnADJpCB6FosKmfQms26I9nTZ2DoA7181tiuPCZdrTafXiVtRF\n5fURhBMN+as/wekdzuCxbd1Yt6Ubv9/Wg74RJ01EggGc09GMC5dqtdPidpEmBOFERyaNE4xCUeGF\nvQN4dGs3Ht3ShRf3JXzSxLwZca1yWtqO809uQa1IE4IgMOQ/wglA91AGj23txrqt3fj9tm4MJHPe\nvkgo4Hk6XbSsDSe11oo0IQjCmMikMQ3JF4p4Ye+AlyH2pX2Dvv0Lmms8T6fzTmpBTUReA0EQKkP+\nW0wTugbTOrhuazd+v7Ubg+m8ty8aCuC8k1o8T6eO1toq9lQQhKmMTBpTlFyhiOf3DHieTpsP+KWJ\njtZaz4B93kktiIWDVeqpIAjTCZk0phAHE2mvct0ftvdgiEkTsXAAF5zciouMS+zCFpEmBEE4/sik\nUSVuvfVWfOYznznsMblCEet39WPd1i48uqUbrx4c8u0/qa0WFy3VBuxzOppFmhAEYdyRSaMKdHZ2\n4qabbsIVV1yBuXPn+vbtH0h5if8e396L4YyTJmoiQVxwcgsuXNaOi5a2YX5zzUR3XRCEE5xJP2kQ\n0TsAfANAEMB/KKW+WuUuvW7WrFmDRCKBNWvW4Es3/QPW7+rDOjNRbD007Dt2SXudZ8BetWgGoiGR\nJgRBqB6TuggTEQUBbAVwKYBOAM8CuFIptXms70zGIkw333wzHnjgAcRiMQDA7r2d2PXaDjTMnI9c\ntAlFpaDyOcRPXoW5b70aqxe3euk65s0QaUIQhPFnuhRhOgfAdqXUawBARPcAuBzAmJPGZORTn/oU\n7r//fjz66KO+9sFDewHsBQDMXfIGfPebN+GtZ3QgEgpUoZeCIAhHZrJPGnNh/6tqOgGcO/ogIroO\nwHXm12Ei2jIBfTtaggCWAignOiT3bXt56ztWLi5McJ+mE60AeqrdiWmCjOXxZaqM58JKDprsk0ZF\nKKXuAHBHtftxNBDR+kpEQaEyZDyPHzKWx5fpNp6TXQ+yD8B89vs80yYIgiBUgck+aTwLYAkRdRBR\nBMBHAfyyyn0SBEE4YZnU6imlVJ6IbgDwG2ibwH8qpV6ucreOF1NKnTYFkPE8fshYHl+m1XhOapdb\nQRAEYXIx2dVTgiAIwiRCJg1BEAShYmTSmGCI6B1EtIWIthPRF6rdn6kOEf0nEXUR0UvV7stUh4jm\nE9EjRLSZiF4mok9Xu09TGSKKEdEzRLTRjOdN1e7T8UBsGhPIsaRFEQ4PEb0FwDCA7yulTq92f6Yy\nRDQbwGyl1AYiqgfwHID3yft5bJCum1yrlBomojCAPwD4tFLqqSp37XUhksbE4qVFUUplAdi0KMIx\nopR6DEBftfsxHVBKHVBKbTDbQwBegc7KIBwDSmMzkIbNz5RfpcukMbGUS4sif5TCpIOIFgE4E8DT\n1e3J1IaIgkT0AoAuAGuVUlN+PGXSEATBBxHVAfgZgBuVUoNHOl4YG6VUQSm1AjqbxTlENOVVqDJp\nTCySFkWY1Bjd+88A/FAp9fNq92e6oJQaAPAIgHdUuy+vF5k0JhZJiyJMWozh9rsAXlFKfa3a/Znq\nEFEbETWZ7Ti0A8yr1e3V60cmjQlEKZUHYNOivALg3mmUFqUqENGPADwJYBkRdRLRtdXu0xRmNYA/\nBnAxEb1gfi6rdqemMLMBPEJEL0IvGNcqpX5V5T69bsTlVhAEQagYkTQEQRCEipFJQxAEQagYmTQE\nQRCEipFJQxAEQagYmTQEQRCEipFJQ6gqRHQ6ESkiuoi1KVOxsdJzfI5//zj16yLTjykfwUtEXyai\nnipefxcR/Wu1ri8cX2TSECYj5wP4yVEc/zkAF41PVwRB4EzqGuHCiUk1U0ebqOhota4vCJMdkTSE\nCYWI/oKI9hLRCBHdDx01O/oYn3qKiN5ERL8nokHz8wIRfdjs2wWgBcCXzPeUUS0tMtvvHnXu7xHR\nevb7l4mox1zjWQBpAB9mX5lDRL8y/d1DRH826nznE9EvieiAOeYFIvrYqGM+YfpyBhGtNce9SkQf\nKHPv7zeFe1JE1EtEDxDRQrb/dCL6NRENmZ+fENGsigbff51mIrqDiA4RUZqIniCic9n+dURUIu0R\n0S1mHMj8HiOifzHPNGMKDkkU+TRGJg1hwiCiywHcDuBXAD4AYBOA/zzCdxrM8a8B+CCADwH4LwBN\n5pD3A0hA50w63/xsOMqu1QC4C8B/QCeUe4bt+y6AF01/HwDw7VET0UIAjwO4FsB7oJP93UlEV5a5\nzt3QucbeD2AbgHuIaB671z8G8HMAOwBcAeAa6KJdbWb/YnOtGICrAXwCwBsA3G//iVcCEUUBPATg\nEgB/DeB9ALoBPMQmoB8DuIyIatn3yPTrXuVSSfzU9OOfzP0/C+CXRLSi0v4IUwyllPzIz4T8QP8z\nfnBU279DF6a5iLUpADeY7VXm9/rDnLcHwJdHtS0y33v3qPbvAVjPfv+yOe7yUcddZNrvGNW+FsBT\nY/SDoFW+3wHwMGv/hDnXn7K2FgB5AH9mfg9AZzz++WHu878AbAEQYW1LABQAvOsw3/sygB72+7UA\nsgCWsLYQ9GR1i/m9zfTvo+yY8819rDK//5H5/cJR13sMwE/Y77sA/Gu13z/5OT4/ImkIEwIRhQCs\nBHDfqF1HSr+9A7qc691EdLnNGnqcUQAeHGPfL0b9/nMAZ5Eu3QsimkFE3ySi3QBy5uc6AEvLnOu3\n3gWV6oUuzGMljWUA5gC48zD9vMT0p0hEITOmO6H/Ka86zPfKnec5ADvZeQDgUXsepVQ3gIcBfIR9\n7yMAdiil1rPzHATwuD2POdfvjrI/whRCJg1homgFEIT+R8kZ/bsPpVQ/dErpMIB7AXQbnf5Jx7Fv\n/UqX3y1Huf6GoO8H0JLLRwDcAuBtAM6GVrnFypxrYNTvWXZci/k8cJh+tgL4PNzkZH9Ogr9Oy5Fo\nBXBemfNcM+o89wB4JxE1EFEA2tbz41HnmVXmPF8+yv4IUwjxnhImih5oNUr7qPbRv5egtDfVO0jX\nJLgEwNeg7QPnHeZrafMZGdU+o9wlDnOecv3NA+ghotj/b+/eXaOIojiOf4+NFkbBxiKCjRAFC0Gw\n0EKxUCSixn9AsPBZiRBMI1ooWIkgJIWICCHYREUCwRBIaeEiCIIBERNTiJjVRJNg0ByLcxfH2ck6\naxGJ/j4wbNi5ufMo7rmPs1zgIHDW3XtqBVID26zJ9FmXGJBRJUYatwrONfM7jCrwFDhdcO5r5u/7\nQDexj/0YMRLKBo0qMaV2pIlryzKnoCFLwt2/mdkzogHqyZyqyyBqUMccsei7FejKnMr22GveE73e\nLbUvLLYx3Uk0gGV18OvUVQdQcffvqb4VZBpaM2sBDtE4EBUZJRrgY8CjRcoMEwvfFU+LBX9omBgV\njbv7oiM9d/9oZo+JkdQYsTnT81w954Ev7r7sNxeSchQ0ZCldBfrNrJvoxe7mN9tfmlk7cBx4AIwD\nrcBJYr695iXQbmaDxPrHqLt/NrOHwLm03vCJaODmmrznA2Z2hZjvP0pMlR0GcPeplKZ70cymgQXg\nApHNtaaZi7j7gpl1Ar1m1gv0EYFnL9CX1hEuEckEA2Z2mxhdtKZ7uuPuIyUvdxc4BYykX2q/JqbH\ndgDv3P16puw9YrptCriZq2eI2FBsyMyuAS/Sc28DVrl7F/Lv+dsr8Tr+r4PYuXACmCVSWPfROHuq\njUjrfEv06CeIkcq6TPntwBNgJlsXsJ5YeJ8mesonKM6e+lBwn3tSXfuJkcZsuvaZXLlNRI97hghq\nnfk6+Zk9tTr3v2/IZRURgalCTK9NAgPAxsz5zel9VIkA+IrI1trQ4J3XPSOwFriR3ut8erZ+YFeu\nXEt6dgfaCupeCVxO9zFPLIwPksnmKnpOHcv30M59IiJSmrKnRESkNAUNEREpTUFDRERKU9AQEZHS\nFDRERKQ0BQ0RESlNQUNEREpT0BARkdJ+AI7e0cWGSVZ0AAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Er = vis.Experiment_reader()\n", + "pth = \"out/blur\"\n", + "Er.set_path(pth)\n", + "Er.read_all_expriemts()\n", + "ank=1.0/8\n", + "ul = Er.get_data(\"Inc\")['blur_001'][0].max()\n", + "def fd(x):\n", + " return ul-x\n", + "Er.print_param_description(0)\n", + "ank=1.0/8\n", + "Er.annotated_plot2(0,'Inc', zoom=0.8, pad=0, max_hight=3, xybox=None, fd=fd,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,16],\n", + " ylabel=\"IND\", add_points=True) #n_datapoints=4)\n", + "Er.annotated_plot2(0,'Fid', zoom=0.8, pad=0, max_hight=300, xybox=None,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,500], ylabel=\"FID\", add_points=True)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rect_001: {'alpha': 0.0}\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(imgs) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::mi-0.1*r = -0.884866857529\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAD8CAYAAABzTgP2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAADqFJREFUeJzt23+o3fV9x/Hnq7k0axE00WitMbu2\nCiNu0MJBKdvA1V9x0EZa/7D7o2FryR+rf6yl0BTHtOof6tZZSruN0BZCYdXOURqQItFWGGNYT6yj\nzdo0t7HFpLZNjQhOqmR974/7dTufy4k3ud9z78nR5wMO93y/38+99/3xgs97zvcmVYUkSa9607QH\nkCSdWQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ15qY9wEqcd955NT8/P+0xJGmm\n7N+//9dVtWm5dTMZhvn5eYbD4bTHkKSZkuRnp7LOt5IkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSG\nYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLD\nMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB7vrjSeaXXN+S5MUkn5zEPJKk\nlesdhiTrgC8CNwBbgQ8l2bpk2UeA56vqUuA+4J4l1/8e+FbfWSRJ/U3iFcMVwEJVHa6qV4D7ge1L\n1mwH9nTPHwSuThKAJDcCTwMHJjCLJKmnSYThIuCZkeMj3bmxa6rqBPACcG6Ss4BPAZ+ZwBySpAmY\n9s3n24H7qurF5RYm2ZlkmGR47Nix1Z9Mkt6g5ibwNY4CF48cb+7OjVtzJMkccDbwHHAlcFOSe4Fz\ngN8m+U1VfWHpN6mq3cBugMFgUBOYW5I0xiTC8ARwWZJLWAzAzcCfLVmzF9gB/AdwE/Dtqirgj19d\nkOR24MVxUZAkrZ3eYaiqE0luAR4G1gFfqaoDSe4AhlW1F/gy8NUkC8BxFuMhSToDZfEX99kyGAxq\nOBxOewxJmilJ9lfVYLl10775LEk6wxgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElq\nGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1\nDIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpMZEwJNmW5GCShSS7xlxfn+SB\n7vrjSea789cm2Z/k+93H905iHknSyvUOQ5J1wBeBG4CtwIeSbF2y7CPA81V1KXAfcE93/tfA+6rq\nD4AdwFf7ziNJ6mcSrxiuABaq6nBVvQLcD2xfsmY7sKd7/iBwdZJU1feq6ufd+QPAW5Ksn8BMkqQV\nmkQYLgKeGTk+0p0bu6aqTgAvAOcuWfNB4MmqenkCM0mSVmhu2gMAJLmcxbeXrnuNNTuBnQBbtmxZ\no8kk6Y1nEq8YjgIXjxxv7s6NXZNkDjgbeK473gx8A/hwVf3kZN+kqnZX1aCqBps2bZrA2JKkcSYR\nhieAy5JckuTNwM3A3iVr9rJ4cxngJuDbVVVJzgEeAnZV1b9PYBZJUk+9w9DdM7gFeBj4IfD1qjqQ\n5I4k7++WfRk4N8kC8Ang1T9pvQW4FPibJE91j/P7ziRJWrlU1bRnOG2DwaCGw+G0x5CkmZJkf1UN\nllvnv3yWJDUMgySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoY\nBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIaEwlDkm1JDiZZSLJrzPX1SR7orj+eZH7k2qe78weTXD+JeSRJ\nK9c7DEnWAV8EbgC2Ah9KsnXJso8Az1fVpcB9wD3d524FbgYuB7YB/9B9PUnSlEziFcMVwEJVHa6q\nV4D7ge1L1mwH9nTPHwSuTpLu/P1V9XJVPQ0sdF9PkjQlkwjDRcAzI8dHunNj11TVCeAF4NxT/FxJ\n0hqamZvPSXYmGSYZHjt2bNrjSNLr1iTCcBS4eOR4c3du7Jokc8DZwHOn+LkAVNXuqhpU1WDTpk0T\nGFuSNM4kwvAEcFmSS5K8mcWbyXuXrNkL7Oie3wR8u6qqO39z91dLlwCXAd+dwEySpBWa6/sFqupE\nkluAh4F1wFeq6kCSO4BhVe0Fvgx8NckCcJzFeNCt+zrwX8AJ4GNV9T99Z5IkrVwWf3GfLYPBoIbD\n4bTHkKSZkmR/VQ2WWzczN58lSWvDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJUsMw\nSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAySpIZhkCQ1DIMkqWEY\nJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSY1eYUiyMcm+JIe6jxtOsm5Ht+ZQ\nkh3dubcmeSjJj5IcSHJ3n1kkSZPR9xXDLuDRqroMeLQ7biTZCNwGXAlcAdw2EpC/q6rfA94N/GGS\nG3rOI0nqqW8YtgN7uud7gBvHrLke2FdVx6vqeWAfsK2qXqqq7wBU1SvAk8DmnvNIknrqG4YLqurZ\n7vkvgAvGrLkIeGbk+Eh37v8kOQd4H4uvOiRJUzS33IIkjwBvG3Pp1tGDqqokdboDJJkDvgZ8vqoO\nv8a6ncBOgC1btpzut5EknaJlw1BV15zsWpJfJrmwqp5NciHwqzHLjgJXjRxvBh4bOd4NHKqqzy0z\nx+5uLYPB4LQDJEk6NX3fStoL7Oie7wC+OWbNw8B1STZ0N52v686R5C7gbOCves4hSZqQvmG4G7g2\nySHgmu6YJIMkXwKoquPAncAT3eOOqjqeZDOLb0dtBZ5M8lSSj/acR5LUU6pm712ZwWBQw+Fw2mNI\n0kxJsr+qBsut818+S5IahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqGQZLUMAyS\npIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGoZBktQwDJKkhmGQJDUMgySpYRgkSQ3DIElqGAZJ\nUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJjV5hSLIxyb4kh7qPG06ybke35lCSHWOu703ygz6z\nSJImo+8rhl3Ao1V1GfBod9xIshG4DbgSuAK4bTQgST4AvNhzDknShPQNw3ZgT/d8D3DjmDXXA/uq\n6nhVPQ/sA7YBJDkL+ARwV885JEkT0jcMF1TVs93zXwAXjFlzEfDMyPGR7hzAncBngZd6ziFJmpC5\n5RYkeQR425hLt44eVFUlqVP9xkneBbyzqj6eZP4U1u8EdgJs2bLlVL+NJOk0LRuGqrrmZNeS/DLJ\nhVX1bJILgV+NWXYUuGrkeDPwGPAeYJDkp90c5yd5rKquYoyq2g3sBhgMBqccIEnS6en7VtJe4NW/\nMtoBfHPMmoeB65Js6G46Xwc8XFX/WFVvr6p54I+AH58sCpKktdM3DHcD1yY5BFzTHZNkkORLAFV1\nnMV7CU90jzu6c5KkM1CqZu9dmcFgUMPhcNpjSNJMSbK/qgbLrfNfPkuSGoZBktQwDJKkhmGQJDUM\ngySpYRgkSQ3DIElqGAZJUsMwSJIahkGS1DAMkqSGYZAkNQyDJKlhGCRJDcMgSWoYBklSwzBIkhqG\nQZLUMAySpIZhkCQ1DIMkqWEYJEkNwyBJahgGSVLDMEiSGqmqac9w2pIcA3427TlO03nAr6c9xBpz\nz28M7nl2/G5VbVpu0UyGYRYlGVbVYNpzrCX3/Mbgnl9/fCtJktQwDJKkhmFYO7unPcAUuOc3Bvf8\nOuM9BklSw1cMkqSGYZigJBuT7EtyqPu44STrdnRrDiXZMeb63iQ/WP2J++uz5yRvTfJQkh8lOZDk\n7rWd/vQk2ZbkYJKFJLvGXF+f5IHu+uNJ5keufbo7fzDJ9Ws5dx8r3XOSa5PsT/L97uN713r2lejz\nM+6ub0nyYpJPrtXMq6KqfEzoAdwL7Oqe7wLuGbNmI3C4+7ihe75h5PoHgH8GfjDt/az2noG3An/S\nrXkz8G/ADdPe00n2uQ74CfCObtb/BLYuWfOXwD91z28GHuieb+3Wrwcu6b7OumnvaZX3/G7g7d3z\n3weOTns/q7nfkesPAv8CfHLa++nz8BXDZG0H9nTP9wA3jllzPbCvqo5X1fPAPmAbQJKzgE8Ad63B\nrJOy4j1X1UtV9R2AqnoFeBLYvAYzr8QVwEJVHe5mvZ/FvY8a/W/xIHB1knTn76+ql6vqaWCh+3pn\nuhXvuaq+V1U/784fAN6SZP2aTL1yfX7GJLkReJrF/c40wzBZF1TVs93zXwAXjFlzEfDMyPGR7hzA\nncBngZdWbcLJ67tnAJKcA7wPeHQ1hpyAZfcwuqaqTgAvAOee4ueeifrsedQHgSer6uVVmnNSVrzf\n7pe6TwGfWYM5V93ctAeYNUkeAd425tKtowdVVUlO+U++krwLeGdVfXzp+5bTtlp7Hvn6c8DXgM9X\n1eGVTakzUZLLgXuA66Y9yyq7Hbivql7sXkDMNMNwmqrqmpNdS/LLJBdW1bNJLgR+NWbZUeCqkePN\nwGPAe4BBkp+y+HM5P8ljVXUVU7aKe37VbuBQVX1uAuOulqPAxSPHm7tz49Yc6WJ3NvDcKX7umajP\nnkmyGfgG8OGq+snqj9tbn/1eCdyU5F7gHOC3SX5TVV9Y/bFXwbRvcryeHsDf0t6IvXfMmo0svg+5\noXs8DWxcsmae2bn53GvPLN5P+VfgTdPeyzL7nGPxpvkl/P+NycuXrPkY7Y3Jr3fPL6e9+XyY2bj5\n3GfP53TrPzDtfazFfpesuZ0Zv/k89QFeTw8W31t9FDgEPDLyP78B8KWRdX/B4g3IBeDPx3ydWQrD\nivfM4m9kBfwQeKp7fHTae3qNvf4p8GMW/3Ll1u7cHcD7u+e/w+JfpCwA3wXeMfK5t3afd5Az9C+v\nJrln4K+B/x75uT4FnD/t/azmz3jka8x8GPyXz5Kkhn+VJElqGAZJUsMwSJIahkGS1DAMkqSGYZAk\nNQyDJKlhGCRJjf8FFDYZsBaypoYAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEPCAYAAACp/QjLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJztnXl4nVW1/7/rzJmnpmOSjrSltLSl\nacs8I7PgDIrSihdxQrxeUe+9KnjVn168ol5A7YUWEAQrosxoQQFBoE1nOtIhbdI2UzMnZz7798fe\n77t3ck7StD05Q7I+z5Mn5+z3Pe+73pVh7bXWXmuTEAIMwzAMY+JItwAMwzBM5uFKtwDHS05OTkMg\nEBiXbjlGCl6vF8FgMN1ijBhYn8mF9Tm8+Hy+Rr/fPz7RMcq2sBIRiWyTOZMhIrA+kwfrM7mwPocX\npV9KdIzDSgzDMEwcWRdWiqN1PQCgu7vbHgqHwwAAh9Npj8X0S0SjEfU9bI85lJmMhuUspbGhyz7W\n3RUCAPhy9UXIEdLXC1lur1anA251D2mU810F+nodR+UxROyxzjYtSxTyPnlj+z8sQCSv63Z77DEP\nedU1tPu9Y/tu+3V71xEAQNVk7T2Or5gRf3EAaNsMAOjp6bGHIhEpJxn6FEPVZ0Tqs7lRX6+nW57n\nzdVzEwcZzx+O1yep19Go/EyeK09fr7MNABAz9NnVoV9b+swt6/+wADnkdd0utz3mVvrs7tD6fH/X\nPvt1e3cDAKCiUv+Axk6YGn9xgPUJ1mcm6fP8az8Zf5MBYM+BYRiGiSOlngMRrQRwDYAmIcRcY/wr\nAL4EIArgBSHEnUO95tvr1UzC77fHgr0B+cIVtcdC0FY2GAio79p6A3KGH1YzCZ+r2D7i88hZQO3m\nXfZYIKw9lVBE3ieiJxcIK8fCoSz/hFI9a286dEBeI9Sr7+HS04YJlVUAgH1btul7qOs5IT0QJ+XY\nx/zKazpQW2ePHdivX3tz5DOdd/4ie6zal8AtAVCj7tnrD9hjwYDSnVPrMwztOYVUwjAYjNdnROnT\n6yrS8nik7HXb9up7RPTMzdancblIP32OK9byNzfIZw2GtMxe4+c3fmIFAKB2m/75KecSDsifranP\ngJqV1tcdssfqDh7W1/bJZ1p61nx7bKG3HIlgfWa2Pm+46TsJ7zNSESJzPYeHAVxhDhDRRQCuAzBf\nCHEagJ+mWCaGYRimHyk1DkKINwC09hv+AoAfCyGC6pymVMrEMAzDxJMJCemZAM4joh8CCAD4NyHE\nukQnEtGt/cfe3SBPjUb1crdeK1lluJk9oU77dSgox03X1+mUSZ5oRCaHxpTp1V3l5T4AQEOztmv+\ncLtx7Xi33cp9edzyHuTUCaqmzl41ps8POfX9csPydUdA2+6ubnWPgHSXe7q0W320Rbrrbe1t+noR\nnbAudMpkVquxXLw3nHh54IYtm6T8hj79fhX+cmh99pphNVufPnvM6ZT3t/RZWqqfr6xM6rrpqJY3\nENE/n95E+lSvPS5Ln7n2sWalC4ehz7BT664zIu/dGdRjPT3qHkH5g+rt1sppa5UJvfYO/TMOG/qE\nU/7ZtJv6jLA+gezTJzMwmZCQdgEoBXAmgG8AWE1ECdfdCiFWpFIwhmGY0UomeA71AJ5WlW1riSgG\nYAyA5qF8ePZ0mbx1efRSr1Y1g2rr0DP97qCeBkXCaumYb4w95vPkAwBiUTmjyMsptY/l58lZ1ayp\ns+yxtm6dUOtVCbFAUGekQwE5s7FmKHk5einrhKrp8nO9enYTC+uZzNFWORM60qCT7E2N8pkCfmnP\nu7v1vYK9HQAAj0dfw+HSr7050mspLtJJPhJG9txgxhSZbHS59a9GW7u8fkennpn2mPpUs9lcr9aZ\n1y3vGYvJn0uur8Q+lpcr9TmjSi+nbe9psF/7g9I7CoYMfQaVPh0edb18+9j4iskAgN5enYQVxsy0\ntV3qsalJ67OlRT5TUOmzp8fQp18uY3a79e8Umfr0SfmLCvUiAoqxPoHs0yczMJngOfwZwEUAQEQz\nAXgAtKRVIoZhmFFOqpeyPgHgQgBjiKgewPcArASwkojeAxACcDP3x2AYhkkvKTUOQogbBzh004le\nc1qFdBUjRoJqXLFM0AVCOmwUieo0hhWaOXRIu6G1e+Uiqdaj0lX2GBXN48fLe5SUa1f11Kop9muf\n0mIoFNOCqRLNsKqYbO7Un609JF3+tkYd9mo4rBOInd3yWVq7O+yxoAoNWNXfoZDOLgaV+19UPske\nc3m12+7vlsdDPTqMVeQ1so0GUyYWK7m1PsuLpD6DYe2Wm/oMBuTrI0e0vHW10vlra7P0qZ3BsWPl\nPYrKtAwzKyrt115Ln2bSXJW4W/c92q0/W3dE/uw6mnXCs6lRV7h39cjrtPdoHQdVCNCqru2jT5Xg\nLCybYI+5jArVQI88HjLCLoWsT3m9LNMnMzCZEFZiGIZhMgw2DgzDMEwcmbBa6aTIcUv3MwrtZrrz\nZEjIH/baY7v36tq6vXsOAgBq1r1njx2ul8d7OmX4JhTSa6bzcuR1ps/ULTAWLZptv54yUbqz+fl6\nBYnXI1dg7K2V93rjjc36/spt9xtuud+v7xeJSJfbXK8RCctwQiAoXe+oEcGyXju9eh2321jN0atW\nbdUd0itYhGMBEuFzSX26DX26cuVKloCxYmVv7VH79YH99QCATZt22mMNh+Vis94u+RThsH6+XJ/U\n5+RpumXD/Pl6pU3leBk6yMszWkS4pT5r6+S93n5HtxapbZAhgoBfhz4Cfq2gSETqwmyeEI7Itfyh\noAxpRM2Ii/qow6t/f1wuQ5+dMtxy+Ij+nRIOuxtMHwbT58R5H034mZHKvlr9N3DR+R+xX5+IPqdX\nn3Akmhki7DkwDMMwcWS95yDC1sxMJ7DCYbmG+Y23ddvq5158x37d0SYTUy2tOiEsonJmS+p7NKQb\nd3X75Wy9dYOemdYf0p+dWC49hrxcvVbcpTyag6rZ2PZ6PWsPKFmdRlW0WfEZCctZbdjwXoKRbnWe\nlCtmeg6QVa49Zuc/j9aHU8nV0Karqtv8iecFMbUm3AVznbictb+9Tjd2++ur6+3XnR1SL61tOsFv\n61MJGjP02aMSlG1bdYL8SIP+7Pgy6Tnk5ui16C63lLdeeT+7D+tZZlDNcRxGFW/McK3CEVmHEjG8\nl2BY/kxjaj29qc8YpAfmN/Xp1td25kq5moyWyR3+hHWbg+pzNBMirfAT0Scz/LDnwDAMw8TBxoFh\nGIaJI+vDSh6PDAE4oddNb98t+9E/8uhqe6yhSbusXpVo7O3VY9YuUc6YDD24HEYYQZX9O51aXe0d\nOiTS29mlrmE0+lNrtoMqnNJthBWiSmaPkeQMG13RQqrNQSSkQ03WtckOnegwjTsqZelt1v3yHaW6\nVcbYCrkLVNSoLWxq1vKbeDxSN07jV2P3XtkqZPUfnjU+r3XnUfr0m/qMyedJqE9XvD47OvUad3+X\nFUIzGqmp/TpCCfQZUzK7hb5exNDne9vWJnzWY1F/cHPC8SuuvUXKZ+iz+WhPwnMH0+doJmDs3Fg+\nUbbAOR59MsMPew4MwzBMHFnvOditjKN6meVr/9wIANi+54A9Vl6qq4eDameokJEkjakZbkyoma7R\nxM6p9qY1m/s5jX7GUeuzDp1Ec6rduVyqL7dXaDvcqxKjgYC+v9kvxDrTZeTevKoxoFA72kVjuunZ\nZWctBQD43IX22Lvv7df365YJ4LAxs961Yw8S4XSova9j+vn/uXaL/Iyxu1xZsa52tXbaCoV1Alco\nPYpYvD7/sfXthPc+UU6bJXe4s6rIgb76TDa9PVL3EUOfe97fn/BcS59j5l47jBJlH17Snt2J6JMZ\nfthzYBiGYeJg48AwDMPEkfVhJasvfUe3DrNs3lkLAOjRUQYUm73sQ1atgF5rHbXWxdtbuOkwiLVP\ngqNPKMn8rPweMxJqQoWTHE6ZfM4xkq8gVYVthEHQZ1m3iBt0qLX3gZAMEU2brquLP7v8OnkPt64o\nzn/m7/brl16XiVWnVx9vadHJXhNLn509Wp/b9shwUq8hblFYhwXCSp9CmDpRz2BvOTZ8O2/luGWS\nMhwzBBzGaU9vh6y1cHp1GK+1dXB9Mn057yxdAf3qP2W1O+szs2DPgWEYhokj6z0Hl0pId3Tr9sJH\n1DJNM8EVMZZFWkskzeWOsWjfBKqVhDaJhhPvThVWyeZon8mOo883M0Oap5ay5rjjk96A3hPXXMoZ\nVftKO9R1Tp8z1T7mzpEzZodDL/9bdIY+3tYmew/tPaj7ITU2aX2ZuNR+0109uhq8US0rHFCfSmcR\nw5uwEvy2Ph3DV9maq5Y3CqMNtCPBzy9ZLDpNJuMP1Osq+ebmjoTnWvpk+nL6vCr7dbvq/cX6zCzY\nc2AYhmHiYOPAMAzDxJH1YSWrmrjlqA6TtLTLsFLMrEqO6pCHVYkZThBq8iXo92Wt3++zeynpE600\nbMRIUlvJaac6TxiN9axPmqEkt9Eoz6FahPv9uuI4EpDyT66Q9RpLFs+3j/lD0gWnHC3elCqdfP7U\nRy4EALzx5iZ7bHddIwBg644+j4qwCg21tmt9tnbIsJIYSJ8x+WyRmKlPqQtv4v5pSSWiEuJ99DmM\nYaWPXHM2AOCdd3TL972qRfnr/c4NhyNg4qms0Mln1mdmklLPgYhWElGT2i+6/7GvE5EgojGJPssw\nDMOkjlR7Dg8DuA/Ao+YgEVUC+ACAg8d7wXa/3DBnz0FdvWvtlewhPZMNCZ1MtkYjxuzSPlN9Jgrd\n3tqvNt+JDKAuZyyYYFTOnKN2TyF9L6Fm1R6jotpvVGtDJVajxj663qhMEJ+3VG6KM2fmRH1/ks9m\nti2POfWzF5TIxPel586xxy6IzAMA/PnlV/pI3eGXuttfr/v/WHv7ug19hoXhOVhjRtLZ8qG8tj4T\n6Sg5BJUX4yGjZbepzyRTXiR/PuefOdMeOyt8KgDgkSef6nOupU+mL7nm7yfrMyNJqecghHgDQGuC\nQ/cCuBPD2/WAYRiGGSJpT0gT0XUADgkhErfAZBiGYVJOWhPSRJQL4N8hQ0pDOf/W/mNWUnnv/lp7\nLNgtwyAOM8wS005JTAU9nMbOYaQSx9ZQKKzDEiHl0EQNUyqM6mWKxdc/9EleA3AZqraS0+TWa7at\nHd5MokbYa9JYuS/26adNBwDkGRXH4ZB6npghk9Hoz7p2WVmBPeaOJV4vbiWVa40wXVBVSzuMuUTM\nqIYWSj/mznYOFU6zImehyPCFeUKqXoWMFuiOaOIK22QQVTvElRZrfbpE4j8lM0nPaJwx4/czxvrM\nRNLtOUwHMBXAZiKqBVABYAMRjU90shBiRQplYxiGGbWk1XMQQmwFYDcJUgaiWgjRkjahGIZhmNQa\nByJ6AsCFAMYQUT2A7wkhHjqZa/b2yJDCwdp6e8wKYbi8+faYA0YYRK1ucRlhEKHW3ITVHgthhw6D\nWHszmL3zIkJ/NmhsTm8RE30X+LtjRmsJVZvhMsIgZhAqqhrIxcLasRs/WTpTlROlLfU4dVGDFRLz\nGe04ombNgVox5CIjlDRAO4veXilnfZ1erRS29ZmnP25IbOnTm0CfEdVcMBIexrCS2k/CPYA+k43X\nJetQXGT++QyuT6Yvlg4B/bvK+swsUmochBA3HuP4lBSJwjAMwwxC1ldI+7vlLLz1qNmoS84bc/J8\n9kifBKVKgBXm6Jn2jGky0bt9l9whraPb2MPWqnx26toHcutrg+L3tbUSsVbldTRi7AetEqhmYz1n\nnwSzPG62wB43RlY8lxRKb4jCemaV55HVpi5juhwykuSkahIiQX0/GqCAOKA8sfbWTmNUXtiXq5+T\nTH2q6xcYO+VNnTwFALBrr9zRq7OnN/ENk4C1/7bpLbkNL2rurCVSTKOC/ZrzTgMA3PSxywAALqOm\nxPIoTX2albnWzy8SMfQ5QPbO0ifTF5ex5zuphRSsz8wi3QlphmEYJgPJfs/BL6d33cZmPxZ5OTou\nHwvo490ROYudN0tXZH70g1cDAH79uz8BANZv0h0+clXfo4njSuyxxuY2LUNYehTmZkCkpj6Wgh2G\nqmNkzZT0LMjjjt+f2m94LwX58vMedRlz0xOrp5AwKphz3PrZI2pmHzWqUsNIvCTQH5Cz656eQNyx\nXJ/2lmJBfbxHeQWnztAbuFx7uZyRP/L0iwCAzVt32sfmT5XtxMeVl9pjLS3a8+tUaiFDnw7qm8Mh\ns314JL5luNto3+1UblIgoL2X/DyXOk++DxuJI2tfclOfPsNTtPTpMvQZQYLEE7Q+mb6wPjMf9hwY\nhmGYONg4MAzDMHFkfVipRe0YZe8BDb1E1Ny5LWZU6FZNkjt5XXv5xfbY7GmVAID5c2VopGbDVvvY\nxPHjAACfvekT9tiu93fbrze8twsA0Nmpm4IFgzKMZbXd9od0WMSSKmzIFzKWYbrV0llPgW5rPHWK\nlM8KPzkdOmzkUjbeTUaltlEt7VLRGTLG4EzcS7u1RTb4ixotxmsP1/b5PhA76nRV9cpnXhrwvA8s\nlQniT370entsz7699uutO+WigK4uHVYLqjBWICD1GQhr+a1gkhmmC0e0LlwqxFaQrytwqypl40KP\niis5HTrMYenTRUbyM6E+jbmVM3GYztIn0xeKmQsArDHWZybBngPDMAwTR9Z7DlbBmDdHz/zcbmsP\nZj3zcxoJ2A9ffzkA4Kwl8+wxa+Hj+Anl8nyjyGz8GJk4XXy63pf53MWz7NcXNy4GAHQYG+R0dMql\noC3Nct/moOE57N4rk9l/f3uLPRYyZuoQUtaJ43TCduYMuedurkt6SP6wfh6H02oPrpPE5sY3QiVq\nQ0b/JrM3lElMWPqMX56bLMaWFgMAFszR+wgvXTDDfn2wZQEAoLPDSFJ3Sa+s9ahs6mvqc98Bed5b\n67bZY3306ZK6GldebA9Nn1oBAMhV61X9Ye15OpxyLObQbcbNxQZCeSVhY7lw7Bj6ZPrh0rpjfWYm\n7DkwDMMwcbBxYBiGYeLI+rCSN0e6+4UFufaYp1UmMgPGWvxZk3WI5tKLZRgoP8eoWg6qfaKtdLHQ\nFbbTq2QIojTfqEswNOc7RdY/RMO6l1O0X8tuD+nrvfa2rBp+s0YnvXtCOvnq9MgQRyiqrxcMyLCK\nEzLclecxag588l5BI2xERljJ5ZVzAGevsY48mni9uNcnP5efn5PweDKYUiGTwSV5Zl2CPn7KVBn+\niUYMfcb6yutxaH3+c90BAMC7G/WG2L0hXdPgdFt7huveUMGg1dZd6jHXo8NowhuvT4ehT4faGDvi\n1/qMRRN3c7L0yfTF4TF0y/rMSNhzYBiGYeLIes8hzycfobhIL/t0UjMAwOPRtu+ay8+xX8+YLpem\n9jTrzqPWUtiOVpncNLuezpp1CgAgR09kETUqrqFedrbphLRH9UrKy5PXCUV1crW945C6px4bU6Iv\n7lUz/bYWvaPq+/uPAADmnSK7s0Yj+rNRlZwNQ8/GjFWytiw+I8kcHWBmluOV+iwqLEh4PBnMmD5N\nyqMn8ogaXp61fXd3u35Gt+rblJsrZ/qhsO791NHVIK8R02OlRfriXjUz7WjVVe37DzYCAE6dKrvc\nRoX+bEzpMwKtxIixNNatlhN7fFqfrmPok+mL6dWzPjMT9hwYhmGYONg4MAzDMHFkvY9Wrpq3WWvn\nAQCqvqFqkr3JHM6Yo2sU/J1NAIBIWFc0BwIyGdbcIEMUOS6d8MwvlMnuQFSHkoJ+nUCOhtS5IR16\nyMuTYSKP1Rvb0LTLK++Vn6sTcGdV67qJcWNkrcXq1f+wx7Zu3wcAOO/MOQAAr0PfX6gmf+EBkszB\nYDBuzNxoyKSsTCbXxxQXJTyeDPIKZKht3OJlw3aPz39GV1+Xl5YBAJ555h17bPsumcReukg2X/SS\nTmDHlD4jseTpc6TzrzffAABY/knZcPFY+vQbYVnWZ2bCngPDMAwTBxsHhmEYJo7sDyuVyFVKRUad\ng0uZvBmqWR0AjC/Tq4HammXoCFFzdy8ZGpo0Xoapco1l/qT6+pt7UPWEjDX6UXnDwhIdirHW5YdC\naj9oj3apx4+XYaMZ03X7iFnTJ9qvz1ks6zDe27zPHtu+W74+2CxDYfNmGvdS+0k4o3oNeNRoQmft\nOGc2+otG4/drAIAxxVJPhcYuesmGKHFTtWQyY/I4+/WShQsBADu3HbDHdu+Tr+tbZHhuznS92i0a\nkWGOgfQZU/qM9NFxfGgE0Poc6bA+Rx4p9RyIaCURNRHRe8bYPUS0k4i2ENGfiKh4sGswDMMww0+q\nPYeHAdwH4FFjbA2AbwshIkT0EwDfBvDNoV7Qo/r9TlCzcQAoKZKzi6mV2nMo8mrPIhyVM+godNLZ\nofZhXrJEtux+6S86ednaKusmWtv0+f4ePfvN9UmvwGvsoWyt0g6rfY3dxs5XblVD4SJ9fuU47TlM\nmSi9l2uvOdMe+79HnwMAvLtBtgo/ZYo+Zs3HerqNqmCjJbfbKe+TX6J1EDKS8SZupc9xY8sSHk8G\n7e1Hh+3aFhPLx9uvK8fJ+cYHLltkjz22+i8AgI1bpUc2vVIfs/TpN/a9dhj6dDmkPouKDH1GBtfn\nSKfTLxPMrM+RQ0o9ByHEGwBa+439VQhh+ZPvAKhIpUwMwzBMPJmWkP4sgAF3iSGiW1MoC8MwzKgl\nYxLSRPQfkDnfxwc6Rwixgoh+Y44VFspwUGWFdjjKSmWyduJYHWpCRNtBhwonRQ3b6M2R7SImTZRh\noKpK3ahv/wG5w9nsOfp64ZCRVFV1Cw6jUVtMJaT9yt0u9YyxjzWq3ev83bpWITdHZ8DbWmXC/NRZ\nOjRSpfYfWL9VNpernqfDUGX5VkJPy2TujGft7TDJo68XCJnpdU1BvgzJTZw0MeHxZHCg7vCxTzpJ\ncn06jNfRLutaZs7QdS8Vk+Xzbd4hw3TzT9UJ7JI8S59ah7Goud+DCmV69PWCx9Dnc6v+1x579Iln\nAADXX3GJPXb+fL04Iab6sUSM3eemX3ZLwutnCqnWJzP8ZIRxIKJlAK4BcIkQInFTFYZhGCZlpN04\nENEVAO4EcIEQovdY5/entUcml9du3G6PVY6Xs4uZM/TsvyOgl27GlPlxGsmt3laZChEOOftfvOQM\n+9hb6+S1zzY2oSo2+tK1tklPoKNHV332WEtYVXWo32i/vWWrXKxVMcVY7ufRTeb2H5Ez68IcXQ26\nYL5sVvfG2zUAgJYu3SguN18+R9TYPCsU1snuaFgtC1QzaADwG7KatPXKGdvGLXqP7Dtuug4AcMNH\nrrTHCr26SVpM7VznNKpaI6pxWpSkHl7/5y772NpN+trDRdSj9XOgSTbZK/Dp5b9z50wGALxdsxkA\n0NqtE6A5efJnFjWCruGgoU+1NBMdzfZYoDfx0uBE+pw0VjYFnD5V/3y7gobXmeD3M9NJtT6ZgXnu\nkf8EABSVaO95IH0ORqqXsj4B4G0As4ionohugVy9VABgDRFtIqJfp1ImhmEYJp6Ueg5CiBsTDD+U\nShkYhmGYY5P2sNLJsmev3BvhH2+tt8euvEhuUF9qVFOGAtptd6qmeh4jaSlUIzBySJ9+7tyZ9rG/\nvSVd5bp67faWzTOT09ItDoR0uqSlQ16vuVWGN7YH6uxjftW0b9EZcwyZjCZvdi977QqeOksmpHft\n2QsA2L+/wT42pWKGvG6oyx4LRLQsDlWR7O/VCXCjR2Af9h+Q+0a8s3azPXbJubL2o7hQ75EQDpr6\nVP34jVCTUP36iaQcs2dPt4/9Y50OAQ4XTpeRurKTn7rq9pQZMoG6Z38tAODAQR1yq5womzQGwlqf\nQUOf5JD6DBjN48IDRDuSpc9MJ9X6ZAYmGhu6Pgcj05ayMgzDMBlA1nsO27bIpZ0+Y4/fBfNkbyKv\nS2eNPfl6ZmbtjOZ06qRYJCyPWznA8eN08jKmlqju3a1n/5Vlulq6s0PuMJaXq5fvtTdLq/23V9bJ\ne0Fb8YsvXgIAKCvVy1sjQT2V91lCOLTMJXlS1umTpAdx+ICuJQxVq/ONhLkLelmrx6My1dFEs+m+\n7Nr2PgDAa+hz7qnSE/O6tCfmccXvjOZ06l8nq4+T9Shjy3XyXRhLfoeLSFA/v9cSgvR9i3Llz3fK\nBDnjbazXu8SFAsrLMcR0Gvp0u9WcKprI2+tLsvSZ6RTlSllTpU9mYLzHoc/BYM+BYRiGiSN7piYD\n0NUmZ9DXf/AKe+zMRbKvi9etpyqCdLzZKgoz97F19UpVOEmpxIgBjx0jZ71HG3TO4cghPRP2qPuE\nnfozte9LLyPQKcemTtVeQrnamCji1/c3J4mxHDnTtzq6AgBB5i7GqqK/na31hiwylls5SfemgU8H\nGZ1OOYNwkJ4LREOJZ+/dygu68oqL7bFF8+cDALzG7DaWQJ/mpi1ut0vdM16fZaXDtz+1RdTwHGIO\npU+jK61D9ZYaUyBl2dOmC/OajshjEycY+vTG65NI/9CGW5+ZjgikVp/MwOSqPeiHos/BYM+BYRiG\niYONA8MwDBMHGweGYRgmjqzPOZy5WK4ZL52om8p1d8k11gFjRUTEiOnGVPumiBGDtlpAOGJqpQh0\nL4rLLj4XAHDwgN6ZbUK5Xpk0Qa1samjULRuKCuRqpquuPB8AUDlJ5ygK8uW1XcZqKZfH/FFE1HFd\nh+FUvQfG5Muxri69Wmnr5p0AgIoxp9tjnhz97FbrfI9br7CCx+i1YbBowWwAQMl43QStp6sFABA0\n9WmspIqpfg/mTl5C6ZNiKpdj6POCc5cCAGZM0TqcPU3fb5xa2dTUrNfFr10v20/kFcp9JiZN0HmL\n/Dx5badD6zChPh2GTlRTu9I8WUvQ1d1uH9uxbQ8AYEKprkNx+xLo06V/fsOtz92vPiafISbHuiP6\nflv3yFqfQ/W19lgq9Ol2p0+f617Q/TkXX/2phOeOVlyqtmgo+hwM9hwYhmGYOLLec5g8Rc58jqrZ\nGAA0HJazajKW+YaE9hKsFb9krN6BkCeX5slmfdGYPlZSpCpZK/TuaF7TEqtJUEmJ9g7OOVeuSMnJ\nVTMyY0/cHJ9T3VJfIydPr3m393wO6doIn5qtOcdKOSur9Mwwqlp1kzE7z/HqdfV+v1wtEhbG2ACN\nuCpVq/LWHu2ZNDXKWSAZS6XUg6/5AAAgAElEQVRDMcNLUN/JYehEzX5LcuXKLFOfxYWqPflE3Rgx\nkT6LivRsdsnS0wAAvlylJ1Of6lljwnj+HF19bOkzEtbP7LX0OUbKOalCryY7pj4DstLc1GegN/HK\nItbn8OuT6Yu1p/xQ9DkY7DkwDMMwcbBxYBiGYeLI+rBSUb50d6NGS4ZIr9pLwSgZ9yC+N77VpkB+\nXrq+QeXeR4zOdGX5Uk1FOdptDxtueITktT05+h5FkOGfHDXW26Hly/eo1hxurX53jk4Wt7XLMFAk\npq/n9ElZc0jKddlFC/WDhOV5Ofna9YdLh6Q8OTJ0EDM2fMjP1zvPmRTmyUKlaFQ/f0Q1LRSmPh3x\n8wqXkQCzmn/Z+jQ6f5XkyecuyCnW9zB/fpY+ffoehUqfPhWS83dp+fLcKnFv6NPl0/rs6JRhi4jh\nZueoBKsvT8p1wTlz9YNEnOqYUbTlNPTpk3o29ZmXpxcPmLA+Mez63PSnBwD01efCj34p4edHKuue\n/YH92tLNUPQ5GOw5MAzDMHFQtu3KSUS8k2gSISKwPpMH6zO5sD6HF6XfhOtc2XNgGIZh4mDjwDAM\nw8SR6j2kVxJRExG9Z4yVEtEaInpffS8Z7BoMwzDM8JPSnAMRnQ+gG8CjQoi5auy/AbQKIX5MRN8C\nUCKE+OZA18jJyWkIBALjBjrOHB9erzerWkNnOqzP5ML6HF58Pl+j3+8fn+hYyhPSRDQFwPOGcdgF\n4EIhxBEimgDgNSHErGNdp7q6WtTU1AyrrAzDMCMNIlovhKg+1nmZkHMYJ4Swdg1vADCgV0BEtxJR\nDRHVNDc3D3QawzAMc5JkgnGwUWtUB3RlhBArhBDVQojq8vLyFErGMAwzusgE49CowklQ35vSLA/D\nMMyoJxOMw7MAblavbwbwTBplYRiGYZD6paxPAHgbwCwiqieiWwD8GMBlRPQ+gEvVe4ZhGCaNpLTx\nnhDixgEOXZJKORiGYZjByYSwEsMwDJNhsHFgGIZh4mDjwDAMw8TBxoFhGIaJg40DwzAMEwcbB4Zh\nmCRw7733pluEpMLGgWEY5iSpr6/H3XffjUOHDqVblKTBxoFhGOYkeeCBB9DR0YEHHngg3aIkjZQW\nwTEMw2Q7gXAU3/neXfjryy+DXG5EYgLNjbKx9OrVq/HWW2/J8wIBXHXVVfjud7+bTnFPGDYODMOM\nWqIxgQ5/GK09QbT2hNHaE0JrTwhtvSH7tfX+aLf83huKIhaYi8Yjv0eo4f0+19uzZw/27NkDAKiu\nrsbtt9+ejsdKCmwcGIYZEQgh0BuK6n/qvSG0DfAP3hpr94dxvPuduZ2EkvIynHL7vdj0m2+g9cCO\nuHOqq6uxZs0aFBcXJ+npUs9xGwciqoLekKdRCHEwuSIxDJMK7r33Xnzta19LtxgDEo7G0NYbQltP\nGEd7gmjrCaO1N4TW7gFm9j0hhCKx475PUY4bpXkelOZ5UJLrQWmeGyV5HpTZ7/VXSZ4HBV4XiAgA\n0P6Fc7B06VLs3r3bvt7MmTOz3jAAQzQOROQE8O8AvoB+O7URUQOABwD8WAgRTbqEDMMkHWt1zcc/\n/nFMmjRp2O8nhEBnICJn8uoffMKZfY8e6wxEjvs+XpdD/lM3/6HneuLGrPGSXDdczhNfl9Pd3Y3O\nzk64XC5MnToV+/fvR2dnJ3p6eka+cSBpIp8HcBmAPwBYA6AeAAGYBOByAHcDOBvA1cMmKcMwScNc\nXfPDH/7wuD8fCEft2bue2YfQ2ivj921G/N4yApHY8cVvHAT5DzzPg9JcPXMvzXOjNM8rZ/i5HpTl\neVGSJ2f/uZ7URsrvu+8+RKNR3HHHHfjBD36A//zP/8QjjzyC+++/Hz/60Y9SKkuyGYomPwXgYgBX\nCCFeSXB8JRFdBuB5IvqkEOJ3SZWQYZiT5vvf/z5efPFF+Hw+ALDX469evRpvvvkmIjGBXn8AS86/\nBB++5at6Nt/vH7w1s+8JHX+QIN/rkv/Ec41ZvHodN7PP9aAwxw2ng5Kqh2RTWFiINWvWYP78+QCA\ne+65BzfddBNeeumlNEt28pA4RjaGiF4AUC+E+PwxzvsNgAohREq8h+rqalFTU5OKWzFM1tPe3o5L\nLr0MG9YP/DfjGT8D4z7xAzh8+ce8nstBfeP0+Qn+0Rvx+uJcN3xuZzIfiTlBiGi9EKL6WOcNxXNY\nAOChIZz3FwD/O4TzGIYZZqIxgT1N3dh4sA2b6tqxqa4dLef9GzyHvhO3/BIAciaegjNu+xnGlpX0\nSb6a/+DNf/xmUpYZmQzFOJQBODKE8xrUuQzDpJimrgA2HZRGYOPBdmypb48L/bhzCnDRHb/Aul9+\nAa2HD9jjp5xyCtauXZv1CVQmuQzFOHgADCXAGAXgPlFBiOhrAD4HQADYCmC5ECJwotdjmJFKIBzF\ne4c6pCGoa8emg+041O6PO29ScQ4WVBVjYWUxFlQWY+6kIrQ0HsHinwf7rK7p6uoaEatrmOQy1NT+\nV4joWN7DhBMVgogmAbgdwBwhhJ+IVgO4AcDDJ3pNhhkJxGIC+4/22F7Bprp27DjSGbfyJ8/jxHxl\nBBZUFmNBVTHGFvjirjeSV9cwyWUoxuEggHOHeL2TKYhzAcghojCAXACHT+JaDJOVtPWEtEdQ147N\nde3o8If7nOMgYPb4AiyssoxBCWaMzR/Syp6RvLqGSS7HXK2UKojoqwB+CMAP4K9CiE8lOOdWALcC\nQFVV1aIDBw70P4VhsoZQJIYdRzr7JI1rj/bGnTe2wIsFlcVYWFWCBZXFOL2iCHle7nzDnBjJXK00\n7BBRCYDrAEwF0A7gD0R0kxDiMfM8IcQKACsAuZQ15YIyzAkihEB9mx8b69ptY7DtcGdcuwef24F5\nk4r6GIMJRT5eGcSknKFUSFcdzwVPsNfSpQD2CyGa1T2fhqy4fmzQTzFMhtIZCGNLXQc21bVho8oX\nHO0JxZ03vTwPCypL7MTxrPEFcJ9EOweGSRZD8RxqIVcQDQUxxGv25yCAM4koFzKsdAkArnBjsoJI\nNIZdjV0yNKQMwZ7m7rhun6V5Hp0wrizG/MpiFOWc8AI/hhlWhvKP/NpjHM8FcBuAiwCEj3FuQoQQ\n7xLRUwA2AIgA2AgVPmKYTKOhI2CHhjbWtWNrfQf84b6rvT1OB+ZMLFThIWkMqkpzOTzEZA3HNA5C\niBcSjRNRPoAvA/gagEIAvwLwkxMVRAjxPQDfO9HPM8xw0BuKYGt9h11PsKmuHQ2d8eU3VaW5xuqh\nYsyZWAivi9tFMNnLieznUALgDkjD4AHwGwA/FUI0JFk2hkkpsZjA3uZulTSWhmB3Yxei/WoKCnwu\n6RGoeoL5FcUoy/emSWqGGR6GbByIaCyAf4MMIUUB3Afg50KIo8MkG8MMKy3dQd1yoq4NW+o60BXs\nu4eA00E4zQ4PydVD08bkwZHh3UIZ5mQZymqlCgDfBPBZAD0AfgzgPiFE5zDLxjBxnOjuZYFwFNsO\nd9r1BBsPtqG+Lb7lxMQiHxao8NDCqhLMnViEHA+Hh5jRx1Badgcgeya9BuDXkAZiQIQQLyZLuMHg\nlt2jj/r6esydOxfbtm0bdPcyIQQOHO3Fxro22zPYfqQT4Wjf3/VcjxOnVxTJpaQqcTyuML7lBMOM\nJJJZBOdR3y8CcCHkDnADIQDwNIsZFgbavayjN4xN9bq4bHNdO9p6+y6cIwJmjSuw+w4tqCzGzHEF\nGb+ZDMOki6EYh6nDLgXDJGCg3cse+92T+OOLr6I7GEFXTy+o6gwUn3Njn8+Oyffaq4cWVhZjXkUR\nCnxcU8AwQ2UoS1m5gRGTFm6//XY899xzeP311/uMH6zdB2AfALl7WeXS6zB/com9emhBZTEmFedw\nTQHDnARDSUg3Y+gV0hBCjD0piRhGUVxcjDVr1uCCiy7Blk0b4o5PnT0Pv33qOSyZXcktJxgmyQwl\nrHQ/jsM4MEwyiblzUfDhu+E6cBsibYfs8ZkzZ+Ldt9/gDWoYZpgYSljprhTIwTBxBMJR/MujNag9\nchSOiL/P7mWdnZ28exnDDCPsizMZSSwm8K+rN2H9gTaIbS+jwOPAHXfcga1bt+KOO+5ANBrF/fff\nn24xGWbEwsaByUh+9OIOvLi1AQVeFz5+zky8+uoruOeee+D1enHPPfdgzZo1KCwsTLeYDDNiyYjN\nfhjGZNVb+/Hgm/vhdhJ+8+lFOHvG5XHnzJ8/397qkmGY5MOeA5NRvPxeA77//HYAwE8+cjrOnjEm\nzRIxzOiEjQOTMWw42IavPrkRQgBfv2wmPnxGRbpFYphRCxsHJiOobenB5x6pQTASww2LK/Hli2ek\nWySGGdWwcWDSTmtPCMtWrUVrTwgXzCzHf10/l6ubGSbNsHFg0kogHMXnHlmH2qO9mDOhEPd/6gyu\ndmaYDCBj/gqJqJiIniKinUS0g4jOSrdMzPASjQl89cmN2HCwHZOKc7Bq+WLke3kBHcNkApn0l/gL\nAC8LIT5KRB4AuekWiBlefvjCDvxlWyMKfC6sWr6Y91JgmAwiI4wDERUBOB/AMgAQQoQAhNIpEzO8\nPPTmfqx8S9cyzBxXkG6RGIYxyJSw0lQAzQBWEdFGInqQiPL6n0REtxJRDRHVNDc3p15KJim8tPUI\nfvCCrGW456PzcfZ0rmVgmEwjU4yDC8AZAH4lhFgIuRXpt/qfJIRYIYSoFkJUl5eXp1pGJgmsP9CK\nO36/CUIA37h8Fq5fOPB2nwzDpI9MMQ71AOqFEO+q909BGgtmBLHfqGW4cUkVvnjh9HSLxDDMAGSE\ncRBCNACoI6JZaugSANvTKBKTZI52B7Fs1Vq09YZx0axy/Nd1p3EtA8NkMBmRkFZ8BcDjaqXSPgDL\n0ywPkyT8oShueaQGB472Yu6kQtz3yTPg4loGhsloMsY4CCE2AahOtxxMcrFqGTbVyVqGlcsWI49r\nGRgm4+HpGzNsCCHwX89vx1+3N6LQ58LDyxdjbAHXMjBMNsDGgRk2HnpzPx7+Zy08TgdWfKYap3At\nA8NkDWwcmGHhxa1H8MMXdwAA7vnY6ThzWlmaJWIY5nhg48AknZpaXcvwzStm47oFXMvAMNkGGwcm\nqexr7sbnHq1BKBLDp5ZW4bYLpqVbJIZhTgA2DkzSaOkOYtmqdWjvDePi2WNx9we5loFhshU2DkxS\n6A1FcMsjNTjY2ot5k4rwvzcu5FoGhsli+K+XOWmiMYHbn9iEzXXtqCjJwUPLqrmWgWGyHDYOzEkh\nhMDdz23DKzsaUZTj5loGhhkhsHFgTooH/7Efj759AB6nA//3mWrMGMu1DAwzEmDjwJwwz285bNcy\n/M/H52PJ1NI0S8QwTLJg48CcEGv3t+Jff78ZAPDtK2fj2vkT0ywRwzDJhI0Dc9zsbe7Gvzxag1A0\nhk+fORm3ns+1DAwz0mDjwBwXzV1yX4YOfxiXnjoW37t2DtcyMMwIhI0DM2RkLcM61LX6Mb+iCL/k\nWgaGGbHwXzYzJCLRGL7yu43YUt+BytIcPHjzYuR6uJaBYUYqbByYYyKEwF3PbcOrO5tQnOvGw8uX\noLzAm26xGIYZRtg4MMdkxRv78Ng7B+FxyVqG6eX56RaJYZhhJqOMAxE5iWgjET2fblkYybObD+P/\nvbQTAHDvxxdg8RSuZWCY0UBGGQcAXwWwI91CMJJ39x3Fv62WtQz/cdWpuPr0CWmWiGGYVJExxoGI\nKgBcDeDBdMvCAHuauuxahpvPmozPnTc13SIxDJNCMsY4APg5gDsBxAY6gYhuJaIaIqppbm5OnWSj\njKauAJatWofOQASXzRmH717L+zIwzGgjI4wDEV0DoEkIsX6w84QQK4QQ1UKI6vLy8hRJN7roCUZw\ny8M1qG/zY35lMX55w0I4HWwYGGa0kRHGAcA5AD5IRLUAngRwMRE9ll6RRh+RaAxfeWIjth7qQFVp\nLh66uRo5Hme6xWIYJg1khHEQQnxbCFEhhJgC4AYAfxNC3JRmsUYVQgh879lt+NvOJpTkyn0ZxuRz\nLQPDjFYywjgw6efXr+/D4+/KWoYHb67GNK5lYJhRTcb1PxBCvAbgtTSLMap4ZtMh/OTlnSACfvGJ\nBVg0mWsZGGa0w57DKOedfUfxjT9sASBrGa6cx7UMDMOwcRjVvN/YhVtVLcOys6fglnO5loFhGAkb\nh1FKU6euZbj8tHH4zjW8LwPDMBo2DqOQnmAEn31kHQ61+7Gwqhg//wTXMjAM0xc2DqOMSDSGL/1u\nA9471IkpZbl48DNcy8AwTDxsHEYRQgh855n38NquZpTmefDw8iUo41oGhmESwMZhFPHAa3vxxNo6\neNW+DFPG5KVbJIZhMhQ2DqOEP288hHv+skvWMtywAIsml6RbJIZhMhg2DqOAf+5twTeekvsyfOfq\nObhiLtcyMAwzOGwcRji7G7vw+d+uRzgqcMu5U/FZrmVgGGYIsHEYwTR2BrBs5Vp0BSK4cu54/MdV\np6ZbJIZhsgQ2DiOU7mAEy1etw+GOAM6oKsa9n1gAB9cyMAwzRNg4jEDC0Ri++PgGbD/Sialj8vDg\nzYvhc3MtA8MwQ4eNwwhDCIHv/Pk9vLG7GWV5Hjy8fDFK8zzpFothmCyDjcMI476/7cGT6+rgc8t9\nGSaXcS0DwzDHDxuHEcQf19fjf9bsVrUMC7GwimsZGIY5Mdg4jBDe2tOCb/5R7stw17Wn4fLTxqdZ\nIoZhshk2DiOAXQ1duO236xGJCfzLeVNx89lT0i0SwzBZTkYYByKqJKK/E9F2ItpGRF9Nt0zZQkNH\nAMtWrUVXMIKr5o3Ht6/kWgaGYU6eTNlDOgLg60KIDURUAGA9Ea0RQmxPt2CZTFcgjOUPr8ORjgCq\nJ5fgZx/nWgaGYZJDRngOQogjQogN6nUXgB0AJqVXqszGqmXYcaQT08bk4f8+U821DAzDJI2MMA4m\nRDQFwEIA76ZXksxFCIH/+NNW/OP9FlXLsAQlXMvAMEwSySjjQET5AP4I4A4hRGeC47cSUQ0R1TQ3\nN6dewAzhl6/uweqaevjcDjy0bDGqynLTLRLDMCOMjDEOROSGNAyPCyGeTnSOEGKFEKJaCFFdXl6e\nWgEzhKfW1+PeV3bDQcD/3ngGFlQWp1skhmFGIBlhHIiIADwEYIcQ4mfplidTefP9FnzLqmX44Gm4\nbM64NEvEMMxIJSOMA4BzAHwawMVEtEl9XZVuoTKJHUc6cdtjspbh8+dPw2fOmpJukRiGGcFkxFJW\nIcSbAHgN5gAc6fBj+ap16A5GcM3pE/DNK2anWySGYUY4meI5MAPQGQhj+ap1aOgMYMmUUvz0Y/O5\nloFhmGGHjUMGE47G8MXHNmBnQxemledhxWcWcS0DwzApgY1DhiKEwLef3oo397RgTL4HjyxfguJc\nrmVgGCY1sHHIUH7+yvt4an09ctxOrFy2GJWlXMvAMEzqYOOQgayuqcMvXn0fDgLu++RCnF7BtQwM\nw6QWNg4Zxhu7m/HvT28FAHz/urm45FSuZWAYJvWwccggth/uxBcf34BITOC2C6bjpjMnp1skhmFG\nKWwcMoTD7X4sf3gtuoMRXDt/Iu68fFa6RWIYZhTDxiEDsGoZGjuDWDq1FD/92Olcy8AwTFph45Bm\nQpEYvvDYeuxq7MKMsflY8elqeF1cy8AwTHph45BGhBD41tNb8Naeoygv8GLVssUoynWnWyyGYRg2\nDunk3jW78fSGQ8j1OLHyZq5lYBgmc2DjkCZ+v+4gfvm3PXAQcP8nz8C8iqJ0i8QwDGPDxiENvL67\nGf/+p/cAAD+4fh4umj02zRIxDMP0hY1Ditl2uANffGw9ojGBL144HZ9cWpVukRiGYeJg45BCDrXL\nfRl6QlFct2AivsG1DAzDZChsHFJEhz+M5avWoqkriDOnleK/P3o65O6oDMMwmQcbh2Hm3nvvRSgS\nw22/XY/djd04ZWw+fnMT1zIwDJPZZMQ2oSOV+vp63H333djuOw1vHwjLWoblXMvAMEzmkzGeAxFd\nQUS7iGgPEX0r3fIkgwceeAAdHR1Y/duHkOtxYtWyxago4VoGhmEyn4zwHIjICeB+AJcBqAewjoie\nFUJsT69kx8f3v/99vPjii/D5fACAnXsPAAB6d/4D7kgdvvyGB4FAAFdddRW++93vplNUhmGYQckI\n4wBgCYA9Qoh9AEBETwK4DkBWGYfbb78dzz33HF5//fU+45G2I9iy7ggAoLq6Grfffns6xGMYhhky\nmWIcJgGoM97XA1ja/yQiuhXAreptNxHtSoFsx4sTwEwAieJHvTU1NbtLSkqiKZZpJDEGQEu6hRgh\nsC6TS7boc0gbxWSKcRgSQogVAFakW47jgYhqhBDV6ZZjpMD6TB6sy+Qy0vSZKQnpQwAqjfcVaoxh\nGIZJA5liHNYBOIWIphKRB8ANAJ5Ns0wMwzCjlowIKwkhIkT0ZQB/gYzZrxRCbEuzWMkiq8JgWQDr\nM3mwLpPLiNInCSHSLQPDMAyTYWRKWIlhGIbJINg4MAzDMHGwcRgmRmI7kHRBRCuJqImI3ku3LCMB\nIqokor8T0XYi2kZEX023TNkMEfmIaC0RbVb6vDvdMiUDzjkMA6odyG4Y7UAA3Jht7UAyBSI6H0A3\ngEeFEHPTLU+2Q0QTAEwQQmwgogIA6wFcz7+fJwbJ3vt5QohuInIDeBPAV4UQ76RZtJOCPYfhwW4H\nIoQIAbDagTAngBDiDQCt6ZZjpCCEOCKE2KBedwHYAdmlgDkBhKRbvXWrr6yfdbNxGB4StQPhPz4m\n4yCiKQAWAng3vZJkN0TkJKJNAJoArBFCZL0+2TgwzCiFiPIB/BHAHUKIznTLk80IIaJCiAWQ3R2W\nEFHWhz/ZOAwP3A6EyWhUbPyPAB4XQjydbnlGCkKIdgB/B3BFumU5Wdg4DA/cDoTJWFQC9SEAO4QQ\nP0u3PNkOEZUTUbF6nQO5EGVneqU6edg4DANCiAgAqx3IDgCrR1A7kJRDRE8AeBvALCKqJ6Jb0i1T\nlnMOgE8DuJiINqmvq9ItVBYzAcDfiWgL5MRwjRDi+TTLdNLwUlaGYRgmDvYcGIZhmDjYODAMwzBx\nsHFgGIZh4mDjwDAMw8TBxoFhGIaJg40DkxKIaC4RCSK60BgTagfAoV7jTvPzSZLrQiVH1le0EtFd\nRNSSxvvXEtFP03V/JrmwcWDSyVkA/nAc598J4MLhEYVhGJOM2EOaGZ2ks6WxqhL2puv+DJPpsOfA\nDAtE9EUiqiOiHiJ6DrKKtP85fcJKRHQuEf2DiDrV1yYi+pg6VgugDMD31OeECglNUa+v6Xfth4mo\nxnh/FxG1qHusAxAA8DHjIxOJ6Hkl70Eiuq3f9c4iomeJ6Ig6ZxMRfarfOcuULPOIaI06bycRfTjB\ns39IbRDjJ6KjRPQiEU02js8loheIqEt9/YGIxg9J+X3vU0pEK4iokYgCRPRPIlpqHH+NiOK8NyK6\nR+mB1HsfEf23+pkG1cY2XFU9gmHjwCQdIroOwP0AngfwYQBbAaw8xmcK1fn7AHwEwEcB/BZAsTrl\nQwA6IHsCnaW+NhynaLkAHgHwIGRjtLXGsYcAbFHyvgjgV/0MzmQAbwG4BcC1kE3rVhHRjQnu8zvI\nXlofAvA+gCeJqMJ41k8DeBrAXgAfB7AccnOocnV8hrqXD8BNAJYBOA3Ac9Y/66FARF4ArwC4FMA3\nAFwPoBnAK4ah+T2Aq4goz/gcKblWC91C4Sklx4/U868D8CwRLRiqPEyWIYTgL/5K6hfkP92X+o39\nH+QGKBcaYwLAl9XravW+YJDrtgC4q9/YFPW5a/qNPwygxnh/lzrvun7nXajGV/QbXwPgnQHkIMiQ\n7G8A/M0YX6au9VljrAxABMBt6r0DskPv04M8528B7ALgMcZOARAFcPUgn7sLQIvx/hYAIQCnGGMu\nSKN0j3pfruS7wTjnLPUc1er9Jer9Bf3u9waAPxjvawH8NN2/f/yVnC/2HJikQkQuAGcAeKbfoWO1\nhd4LuRXo74joOqvLZZIRAF4a4Nif+r1/GsAiklu+gohKiOiXRHQAQFh93QpgZoJr/dW+oRBHITeA\nsTyHWQAmAlg1iJyXKnliRORSOt0P+c+3epDPJbrOegD7jesAwOvWdYQQzQD+BuATxuc+AWCvEKLG\nuE4DgLes66hrvXqc8jBZBBsHJtmMAeCE/Ido0v99H4QQbZCtjt0AVgNoVjH3aUmUrU3IbVsTkUhe\nF+TzANIT+QSAewB8AMBiyFCZL8G12vu9DxnnlanvRwaRcwyAb0IbIetrGvruE3IsxgA4M8F1lve7\nzpMAriSiQiJyQOZift/vOuMTXOeu45SHySJ4tRKTbFogwx9j+433fx+HkKuXriDZE/9SAD+DjN+f\nOcjHAuq7p994SaJbDHKdRPJGALQQkQ/ANQC+JIT4tXWC+kd6vBxV3+MS9AatkJ7DgwmOHU8dQyuA\nGgBfSHAsaLz+E4BfQe5zfgDSszGNQytkKOz647g3k+WwcWCSihAiQkQbIf/R/No4FLdiZ5Br+CGT\nr3MBfNs4ZM7ALZogZ7GnWgMkt788G/If3VD5EPqGnD4EYL0QIqqu54DxD5WICgB8EMe/kfwuyH+0\nNwN4boBzXoVMQK8XKph/grwK6eUcFEIM6LkJIdqI6K+QntEByE2AtvS7ztcBdAshsn4TG2ZosHFg\nhoMfAXiaiH4FOSu9AMfYNpGIrgbwWQB/BnAQwCQAn4eMh1vsBHA1Eb0MmZ/YJYToIqJnAHxN5QPa\nIf+R+Y9T5iuJ6IeQ8fgPQ4a4rgMAIUSHWv76XSLqBBAD8C3I1VOFx3MTIUSMiO4E8DgRPQ7gCUgD\nczGAJ1Sc/y7IpP4LRLQS0luYpGR6WAjx2hBv9yiA2wC8piqX90GGtZYAaBBC3Guc+3vIMFkHgPv6\nXWcN5MZVa4joJwC2qedeAMAnhPg2mJFHujPi/DUyvyB3wqsH0Au5NPQDGHy10izI5ZJ1kDP0ekjP\no9Q4fxGAdwD0mNcCMI2O4woAAADHSURBVA4yAd4JOfO9FYlXK7UkkPNCda3LIT2HXnXvL/Y7bwbk\nDLoH0njd2f+a0KuV8vt9thb9VvFAGqD1kGGxowBeADDZOD5b6aMV0tDtgVwdVTGIzuOeEUARgF8o\nvYbUsz0N4Jx+5xWoZxcAZiW4thfA3UqOEGSC+mUYq6cSPSd/Ze8X7wTHMAzDxMGrlRiGYZg42Dgw\nDMMwcbBxYBiGYeJg48AwDMPEwcaBYRiGiYONA8MwDBMHGweGYRgmDjYODMMwTBz/HxdW0cxOd4mH\nAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(imgs) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::len(data) = 4\n", + "# DEBUG:::mi-0.1*r = -22.0744677039\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY0AAAEPCAYAAAC+35gCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJztvXmYXVWVsP+uurfmSqUyV+aETECA\nBAlDBBSZJwUcAtoIKs+H/bW2gv4c8LNtwOGnjRrtT2I33dqCihAVmsHQEGaFBAiQgSQkJCSVVEgq\nQ83jnfb3x9737l3JTXIDVXWrKut9nnrq3H3O2WeddStZe6+19tpijEFRFEVRcqEg3wIoiqIoA4do\nvgXoKUpLS3d1dnaOybccg4Xi4mK6urryLcagQfXZs6g+e5eSkpK6jo6O6mznZLC4p0TEDJZ36Q+I\nCKrPnkP12bOoPnsXp1/Jdk7dU4qiKErODBr31AHUvwpAa2trpikejwNQEIlk2lL+kGQy4X7HM20F\nzqwm43ZUU7erJXOutSUGQEmZ70QKYr6/WHr67NVcQKF7hjXiFdEhvr+mffYciUxbc4OXJYl9Tvno\n/V8WRGy/hYVFmbYiKXZ9+Gn8+nUbM8eNLTsBmDTZz0KrJ0w/sHOAhlUAtLW1ZZoSCSunBPo0ueoz\nYfW5p87319Zqrysu82OZAgneP36gPsUdJ5P2nvJoue+vuQGAVKDPliZ/nNZn2Yj9XxakwPZbGC3M\ntBU6fbY2eX2+teHtzHFj6y4AJkz0X9DosVMP7BxUn6g++5M+P/DhTx34kIOgMw1FURQlZ/Iy0xCR\nCLAC2GGMuVxEpgL3ASOAV4FPG2NiIlIM3AOcAuwDrjbGbM3lGctedSOPjo5MW1d7pz2IJjNtMbxV\n7ursdL+9tQc7I4i7kUdJtCpzpqTIjhq2rtqQaeuM+5lNLGGfk/CDEeJuIlLgRgpjh/tR/u4dNbaP\nWLt/RtQPM8ZOnATA26vX+me4/iLYGUtESjPnOtwsq2br9kxbzRZ/XFxq3+nsD5ySaZtXkmUaA6xw\nz2zv6My0dXU63UW8PuP4mVbMBSq7ug7UZ8Lpszg61MtTZGXfvnazf0bCj/Qy+gy6S+ynzzFVXv49\nu+y7dsW8zMXB91c9bgIAW9f6789NRinAfrehPjvdKLZ2+45M2/Zt7/i+S+w7nT5/Tqbt5OJRZEP1\n2b/1ec21/5T1OYMVY/r/TOPLwPrg84+AhcaY6UADcINrvwFocO0L3XWKoihKnuhzoyEiE4DLgP90\nnwU4F/iTu+Ru4Ep3fIX7jDt/nrteURRFyQP5cE/9DPg6kI4AjwAajTHpOWItMN4djwe2AxhjEiLS\n5K7fG3YoIjfu/5CXXnsFgGTSp+W1p4NkwXS1LdacOY512fZwCh2J2OBSMmGDUiNHeJs1alQJALv2\n1GfaOuKNQd8HTv/TMbeiQvsMifjA2O7mdtfmr49F/PPK4va4qdPb+pZW94xOO+1ua/HT83177bS/\nobHB95fwgfLKiA2i1Qfp7u3x7GmMr61eaeUP9NnR4dxoBV6f7aF7LqPPkkxbJGKfn9bn8OH+/UaM\nsLrevc/L25nw3097Nn2646JoWp9lmXN7nC4KAn3GI153zQn77OYu39bW5p7RZb+o9lavnIZ6G0hs\nbPLfcTzQJxH7z6kx1GdC9QkDT5/KwenTmYaIXA7sNsa82pP9GmPu6sn+FEVRlOz09UzjTOAjInIp\nUAJUAj8HqkQk6mYbE4B0ZGwHMBGoFZEoMBQbED8sx06zQeNokU9Jq3cjroYmPzNo7fLDpkTcpbiV\njMy0lRRVAJBK2hFIeenwzLmKcjsKmzV1VqatodUH8tpdIK6zy0fCY512JJQe0ZSX+pTbsZOm2fva\n/WgoFfcjn331duS0c5cP7u+us+/U2WHtf2urf1ZXexMARUW+j4KoPy4utbOcqqE+uCgmiNoHTJ9i\ng5zRQv8n09Bo+29q9iPZtlCfbvRbVux1Vlxon5lK2e+lrGRY5lx5mdXn9Ek+7bexbVfmuKPLzqa6\nYoE+u5w+C4pcfxWZc9UTJgPQ3u6DvyYYydY3Wj3u3u31uXevfacup8+2tkCfHTbdurDQ/01JqM8S\nK//QSp+8ICnVJww8fSoHp09nGsaYW4wxE4wxU4BrgKeNMX8HPAN83F12PfCQO37Yfcadf1qXfSuK\nouSP/rJO4xvAV0RkEzZm8SvX/itghGv/CvDNPMmnKIqikMcV4caYZ4Fn3fHbwGlZrukEPvFu+j9m\ngp1yJoLA2JgqGxjsjHn3UyLpA4dpF8+OHX46u3XzbgDq99kpd1Gwgru62j5j2Cg/5T1u0pTMcYnT\nbiyW8oK5Jalxt0J0T7O/d+sO6zpoqPPus13v+MBlc6t9l/rWpkxbl3MxpFe7x2I+qtnl3AhDR43P\ntEWL/fS/o9Wej7V5d9jQ4iDKGTBlXJWT2+tz1FCrz664n96H+uzqtMc7d3p5t2+1OQwNDWl9+pyG\n0aPtM4aO8DLMnDAxc1yc1mcYrHdL+tPP3dfq792+0353TXt8oHV3nV/R39Jm+2ls8zrucq7E9Gri\nbvp0gdXKEWMzbdFgRW5nmz0fC9w3lapP298A06dycPrLTENRFEUZAKjRUBRFUXJm0BYsLC2009gk\nfrpaWG5dSx3x4kzbRud+Ati8aRsAK155I9P2Tq0939Zs3UCxmM/5Li+1/Uyb6UuBnHLKsZnjKePs\ntLiiwme0FBfZjJDNW+2znn9+lX++m/53BNP7jg7/vETCTt3D/JFE3LolOrvsFD4ZeMLSx5Fin4de\nGGSXtLsssu07fEaNKZhLNkqiVp+FgT6jZTazpjPIoNm81Se31WypBWDlyjczbbve2WOf3WLfIh73\n71dWYvU5+RhfumLOHJ/5M7HauiDKy4NSGYVWn1u322ctW+5LrGzdZV0NnR3ehdLZ4RWUSFhdhEUk\n4gm7FiHWZV0jydBz424tKPZ/P9FooM9m67Z5Z6f/mzIFJ5CNQ+lz3Ikfz3rPYOXtrf7fwIc+8LHM\n8bvR57R51/aGiEqAzjQURVGUnBm0Mw0TT4/kfOAsHrc52M8v8+XBH1myPHPc1GADYnvrfSDaJO1I\nWNzvZMwXPGvtsKP7+tf8SLZ2h7933Cg7wygv87nuUTcD2uaKtK2r9aP8TidrJFgFHq5wTcTtKDge\nzHa6Eq3uOitXKpxpYFf1toUVE4u8PiJOrl0NfhV5Q0f2cUTK5bRHCfPc7Sh/2Su+IN4TT/l1m81N\nVi/1DT6xIKNPJ2gq0GebC4w2rPGB+Z27/L3VI+xMo6zU59JHC628tW62tPEdPyrtcmOigmDVciqY\nisUTdh1NIpjtdMXtd5py6wFCfaawM7aOUJ+Fvu9ImZVrd1Cauqkje9WbQ+nzaCYmXuHvRp9K76Mz\nDUVRFCVn1GgoiqIoOTNo3VNFRdaVEMHnfa/baPcDuPuexZm2Xbv91LfYBTjb231belevSMq6MKIF\ngTvClT+IRLwaG5u8a6W9ucX1ERRIdDnnXc4t0xq4J5JO5qIguBoPqsnFXLmHRMy7rNJ9S8YF4909\nhUkrS/sev19BwXBfMmT0BLtrVzJYZL97j5c/pKjI6iYS/Mls3GxLpiz+48PB/V53RU6fHaE+U/Z9\nsuozeqA+m5p9jn5HS9oVFxSgc/ulxLLoM+VkLjS+v0SgzzfWvpz1XQ9H7bZVWdsv/rCt6B/qc8++\ntqzXHkqfRzOdwU6bo8bZUkBHok+l99GZhqIoipIzg3amkSkZnfTpoM+++DoA6zbVZNpGDferpbvc\nTl6xIDibciPilHEj46D4X8TtPRwWRYwEdaOT6XsLfPAu4nZTi7r658XG2+12F5Dt7PTPDwttpa+M\nBjG/YldQ0bgdCJMpXyzugvmnA1BSWJlpe+mNLf55rTbwHA9G4hvWbyIbkQK3t3nKv/+LL6+29wS7\nAY6o8qt70zujxeI+cGycHk3qQH3+dc2yrM9+t8yeZXckTK+ah+767Gna26zuE4E+N721Jeu1aX2O\nPOHDvSjRwKNY/Ezw3ehT6X10pqEoiqLkjBoNRVEUJWcGrXsqvS9AU6t316x6cysAbd5bQVW4l0As\nvdbB54on03n9mS33vDslvU9FQTeXVHiv/Z0KAnnGuaUKIjboXRoEfRG36jxwp9AtLd0c0Fjg1g50\nxqyr6ZhpfjX15z57hX1GoV9BXfHQM5njx56zAd1IsT+/d68PMoek9dnc5vW5dpN1S7UH4g6Ne/dC\n3OnTmFAn7h0yW8T13k5ppYU2OBpPBQL24jCpvcmuFYkUe3dgff2h9al05+z5fsX3Uy/a1f2qz/6F\nzjQURVGUnBm0M42oC4Q3tfoyzjtdOmkYWEsE6ZvpVM4wLTOV7B64TQe/Q5Lx7LuJxV2QO9ltcFTQ\n7VcYmS13KbelhQcG28HveRymnCbdvuEFrp+Tjp+aOVdYakfYBQU+TfGU9/nzDQ22NtPmbb5eVN1u\nr6+QqNtPvKXNr36vc+mPB9Wn01kimH2kEwsy+izovZW8ZS4N0wTltguyfH89xSmzbRJATa2vCrBn\nT1PWa9P6VLpz0omTMseNrjaa6rN/oTMNRVEUJWfUaCiKoig5M2jdU+nV03v3eXfL3kbrnkqFq7CT\n3nWSXnkaz+KyKslSJy29/qDbtuXiL0yHfxNBcDwdFI+460xQkDB9Z+iSKgwKDBa4UuwdHX6FdaLT\nyj95gl1vctqpczLnOmJ2Ki+lXrwpk3zQ++8+dg4Az/9tZaZt4/Y6ANas7/aqxJ2Lqb7R67O+ybqn\nzMH0mbLvlkiF+rS6KM5ed65HSbhAfDd99qJ76mOXvx+A5ct9af3NrhT8c/tdG48nUA5k4gQf9FZ9\n9k/6dKYhIiUi8rKIrBKRtSJym2v/jYhsEZGV7meuaxcR+VcR2SQiq0XkfX0pr6IoitKdvp5pdAHn\nGmNaRaQQ+JuIPObOfc0Y86f9rr8EmOF+Tgd+6X4flsYOu5HRpm1+tXJ6L+wi8SPfmPFB7HRrIhiN\nZq509yTxZcQ73KZIiYOoMZLqytJqR9rJTM0l/yzjRuFFwQryjmB1Oi6gmwz2SS5O2sD02afbzYqO\nnznOP1/su4Xl4VMR/+5DhtmA+/lnHZ9p+2DiRAD++3+e7CZ1U4fV3ZZaXx8pvXdzYaDPuAlmGum2\nINidnnMVZ/SZTUc9Q5eb9RRJUBo91GcPM2qo/X4+cMbMTNv8+HEA3H1f9z/ttD6V7pSFf5+qz35J\nn840jCX97Ra6n0MlWF8B3OPuWw5UicjYQ1yvKIqi9CJ9HggXkYiIrAR2A0uNMS+5U993LqiFIpIu\nWTke2B7cXuvaFEVRlDzQ54FwYyvWzRWRKuBBETkBuAXYBRQBdwHfAG7PtU8RuXH/tnQwe/OWrZm2\nrlbrTikI3TUpP9FJOedJJNjpTVzAOt0Ui3v3RsxNkpKB6TXBam1JHbh+o1vQHIgGX0E6KC6FPuc8\nvSNfSDJwn40fbfc9P2n2NADKgxXW8Zh7n1QgU1AgMd33iBFDMm2Fqez57ulg9tbA3dflVocXBGOP\nVLD62zj9hDsRFji3XNoDF0v0nrso5tbbSFBqviCZfUVxT5B0O/oNr/L6jJrs/8TC5ADFE0kFf58p\n1Wd/JG8pt8aYRuAZ4GJjzE7nguoC/gs4zV22A5gY3DbBte3f1129La+iKIrS99lTo9wMAxEpBS4A\n3kzHKcQO668E0jl2DwPXuSyqM4AmY8zOvpRZURRF8fS1e2oscLeIRLAGa7Ex5lEReVpERmGXKqwE\n/t5dvwS4FNgEtAOfzfVB7W3WNbFta22mLe0KiRZXZNoKCNwpLtsmGrhTjMsBirs9LuIF3p2S3hsj\nrDmYMP7eLt91hpTpvkChMBWU2HBrS6KBOyV0ZiVd4b1U3Nv66snVAEwcZwsVFkX8ooy0a60kKEuS\nDNdMuAymqAQuqYOU9Whvt3LWbvfZU/GMPsv97YHEaX0WZ9FnwhVlTMR70T3l9vMoPIg+e5riqA3F\nRSX8Z3VofSrdSesQ/N+q6rN/0adGwxizGjg5S/u5B7neAF/obbkURVGU3Bi0K8I7Wu2ovX5fWODM\njjNLy0syLd0Coy7wVlnqR+bTj7EB5nUb7I52Ta3BHsXpld4Rv3ZDCn3fyIH7FqcDwOmV5slEsN+3\nC9yGBQkj3QLb9nxYanzMSLvCe1ilnT1J3I/Eyovs6tpoMLyOBcF5cWsqEl3+eXKQBdOdbubWWN8c\ntNqOS8r8e0qoT9f/kGBnw6mTpwCwYbPdga25rT37A3uA9P7q4eyqMJh1nTDLhs5MsGL/8rNnA3Dt\nJy4AIBqsiUnPQEN9hiuR099fIhHo8yAO4LQ+le5E8X8r4hI4VJ/9C609pSiKouTM4J1pdNjhYGuw\nCVOa8lLv9091+vOtCTvqPXGWX4H68Y9cBsC/3fsgAK+u9HVwylxdqHFjhmXa6vY0eBnidgYSbtIk\nbqiUVnxB8BWkJD2y8qOmosID9x/vCGY7Qyrs/UWum3AzmnTNJROs2C4t9O+ecDOBZLAKN0721MWO\nTjsab2vrPOBcWYmfXaW6/Pk2N4s4brrfWOfDF9kR/N0PLAFg1Zo3M+fmTLVl28eMGp5p27vXzxSb\nnVok0GeBdI8RSVimPXFgafbCoEx6xE2rOjv9bKeiPOqus5/jQWAqve98qM+SYGaZ1mc00GeCLIEt\nvD6V7qg++z8601AURVFyRo2GoiiKkjOD1j211+3wldnjG5/KGu60lwpWJE8ab8taffgin8x17DF2\nbeGcE6yLZcVrazLnxlWPAeBz116dadvw1sbM8WtvbACgudkXU+vqsu6wdHnzjph3r6SligfyxYJ0\n0UKX4ls0xJePnjrFypd2Y0UKvPsp6sYEhRKsTA9Wh0edl0eCNiLZa5bX77WFEZNBKfet72zt9vtg\nrN/uV5H/+qHHDnrdhafbwPSnPn5lpm3T25szx2vetMkILS3ePdfl3GGdnVafnXEvf9opFbr74gmv\ni6hz1Q2p8CuOJ020BR+LnH8qUuDdJWl9RiUIumbVZzAWi2R396X1qXRHUmHiQbpN9dmf0JmGoiiK\nkjODdqaRXghXXOpHioWF6T22/UgxEgR+P3rlRQDMP+3ETFs6QbN67Ch7fbB4rnqkDdieepLfd/us\nU2dljs+tOxWApmDjoqZmm7K6d4/dl7srmGls3GyD6M8sW51piwUje4yVddwYHyieOd3uqVwWtTOq\njrh/n4JIugy7D06HGxIZFyCOBfWtwtpZISmT1ueBacQ9xejhVQDMPd7vE3363OmZ42175wLQ3BQE\nx1vsLK5+n91HOtTn2zX2uhdeWZtp66bPqNXVmFFVmaZpUycAUObyajvifqZaELFtqQJfzj1McjBu\nFhMP0ppTh9Gnsh9RrzvVZ/9EZxqKoihKzqjRUBRFUXJm0Lqnikut26BySFmmrajeBlA7g7UEsyZ7\nV8/551p3UkVpsEq7y+0Dng5TG7+ieNok68oYXhGsqwg0WjLDrt9Ixn2tq+R+pdGLxPf37DK7Svpv\nK3ywvS3mg76RIusqiSV9f12d1j0TwbrNyouCNRMl9lldgftJAvdUtNiOGSLtQR58Mnu+e3GJva+i\nojTr+Z5gygQbhB5WHq6r8OdnTLVupGQi0Gequ7xFBV6fL75SA8BLr/sNz9tjfk1GpDC9J7yvndXV\nlS6fb/VYVuTdcab4QH0WBPoscBufJzq8PlPJ7NWu0vpUulNQFOhW9dkv0ZmGoiiKkjODdqZRXmJf\nrWqoT0+NyB4Aioq8rbz8ojMzx9On2RTatj2+kms6Zbep3gZVwyqys2bNAKDUD3xJBivMcYfNDT4Q\nXuRqSZWX235iSR/UbWza4Z7p20YO850Xu5lBw976TNtbW2yl+BNn2Gq3yYS/N+mCwnH86C3I5s3I\nUhIEt5MHGcmVFlt9Dq0ckvV8TzB92jFWHj/wJxnMCtPbs7c2+ncsdHWtysrszCAW97Wxmlp22T5S\nvm34UN95sRvJNtX7VfxbttUBcNxUWzU4afy9KafPBF6JiSCFt9ClPReVeH1GD6NPpTuhF0D12T/R\nmYaiKIqSM2o0FEVRlJwZtHO6Ua7oXTr3HwC3PmPS+NGZpvcd79dYdDTvBiAR9yu4OzttEG7PLuvq\nKI36QGtFpQ2ydya9S6qrwweukzF3bcy7MMrLrbupKF2DPPgGosX2WRVlPvA3f55f9zFmpF0rsnjx\nXzNta9a9DcDZZxwPQHGBf75xxRHjBwlud3V1HdAWbgAVMmKEDeqPrBqa9XxPUD7EuuzGnPqZXnvG\n56/zq81HDR8BwEMPLc+0rdtgg+enn2KLVhaLD5ynnD4TqZ7T52DnK9dfA8BnP2ULVR5Onx2Be1f1\n2T/RmYaiKIqSM2o0FEVRlJwZvO6pYTZramiwTiPqTOR0V+QPoHqEz05q2GNdUCTD3disi2l8tXV3\nlQXLFMTtqxDuGdYWC9YYJO0DK4d5l056XUEs5vb7LvJT8+pq636aPs2X0Zg1bVzm+MxT7TqSN1a9\nnWlbt9Eeb9tjXWonzgye5fbziCR9DnsyKN6X3iEwLJCYTB64XwbAyCqrp8pg18OeRiR7MbqeZPrk\nMZnj0062Ow+/ubYm07bxbXtcu9e6+Y6f5rPvkgnrLjmYPlNOn4luOj7QxQJen4Md1efgo09nGiJS\nIiIvi8gqEVkrIre59qki8pKIbBKR+0XsijcRKXafN7nzU/pSXkVRFKU7fT3T6ALONca0ikgh8DcR\neQz4CrDQGHOfiPwbcAPwS/e7wRgzXUSuAX4EXH2wzkOKXF3lsW70DjBsqB2NTJ3oZxpDi/1MJJ60\nI+4kPthd4PbZPu00Wxr9scd90LS+3q77qG/w13e0+dFyWYmdRRQHe2Sns8zjbt/qwmCnskK3BiQq\n/vqJY/xMY8o4O9v58OVnZNr+455HAHjpNVuSfcYUfy49fmtrDVZBB6XPCyP2ORXDvA5iQRJASKHT\n55jRI7Ke7wkaG/f1Wt9pxo2qzhxPHGOTJC684JRM2+8WPw7A62vsDG7aRH8urc+OYF/zgkCf0QKr\nz6FDA30mDq3PwU5zhw1sqz4HD3060zCW9Lde6H4McC7wJ9d+N5BOcbnCfcadP09Espe5VBRFUXqd\nPg+Ei0hERFYCu4GlwGag0RiTdlzWAuPd8XhgO4A73wQcMNQVkRt7W25FURQlD4FwY0wSmCsiVcCD\nwLE90OddIvLvYVtlpXUrTZwwIdM2YrgNEo8b7V1WJLzdLHBuqWRgS4tLbdmM8eOsO2nSRF/gcEuN\n3ZHu2ON9f/FYEMx16y4KggJ3KRcI73DT9uFFIzPn6txugx2tfq1FWamPvDfU20D9cbO8i2WS2//h\n1TW2KN+8E707a0RFOpDoZQp3MkzvrTG+yPfXGQvD+p4hFda1N278uKzne4Ka7e8c/qL3SFmJdwc2\nNdp1OTOn+3U7Eybb91u13rr75hznA+fDytP69DpMJcP9NpxLtMj313UYfT7yX/8303bPHx4C4MqL\nz8u0fWCOT4pIubo0iWC3wGkX3JC1//5CX+tT6X3ylnJrjGkEngHmA1UikjZgE4Ad7ngHMBHAnR8K\n9L7jW1EURclKn840RGQUEDfGNIpIKXABNrj9DPBx4D7geuAhd8vD7vMyd/5pY0z2imX7Ud9mg9ov\nv74u0zax2o5GZk73s4WmTp9imnI9R4KgWnu9LQ5oCuxs4dTT3pc598Irtu/3B5uGVQX1/Oob7Myh\nqc2vcm1Lp9q61bAdQZnz1WveAGDClCAtscgX59uy047EK0v96te5c2yRv+eXrQBgb4svsFdWYd8j\nGUSBYnEfZE/GXfqiG3EDdASyhjS02xHe66v9Hug3XXsFANd87JJMW2WxLy6XcjsNRoJVvAlXcC4p\nVg/Pvbghc+7llb7v3iJZ5PVTs9sWJxxS4tOUTzh+MgDLVqwCoL7VB15Ly+13lgyGWvGuQJ8uhZSm\nPZm2zvbsKczZ9Dl+tC2mOG2q/35buoJZapa/z/5OX+tTOTiP3P1tAIYO87Ptg+nzUPS1e2oscLeI\nRLCznMXGmEdFZB1wn4h8D3gd+JW7/lfAb0VkE1APXNPH8iqKoigBfWo0jDGrgZOztL8NnJalvRP4\nRB+IpiiKouTAoF0RvmmzDYv89YVXM22XfGguAMOD1aOxTj/9j7hihEVBsNS4AmpSYH0DJ5wwM3Pu\n6RfslHt7rZ8+jzgxDIrb6XVnzHvU9jbZ/vbUWzfJus7tmXMdrtjhKe87PpApKI6X2UvATymPm2UD\n4Rs2bQZgy5ZdmXNTJky3/cZaMm2dCS9LgVuB3dHuA+9BbcVubKmx+3Ysf3lVpu28s+zalapKv0dF\nvCvUp9sPIXBZGbdfgoiV49hjp2XO/fUV70rsLSLRwLuZCbr6VcYzptvA7aYtWwGo2eZddxPH2eKW\nnXGvz65An1Jg9dkZFN2LH8Rr0lP67O/0tT6Vg5NM5a7PQ6G1pxRFUZScGbQzjbWrbQpqSbCH89wT\nbe2m4qiPVhdV+JFceie7SMQH4xJxez4de6we44OmKZdKu3mjny1MHOFXhzc32R3hyst8mmHjHmvl\nn37yFfssvNU/91zroRsx3KfhJrr80L8kLUSBl3lYuZV12ng743inxu/qF5vnrg8C9VF8+m1RkYuQ\nJ7ONvruzYe1bABQH+jzhODtzK476mVtR9MCd7CIR/2eWrnOVfpXRo3zQ3wSpyb1Fosu/f3FaCPHP\nHVpmv98pY+0Iua7W7+oX63SzokDMSKDPwkI3Bktmmx12p6f02d8ZWmZl7St9Kgen+Aj0eSh0pqEo\niqLkzMAZshwhLQ12xH3lRy7OtJ1xiq17U1zohzZGvD87vdgt3Kc42m5VFEkvIwl8zKNH2lHyvl0+\nprFzhx85F7nnxCP+nq1v2VlJZ7NtmzrVzypGuQ2jEh3++eGgMlVqZwbpCrkAgo2NjHaLGd+srw1k\nsb7iieN97R5KvBMzErEjjgLxY4dkLPtov9XNmi65+NxM2ylz5gBQHIyGU1n0GW6mU1gYdc88UJ8j\nhvfe/uNpksFMI1Xg9BlU+S1wtbdGDrGybGrwCw5377Tnxo0N9Fl8oD79kqPe12d/x3T2rT6Vg1NW\nbP+d56LPQ6EzDUVRFCVn1GjIkFs/AAAgAElEQVQoiqIoOaNGQ1EURcmZQRvTOONUm/M+fJwvxtfa\nYnPEO4MMjUTgM065CiWJwMedLoVRkHKZK/iaHBecexYA22r8TnpjR/lMqbEu02pXnS9dMXSIza66\n9JIPADBxvI+BDKmwfUeD7K1oUfgVJdx5v44k4mowjKywbS0tPntqzao3AZgw8qRMW1Gpf/f01gVF\nhT7ji6LsledPmWvrSg6r9sXj2lr2AtAV6jPI7Eq5uhfhzmvG6VNSLlYU6PODZ50OwPQpXofHHuOf\nN8ZlWu3e4/P6X37VluEor7TFj8eP9XGRinLbd6TA6zCrPgsCnbhigMPL7VqIltbGzLn1azcBMHa4\nX0dTWJJFn1H//fW2Pjc+9Tv7Dinb1prwz1uzya5V2lG7NdPWF/osLMyfPl/5y+8zbade9ndZrz1a\nibq1Ubno81DoTENRFEXJmUE705g8xY6U9rnRG8Cud+woXII05Zjxs4p0xrIE2UQYe/HwclvkMJny\n54YNdSt3J/gtPopDy+0GTcOG+dnEmWfZDJnSMjeCC/Y8Li2JuEf6PkrLfc5+Zk/vmF/bUeJGd5HR\nVs6Jk/xIMulKokswmi8t9usCOjps9krcBG0HKWA20ZWEr2/zM5nddXbUKEGqdywVzCrcbykIdOJG\ny8PKbKZYqM+qSlcGfpwvKJlNn0OH+tHvaafPBqCkzOkp1Kd715QJ3r/Ur7ZO6zMR9+9cnNbnSCvn\n+Ak+u+2w+uy0K+tDfXa2Z890Un32vj6V7iTd95KLPg+FzjQURVGUnFGjoSiKouTMoHVPDa2w0+Zk\nUJoi0e72sgiWzhdx4N4E6XIN9n47he5yboJEUNFvRIVV39BSP/2PB9P5hNi+i0r9M4Zi3Uilrq29\nyctXUeRKlBT6r6Ww1AepGxqtOymR8v1FSqyspWLluuBDQRHhuL2utMK7EIh611ZRqXVBpIINNyoq\n/E6BIZXldgFWMunfP+GKPZpQnwUHjkOiQeAtXTQto8+gYtqwcvveQ0qr/DPC7y+tzxL/jEqnzxLn\n2uto8fKVF7qEgUCf0RKvz6Zm6/5IBNP1UhfYLSm3cn3wzBP8iyQi7lywGC0S6LPE6jnUZ3m5T1oI\nUX3S6/pc+eAioLs+T/74F7LeP1h55eHvZY7TuslFn4dCZxqKoihKzkiOG+H1e0Qk1039lBwQEVSf\nPYfqs2dRffYuTr9Z83F1pqEoiqLkjBoNRVEUJWf61GiIyEQReUZE1onIWhH5smu/VUR2iMhK93Np\ncM8tIrJJRDaIyEV9Ka+iKIrSnb7OnkoAXzXGvCYiQ4BXRWSpO7fQGPPj8GIROR64BpgNjAOeFJGZ\nJl2LIqCkpKRORMbs3668O4qLixE58hIDSnZUnz2L6rN3KSkpqTvYuT41GsaYncBOd9wiIuuB8Ye4\n5QrgPmNMF7BFRDYBpwHL9r+wo6OjGmDevHlmxYoVPS67oijKYEZEXjXGzDvcdXmLaYjIFOBk4CXX\n9EURWS0ivxaRYa5tPLA9uK2WLEZGRG4UkRUismLPnj37n1YURVF6iLwYDRGpAP4M3GSMaQZ+CUwD\n5mJnIj85kv6MMXcZY+YZY+aNGjWqx+VVFEVRLH1uNESkEGswfm+MeQDAGFNnjEkaY1LAf2BdUAA7\ngInB7RNcm6IoipIH+jp7SoBfAeuNMT8N2scGl10FvOGOHwauEZFiEZkKzABe7it5FUVRlO70dfbU\nmcCngTUistK1fQv4pIjMxVZ/3gp8HsAYs1ZEFgPrsJlXX8iWOaUoiqL0DX2dPfU3IFue3JJD3PN9\n4Pu9JpSiKIqSM7oiXFEURckZNRqKoihKzqjRUBRFUXJGjYaiKIqSM2o0FEVRlJxRo6EoitKLLFy4\nMN8i9ChqNBRFUXqJ2tpabrvtNnbsGDyFLNRoKIqi9BKLFi2iqamJRYsW5VuUHqOvV4QriqIMWm6/\n/XaWLFlCSUkJANtr7Qxj8eLFvPDCCwB0dnZy6aWX8p3vfCdvcr4XZLBtzq77aSiKki8aGxs557zz\nWfXaqwe9Zt68eSxdupSqqqo+lOzw5Lqfhs40FEVR3iNv72nl8bV1PL52F/Vnf42id/6J2K63Driu\nvxqMI0GNhqIoyhFijGHNjiaecIbird2tmXOlFZUs+Kd/56kf3cDObVsy7TNnzhzwBgPUaCiKouRE\nIpni5a31PLG2jifW7uKdps7MuSElUc47djQXza7mg7NGUb97F6d+t4NoNMrUqVPZsmULzc3NtLW1\nHT1Gw+2FcQFwBjDGNddh9+t+0gy24IiiKEc9nfEkf31rL4+v3cVT6+toaI9nzo0eUsyFs8dw0exq\nTp86gqKoT0a9/Re/IJlMctNNN/G9732Pb3/729x9993ceeed/OAHP8jHq/QYORkNETkZuA+YDiSB\nvdgS5yNcHxtF5BpjzMqD96IoitL/aeqI88ybu3l87S6e27iH9pjfwmfqyPKMoZg7oYqCgmw7PUBl\nZSVLly5lzpw5ANxxxx1ce+21PPbYY33yDr3JYbOnRGQMsAa7d/fXgWeNMV3uXDFwLvAj7OzjRGPM\n7l6V+DBo9pSiKEfK7uZOnlhn4xPLNu8jkfL/L54wvpKLjq/mohOqmTG6Aut0GXz0ZPbUPwIdwNnG\nmObwhDMej4nIMmAl8EVgYCYfK4pyVLFlbxuPr93FE2t38fr2RtLj5wKB06cO56LZ1Vw4ewwThpXl\nV9B+Ri5G40Jg0f4GI8QY0ygivwQ+hhoNRVH6IcYY1r7TzONrd/H42l1srPMZT0XRAs6ePpKLZldz\n3nGjGVFRnEdJ+ze5GI3pwGs5XPcq8I1DXSAiE4F7sK4sA9xljPm5iAwH7gemYPcIX2CMaXDB958D\nlwLtwGeMMbnIoiiKQjJleGVrvZtR1LGjsSNzbkhxlHOPcxlPM0dRXqzJpLmQi5aGAk05XNcCVB7m\nmgTwVWPMayIyBHhVRJYCnwGeMsb8UES+CXwTa4AuAWa4n9OBX7rfiqIoWemMJ3lhk814enL9burb\nYplzo4YUc8HxNpA9/5juGU9KbuRiNAQ7K8iFQ0aIjDE7sQF1jDEtIrIeGA9cAZzjLrsbeBZrNK4A\n7nHpvMtFpEpExrp+FEVRAGjutBlPT6yt49kNu2kLMp4mjyjjotnVXDR7DCdPHHbQjCclN3Kdjz0u\nIoke6gsAEZkCnAy8BIwJDMEu/DqQ8cD24LZa19bNaIjIjcCNAJMmTToSMRRFGaDsbulk6bo6Hl9b\nx7LNe4kn/dh29rjKTCB71pghgzbjKR/k8h/9bT39UBGpAP4M3GSMaQ6/UGOMEZEjWihojLkLuAts\nym1PyqooSv+hZl+bC2TX8dq2hm4ZT6elM56OH8PE4Zrx1Fsc1mgYY3rUaIhIIdZg/N4Y84Brrku7\nnURkLJBe67EDmBjcPsG1KYpyFGCMYd3OZh53pTve3NWSOVcUKeCsGSO5aPYYzj9ujGY89RF9mi7g\nsqF+Baw3xvw0OPUwcD3wQ/f7oaD9iyJyHzYA3qTxDEUZ3CRThldrGjKpsbUNPuOpojjKh44dzUWz\nx3DOrNFUaMZTn3NYjYvIvxxJh8aYrx/i9JnAp4E1IpIuOfItrLFYLCI3ADXAAnduCTbddhM25faz\nRyKLoigDg66Ey3h6o44n19exL8h4GllRxAXHj+HC2dW8f9oIiqORPEqq5GKmF5B79pTBlhrJftKY\nv3HwDKvzslxvgC/k+GxFUQYQLZ1xntmwh8fX7uLZN7tnPE0aXsZFrsbTyZOGEdGMp35DLjGNKX0g\nh6IoRwF7Wrp4cr2t8fTipn3EkqnMuePGVmYMxbHVmvHUX8nFPfUE8I/GmA1B27nAS8aYtt4UTlGU\ngc/2+vZMfGJFjc94EoFTpwxzGU/VTBqhGU8DgVzcU+djV4UDICIRYClwKrmVF1EU5SjCGMObu1oy\nqbHrd/qydUWRAs6cPsLVeBrDqCGa8TTQeLepBzpvVBQlQzJleG1bA4+/sYsn1tWxrb49c66iOMo5\ns0Zx0exqzpk1iiElhXmUVHmvaL6aoijdWLhwITfffPNhr+tKJHlx8z6eWLuLpevq2NvqM55GlBdl\najy9f7pmPA0mcjUa2bKndOW1ogwyamtrue2221iwYAHjx48/4HxrV4JnN+zm8bV1PPvmblq6fHWh\nCcNKXY2nak6ZrBlPg5X3UnvqqWz1qIwxo9+7WIqi5INFixbR1NTEokWL+P73vw/AvtZ0xlMdf9u0\nl1jCZzwdWz2EC10xwOPHVmrG01FAXmpPKYrSP7j99ttZsmQJJSUlAOzYYav03Hvf/fzpL09S3x6j\npbWDkmnzqDrzk4jAvMnDMsUAJ48oz6f4Sh447B7hAw3dI1xRcqexsZELLriAQ/2bKa6ewVX/55d8\n+NQZnH/8aEYPKelDCZW+oif3CFcUZZBSVVXFot89wBWXXcLOzWsPOD/t+JN4+qknmVQ9Kg/SKf0R\nNRqKchQST6ZYuq6Ou1/cyktb6ole/k9E7/kqiQZfRHrmzJm89MJzVFVV5VFSpb+hRkNRjiJ2t3Ty\nh5e2c+/LNdQ1dwFQXhTh/OkjuDcapzEaZerUqWzZsoXm5mba2trUaCjd0A1yFWWQY4zhla31/OMf\nXufMHz7Nwic3UtfcxbRR5dz2kdks/9Z5lG56EsFw0003sWbNGm666SaSySR33nlnvsVX+hk601CU\nQUp7LMFDK9/hnmU1mVIeBQIXzR7DdfOn8P5pIzIpspWVlSxdupQ5c+YAcMcdd3Dttdfy2GOP5U1+\npX+i2VOKMsjYureN3y6v4Y8rttPcaZdSjSgv4prTJvKp0yczvqo0zxIq/RHNnlKUo4hkyvDcxt3c\n/WINz23ck2mfO7GK698/mUtPHKulPJQeQY2GogxgGtpiLF6xnd+9VMP2erstanG0gI/MGcd186dw\n4oShh+lBUY4MNRqKMgB5Y0cTd7+4lYdXvUOXK+sxcXgp154+mQXzJjKsvCjPEiqDlT41GiLya+By\nYLcx5gTXdivwv4D0nPpbxpgl7twtwA1AEviSMebxvpRXUfoTXYkkj63Zxd3LtvL6tsZM+wdnjuK6\n+ZM5Z9ZoLRKo9Dp9PdP4DfAL4J792hcaY34cNojI8cA1wGxgHPCkiMw0xiRRlKOIdxo7+P1LNdz3\n8nb2tdny45UlUT4xbyLXnjGZqSO1/pPSd/Sp0TDGPC8iU3K8/ArgPmNMF7BFRDYBpwHLekk8Rek3\nGGNYtnkfdy/bytJ1daRckuNxYyu5bv5krpg7jrIi9S4rfU9/+av7oohcB6wAvmqMaQDGA8uDa2pd\n2wGIyI3AjQCTJk3qZVEVpfdo6Yzz4Os7uGdZDZt2twIQLRAuO2ks182fzLzJw7T8uJJX+oPR+CXw\nXeymTt8FfgJ87kg6MMbcBdwFdp1GTwuoKL3NW3Ut3LOshgdeq6UtZj2wYyqL+dRpk/nkaRMZXamV\nZZX+Qd6NhjGmLn0sIv8BPOo+7gAmBpdOcG2KMihIuKKB9yyrYdnb+zLtp08dznXzp3Dh7DEURrTS\nj9K/yLvREJGxxpid7uNVwBvu+GHgXhH5KTYQPgN4OQ8iKkqPsqeli/te3sa9L29jZ1MnAGVFEa46\neTzXzZ/CrOoheZZQUQ5OX6fc/gE4BxgpIrXAPwPniMhcrHtqK/B5AGPMWhFZDKwDEsAXNHNKGagY\nY3htWyP3LNvKkjU7iSetF/WYkeV8ev5kPnbKBCpLCvMrpKLkgNaeUpRepCOW5OFVNrC99h1fNPC8\n48Zw3fzJnDltJAW6tkLpB2jtKUXJIzX72vjd8hoWr6ilqSMOwPDyIq4+dSJ/d/okJgwry7OEivLu\nUKOhKD1EKmV47q093PPiVp7duIf0JH7OxCquO2Myl500lpJCLRqoDGzUaCjKe6SxPcYfV9Tyu5dq\nqNnXDkBRtIAPnzSO6+ZPZs5E3flOGTyo0VCUd8kbO5r47bIaHlq1g864LRo4vqqUa8+YzNWnTmS4\nFg1UBiFqNBTlCIglUjz2xk7uWVbDqzUNmfazZ4zk+vlT+NCxWjRQGdyo0VCUHNjV1Mm9L9Vw78vb\n2dvaBcCQkigfP2UCnz5jMseMqsizhIrSN6jRUJSDYIxh+dv13LNsK0+sqyPpqgYeWz2ET8+fzJVz\nx1NerP+ElKML/YtXlP1o7Urw4Gu1/HZ5DRvruhcNvH7+FE6dokUDlaMXNRqK4ti0u5XfLtvKn1/b\nQWtXAoBRQ4r51GmT+NTpkxijRQMVRY2GcnSTSKZ4cv1ufrt8Ky9s8kUDT5synE/Pn8xFs6spimrR\nQEVJo0ZDOSrZ29rF/a9s5/fLa3jHFQ0sLYxw5cnjuW7+ZI4bW5lnCRWlf6JGQzlqMMawcnsj9yyr\n4S+rdxJL2rUVU0eW8+kzbNHAoaVaNFBRDoUaDWVQsHDhQm6++eas5zrjSR5Z9Q73LKthzY4mAETg\n/ONGc938KZw1XYsGKkquqNFQBjy1tbXcdtttLFiwgPHj/Y7A2+vb+d3yGu5fsZ3Gdls0cFhZIQtO\nnci1p09m4nAtGqgoR4oaDWXAs2jRIpqamli0aBHf/e73+Oumvdzz4lae3rA7UzTwpAlDuW7+FC7X\nooGK8p5Qo6EMOG6//XaWLFlCSYlNgd2xw+4C/J93/547//AonYkUJhGnYvqpXP+Fr3Ld+6cwV4sG\nKkqPoJswKQOOxsZGLrjgAg71PU+adSLPPLmUYyaM6UPJFGXgkusmTJqArgw4qqqqWLp0KbNOnJv1\n/Cnz5rFq+fNqMBSlF+hToyEivxaR3SLyRtA2XESWishb7vcw1y4i8q8isklEVovI+/pSVqX/0hFL\nsvC5WtrP/SbRYeO7nZs5cyZPLl1KVZW6oxSlN+jrmcZvgIv3a/sm8JQxZgbwlPsMcAkww/3cCPyy\nj2RU+jFrapu4/P/+lbuX1RBJdlFKF9FolBkzZhCNRmlubqatrS3fYirKoKVPjYYx5nmgfr/mK4C7\n3fHdwJVB+z3GshyoEpGxfSOp0t9IJFP84um3uGrRC2ze08b00RWcm3qdkqhw0003sWbNGm666SaS\nySR33nlnvsVVlEFLf4hpjDHG7HTHu4C0I3o8sD24rta1HYCI3CgiK0RkxZ49e3pPUiUv1Oxr4+q7\nlvPjJzaSSBk+e+YUHv3Hs5g2fhRLly7ljjvuoLi4mDvuuIOlS5dSWaklQBSlt+hXKbfGGCMiR5zO\nZYy5C7gLbPZUjwum5AVjDItXbOf2R9bRFksyprKYH39iDmfPGAXAt771rQPumTNnDnPmzOlrURXl\nqKE/GI06ERlrjNnp3E+7XfsOYGJw3QTXphwF7G3t4pYH1rB0XR0Al500lu9feQJVZbrvtqLkk/7g\nnnoYuN4dXw88FLRf57KozgCaAjeWMoh5+s06Lv7Z8yxdV8eQkig/u3ouv/jkyWowFKUf0KczDRH5\nA3AOMFJEaoF/Bn4ILBaRG4AaYIG7fAlwKbAJaAc+25eyKn1PeyzB9/6ynntf2gbAGccM5ycL5jK+\nqjTPkimKkqZPjYYx5pMHOXVelmsN8IXelUjpL7y+rYGb71/J1n3tFEUK+NpFs7jhrKlafVZR+hn9\nIaahHMXEkyl+8fQmfvHMJpIpw7HVQ1h49VzdBElR+ilqNJS88faeVm6+fyWrapsQgRs/cAxfuWCm\nVqFVlH6MGg2lzzHG8PuXtvG9v6yjM55i3NASfrJgLvOnjci3aIqiHAY1Gkqfsrulk2/8aTXPbLCL\nMK86eTy3fmS2brOqKAMENRpKn/H42l3c8sAa6ttiDC0t5PtXncDlJ43Lt1iKohwBajSUXqe1K8Ft\nD6/lj6/WAnDW9JH8+BNzqB5akmfJFEU5UtRoKL3KK1vr+crilWyv76A4WsA3LzmW6+dP0VRaRRmg\nqNFQeoVYIsXPntzIvz23mZSB2eMq+dnVc5kxZki+RVMU5T2gRkPpcTbtbuGm+1fyxo5mROAfzpnG\nTefPpCjaH6rWKIryXlCjofQYqZTh7mVb+eFjb9KVSDFhWCkLr57LqVOG51s0RVF6CDUaSo+wq6mT\nr/1pFX99ay8AnzhlAt/58PEMKdFUWkUZTKjRUN4zj65+h//z4Bs0dcQZVlbI///RE7n4BN1kUVEG\nI2o0lHdNc2ecf35oLQ++brc5OWfWKP7l4ycxeoim0irKYEWNhvKuWP72Pr66eBU7GjsoKSzg/1x2\nPNeePgkRTaVVlMGMGg3liOhKJPnJExv5j7++jTEwZ8JQfnr1XKaNqsi3aIqi9AFqNJSceXNXMzfd\nt5I3d7UQKRC+cO50/vHc6RRGNJVWUY4W1GgohyWVMvzqb1u44/ENxJIppowo46dXz+V9k4blWzRF\nUfoYNRrKIdnR2MFXF69k+dv1AHzytEl8+7LjKC/WPx1FORrpN//yRWQr0AIkgYQxZp6IDAfuB6YA\nW4EFxpiGfMl4NGGM4aGV7/BPD71BS2eCkRVF/PCjJ3H+8WPyLZqiKHmk3xgNx4eMMXuDz98EnjLG\n/FBEvuk+fyM/oh09NLbH+PZ/v8Gjq3cCcP5xY/jhx05kZEVxniVTFCXf9DejsT9XAOe447uBZ1Gj\n0av87a29/H9/XMWu5k7KiiJ85/LjufrUiZpKqygK0L+MhgGeEBED/Lsx5i5gjDFmpzu/C1DfSC/R\nGU/yo/95k/96YSsA75tUxcKr5zJ5RHl+BVMUpV/Rn4zGWcaYHSIyGlgqIm+GJ40xxhmUAxCRG4Eb\nASZNmtT7kg4y3tjRxM33r+St3a1EC4QvnzeD/33ONKKaSqsoyn70G6NhjNnhfu8WkQeB04A6ERlr\njNkpImOB3Qe59y7gLoB58+ZlNSzKgSRThn9/fjMLl24knjQcM6qcn109l5MmVOVbNEVR+in9wmiI\nSDlQYIxpcccXArcDDwPXAz90vx/Kn5SDi+317Xxl8Upe2WqT0a6bP5lbLjmO0qJIniVTFKU/0y+M\nBjZW8aALtkaBe40x/yMirwCLReQGoAZYkEcZBwXGGP70ai23PbKO1q4Eo4cU8y8fP4lzZo3Ot2iK\nogwA+oXRMMa8DczJ0r4POK/vJRqc1LfFuOWB1Ty+tg6AS06o5gdXnciw8qI8S6YoykChXxgNpfd5\nZsNuvv6n1exp6aKiOMqtH5nNx943XlNpFUU5ItRoDHI6Ykl+sGQ9v11eA8BpU4bzkwVzmDi8LM+S\nKYoyEFGjMYhZtb2Rm+9fydt72yiMCF+9cBb/6+xjiBTo7EJRlHeHGo1BSCKZ4s5nNvOvT79FMmWY\nOaaChVfPZfa4ofkWTVGUAY4ajUHGlr1t3Hz/SlZubwTgc2dO5esXz6KkUFNpFUV576jRGCQYY/jD\ny9v57qPr6Ignqa4s4ScL5nDm9JH5Fk1RlEGEGo1BwJ6WLm55YDVPrrcL5j88Zxzfu+IEhpYV5lky\nRVEGG2o0BjhL19XxzT+vZl9bjCElUb535QlcMXd8vsVSFGWQokZjgNLWleC7j67jvle2AzD/mBH8\nZMEcxlWV5lkyRVEGM2o0BiCv1jRw8/0r2VbfTlG0gK9fNIvPnTmVAk2lVRSll1GjMYCIJ1P861Nv\nceczm0gZOLZ6CD+/5mRmVQ/Jt2iKohwlqNEYIGze08rN969kdW0TIvD5Dx7DVy6YSXFUU2kVRek7\n1Gj0c4wx/HZ5DT9Ysp7OeIrxVaX8ZMEczjhmRL5FUxTlKESNRj+mrrmTr/1pNc9v3APAR08ez61X\nzKayRFNpFUXJD2o0+imPrdnJLQ+uobE9TlVZId+/8kQuO2lsvsVSFOUoR41GP6OlM86tD6/jz6/V\nAnD2jJH8+BNzGFNZkmfJFEVR1Gj0K17eUs/N969kR2MHxdECvnXpcVw3f7LueaEoSr+hIN8CHK0s\nXLgwc9yVSPLDx97k6ruWsaOxgxPGV/KXL53F9e+fogZDUZR+hc408kBtbS233XYbCxYsoC1ayZfv\nW8n6nc0UCHzhQ9P50nkzKIqqPVcUpf8xIIyGiFwM/ByIAP9pjPlhnkV6TyxatIimpiY+f8sP2Djp\ncmKJFJOGl/HTBXOYN2V4vsVTFEU5KP3eaIhIBLgTuACoBV4RkYeNMevyK1nu3H777SxZsoSSEhvM\n3rbdBrkff+RBIkOeZ9SQYjoroizpvJR53/lOPkVVFEU5JP3eaACnAZuMMW8DiMh9wBXAgDEaX/rS\nl3jkkUd47rnnurUnGneSaNxJLVA9bx5f+tKX8iOgoihKjgwEozEe2B58rgVODy8QkRuBG93HVhHZ\n0EeyHQkRYCZQluVc+4oVKzYOGzYs2ccyDSZGAnvzLcQgQXXZswwUfU7O5aKBYDQOizHmLuCufMtx\nJIjICmPMvHzLMVhQffYcqsueZbDpcyCk6OwAJgafJ7g2RVEUpY8ZCEbjFWCGiEwVkSLgGuDhPMuk\nKIpyVNLv3VPGmISIfBF4HBsX+LUxZm2exeoJBpQ7bQCg+uw5VJc9y6DSpxhj8i2DoiiKMkAYCO4p\nRVEUpZ+gRkNRFEXJGTUaeUBELhaRDSKySUS+mW95BjIi8msR2S0ib+RbloGOiEwUkWdEZJ2IrBWR\nL+dbpoGMiJSIyMsissrp87Z8y9QTaEyjj3FlUTYSlEUBPjmQyqL0J0TkA0ArcI8x5oR8yzOQEZGx\nwFhjzGsiMgR4FbhS/zbfHWJLVJcbY1pFpBD4G/BlY8zyPIv2ntCZRt+TKYtijIkB6bIoyrvAGPM8\nUJ9vOQYDxpidxpjX3HELsB5bkUF5FxhLq/tY6H4G/ChdjUbfk60siv7DVPoVIjIFOBl4Kb+SDGxE\nJCIiK4HdwFJjzIDXpxoNRVG6ISIVwJ+Bm4wxzfmWZyBjjEkaY+ZiK1mcJiID3oWqRqPv0bIoSr/F\n+d7/DPzeGPNAvuUZLBhjGoFngIvzLct7RY1G36NlUZR+iQvc/gpYb4z5ab7lGeiIyCgRqXLHpdjk\nlzfzK9V7R41GH2OMSQG/YuEAAAaaSURBVADpsijrgcWDpCxKXhCRPwDLgFkiUisiN+RbpgHMmcCn\ngXNFZKX7uTTfQg1gxgLPiMhq7GBxqTHm0TzL9J7RlFtFURQlZ3SmoSiKouSMGg1FURQlZ9RoKIqi\nKDmjRkNRFEXJGTUaiqIoSs6o0VDyjoicICJGRM4J2ozbsTHXPr4e3t9Dcp3j5Bjwq3hF5FYR2ZvH\n528VkR/n6/lKz6FGQ+mvzAf+eATXfx04p3dEURQlTb/fI1w5Osln+Wi3Mro4X89XlP6MzjSUPkdE\n/kFEtotIm4g8gl05u/813dxTInKWiPxVRJrdz0oR+YQ7txUYAfyzu88419IUd3z5fn3/RkRWBJ9v\nFZG97hmvAJ3AJ4JbxonIo07ebSLy9/v1N19EHhaRne6alSLyd/td8xkny4kistRd96aIfDTLu1/l\nNu/pEJF9IrJERCYH508Qkb+ISIv7+aOIVOek/O7PGS4id4lInYh0isiLInJ6cP5ZETlgticidzg9\niPtcIiL/4r7TLrfpkK4kH6So0VD6FBG5ArgTeBT4KLAG+PVh7ql0178NfAz4OPBboMpdchXQhK2b\nNN/9vHaEopUBdwP/iS0q93Jw7lfAaifvEuCX+xmiycALwA3Ah7EF//5LRD6Z5Tn3YmuNXQW8Bdwn\nIhOCd/008ACwGVgAfBa7adcod366e1YJcC3wGWA28Ej6P/FcEJFi4EngfOBrwJXAHuDJwADdD1wq\nIuXBfeLkWmx8OYk/OTl+4N7/FeBhEZmbqzzKAMIYoz/602c/2P+MH9uv7T+wm9OcE7QZ4IvueJ77\nPOQQ/e4Fbt2vbYq77/L92n8DrAg+3+quu2K/685x7Xft174UWH4QOQTr9v134Omg/TOur88FbSOA\nBPD37nMBtuLxA4d4z98CG4CioG0GkAQuO8R9twJ7g883ADFgRtAWxRqrO9znUU6+a4Jr5rv3mOc+\nn+c+f3C/5z0P/DH4vBX4cb7//vTnvf/oTEPpM0QkCrwPeGi/U4crwb0Zu6XrvSJyRbpyaA9jgMcO\ncu7B/T4/AJwiduteRGSYiPyriNQAcfdzIzAzS19PZB5ozD7s5jzpmcYsYBzwX4eQ83wnT0pEok6n\nW7D/Kc87xH3Z+nkV2BL0A/Bcuh9jzB7gaeDq4L6rgc3GmBVBP7uAF9L9uL6eOkJ5lAGCGg2lLxkJ\nRLD/UYbs/7kbxpgGbFnpQmAxsMf59I/pQdkajN1+NxvZ5I1i3wfszOVq4A7gQuBUrMutJEtfjft9\njgXXjXC/dx5CzpHAN/DGKf1zDN33aTkcI4EzsvTz2f36uQ+4REQqRaQAG+u5f79+qrP0c+sRyqMM\nEDR7SulL9mLdKKP3a9//8wEYm011sdh9Cc4HfoqND5xxiNs63e+i/dqHZXvEIfrJJm8C2CsiJcDl\nwBeMMf+WvsD9B3uk7HO/D0gMCKjHzjT+M8u5I1mHUQ+sAP53lnNdwfGDwC+x+9jXYGdCodGox7rU\nrjyCZysDGDUaSp9hjEmIyOvY/4D+LTh1QAbRIfrowAZ9TwBuCU6FI/Y0u7Gj3uPSDWK3Mn0/9j/A\nXLmK7q6rq4BXjTFJ118BwX+0IjIE+AiHNkTZ2ID9D/h64JGDXPMUNvD9qnHBgnfJU9hZ0TZjzEFn\nesaYBhF5AjuTqsFu0LR6v36+CrQaYwb8BkPK4VGjofQ1PwAeEJFfYkexH+QwW2CKyGXA54D/BrYB\n44HPY/3tad4ELhOR/8HGPzYYY1pE5CHgZhdvaMT+B9dxhDJfIiLfx/r7P4p1lV0BYIxpcmm63xGR\nZiAFfBObzVV5JA8xxqRE5OvA70Xk98AfsIbnXOAPLo5wKzaZ4C8i8mvs7GK8k+k3xphnc3zcPcDf\nA8+6ldpvY91jpwG7jDELg2vvx7rbmoBf7NfPUuyGYktF5EfAWvfec4ESY8wtKIOLfEfi9efo+8Hu\nXFgLtGNTWC/k0NlTs7BpnduxI/pa7ExleHD9KcByoC3sCxiDDbw3Y0fKN5I9e2pvFjnPcX1dhJ1p\ntLtn/8N+103HjrjbsEbt6/v3ic+eqtjv3q3sl1WENUyvYt1r+4C/AJOD88c6fdRjDeAmbLbWhEPo\n/IB3BIYCP3d6jbl3ewA4c7/rhrh3N8CsLH0XA7c5OWLYwPj/EGRzZXtP/RmYP7pzn6IoipIzmj2l\nKIqi5IwaDUVRFCVn1GgoiqIoOaNGQ1EURckZNRqKoihKzqjRUBRFUXJGjYaiKIqSM2o0FEVRlJz5\nf9X3jCI/oSzsAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Er = vis.Experiment_reader()\n", + "pth = \"out/rect\"\n", + "Er.set_path(pth)\n", + "Er.read_all_expriemts()\n", + "ank=1.0/8\n", + "ul = Er.get_data(\"Inc\")['rect_001'][0].max()\n", + "def fd(x):\n", + " return ul-x\n", + "Er.print_param_description(0)\n", + "ank=1.0/8\n", + "Er.annotated_plot2(0,'Inc', zoom=0.8, pad=0, max_hight=3, xybox=None, fd=fd,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,16],\n", + " ylabel=\"IND\", add_points=True)\n", + "Er.annotated_plot2(0,'Fid', zoom=0.8, pad=0, max_hight=300, xybox=None,\n", + " figposx=[ank,3*ank,5*ank,7*ank], figposy=[fiy,fiy,fiy,fiy], xlim=[-0.5,3.5], ylim=[0,400], ylabel=\"FID\", add_points=True)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fid.py b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fid.py new file mode 100644 index 00000000..1e39a19c --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fid.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python3 +''' Calculates the Frechet Inception Distance (FID) to evalulate GANs. +The FID metric calculates the distance between two distributions of images. +Typically, we have summary statistics (mean & covariance matrix) of one +of these distributions, while the 2nd distribution is given by a GAN. +When run as a stand-alone program, it compares the distribution of +images that are stored as PNG/JPEG at a specified location with a +distribution given by summary statistics (in pickle format). +The FID is calculated by assuming that X_1 and X_2 are the activations of +the pool_3 layer of the inception net for generated samples and real world +samples respectivly. +See --help to see further details. +''' + +from __future__ import absolute_import, division, print_function +import numpy as np +import os +import gzip, pickle +import tensorflow as tf +from scipy.misc import imread +from scipy import linalg +import pathlib +import urllib + + +class InvalidFIDException(Exception): + pass + + +def create_inception_graph(pth): + """Creates a graph from saved GraphDef file.""" + # Creates graph from saved graph_def.pb. + with tf.gfile.FastGFile( pth, 'rb') as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString( f.read()) + _ = tf.import_graph_def( graph_def, name='FID_Inception_Net') +#------------------------------------------------------------------------------- + + +# code for handling inception net derived from +# https://github.com/openai/improved-gan/blob/master/inception_score/model.py +def _get_inception_layer(sess): + """Prepares inception net for batched usage and returns pool_3 layer. """ + layername = 'FID_Inception_Net/pool_3:0' + pool3 = sess.graph.get_tensor_by_name(layername) + ops = pool3.graph.get_operations() + for op_idx, op in enumerate(ops): + for o in op.outputs: + shape = o.get_shape() + if shape._dims is not None: + shape = [s.value for s in shape] + new_shape = [] + for j, s in enumerate(shape): + if s == 1 and j == 0: + new_shape.append(None) + else: + new_shape.append(s) + o._shape = tf.TensorShape(new_shape) + return pool3 +#------------------------------------------------------------------------------- + + +def get_activations(images, sess, batch_size=50, verbose=False): + """Calculates the activations of the pool_3 layer for all images. + Params: + -- images : Numpy array of dimension (n_images, hi, wi, 3). The values + must lie between 0 and 256. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the disposable hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- A numpy array of dimension (num images, 2048) that contains the + activations of the given tensor when feeding inception with the query tensor. + """ + inception_layer = _get_inception_layer(sess) + d0 = images.shape[0] + if batch_size > d0: + print("warning: batch size is bigger than the data size. setting batch size to data size") + batch_size = d0 + n_batches = d0//batch_size + n_used_imgs = n_batches*batch_size + pred_arr = np.empty((n_used_imgs,2048)) + for i in range(n_batches): + if verbose: + print("\rPropagating batch %d/%d" % (i+1, n_batches), end="", flush=True) + start = i*batch_size + end = start + batch_size + batch = images[start:end] + pred = sess.run(inception_layer, {'FID_Inception_Net/ExpandDims:0': batch}) + pred_arr[start:end] = pred.reshape(batch_size,-1) + if verbose: + print(" done") + return pred_arr +#------------------------------------------------------------------------------- + + +def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6): + """Numpy implementation of the Frechet Distance. + The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) + and X_2 ~ N(mu_2, C_2) is + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + + Stable version by Dougal J. Sutherland. + Params: + -- mu1 : Numpy array containing the activations of the pool_3 layer of the + inception net ( like returned by the function 'get_predictions') + for generated samples. + -- mu2 : The sample mean over activations of the pool_3 layer, precalcualted + on an representive data set. + -- sigma1: The covariance matrix over activations of the pool_3 layer for + generated samples. + -- sigma2: The covariance matrix over activations of the pool_3 layer, + precalcualted on an representive data set. + Returns: + -- : The Frechet Distance. + """ + + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert mu1.shape == mu2.shape, "Training and test mean vectors have different lengths" + assert sigma1.shape == sigma2.shape, "Training and test covariances have different dimensions" + + diff = mu1 - mu2 + + # product might be almost singular + covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = "fid calculation produces singular product; adding %s to diagonal of cov estimates" % eps + warnings.warn(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError("Imaginary component {}".format(m)) + covmean = covmean.real + + tr_covmean = np.trace(covmean) + + return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean +#------------------------------------------------------------------------------- + + +def calculate_activation_statistics(images, sess, batch_size=50, verbose=False): + """Calculation of the statistics used by the FID. + Params: + -- images : Numpy array of dimension (n_images, hi, wi, 3). The values + must lie between 0 and 255. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the available hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- mu : The mean over samples of the activations of the pool_3 layer of + the incption model. + -- sigma : The covariance matrix of the activations of the pool_3 layer of + the incption model. + """ + act = get_activations(images, sess, batch_size, verbose) + mu = np.mean(act, axis=0) + sigma = np.cov(act, rowvar=False) + return mu, sigma +#------------------------------------------------------------------------------- + + +#------------------------------------------------------------------------------- +# The following functions aren't needed for calculating the FID +# they're just here to make this module work as a stand-alone script +# for calculating FID scores +#------------------------------------------------------------------------------- +def check_or_download_inception(inception_path): + ''' Checks if the path to the inception file is valid, or downloads + the file if it is not present. ''' + INCEPTION_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz' + if inception_path is None: + inception_path = '/tmp' + inception_path = pathlib.Path(inception_path) + model_file = inception_path / 'classify_image_graph_def.pb' + if not model_file.exists(): + print("Downloading Inception model") + from urllib import request + import tarfile + fn, _ = request.urlretrieve(INCEPTION_URL) + with tarfile.open(fn, mode='r') as f: + f.extract('classify_image_graph_def.pb', str(model_file.parent)) + return str(model_file) + + +def _handle_path(path, sess): + if path.endswith('.npz'): + f = np.load(path) + m, s = f['mu'][:], f['sigma'][:] + f.close() + else: + path = pathlib.Path(path) + files = list(path.glob('*.jpg')) + list(path.glob('*.png')) + x = np.array([imread(str(fn)).astype(np.float32) for fn in files]) + m, s = calculate_activation_statistics(x, sess) + return m, s + + +def calculate_fid_given_paths(paths, inception_path): + ''' Calculates the FID of two paths. ''' + inception_path = check_or_download_inception(inception_path) + + for p in paths: + if not os.path.exists(p): + raise RuntimeError("Invalid path: %s" % p) + + create_inception_graph(str(inception_path)) + with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + m1, s1 = _handle_path(paths[0], sess) + m2, s2 = _handle_path(paths[1], sess) + fid_value = calculate_frechet_distance(m1, s1, m2, s2) + return fid_value + + +if __name__ == "__main__": + from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument("path", type=str, nargs=2, + help='Path to the generated images or to .npz statistic files') + parser.add_argument("-i", "--inception", type=str, default=None, + help='Path to Inception model (will be downloaded if not provided)') + parser.add_argument("--gpu", default="", type=str, + help='GPU to use (leave blank for CPU only)') + args = parser.parse_args() + os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu + fid_value = calculate_fid_given_paths(args.path, args.inception) + print("FID: ", fid_value) diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fidutils.py b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fidutils.py new file mode 100644 index 00000000..f5e4b254 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/fidutils.py @@ -0,0 +1,708 @@ +import os +import sys +import math +import random +import tensorflow as tf +import numpy as np +import scipy.stats as st +from scipy.misc import toimage +import scipy as sp +from skimage import data +from skimage.transform import swirl +from skimage.filters import gaussian +import matplotlib.pyplot as plt +from matplotlib.patches import Circle +from matplotlib.offsetbox import (TextArea, DrawingArea, OffsetImage,AnnotationBbox) +from matplotlib.cbook import get_sample_data +import matplotlib.gridspec as gridspec +import shutil + + +# +# derived from https://github.com/openai/improved-gan/blob/master/inception_score/model.py +# + +# calculate inception score +def get_inception_score(images, softmax, sess, splits=10, verbose=False): + inps = images + bs = 50 + preds = [] + n_batches = int(math.ceil(float(inps.shape[0]) / float(bs))) + for i in range(n_batches): + #sys.stdout.write(".") + #sys.stdout.flush() + if verbose: + print("\rPropagating batch %d/%d" % (i+1, n_batches), end="", flush=True) + inp = inps[(i * bs):min((i + 1) * bs, inps.shape[0])] + pred = sess.run(softmax, {'FID_Inception_Net/ExpandDims:0': inp}) + preds.append(pred) + preds = np.concatenate(preds, 0) + scores = [] + for i in range(splits): + part = preds[(i * preds.shape[0] // splits):((i + 1) * preds.shape[0] // splits), :] + kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0))) + kl = np.mean(np.sum(kl, 1)) + scores.append(np.exp(kl)) + if verbose: + print(" done") + return np.mean(scores), np.std(scores) +#------------------------------------------------------------------------------- + +# get softmax output +def get_softmax(sess, pool3): + w = sess.graph.get_operation_by_name("FID_Inception_Net/softmax/logits/MatMul").inputs[1] + logits = tf.matmul(tf.squeeze(pool3), w) + softmax = tf.nn.softmax(logits) + return softmax +#=============================================================================== + + +# +# simple data container with image transformations +# +class DataContainer: + def __init__(self,data, labels=None, epoch_shuffle=True): # todo: labels + self._data = data + self._labels = labels + self._d0 = 0 + self._d1 = 0 + self._cur_samp = 0 + self._mean = None + self._std = None + self._min = None + self._max = None + self.__init_and_check() + self._epoch_shuffle = epoch_shuffle + self._transf_data = None + self._reshuffle_idx = None + #--------------------------------------------------------------------------- + + # TODO: check parameters + def __init_and_check(self): + [self._d0, self._d1] = self._data.shape + self._cur_samp = 0 + if not self._labels is None: + ls = self._labels.shape + if ls[0] != self._d0: + raise RuntimeError("Data and labels must have the same number of samples!") + #--------------------------------------------------------------------------- + + def get_next_batch(self,batch_size): + ret_D = None + ret_L = None + tmp_smp = self._cur_samp + batch_size + if tmp_smp <= self._d0: + ret_D = self._data[self._cur_samp:tmp_smp,:] + if not self._labels is None: + ret_L = self._labels[self._cur_samp:tmp_smp,:] + if tmp_smp < self._d0: + self._cur_samp = tmp_smp + else: + self._cur_samp = 0 + else: + if self._epoch_shuffle: + self.reshuffle() + self._cur_samp = batch_size + ret_D = self._data[0:self._cur_samp,:] + if not self._labels is None: + ret_L = self._labels[0:self._cur_samp,:] + return [ret_D, ret_L] + #--------------------------------------------------------------------------- + + + def reset_counter(self): + self._cur_samp = 0; + #--------------------------------------------------------------------------- + + def get_data(self): + return self._data + #--------------------------------------------------------------------------- + + def get_transformed_data(self): + return self._transf_data + #--------------------------------------------------------------------------- + + def get_labels(self): + return self._labels + #--------------------------------------------------------------------------- + + def reshuffle(self): + idx = np.array(range(self._d0)) + np.random.shuffle(idx) + self._data = self._data[idx,:] + if not self._labels is None: + self._labels = self._labels[idx,:] + #--------------------------------------------------------------------------- + + def apply_gaussian_blur(self,sigma, m, n): + self._transf_data = np.zeros_like(self._data) + for i in range(self._d0): + tmp = gaussian( self._data[i].reshape(m,n,3), sigma) + self._transf_data[i,:] = tmp.reshape(n*m*3,) + #--------------------------------------------------------------------------- + + def apply_gauss_noise(self, alpha, mi=-1, ma=1): + rnd = np.random.randn(self._d0, self._d1) + rnd = (rnd - rnd.min()) / (rnd.max() - rnd.min()) + rnd = rnd*(ma - mi) + mi + if alpha > 1e-6: + self._transf_data = (1-alpha)*self._data + alpha*rnd + else: + self._transf_data = self._data.copy() + #---------------------------------------------------------------------------- + + def apply_rect(self, hi, wi, chan, share, positioning="random", val=0.0): + self._transf_data = np.zeros_like(self._data) + for i in range(self._d0): + img = self._data[i,:].reshape(hi,whi,chan) + self._transf_data[i,:] = drop_rect(img, hi, wi, chan, share=share, positioning=positioning, val=val).flatten() + #---------------------------------------------------------------------------- + + def apply_mult_rect(self, n_rect, hi, wi, chan, share, val=0.0): + self._transf_data = np.zeros_like(self._data) + for i in range(self._d0): + img = self._data[i,:].reshape(hi,wi,chan) + self._transf_data[i,:] = drop_rect(img, hi, wi, chan, share=share, positioning="random", val=val).flatten() + for j in range(1,n_rect): + img = self._transf_data[i,:].reshape(hi,wi,chan) + self._transf_data[i,:] = drop_rect(img, hi, wi, chan, share=share, positioning="random", val=val).flatten() + #--------------------------------------------------------------------------- + + + def apply_local_swirl(self, hi, wi, chan, n_swirls, radius, strength, positioning="random", directions="random"): + self._transf_data = np.zeros_like(self._data) + for i in range(self._d0): + img_in = self._data[i,:].reshape(hi,wi,chan) + img = lokal_swirl(img_in, hi, wi, chan, n_swirls, radius, strength, positioning=positioning, directions=directions) + self._transf_data[i,:] = img.flatten() + #---------------------------------------------------------------------------- + + + def salt_and_pepper(self, h=64, w=64, c=3, p=0.5, mi=-1, ma=1.0): + self._transf_data = self._data.copy() + ns, d0, d1, d2 = self._transf_data.reshape(-1,h,w,c).shape + coords = np.random.rand(ns,d0,d1) < p + n_co = coords.sum() + if n_co > 0: + vals = (np.random.rand(n_co) < 0.5).astype(np.float32) + vals[vals < 0.5] = mi; vals[vals > 0.5] = ma + for i in range(c): + self._transf_data.reshape(-1,h,w,c)[coords,i] = vals +#------------------------------------------------------------------------------ + +# helper functions for data container +def drop_rect(img_in, hi, wi, chan, share=0.5, positioning="random", val=0.0): + img = img_in.copy() + if positioning != "random": + raise NotImplementedError("TODO!") + rhi = np.int(hi*share) + rwi = np.int(wi*share) + xpos = random.randint(0, hi-rhi) + ypos = random.randint(0, wi-rwi) + xdim = xpos + rhi + ydim = ypos + rwi + if chan == 1: + img = img.reshape(hi,wi) + img[xpos:xdim,ypos:ydim] = np.ones((rhi, rwi))*val + else: + img = img.reshape(hi,wi, chan) + img[xpos:xdim,ypos:ydim,:] = np.ones((rhi, rwi, chan))*val + return img +#------------------------------------------------------------------------------- + +def lokal_swirl(img_in, hi, wi, chan, n_swirls, radius, strength, positioning="random", directions="random", corr_size=3): + img = img_in.copy() + if not positioning in ["random", "center"]: + raise NotImplementedError("TODO!") + size = corr_size + for i in range(n_swirls): + sign = None + if directions == "random": + sign = np.sign(np.random.rand(1) - 0.5)[0] + elif directions == "left": + sign = -1 + else: + sign = 1 + xpos, ypos = None, None + if positioning == "random": + xpos = random.randint(0, hi - radius) + ypos = random.randint(0, wi - radius) + elif positioning == "center": + xpos = hi // 2 + ypos = wi // 2 + center = (xpos,ypos) + img = swirl(img, rotation=0, strength=sign*strength, radius=radius, center=center) + img[0:size] = img_in[0:size] + img[-(size+1):] = img_in[-(size+1):] + img[:,0:size] = img_in[:,0:size] + img[:,-(size+1):] = img_in[:,-(size+1):] + return img +#============================EOF DataContainer================================== + + +# +# simple class to track results +# +class ResultWriter: + def __init__(self, pth, out_dir_name, out_name="res", zfill=0, out_imgs=True): + self._pth = pth + self._out_imgs = out_imgs + self._out_dir_name = out_dir_name + self._out_name = out_name + self._zfill = zfill + self._enumerator = 0 + self._check_input() + self._current_path = None + self._res_dat = {} + self._res_writer_idx = 0 + self._res_writer_rep = 0 + self._fig = None + #--------------------------------------------------------------------------- + + def _check_input(self): + self._check(self._pth) + #--------------------------------------------------------------------------- + + def _check(self, pth): + if not os.path.exists(pth): + raise RESULT_WRITER_EXCEPTION("No such path found: " + pth) + #--------------------------------------------------------------------------- + + def reset_pth(self, pth): + self._pth = pth + self._check_pth() + #--------------------------------------------------------------------------- + + + def write_result(self, dir_name, res_dic, res_mat, force=False): + newpth = os.path.join(self._pth, dir_name) + if not force: + if not os.path.exists(newpth): + os.mkdir(newpth) + else: + raise RESULT_WRITER_EXCEPTION("Path already exists: " + newpth) + else: + if os.path.exists(newpth): + shutil.rmtree(newpth) + os.mkdir(newpth) + + # write results + np.save(newpth + self._out_name+'_data.npy', res_mat) + np.save(newpth + self._out_name+'_descriptor.npy', res_dic) + #--------------------------------------------------------------------------- + + + def write_result_enumerate(self, res_dic, res_mat, force=False): + newpth = os.path.join(self._pth,self._out_dir_name + "_" + str(self._enumerator).zfill(self._zfill)+'/') + self._current_path = newpth + if not force: + if not os.path.exists(newpth): + os.mkdir(newpth) + else: + raise RESULT_WRITER_EXCEPTION("Path already exists: " + newpth) + else: + if os.path.exists(newpth): + shutil.rmtree(newpth) + os.mkdir(newpth) + + # write results + np.save(newpth + self._out_name+'_data.npy', res_mat) + np.save(newpth + self._out_name+'_descriptor.npy', res_dic) + self._enumerator += 1 + #--------------------------------------------------------------------------- + + + def read_result(seld, dir_name): + newpth = os.path.join(self._pth,dir_name) + self._check(newpth) + ret = [] + ret.append(np.load(newpth + self._out_name+'_descriptor.npy')) + ret.append(np.load(newpth + self._out_name+'_data.npy')) + return ret + #--------------------------------------------------------------------------- + + def get_current_path(self): + print(self._current_path) + return self._current_path + self._out_name + #-------------------------------------------------------------------------- + + def add_iter_tracker(self, name, n_saves, n_repeats): + self._res_dat[name] = np.zeros((n_repeats, n_saves)) + #-------------------------------------------------------------------------- + + def get_iter_tracker_names(self): + return self._res_dat.keys() + #-------------------------------------------------------------------------- + + def add_append_tracker(self, name): + self._res_dat[name] = [] + #-------------------------------------------------------------------------- + + def add_rep_append_tracker(self,name, n_repats): + self._res_dat[name] = [[]]*n_repats + #-------------------------------------------------------------------------- + + def reset_saved_vars(self): + self._res_dat = {} + #-------------------------------------------------------------------------- + + def inc_idx(self): + self._res_writer_idx += 1 + #-------------------------------------------------------------------------- + + def inc_rep(self): + self._res_writer_rep += 1 + #-------------------------------------------------------------------------- + + def reset_idx(self): + self._res_writer_idx = 0 + #-------------------------------------------------------------------------- + + def reset_rep(self): + self._res_writer_rep = 0 + #------------------------------------------------------------------------- + + def save_to_iter_tracker(self, name, val, warn=False): + dim = self._res_dat[name].shape + if self._res_writer_rep < dim[0]: + if self._res_writer_idx < dim[1]: + self._res_dat[name][self._res_writer_rep, self._res_writer_idx] = val + elif warn: + print("# Warning! Number of repeats or number of saved iterations exceeds the initial set values.") + else: + RESULT_WRITER_EXCEPTION("Number of repeats or number of saved iterations exceeds the initial set values.") + #-------------------------------------------------------------------------- + + def save_to_rep_append(self, name, rep, val, warn=False): + n_reps = len(self._res_dat[name]) + if n_reps > rep: + self._res_dat[name][rep].append(val) + elif warn: + print("# Warning! Number of repeats exceeds the initial set values.") + else: + RESULT_WRITER_EXCEPTION("Number of repeats exceeds the initial set values.") + #--------------------------------------------------------------------------- + + + def save_to_img_iter_tracker(self, name, val, warn=False): + pass + #-------------------------------------------------------------------------- + + + def save_to_append_tracker(self, name, val): + self._res_dat[name].append(val) + #--------------------------------------------------------------------------- + + def reset(self): + self.reset_idx() + self.reset_rep() + self._res_dat = {} + #--------------------------------------------------------------------------- + + def plot(self,samples, h,w, it, sqrt_n_imgs=3): + if self._fig == None: + self._fig = plt.figure(figsize=(sqrt_n_imgs, sqrt_n_imgs)) + gs = gridspec.GridSpec(sqrt_n_imgs, sqrt_n_imgs) + gs.update(wspace=0.05, hspace=0.05) + n_feat = h*w + for i, sample in enumerate(samples): + ax = plt.subplot(gs[i]) + plt.axis('off') + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_aspect('equal') + plt.imshow(sample[0:n_feat].reshape(h,w), cmap='Greys_r') + plt.savefig(self._pth + 'imgs/{}.png'.format(str(it).zfill(6)), bbox_inches='tight') + #--------------------------------------------------------------------------- + + + def new_enumerated_path(self, force=False): + self._enumerator += 1 + #newpth = self._pth + self._out_name + "_" + str(self._enumerator).zfill(self._zfill) + '/' + newpth = os.path.join(self._pth,self._out_dir_name + "_" + str(self._enumerator).zfill(self._zfill)+'/') + self._current_path = newpth + if not force: + if not os.path.exists(newpth): + os.mkdir(newpth) + else: + raise RESULT_WRITER_EXCEPTION("Path already exists: " + newpth) + else: + if os.path.exists(newpth): + shutil.rmtree(newpth) + os.mkdir(newpth) + #if self._out_name: + os.mkdir(newpth + "imgs") + #--------------------------------------------------------------------------- + + def plot_enumerate(self,samples, h,w, it, sqrt_n_imgs=3): + fig = plt.figure(figsize=(sqrt_n_imgs, sqrt_n_imgs)) + gs = gridspec.GridSpec(sqrt_n_imgs, sqrt_n_imgs) + gs.update(wspace=0.05, hspace=0.05) + n_feat = h * w + for i, sample in enumerate(samples): + ax = plt.subplot(gs[i]) + plt.axis('off') + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_aspect('equal') + plt.imshow(sample[0:n_feat].reshape(h, w), cmap='Greys_r') + plt.savefig(self._current_path + 'imgs/{}.png'.format(str(it).zfill(6)), bbox_inches='tight') + # ---------------------------------------------------------------------- + + + def plot_enumerate_RGB(self, sample, h, w, it): + img = sample.reshape(h,w,3) + fig = plt.figure(figsize=(1, 1), frameon=False) + gs = gridspec.GridSpec(1, 1) + gs.update(wspace=0.05, hspace=0.05) + ax = plt.subplot(gs[0]) + plt.axis('off') + ax = plt.Axes(fig, [0., 0., 1., 1.]) + ax.set_axis_off() + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_aspect('equal') + fig.add_axes(ax) + plt.imshow(toimage(sample.reshape(h, w, 3))) + plt.savefig(self._current_path + 'imgs/{}.png'.format(str(it).zfill(6))) #, bbox_inches='tight') + #--------------------------------------------------------------------------- + + + def plot_batch_enumerate_RGB(self, samples, h, w, it): + img = sample.reshape(h,w,3) + fig = plt.figure(figsize=(1, 1), frameon=False) + gs = gridspec.GridSpec(1, 1) + gs.update(wspace=0.05, hspace=0.05) + ax = plt.subplot(gs[0]) + plt.axis('off') + ax = plt.Axes(fig, [0., 0., 1., 1.]) + ax.set_axis_off() + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_aspect('equal') + fig.add_axes(ax) + plt.imshow(toimage(sample.reshape(h, w, 3))) + plt.savefig(self._current_path + 'imgs/{}.png'.format(str(it).zfill(6))) #, bbox_inches='tight') + #--------------------------------------------------------------------------- + + + def write_result_enumerate_internal(self, res_dic): + # write results + np.save(self._current_path + self._out_name + '_data.npy', self._res_dat) + np.save(self._current_path + self._out_name + '_descriptor.npy', res_dic) + #self._enumerator += 1 + # --------------------------------------------------------------------------- + +class RESULT_WRITER_EXCEPTION(Exception): + pass +#===============EOF RESULT_WRITER====================================# + +# +# visualizing experiments +# +class Vis(): + def __init__(self): + self._curr_path = None + self._curr_desc = None + self._curr_data = None + self._curr_folder = None + self._val_keys = None + self._sum_data = None + self._sum_filtered = None + self._plot = plt.plot() + self._sum_grads = {} + #-------------------------------------------------------------------------- + + def set_path(self, pth): + if pth[-1] != "/": + pth += "/" + if not os.path.exists(pth): + raise VIS_EXCEPTION("path " + pth + " doesn't exist.") + self._curr_path = pth + #-------------------------------------------------------------------------- + + def read_data(self): + if self._curr_path == None: + raise VIS_EXCEPTION("Path is not set. Use method set_path(path).") + d = os.listdir(self._curr_path) + for i in d: + if "_data.npy" in i: + self._curr_data = np.load(self._curr_path + i) + elif "_descriptor.npy" in i: + self._curr_desc = np.load(self._curr_path + i) + self._curr_folder = self._curr_path.split('/')[-2] + self._curr_data = self._curr_data.take(0) + self._val_keys = self._curr_desc[0].keys() + #--------------------------------------------------------------------------- + + def get_data_keys(self): + ret = None + if self._curr_data != None: + ret = self._curr_data.keys() + return ret + #--------------------------------------------------------------------------- + + def get_descriptor(self): + return self._curr_desc + #--------------------------------------------------------------------------- + + def get_description(self): + return self._curr_desc[0] + #--------------------------------------------------------------------------- + + def get_data_description(self): + return self._curr_data.keys() + #--------------------------------------------------------------------------- + + def get_data(self,key): + return self._curr_data[key] + #--------------------------------------------------------------------------- + + + def annotated_plot(self, key, zoom=0.2, rep=0, pad=0, xybox=(0,50),max_hight=None, + fd=None, figposx=None, figposy=None, xlim=None, ylim=None,ylabel=None, n_datapoints=None, add_points=False): + pth = self._curr_path + "imgs/" + if not os.path.exists(pth): + raise VIS_EXCEPTION("No image directory found in path " + pth) + imgs = os.listdir(pth) + imgs.sort() + data = self._curr_data[key][rep] + l = len(data) + if len(imgs) != l: + raise VIS_EXCEPTION("Number of images must match the number of data points") + x = None + if n_datapoints is None: + x = range(len(data)) + else: + if n_datapoints < len(data): + data = data[:n_datapoints] + x = range(n_datapoints) + if not fd is None: + data = fd(data) + #fig, ax = plt.subplots() + _, ax = plt.subplots() + ax.plot(x, data, linewidth=2.0) + if add_points: + ax.plot(x,data, "k*", mew=0.5, ms=10) + if figposy is None: + figposy = data + if figposx is None: + figposx = x + i = 0 + for pos in zip(figposx, figposy): + imgpth = pth + imgs[i]; + i += 1 + fn = get_sample_data(imgpth, asfileobj=False) + arr_img = plt.imread(fn, format='png') + im = OffsetImage(arr_img, zoom=zoom) + im.image.axes = ax + ab = AnnotationBbox(im, pos, + xybox=xybox, + xycoords='axes fraction', + boxcoords="offset points", + #pad=pad, + arrowprops=dict(arrowstyle="<-")) + ax.add_artist(ab) + + mi = np.min(data) + ma = np.max(data) + r = ma-mi + if not xlim is None: + ax.set_xlim(xlim[0], xlim[1]) + if not ylim is None: + ax.set_ylim(ylim[0], ylim[1]) + if ylabel is None: + plt.ylabel(key, fontsize=15) + else: + plt.ylabel(ylabel, fontsize=15) + labs = range(len(data)) + labs = list(map(lambda x: str(x),labs)) + ax.set_xticklabels(labs) + plt.xlabel("disturbance level", fontsize=15) + plt.xticks(x) + plt.show() + #--------------------------------------------------------------------------- + +class VIS_EXCEPTION(Exception): + pass +#==============================EOF VIS========================================== + + +# +# class to read data written by result writer class +# +class Experiment_reader: + def __init__(self): + self._visualizer = [] + self._curr_path = None + #--------------------------------------------------------------------------- + + def set_path(self, pth): + if pth[-1] != "/": + pth += "/" + if not os.path.exists(pth): + raise VIS_EXCEPTION("path " + pth + " doesn't exist.") + self._curr_path = pth + #--------------------------------------------------------------------------- + + def read_all_expriments(self, verbouse=False): + if self._curr_path == None: + raise VIS_EXCEPTION("No path is set. Set path with method set_path.") + dir = os.listdir(self._curr_path) + self._visualizer = [] + for d in dir: + p = self._curr_path + d + if verbouse: + print("# reading: " + p) + v = Vis() + v.set_path(p) + v.read_data() + self._visualizer.append(v) + #--------------------------------------------------------------------------- + + def print_param_description(self,idx=None): + if self._visualizer == []: + raise VIS_EXCEPTION("No data present. Use method read_all_experiments(path) to load experiments.") + if idx == None: + for v in self._visualizer: + print(v.get_curr_folder_name() + ": " + str(v.get_description())) + else: + print(self._visualizer[idx].get_curr_folder_name() + ": " + str(self._visualizer[idx].get_description())) + # -------------------------------------------------------------------------- + + def get_data_description(self): + keys = {} + for v in self._visualizer: + keys.append(v.get_data_description()) + keys[v.get_curr_folder_name()] = v.get_data_description() + return keys + #--------------------------------------------------------------------------- + + def print_data_description(self, idx=None): + if idx == None: + for v in self._visualizer: + print( v.get_curr_folder_name() + ": " + str(v.get_data_description())) + else: + print(self._visualizer[idx].get_curr_folder_name() + ": " + str(self._visualizer[idx].get_data_description())) + #--------------------------------------------------------------------------- + + + def get_data(self, key, v_idx=None): + ret = {} + if v_idx == None: + v_idx = range(len(self._visualizer)) + for idx in v_idx: + ret[self._visualizer[idx].get_curr_folder_name()] = self._visualizer[idx].get_data(key) + return ret + #--------------------------------------------------------------------------- + + + def annotated_plot(self, vis_idx, key, zoom=0.2, rep=0, pad=0, xybox=(0.,50.), + max_hight=None, fd=None, figposx=None, figposy=None, xlim=None, ylim=None, + ylabel=None, n_datapoints=None, add_points=False): + self._visualizer[vis_idx].annotated_plot(key,zoom,rep=rep,pad=pad,xybox=xybox, + max_hight=max_hight, fd=fd, figposx=figposx, figposy=figposy, + xlim=xlim, ylim=ylim, ylabel=ylabel, n_datapoints=n_datapoints, add_points=add_points) + #--------------------------------------------------------------------------- + +#===============================EOF Experiment_reader=========================== diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/main.py b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/main.py new file mode 100644 index 00000000..7e264521 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/main.py @@ -0,0 +1,231 @@ +import matplotlib +matplotlib.use('Agg') +import os +import tensorflow as tf +import fid +import numpy as np +import math +import utils +import fidutils +from glob import glob +import argparse + +# +# parse params +# +parser = argparse.ArgumentParser() +parser.add_argument('--path_IncNet', type=str, help='Path to inception net.') +parser.add_argument('--dataset', type=str, default='CelebA', help='Possible options: CelebA, Cifar10, Other. (default: CelebA)') +parser.add_argument('--path_data', type=str, help='Path to images') +parser.add_argument('--path_out', type=str, help="Path to output directory") +parser.add_argument('--path_stats', type=str, help='Path to precalculated statistics') +hp_str = '''Possible nois types: sp (salt and pepper), + rect (black rectangles), + swirl, + blur, + gn (gaussian noise) + mixed (mixture with ImageNet images) +To make multiple experiments, pass noise types seperated by colons (e.g. sp:rect:swirl). +(default: sp) +''' +parser.add_argument('--noise_type', type=str, default='sp', help=hp_str) +parser.add_argument('--img_file_ext',type=str, default='*.png', help='Extension of image files. If no specific extenison i ') +parser.add_argument('--n_imgs', type=int, default=50000, help='Number of images used to calc the distances. (default: 50000)') +parser.add_argument('--gpu', type=str, default='', help='GPU to use (leave blank for CPU only)') +parser.add_argument('--verbose', type=str, default='', help='Report status of program in console. \"Y\" for yes. (default: status is not reported)') +parser.add_argument('--sub_paths', type=str, default='', help='Create sub directories per distortion type. \"Y\" for yes. (default: sub directories are not created)') +parser.add_argument('--img_dims', type=int, default=None, nargs=3, metavar=('HIGHT', 'WIDTH', 'CHANNELS'), + help='dimensions of images in the order "H W C" for hight, width and channels. Only needed for dataset "Other" (no default value)') +args = parser.parse_args() +#------------------------------------------------------------------------------- + + +# +# check parameters +# +PATH_INC = args.path_IncNet +if not PATH_INC.endswith("classify_image_graph_def.pb"): + PATH_INC = os.join(PATH_INC,"classify_image_graph_def.pb") +if not os.path.exists(PATH_INC): + raise RuntimeError("Invalid path: %s" % PATH_INC) + +PATH_DATA = args.path_data +#print("# DEBUG:::PATH_DATA = " + str(PATH_DATA)) +if not os.path.exists(PATH_DATA): + raise RuntimeError("Invalid path: %s" % PATH_DATA) +PATH_DATA = os.path.join(PATH_DATA,'*') +data = glob(PATH_DATA) +#data = glob(os.path.join(PATH_DATA, '*.jp') + +PATH_OUT = args.path_out +if not os.path.exists(PATH_OUT): + raise RuntimeError("Invalid path: %s" % PATH_OUT) + +_H_, _W_, _C_ = None, None, None +PATH_STATS = args.path_stats +#print("# DEBUG:::args.dataset = " + str(args.dataset)) +if args.dataset == "CelebA": + _H_ = 64; _W_ = 64; _C_ = 3 + if not PATH_STATS.endswith("fid_stats_celeba.npz"): + PATH_STATS = os.path.join(PATH_STATS,"fid_stats_celeba.npz") +elif args.dataset == "Cifar10": + _H_ = 32; _W_ = 32; _C_ = 3 + if not PATH_STATS.endswith("fid_stats_cifar10_train.npz"): + PATH_STATS = os.path.join(PATH_STATS,"fid_stats_cifar10_train.npz") +elif args.dataset == "Other": + _H_ = args.img_dims[0]; _W_ = args.img_dims[1]; _C_ = args.img_dims[2] + if not PATH_STATS.endswith(".npz"): + raise RuntimeError("Invalid path: pleas state the full path, including the file name .npz") +if not os.path.exists(PATH_STATS): + raise RuntimeError("Invalid path: %s" % PATH_STATS) + +args.noise_type = args.noise_type.split(':') +for t in args.noise_type: + if not t in ["sp", "rect", "swirl", "blur", "gn", "mixed"]: + raise RuntimeError("Invalid noise type: %s" % args.nois_type) + +verbose=False +if args.verbose == 'Y': + verbose = True + +if args.verbose and args.gpu != "": + print("# Setting CUDA_VISIBLE_DEVICES to: " + str(args.gpu)) +os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu +pth_out = args.path_out #pth_gan = "/publicwork/ramsauer/experiments2/celebA_sanity_collaps_FIDandINC" +n_repeats = 1 +#------------------------------------------------------------------------------- + + +# +# read data +# +if verbose: + print("# Reading %d images..." % args.n_imgs ,end="", flush=True) +# read stats +f = np.load(PATH_STATS) +mu_real, sigma_real = f['mu'][:], f['sigma'][:] +f.close() +# read imgs +N_FEATURES = _H_*_W_*_C_ +N_LOAD_IMGS = args.n_imgs # 50000 +X = fidutils.DataContainer(np.zeros((N_LOAD_IMGS, N_FEATURES)), epoch_shuffle=False) + +for i in range(N_LOAD_IMGS): + img = utils.get_image(data[i], + input_height=_H_, + input_width=_W_, + resize_height=_H_, + resize_width=_W_, + is_crop=False, + is_grayscale=False) + X._data[i,:] = img.flatten() + +assert X._data.max() <= 1. +assert X._data.min() >= -1. + +if verbose: + print("done") + print("# image values in range [%.2f, %.2f]" % (X._data.min(), X._data.max())) +#------------------------------------------------------------------------------- + + +# +# load inference model +# +fid.create_inception_graph(PATH_INC) +batch_size = 100 +softmax = None +#------------------------------------------------------------------------------- + +# +# run +# +init = tf.global_variables_initializer() +sess = tf.Session() +with sess.as_default(): + sess.run(init) + query_tensor = fid._get_inception_layer(sess) + + if softmax is None: + softmax = fidutils.get_softmax(sess, query_tensor) + + for noise_type in args.noise_type: + if args.verbose: + print("# Noise type: " + noise_type) + alphas = None + if noise_type in ["gn", "rect", "mixed"] : + alphas = [0.0, 0.25, 0.5, 0.75] + elif noise_type in ["blur", "swirl"]: + alphas = [0.0, 1.0, 2.0, 4.0] + elif noise_type == "sp": + alphas = [0.0, 0.1, 0.2, 0.3] + + # prepare result writer + tmp_PATH_OUT = PATH_OUT + if args.sub_paths: + tmp_PATH_OUT = PATH_OUT + "/" + noise_type + os.mkdir(tmp_PATH_OUT) + res_writer = fidutils.ResultWriter(tmp_PATH_OUT, out_dir_name=noise_type, out_name=noise_type, zfill=3) + res_writer.new_enumerated_path(force=True) + n_repeats=1 + save_interval = len(alphas) + res_writer.add_iter_tracker('Fid', save_interval, n_repeats) + res_writer.add_iter_tracker('Inc', save_interval, n_repeats) + res_desc = [] + n_rect = 5 + + for i,a in enumerate(alphas): + if args.verbose: + print("# Alpha = %s" % a) + res_desc.append({'alpha':a}) + if noise_type == "gn": + X.apply_gauss_noise(alpha=a, mi=-1, ma=1) + elif noise_type == "rect": + X.apply_mult_rect(n_rect, _H_, _W_, _C_, share=a, val=X._data.min()) + elif noise_type == "blur": + X.apply_gaussian_blur(a, _H_, _W_) + elif noise_type == "swirl": + if args.dataset == "CelebA": # bigger radius to make the effect more visible + X.apply_local_swirl(_H_, _W_, _C_, n_swirls=1, radius=70, strength=a, positioning="center", directions="random") + else: + X.apply_local_swirl(_H_, _W_, _C_, n_swirls=1, radius=25, strength=a, positioning="center", directions="random") + elif noise_type == "sp": + X.salt_and_pepper( _H_, _W_, _C_, p=a, mi=-1, ma=1) + + + if args.verbose: + print("# -- Range of transformed images: [%.2f, %.2f]" % ( X._transf_data.min(), X._transf_data.max()) ) + X._transf_data = (X._transf_data + 1.) * 127.5 + if args.verbose: + print("# -- Range of upscaled images: [ %.2f, %.2f]" % ( X._transf_data.min(), X._transf_data.max()) ) + res_writer.plot_enumerate_RGB(X._transf_data[0], _H_, _W_, i) + + # calc FID + if args.verbose: + print("# -- Calculating frechet distance...", flush=True) + mu_gen, sigma_gen = fid.calculate_activation_statistics( X._transf_data.reshape( -1, _H_, _W_, _C_), + sess, + batch_size=batch_size) + act = fid.get_activations( X._transf_data.reshape( -1, _H_, _W_, _C_), + sess, + batch_size=batch_size, + verbose=False) + fid_value = fid.calculate_frechet_distance(mu_gen, sigma_gen, mu_real, sigma_real) + res_writer.save_to_iter_tracker('Fid', fid_value) + if args.verbose: + print("# -- FID = %.5f" % fid_value) + + # calc Inception score + inc = None + if args.verbose: + print("# -- Calculating inception score...", flush=True) + inc,_ = fidutils.get_inception_score( X._transf_data.reshape( -1, _H_, _W_, _C_), + softmax, + sess, + splits=10, + verbose=False) + if args.verbose: + print("# -- INC = %.5f" % inc) + res_writer.save_to_iter_tracker('Inc', inc); + res_writer.inc_idx() + res_writer.write_result_enumerate_internal(res_desc) diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/run.sh b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/run.sh new file mode 100644 index 00000000..0ea06cf6 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/run.sh @@ -0,0 +1,14 @@ +#!/bin/bash +python3 main.py \ +--path_IncNet "path/to/inception/net.pb" \ +--dataset "CelebA" # Kind of dataset (one of the following: "CelebA", "Cifar10" or "Other") +--path_data "path/to/data" \ +--path_out "./out" \ # Path to output directory +--path_stats "./stats/" \ # path to image statistics +--img_file_ext "*" \ # file extension in te form e.g. "*.jpg", "*.png" or only "*" \ +--noise_type "sp:rect:swirl:blur:gn" \ +--n_imgs 50000 \ +--gpu "0" \ # set CUDA_VISIBLE_DEVICES +--verbose "Y" \ +--sub_paths "Y" \ +--img_dims 64 64 3 # need only to be specified for dataset "Other" diff --git a/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/utils.py b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/utils.py new file mode 100644 index 00000000..152b530b --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/FIDvsINC/utils.py @@ -0,0 +1,241 @@ +""" +Some codes from https://github.com/Newmu/dcgan_code +""" +from __future__ import division +import math +import json +import random +import pprint +import scipy.misc +import numpy as np +from time import gmtime, strftime +from six.moves import xrange + +pp = pprint.PrettyPrinter() + +get_stddev = lambda x, k_h, k_w: 1/math.sqrt(k_w*k_h*x.get_shape()[-1]) + +def get_image(image_path, input_height, input_width, + resize_height=64, resize_width=64, + is_crop=True, is_grayscale=False): + image = imread(image_path, is_grayscale) + return transform(image, input_height, input_width, + resize_height, resize_width, is_crop) + +def save_images(images, size, image_path): + return imsave(inverse_transform(images), size, image_path) + +def imread(path, is_grayscale = False): + if (is_grayscale): + return scipy.misc.imread(path, flatten = True).astype(np.float) + else: + return scipy.misc.imread(path).astype(np.float) + +def merge_images(images, size): + return inverse_transform(images) + +def merge(images, size): + h, w = images.shape[1], images.shape[2] + img = np.zeros((h * size[0], w * size[1], 3)) + for idx, image in enumerate(images): + i = idx % size[1] + j = idx // size[1] + img[j*h:j*h+h, i*w:i*w+w, :] = image + return img + +def imsave(images, size, path): + return scipy.misc.imsave(path, merge(images, size)) + +def center_crop(x, crop_h, crop_w, + resize_h=64, resize_w=64): + if crop_w is None: + crop_w = crop_h + h, w = x.shape[:2] + j = int(round((h - crop_h)/2.)) + i = int(round((w - crop_w)/2.)) + return scipy.misc.imresize( + x[j:j+crop_h, i:i+crop_w], [resize_h, resize_w]) + +def transform(image, input_height, input_width, + resize_height=64, resize_width=64, is_crop=True): + if is_crop: + cropped_image = center_crop( + image, input_height, input_width, + resize_height, resize_width) + else: + if (input_height != resize_height) or (input_width != resize_width): + cropped_image = scipy.misc.imresize(image, [resize_height, resize_width]) + else: + cropped_image = image + return np.array(cropped_image) / 127.5 - 1. + +def inverse_transform(images): + return (images+1.)/2. + +def to_json(output_path, *layers): + with open(output_path, "w") as layer_f: + lines = "" + for w, b, bn in layers: + layer_idx = w.name.split('/')[0].split('h')[1] + + B = b.eval() + + if "lin/" in w.name: + W = w.eval() + depth = W.shape[1] + else: + W = np.rollaxis(w.eval(), 2, 0) + depth = W.shape[0] + + biases = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(B)]} + if bn != None: + gamma = bn.gamma.eval() + beta = bn.beta.eval() + + gamma = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(gamma)]} + beta = {"sy": 1, "sx": 1, "depth": depth, "w": ['%.2f' % elem for elem in list(beta)]} + else: + gamma = {"sy": 1, "sx": 1, "depth": 0, "w": []} + beta = {"sy": 1, "sx": 1, "depth": 0, "w": []} + + if "lin/" in w.name: + fs = [] + for w in W.T: + fs.append({"sy": 1, "sx": 1, "depth": W.shape[0], "w": ['%.2f' % elem for elem in list(w)]}) + + lines += """ + var layer_%s = { + "layer_type": "fc", + "sy": 1, "sx": 1, + "out_sx": 1, "out_sy": 1, + "stride": 1, "pad": 0, + "out_depth": %s, "in_depth": %s, + "biases": %s, + "gamma": %s, + "beta": %s, + "filters": %s + };""" % (layer_idx.split('_')[0], W.shape[1], W.shape[0], biases, gamma, beta, fs) + else: + fs = [] + for w_ in W: + fs.append({"sy": 5, "sx": 5, "depth": W.shape[3], "w": ['%.2f' % elem for elem in list(w_.flatten())]}) + + lines += """ + var layer_%s = { + "layer_type": "deconv", + "sy": 5, "sx": 5, + "out_sx": %s, "out_sy": %s, + "stride": 2, "pad": 1, + "out_depth": %s, "in_depth": %s, + "biases": %s, + "gamma": %s, + "beta": %s, + "filters": %s + };""" % (layer_idx, 2**(int(layer_idx)+2), 2**(int(layer_idx)+2), + W.shape[0], W.shape[3], biases, gamma, beta, fs) + layer_f.write(" ".join(lines.replace("'","").split())) + +def make_gif(images, fname, duration=2, true_image=False): + import moviepy.editor as mpy + + def make_frame(t): + try: + x = images[int(len(images)/duration*t)] + except: + x = images[-1] + + if true_image: + return x.astype(np.uint8) + else: + return ((x+1)/2*255).astype(np.uint8) + + clip = mpy.VideoClip(make_frame, duration=duration) + clip.write_gif(fname, fps = len(images) / duration) + +def visualize(sess, dcgan, config, option): + image_frame_dim = int(math.ceil(config.batch_size**.5)) + if option == 0: + z_sample = np.random.uniform(-0.5, 0.5, size=(config.batch_size, dcgan.z_dim)) + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_%s.png' % (config.sample_dir, strftime("%Y-%m-%d %H:%M:%S", gmtime()))) + elif option == 1: + values = np.arange(0, 1, 1./config.batch_size) + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_arange_%s.png' % (config.sample_dir, idx)) + elif option == 2: + values = np.arange(0, 1, 1./config.batch_size) + for idx in [random.randint(0, 99) for _ in xrange(100)]: + print(" [*] %d" % idx) + z = np.random.uniform(-0.2, 0.2, size=(dcgan.z_dim)) + z_sample = np.tile(z, (config.batch_size, 1)) + #z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + try: + make_gif(samples, '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + except: + save_images(samples, [image_frame_dim, image_frame_dim], '%s/test_%s.png' % (config.sample_dir, strftime("%Y-%m-%d %H:%M:%S", gmtime()))) + elif option == 3: + values = np.arange(0, 1, 1./config.batch_size) + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): + z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot}) + else: + samples = sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample}) + + make_gif(samples, '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + elif option == 4: + image_set = [] + values = np.arange(0, 1, 1./config.batch_size) + + for idx in xrange(100): + print(" [*] %d" % idx) + z_sample = np.zeros([config.batch_size, dcgan.z_dim]) + for kdx, z in enumerate(z_sample): z[idx] = values[kdx] + + if config.dataset == "mnist": + y = np.random.choice(10, config.batch_size) + y_one_hot = np.zeros((config.batch_size, 10)) + y_one_hot[np.arange(config.batch_size), y] = 1 + image_set.append(sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample, dcgan.y: y_one_hot})) + else: + image_set.append(sess.run(dcgan.sampler, feed_dict={dcgan.z: z_sample})) + + make_gif(image_set[-1], '%s/test_gif_%s.gif' % (config.sample_dir, idx)) + + new_image_set = [merge(np.array([images[idx] for images in image_set]), [10, 10]) \ + for idx in range(64) + range(63, -1, -1)] + make_gif(new_image_set, '%s/test_gif_merged.gif' % config.sample_dir, duration=8) diff --git a/Wav2Lip-master/evaluation/TTUR-master/LICENSE b/Wav2Lip-master/evaluation/TTUR-master/LICENSE new file mode 100644 index 00000000..8dada3ed --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Wav2Lip-master/evaluation/TTUR-master/Poster/TTUR_Converges_NIPS2017.pdf b/Wav2Lip-master/evaluation/TTUR-master/Poster/TTUR_Converges_NIPS2017.pdf new file mode 100644 index 00000000..a28fb2ab Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Poster/TTUR_Converges_NIPS2017.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/README.md b/Wav2Lip-master/evaluation/TTUR-master/README.md new file mode 100644 index 00000000..54a95148 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/README.md @@ -0,0 +1,84 @@ +# Two time-scale update rule for training GANs + +This repository contains code accompanying the paper [GANs Trained by a Two Time-Scale Update Rule +Converge to a Local Nash Equilibrium](https://arxiv.org/abs/1706.08500). + +## Fréchet Inception Distance (FID) +The FID is the performance measure used to evaluate the experiments in the paper. There, a detailed description can be found +in the experiment section as well as in the the appendix in section A1. + +In short: +The Fréchet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) and X_2 ~ N(mu_2, C_2) is + + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + +The FID is calculated by assuming that X_1 and X_2 are the activations of the coding layer pool_3 of the inception model (see below) for generated samples and real world samples respectivly. mu_n is the mean and C_n the covariance of the activations of the coding layer over all real world or generated samples. + +IMPORTANT: The number of samples to calculate the Gaussian statistics (mean and covariance) should be greater than the +dimension of the coding layer, here 2048 for the Inception pool 3 layer. Otherwise the covariance is not full rank resulting in complex numbers and nans by calculating the square root. + +We recommend using a minimum sample size of 10,000 to calculate the FID otherwise the true FID of the generator is +underestimated. + +### Compatibility notice +Previous versions of this repository contained two implementations to calculate the FID, a "unbatched" and a "batched" version. +The "unbatched" version should not be used anymore. If you've downloaded this code previously, please update it immediately to +the new version. The old version included a bug! + +## A pytorch implementation of the FID +If you're looking for a pytorch implementation we recommend https://github.com/mseitzer/pytorch-fid + +## Provided Code + +Requirements: TF 1.1+, Python 3.x + +#### fid.py +This file contains the implementation of all necessary functions to calculate the FID. It can be used either +as a python module imported into your own code, or as a standalone +script to calculate the FID between precalculated (training set) statistics and a directory full of images, or between +two directories of images. + +To compare directories with pre-calculated statistics (e.g. the ones from http://bioinf.jku.at/research/ttur/), use: + + fid.py /path/to/images /path/to/precalculated_stats.npz + +To compare two directories, use + + fid.py /path/to/images /path/to/other_images + +See `fid.py --help` for more details. + +#### fid_example.py +Example code to show the usage of `fid.py` in your own Python scripts. + +#### precalc_stats_example.py +Example code to show how to calculate and save training set statistics. + + +#### WGAN_GP +Improved WGAN (WGAN-GP) implementation forked from https://github.com/igul222/improved_wgan_training +with added FID evaluation for the image model and switchable TTUR/orig settings. Lanuage model with +JSD Tensorboard logging and switchable TTUR/orig settings. + +## Precalculated Statistics for FID calculation + +Precalculated statistics for datasets +- [cropped CelebA](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_celeba.npz) (64x64, calculated on all samples) +- [LSUN bedroom](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_lsun_train.npz) (calculated on all training samples) +- [CIFAR 10](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_cifar10_train.npz) (calculated on all training samples) +- [SVHN](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_svhn_train.npz) (calculated on all training samples) +- [ImageNet Train](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_imagenet_train.npz) (calculated on all training samples) +- [ImageNet Valid](http://bioinf.jku.at/research/ttur/ttur_stats/fid_stats_imagenet_valid.npz) (calculated on all validation samples) + + +are provided at: http://bioinf.jku.at/research/ttur/ + +## Additional Links + +For FID evaluation download the Inception modelf from http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz + +The cropped CelebA dataset can be downloaded here http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html + +To download the LSUN bedroom dataset go to: http://www.yf.io/p/lsun + +The 64x64 downsampled ImageNet training and validation datasets can be found here http://image-net.org/small/download.php diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_celebA.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_celebA.pdf new file mode 100644 index 00000000..bd3811d3 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_celebA.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_cifar10.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_cifar10.pdf new file mode 100644 index 00000000..34ce8628 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_cifar10.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_lsun.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_lsun.pdf new file mode 100644 index 00000000..74849845 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_lsun.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_svhn.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_svhn.pdf new file mode 100644 index 00000000..956ef247 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/dcgan_svhn.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd4.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd4.pdf new file mode 100644 index 00000000..66a4ccb7 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd4.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd6.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd6.pdf new file mode 100644 index 00000000..c9ee71d5 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/lang_jsd6.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_cifar10.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_cifar10.pdf new file mode 100644 index 00000000..e5fbd6fa Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_cifar10.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_lsun.pdf b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_lsun.pdf new file mode 100644 index 00000000..b7c043c4 Binary files /dev/null and b/Wav2Lip-master/evaluation/TTUR-master/Results/figures/wgan_gp_lsun.pdf differ diff --git a/Wav2Lip-master/evaluation/TTUR-master/Toy_Data_Example/ttur_example_saddle.ipynb b/Wav2Lip-master/evaluation/TTUR-master/Toy_Data_Example/ttur_example_saddle.ipynb new file mode 100644 index 00000000..11211b08 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/Toy_Data_Example/ttur_example_saddle.ipynb @@ -0,0 +1,269 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# TTUR example code\n", + "\n", + "* The objective ( obj ) is $f(x,y) = (1+x^2) \\cdot (100-y^2)$ where the derivative with respect to x is $ f_x = 2x\\cdot(100-y^2)$ and with respect to y is $ f_y = -2y\\cdot(1+x^2)$.\n", + "\n", + "* The objective fulfills assumption A4 from the TTUR-paper. \n", + "\n", + "* It has a sattle point at $(x,y)^T = (0,0)^T$. The gradient at the sattle point is the zero vector, the function value at the sattle point is $f(0,0) = 100$.\n", + "\n", + "* The norm $(x^2 + y^2)^{\\frac{1}{2}}$ measures the distance of the parameter vector to the sattle point. We set a base learning rate (base_lr) and obtain the x-learning rate ( lr_x ) and the y-learning rate ( lr_y ) by scaling the base learning rate.\n", + "\n", + "* On top of the gradient we add Gaussian noise with standard deviation sigma in order to simulate a stochastic gradient.\n", + "\n", + "* We plot the objective (should converge to 100), the norm over time (should converge to 0) and the x-y phase diagram (should converge to (0,0)). \n", + " * The first line shows one-time scale learning, which usually diverges and has large fluctuations. \n", + " * The second line shows one-time scale with smaller learning rate, which usually converges, but slower than the TTUR in the next line (see norm). A small learning rate leads to convergence in this simple example but doesn't guarantee convergence in the general case. \n", + " * The third line shows TTUR with the x-update slowed down, which usually converges fast. \n", + " * The fourth line shows TTUR with the y-update slowed down, which is still more stable than the one-time scale update.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1oAAAFbCAYAAADMYf8SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXuQG+d5r/lrNG5zwVyBuZOiyCElkhIlUaItaY9tWTGl\nrMum45SPQ2fXUmJpE7vsjSrZVKz1Kl554xxbObW7rpSduE6iTZRsOZLLWzm0dWSJ9EW2TmSTEmUd\n3S1SIiVyZjADYHAbYNCNvuwfmK+n0ehudAONywzep2pqgBmg0cA0MN/T7/f9Xk5VVRAEQRAEQRAE\nQRDe4ev0DhAEQRAEQRAEQWw3SLQIgiAIgiAIgiA8hkSLIAiCIAiCIAjCY0i0CIIgCIIgCIIgPIZE\niyAIgiAIgiAIwmNItAiCIAiCIAiCIDyGRIsgCIIgCIIgCMJjSLQIgiAIgiAIgiA8hkSLIAiCIAiC\nIAjCY/wub6+2ZC8IgiAIgiAIgiC2BpyTG1FFiyAIgiAIgiAIwmNItAiCIAiCIAiCIDyGRIsgCIIg\nCIIgCMJjSLQIgiAIgiAIgiA8hkSLIAiCIAiCIAjCY0i0CIIgCIIgCIIgPIZEiyAIgiAIgiAIwmNI\ntAiCIAiCIAiCIDyGRIsgCIIgCIIgCMJjSLQIgiAIgiAIgiA8hkSLIAiCIAiCIAjCY0i0CIIgCIIg\nCIIgPIZEiyAIgiAIgiAIwmNItAiCIAiCIAiCIDyGRIsgCIIgCIIgCMJjSLQIgiAIgiAIgiA8hkSL\nIAiCIAiCIAjCY0i0CIIgCIIgCIIgPIZEiyAIgiAIgiAIwmNItAiCILoIRVE6vQsEQRAEQXiAv9M7\nQBAE0cuoqgpVVSHLMmRZRqlUAsdx4DgOPp+v6ov9jP2e47hO7z5BEARBEBaQaBEEQbQRJlaKokCW\n5ZoKFhMoJl+SJFluy07I9FLGbksQBEEQRPvgVFV1c3tXNyYIgiBqq1Zra2tYXFzEvn37AGxKkKqq\nEEXRkRSxz27jdyv0AgYAwWCwqjLm8/mq9oUgCIIgCEsc/bOkihZBEITH2FWtmMiwKYJAZV1WJpNB\nIpFALpdDIBBAKBRCKBRCMBjUvgeDQfA8X7UdN1KmKArS6TSSyST27Nljel/9FEWzKhlNWyQIgiAI\nZ5BoEQRBeAATKyZXTG7MxITneZTLZVy+fBmJRALFYhHDw8OIRqOYmpqCJEkQRRGCIKBYLCKdTmvX\n2Xb9fn+NiOm/MyFj+8C+8zwPjuPg99d+/OuFTFEUTRj129Bv02z9mJmUEQRBEEQvQqJFEATRAPWq\nVkbJ0FetkskkSqUSxsfHsXfvXgwMDGjrspxMHVRVFZIkQRAETcBKpRKy2ax2ne0Pz/NVAsYCNwqF\ngiZkxuqYk8dnz0kvZGb3MwoYBXsQBEEQvQKt0SIIgnCIXdVK/51RKpWQTCa1qtXIyAhisRgGBwfx\n6quv4siRIzXbd7pGy+n+yrJcJWS5XA7pdBqDg4MQBAGyLAOoTBm0q5D5/X7X++VmHZnTYA8SMoIg\nCKILoDVaBEEQzdBM1Wp1dRV+vx/RaLSqagUAkiS1pV8WmyLo9/sxMDAAABgcHIQkSThw4EDVbY1C\nJooiCoWCdp2lH3IcZytkgUCg4QqZk6RFADUyZqyMUbAHQRAE0Q2QaBEEQehws9YKqFSt2HRAfdVq\nz549puuggIooWIlWq+WATVE0wvM8+vv70d/fb3t/WZYhimKVkGUyGe1yuVyGqqrw+XxagIeVkBmF\nyKmQsf2wEjI2jdHn82mCPDAwYBnqQUJGEARBtAISLYIgepp6VSsmAwx9cl8qlUIgEEAsFqupWtlh\nJTtsf1o58Ld7bCfwPI++vj709fXZ3k5RlCohY9MWmZCJoqjd1k7IgsGgaYXKqZRlMhmk02ns3r3b\n8rWlpEWCIAiiFZBoEQTRc7itWq2vr2trrdbX1zE6OopoNGpbtbKjkwP2ZkXLKT6fD+FwGOFw2PZ2\niqKgXC5XCdna2lpVxYztbyAQqBExs+h7YPM1ZsKk/52eRpMWzVIXScgIgiAIPSRaBEFse/QNg9mA\nmmFXtWJrrVjVat++fY6rVo3CRKhVj2E3bbETsBCOUCiESCRieTtVVVEul6vWkemj70VR1II99ELG\n0hnT6bQmZXo5blXSolWFjJIWCYIgegcSLYIgtiX6qlU+n0cmk8H09LRt1YqttdJXrebn5xuqWjVC\nOwbe7apoeQ3HcVrlyg5j9P3q6iqKxSISiURV9L2qqvD7/aaVMXa9meh7J8EeTpMWnTwuQRAE0X2Q\naBEEsS2wq1qVy2Wk02nMzs5qPzNWrYLBIKLRKK666ir09/dv24HtVhUtp3Ach0AggEAgUPXz+fn5\nqutm0feiKCKfz2vX2bRSfS8yo5gZo+9bmbTIpkBS0iJBEMTWgESLIIgti9O1Vn6/H7Ism1atWJCF\n1Rqe7cZ2Fy2nmEXfW+Ek+p4lLXodfQ9UT1tk0yPtgj0EQYDf70c4HKZgD4IgiA5CokUQxJahkbVW\nq6uriMfjSCQSKJfLWtWq3uB6u8JxXFet0doKNBt9z66Xy2UA1dMg60Xfs9vrv1vBhGxpaQn9/f2I\nRqOOgj2M4R4kZARBEN5AokUQRFdjjF6vlxBYLBa1hMBSqYTR0VGMj4+jWCzipptu6sRTsMSsKtHq\nipPP56OKVotoNPpeFMWa6Ht2bDiNvgc2ZYpV19wmLVoFe1jJGAV7EARB2EOiRRBEV9Fo1SqRSCCd\nTiMYDCIWi+Hqq6/WqlayLOPdd99t6/OoB0v/a/eURRoQdx6n0feqqtb0IjNG3yuKoq1LYwKWy+Wg\nqioCgYAmZGbR940kLVrhNNiDjj+CIHoJEi2CIDpOM1UrQRC0hMB9+/aZiovP59PWtnQLnRItYuvA\ncZyr6HsmYCzQY2VlpUrImHzZVciaib53E+xh1xyagj0IgtgukGgRBNF23FatZFnWEgJZP6RoNIr9\n+/fXXTfDttltdFs/K2Lrop9iODg4iNXVVUSjUYyOjlbdzhh9LwgCSqUSstlsVdIigKroeyshazRp\nEYCtkLFpjPopiixt0SzUoxvf3wRBEACJFkEQbaKRqhVLCHRStdpqGEVrfX0dyWQSKysrAFA1sLWK\nEicIMxRFqTlZAVhH3xtxGn0PVI5jKyGzir43XrbaB/ZcZFlGuVy2TVrkOA6lUgmDg4OUtEgQRNdA\nokUQREvQV6300euAddVqdXUVyWSyoarVVoPjOKTTabz77rtIpVIIBAKIxWKYn5/X1uboo8TZZZZc\np+/tZCZlJGS9i5WQOMXr6Hu2TWMvMv0xq4++Z7fXf7d7rkBFyF566SXceOONjpIWzVIXScgIgvAa\nEi2CIDyDiVWpVKpZfO+0ahWLxbZN1cqIKIpIJBJIJBJIpVIAgJmZGezZs0dbGyNJEmRZdhQlzgIS\n2KCWCZl+gMuETD+wlSQJhUIBoVAIPM/TwHKb0axouaGZ6Hv9lEV9xcquObRd9D1r6GyG26RFq2AP\nSlokCMINJFoEQTSMWdVKEAS8+uqruPHGG2sGIvqq1erqKsLhMGKxWNuqVu0cgLLHy+VymkxyHIdo\nNIrdu3eD53ns3LkTw8PDDe2n0wGuJElVqXWiKEKSJFy8eNFSyMwqZfqQBKK7YUmE3USz0ff66wxj\ndaxcLiObzdpG33sZ7OE0adHJ4xIEsT2h/5wEQbhCv9ZKn+THBheBQACyLGsDi0KhoCUEiqLYsapV\nu1L+yuUyUqkUEokEstkshoaGEI1GcfjwYQSDQe12PM+3JQyDTQHTC9ny8jIOHjxYdTtjSAJbk6OX\nM7Y9s0qDWWpdN9CLPcNYH62tSKPR9yxZMR6P10Tf+/1+yxMIzUbfU9IiQRB2dNd/RIIguo56a63M\nBgalUgmvvfYa0um0VrU6cOBAR9da8TwPWZY9Fy1VVbG2tqbJpCzLiEaj2LFjB6655hrLgVO3pQ46\nWZNjFpLA4sTZZX1qnVWgB5uy2C56bfDa7sptJzBG3yuKgsXFRVx11VVVtzNG34uiiGKxiEwmo11n\nx2wrou/ZPgDVSYv1gj3spIymLRLE1oFEiyCIKphY6RMCGVb/5AuFgjY9jg1cJiYmcPXVV3fNmXUm\nWl4gy7JWtUqn0xgYGEAsFsOhQ4fqnolnWIlWNw+gnIYk6IWMDWbtYsStqmPtFrLtQi+IlhG7pEV9\n9L0VLPpefwKhVCohl8uZnkRwGn3P9kH/3W4f2HPRryMzu69exth1v99PwR4E0WWQaBEEYVm1svpn\nzdZaMdHo6+tDNBrVqlbPPvssotFoJ56KJc2KFgvuYFMgx8bGMDU1hf379zckk91W0fISN0JmNrjN\nZrO21QYzKSMh28RKOrYzzVar9dH39ZIWrabZGpMWeZ63FTIvkhYVRcHCwgKASrCO2fMyS1qkYA+C\naA8kWgTRg7itWqmqWpUQyERjcnKyq6pWdrgVLUVRtCbJq6urCIVCnk6B7LRodUPVw+ng1qzR7vr6\nuqWQ6Qe0rJq2vr7eM0LWDX/bdtOKacFWeBl9z9bT2QlZMBi0FTJFURAOh2uev1mFzAqnwR69dlwR\nRLOQaBFEj8DEismVk6pVKpXS+lqxqtXBgwfrJodxHNd1Z9WdhE+USiVtrVWxWNSCO/bu3ev5IK6T\nosVx3JYajOuFzA69kOkrDcViEefPn4cgCNprzoTMaspiNx27btlKf1uv6LbPG8B5MqiiKDVCZhV9\nbyZirFWD8TXoRNIiBXsQRDUkWgSxTWmkaqVPCCyXyw1XrVj1qJsGPj6fr6aipaoqMpmM1tfK7/cj\nGo1i7969GBgYaOlAoZNrtLpRhL1AL2RsPU4wGEQmk8GePXu02xkDEgRBQLFYRDqd1n7GEuuspix2\ns5B1Y7x7q2lnRctrfD5fw9H3+XweuVwO6+vruHTpUpWQmfUiMztu3QoZAFshY/ugn6LI87xlqEev\nHatEb0GiRRDbCLdVK0mStL5WrGoVi8VwzTXX1P2nb4ff74ckSXUrEO2EyZ8oippM5vN5DA8PIxaL\nYffu3W2NJjcTv3Y+di/GnjPcBCSUy+WaptDpdLomQtw4ZdEoZe0Usq0c794o2/HEgRGr6Pv19XXs\n2rVLm8poFn2/trZWE4Wvj763EjKz6HvjZTP00xZlWa6qypk9L0paJLYrJFoEsYXxomo1Pj7u+Vor\nv9/fMYkwoqoq8vk8MpkMFhcXEQqFEI1GsWvXLgwNDXXsn7fP50O5XO7IY7Opg4Q9eiGzQy9k9Qa2\nZkKmH9x69R7stUHpVq5oNYskSTXR8/roeyvcRN8b00Hr9c9rddKiUcxIyIhuhUSLILYY+qrV0tIS\nYrEYgPpVq0QigUwmg/7+fkSj0aarVnbwPF+3gWcrkSQJyWQSyWQSmUwGkUgEoVAI4+PjuPLKKzu2\nX3q6YY0W4Q16IXMysGVCJggC1tbWkEqltAoEEzL9YNYoZYFAYNtXb9zSCxUtKyRJakgyG42+Z9/r\nNTRvR/S9JEm4cOEC9u3bV7VNqwoZJS0S7YZEiyC6HLuq1VtvvYXJycma2+v7WkmS1HQUuVvY1MF2\noX/OrGnw+Pg4ZmdnceDAAfh8PiwsLHSsgmSGnWi1egDA1mgR7cWNkOmnfumFjA1sjWtx9NUFlnbX\nS0LWyxUtRVFa+tzdRt/rj1tW3WXXvY6+Z+sp2fP3MtiDXTZ7XIJwCokWQXQhTtdascG6oiimVatr\nr73WcQNdL2mHaBl7ebHnbNU0mOd5lEqllu6TGzpZ0er1NVrdjtOpX4qi1Kwhy+fzKJfLeOONN2qE\nzG4N2VYfSPayaAHdIwIs+r5e0qI++l6//tFt9L0oilVrgVuRtAigRsYoaZFwCokWQXQB+obBTJwY\n+g9x/e3ZP6XnnnsOiqJgbGwM09PTbata2dFsc2Ar1tfXtaqVIAgYHR11vL7MSbx7O6Gpg0Sz+Hw+\nTZr0pFIpXHfdddp1vZCxwW0ul6sa6DIhMxvQWvVz6iYURWlrmA3RHF5E37M1ZbIsI5vN1u1F1kjS\nIrApZXoho2APwin0qUQQHaKRhEDW14pVrQKBAObn5zE+Pt6Jp1BFIH0tBLUM39gbnlW0WNPgZDKJ\nVCqFYDCIWCyG/fv3u24a3MmUPzOMolUoFLCysoJEIgFRFGsGvcYqhHFqjRtItHoLKyEzYowPFwTB\nVsispKyZY7NRerWitd3fx/Wi7xcWFqCqKmZmZmpOJuTzedPptsZAmnotG1od7GGUMxKy7QWJFkG0\niUaqVmtra1qoA1trpa9avfrqq20fXATS11r+LsQFgPS1uHJjGr+war8myjf2Rs3PBEHQqlaFQkFr\nGrxnz56mzli3qsrWDMViEa+//jpWV1fR19eHiYkJHDp0SBMh46BXnwbG4pL1g2ijlLF4ZrN/8tt9\ngEa4xyo+3AgTMn2lwazBrtm0L69OFpjtU6cr+Z2gVwWTUS6X0d/f7/hkgln0vVnLBgB1hcz4ujcr\nZGb3s0papGCPrQOJFkG0kEarVolEAtlsFv39/YjFYpZrrQKBQMsDHuzEqh4hrrqPlqBW76uyenXV\n9bQi4K23H0M0GsX8/DwGBwc9+wfSDaKll8i1tTWoqopdu3Zh3759VYu5RVF0POhlvcH0SXaFQqFm\n8bm+X06hUEAqlYIsy5aDBoKwohEhY4NYq5MFZiJmFYxgRq8KhzHavdcol8uu+jU2E32/vr6ObDZr\nGX1vJ2ReJS2asbCwgLm5OUfBHiRk7ad3350E0QKaqVrp0/JmZma0tDw7AoFAS0InmpErO+qJ16gv\nhJvm76pcKQNqGuBMql6N0AnRYj28mFxxHIdYLIa9e/eC4zi8+eabWjx/o/A8bzu1hu0HW3zOEuxK\npRJWVlZqBg3sLK7VoLcXqwZEY7g9WWBWvTUGI1hVx8rlck8emyRa7kTLKa2Ivud5vm4vMrdCpqoq\n4vE4duzY4SjY4+6778aJEyfcvhxEE/Tuu5MgPMIYve6mapXJZDA4OGiblmeH3+/3tKLVKsGygonX\nuioAAEqqiQil9lRdHRx/q6HHalcYhizLSKVSWFlZ0Xp4xWIxHD58uKrxbalUalsYBsdxWhrYwMAA\nUqkUotEoRkdHq25nPIvLqmPGaTVWKXadXKNDbF2cnCwAzIWMhQKl02nk83ntTL7dGjLjgHYrQ6LV\nGtFySrPR92z2AavwAqg5fu0qvLIsa8dzvWO6XC7j0qVL3jxxwjG9++4kiAZptGrF+lqZ9XhqlEAg\ngGKx2PD9gfbLFZMqM8Jc9dQfM/FaM4gX4Ey+WlnRMqYhOqlKdmPqoNOzuHYpdsY1OlYViO024O0W\ntvPaOzshe+mll7Bv3z6Ew+Gq6bT66HDjgNasl5P+8lY4PhttVrxd6LRoucFN9L3x+LWq8PI8D0EQ\ncOHCBdOkRf3xu7y8jImJiVY/TcIAiRZBOMBt1apcLlclBLKq1XXXXVd3sa4bmqloSav7Kt83rvdx\n3u0Xw06qnOBEvIBa+coqMmZjF6t+5mXqoKqqyGQymjw3kobYjaLlFDcpdvrBgt2A12q6Iq0fc4fV\novquRxTB/+xn8D/+OMq///tQrr/e1d31YRhuKmTG6HC749NKyjopZL1e0WLCsZ1wevwqioKVlRUk\nk0kMDg5WhdKwr7//+7/HCy+8gLGxMQwNDSGRSODb3/42ZmZmMD09jenpaTzwwAN44oknMDExgVde\neQUAsLq6it/5nd/BxYsXsWvXLnz3u9/F6OgoVFXFfffdhyeeeAL9/f34x3/8Rxw+fBgA8Mgjj+Cr\nX/0qAOCBBx7A3XffXbPPjWx3q9O7706CsEFftVpdXUUwGNSm9VlVrfL5vJYQ6GXVyo5G12gxydJj\nlKJmxKtZwbLCqXgN+3hT+do5DwAXG3rscrmsraXL5XIYGRlBLBbD7t27GxrodFq02vHY9aKZGWZT\natgaB0EQqtaPmU1VtIpl7kW2VPpeoQD/j34E//e/D//Jk+CyWaiDg5BvvdW1aDUShuG0l5N+faOx\nQqYPnDGuwTFKmVkCaLOwqWO9ynau4NaDvc+HhoYs1/oeOXIEgiBgaWkJ3//+9/HSSy+B4zi88MIL\nWFxcxNLSEs6fP4+xsTHkcjntfl//+tfxG7/xG7j//vvx9a9/HV//+tfx0EMP4Yc//CHOnTuHc+fO\n4fTp0/jc5z6H06dPY3V1FV/5ylfw/PPPg+M43HjjjTh27FjN9HS3290O9O67kyAMWFWtEokEhoaG\nav4Zs6oVSwgcHBxELBbzvGplh9uKlplgWeFWvFolV3YoqP0nW7b4xxveGOCkkldqP8tvuEaAU2sq\nYKwpNJsSqCgKotEorrjiCgwNDTU9YPL5fB0bJHTysc1wMqWGLTrXD3iLxaLp+jF9oAdbc5bL5bSB\n75as+Dik6ytaq6vw//CH8P/gB/D/5CfgSiUoY2MoHzsG6SMfgfzBDwIu16oCrRVMp0JmdcKA/Uyf\nAGo2VVE/ZdEpkiS5Xtu7XZBleeucVGgRgiDUHW+EQiHs2rULoVAIH/jAB/CHf/iHNbe5ePEiPvKR\nj2jXT5w4gaeffhpAJUDjtttuw0MPPYQTJ07grrvuAsdxuPnmm5HJZLC0tISnn34aR48exdjYGADg\n6NGjePLJJ/GpT32q6nHcbnd6erqJV6c7INEiehZ91UofvQ5UV62CwaC25sRYtYpGo5ibm8PBgwc7\n8oHvJt7djWSZYSVe7RKsouqschcwTuO0EYqI9ifjNAHLGwo9gcAzDQWVNEOrB8pbsY+WftF5vRQw\nfWDC6uoqRFHEwsJC3aa7biPFu5FuFC1ucRH+xx+H//HHwT/zDDhZhjI7i/Ldd0M6dgzyLbcAHlRl\nOv28na7BYScM9FMWzVLqrIRMP2Wxl6cOSpJUFTDUi4iiiKGhIUe3jcfjeP/73+/otsvLy5rkTE1N\nYXl5GUAlSn7Hjh3a7ebm5rCwsGD582a3S6JFEFsMfdVKv17Hbq1VoVBANpvFxYsXEYlEWrLWqlHY\nP1o7mhUsK1JKoep6P+f9x4lTubLDjXgBevnaIPw+xHMActU/3jVxoel96xRbUbScYuyTw3EceJ7H\nnj3VU0ntejyxQA+gOjDBKtCjm+iWqYPcuXMIPP44/D/4AfjnnwcAyHv3QrzvPkgf/SiUw4eBLhPC\ndqFPALXC2JLBGBvOptQKgoBUKoV4PG4pZdt1jaMoilsmCKNVOKloMZaWljAzM+P6MVrVf6tX+np1\n138IgvCYelUr45tc3/comUxCVVX09fWhv78fhw4d6roPhXpTwFohWVbyY/x5o+LlhVzZYRQvoCJf\nJZvXMWDyZ39jed70thmlDzdPv9zw/gGtr0psZ9FyipseT2aDXbPpYHb9x9o12O1YRUtV4Xvppcp6\nq8cfB//66wAA+YYbIHz5yxW5uuqq9u/XFsXYksGKV155BTt27IDf79eO0VKppIUi6Nc46o9RMynb\nakK2lRIHW4UgCI6revF4HLOzs45uOzk5qU3dW1pa0tIKZ2dnqyLiL1++jNnZWczOzmpTAtnPb7vt\ntqa3ux0g0SK2HY1Urdh0wGw2q/U9uv766xEKhZDNZnHp0qWukyw7lhK7qq7H+OanV7gVIDfi1Wq5\nMlJSa4Mg9DJVduAgEV/tPucVP0Z865YSdvXkecf72MoBc7vCMLYDTtbnGKsPgiBgfX0d2WxWu65f\nP2YV6OFFQ+i2ipYsg//lLyvrrR5/HL5334Xq80G+9VaUHnoI0kc+AlU3HYjwHlmWEQ6HEQqF6lbI\njGvImJCZNS23m7LYLUJGouUuDCWVSlmGZhg5duwYHnnkEdx///145JFH8LGPfUz7+Te/+U0cP34c\np0+fxvDwMKanp3HnnXfiS1/6EtLpNADg5MmT+NrXvtb0drcDJFrElqeRqlUul9PkSlVVjI+PY8eO\nHbjmmmtqbh8MBiGKYlueS6PoB1dGyQKAhFy9/27FywsRMhOvdgiWmVTVw6yC1Yx8MYwCNrYHeGO5\nWsDYMdvKilMnEw+3I06rD6whtL5Ctra2hlQqpQ2A2XvZaqBbryE0E7qWIQiVGPYf/AD+//Jf4Esm\noQaDkD/4QYh/9meQPvxhqNFo6x7fhF6uzjpdo+W0sa4+dIYdk2YnDQBooTNWwR6tFrJeFy03xz0b\nJ5n9TT71qU/h6aefRjKZxNzcHL7yla/g/vvvxyc/+Uk8/PDDuOKKK/Dd734XAPDhD38YTzzxBObn\n59Hf349/+Id/AACMjY3hz//8z3HkyBEAwJe//GUtGOPee+/FZz/7Wdx0002ut7sd4Fx+QPXupxnR\nNbAPDH1CIIMNMKyqViyam1WtotFo3bK7JEk4e/Ys3vve93r/ZDzg9OnTuOmmm8DzvKlkOcVMvlol\nQlbyE+a8WVtSsJAI3sPxp5V4ldT6z6GgOh8c5JUQjow/7/j2TlhaWoIkSVWLj7crqVQKmUymZo1W\nN2NsCG2cumhsCK0f4LKWFPv37/euv9PaGvynTm3GsOfzUCMRSEePQjp2DNLRo0Ak0vzjNIgsy3jx\nxRdx4403dmwfOsVzzz2Hm266qe0zLsxSQI0984xCZiVljVZx3377bQwNDSHaZrHvFiRJwksvveSo\n31SxWMSxY8e2TWR6l+DoTUcVLWJLYFW1spoOaFa1ikaj2LlzJ4aHh139U+J53rNGt62ARbyvrDY3\nkDRWvQZasKC+XnXJ+Hs34mUlV3pkEzlqVL7Kau0dZWefuxjgzJMizQQs4hPwRvramp/nleoF0Cl5\nEL858bSjx6c1Wt2N04bQsizXBHrkcjkUCgW8/PLLjtLrrNbmcKkU+B/+EIHvfx/8T38KThCgRKMo\n//ZvV2LYb7sN6IJAIKAipt0yna0TdGJau5sUUNZigR2nrC0D+xkTMjZ91ir23ihkvV7RcrM+a7tE\npW9FSLSIrqRe1cpMrkRR1Ppa5XI57UzXDTfc0FQEbLevzfL7/Ujnr/Zse8KGQAgmVjLWoJU0Mn3P\n7H5m4uVEsOwwPk2rp1hU6j933qTo71S+AHMBM8pXeaNiFjbcdtafxsur11Xuo1gPgG+OnqE1WtsE\nnudrGkKP4kg2AAAgAElEQVRns1nE43FcpQueME4FEwQBuVyuJixhMJ3G5C9+gfFnnsHA2bPgFAXS\n7CzW774bysc+BvXWW4EuFBrqp9S9sGmwwWDQkZAZm0Lb9cnLZrMIBAIQRbFKynrlWBBF0XHi4OLi\nIolWhyDRIroGJlZMrpxWrRKJBFKplFa18qqhrNn+dYt0lUolrZHu9I7f9my7gkmVBgAGT5bgX5Qh\nTvGQpnyQpnhIMR/Ggvb/0BoVrHrbK+n20+vzmUy8rF4Lt7iRr5JaO4jlUf0a8hx7Daw/vgd81r3N\nfpl8D8ADGATiScub4eboGetfbiF6rXJnFu9ut36MO3cOge9/vxLD/sILAABx716s/sEfYPW225Dd\nvRvCRv8x5ezZqoGzVaBHuz8nZVnuyYrWdjq29ceVHXohy+fzCAQCWFtbq5q6aNa43Fgp2w5C5iba\n3U3iIOEtJFpEx2BVK0mSsL6+XvUBa1e1YtMB9VWrnTt3trRxIetX1alpCqqqIpPJaFLp9/sRjUbb\nIlkAMPTdIiInqwfvqg+QYhvSxeRrkkdohkdpioM8xYOb8kEd9O6fWclkH8sGaQk0sZTUK7mqBw8V\nIgzTYBys7dIT5szXz9kJGFArYbLucYvq5nvo2WT9NYlWlbOjEz+ve9920i0nSNpB3RNCqgrfiy9W\nwix+8APwv/41AEA+fBjCgw+i/NGPQt27F0EAUxtfxu0b1+PkcjntOmsI7fP5bKcrerZ+DN3TO6zd\n9KJg6oWM4zjMzc2ZHkfG4BlBEKqCZ/RCpj9xYBY8063HFqvkOWFpaYlEq0OQaBFtxaxqVSwW8eab\nb+KGG26wrVolk0lwHIfx8fGWVa2sCAaDbZ8PzqQykUggn89jZGQEsVgMu3fvht/vbyr4Qo8TuVj8\nT6PgUwr8cQX+uFz5WlYQ2LgcvCCj/xci+FxFcvR96uVBDsqUD/IUD9n4fXLj+4TPdrGUmWBZYRQv\nwF6+2iVXRrEyEuBqq39u5QuoFrCyrkImmlTLjPRz1umaegljWFXOjJJmN5XRDHb735o65ep+vczf\nXvokQr6N6aR9wM8XNn+3Xg5g7oVVXPWjZez78TIiSyUoPId3bxzFm186gDd/YwL5qT4Apytfl4AA\n52JdahBY48OAdQI+UN74WjP/9THxfzcVMyeJer0oHIDzxMHtit1JBb1ARWyCWtiJA/20WiZk7OSB\nPgnU6uRBJ4RMEATb56YnHo9riYBEe+nddyjRFpystQqHwxBFUfvANKtaxWKxllet7GDzwO166TSL\nPsAjkUiA4zhEo1FceeWViEQi2uvzyyUWjBDBFf58U4/pWDJ8HOQYDznGQ7jWWjbLRSCwIWBMwtjl\nwLKMwLNl9K+UYCzGqD5AjhkkbNqH0iQPaYpHYKNq1mh1zEy+lDbMuqknV/VwI1/lOiIVtBg4OxEw\nwL2E6bGbyqiHCRa7/amV9zu6HwDwnFq1jeodAF5ddrwpAEBRsX9OrkTEBuP+Bi0qlfUYM/w394kK\nZp/NYNepFHb+ZBV9q2VIQR8u/XejeO5/3oWLt0VRGt18L0dQcv2YJd36wUHe/f2L8uZzfzz4YOWC\nsPFl4AOp/8V0qmIoFIIkSSRaPYZX0yY5jtOOJztp0SeB6huXWwmZXey9VyeI3YRhLC4uYmZmxpPH\nJdzRm+9QoqW4XWvF8zzK5TLOnz+vVa1audaqEQKBAMpl86S4ZpAkSRMrfbPkw4cPO/oAfUeq/cfg\nVL68rORoQtEPiFf6IF5p89GiqPAnlU0JW5YR2KiU9S9LCFyQEfqlCD5rsrZpkIPMpilO8ZAmdZc3\nfi7HrKtjdtHrQTS/nsxsjZURH9fcACHAKXWnB7rBqYApNtJoNY2R4XR/a6Y1mjxm2eXUSCcw2dGv\nh4v43IuDGXoZ8ZkcYxHfuiePI6gBBNfK2PHzDK44lcLcz9IIFmSIAzwu3zaKi0fHcfn9o5AG+I19\nUdGP+v0B7YTTGMhiRcminUE/b/230ksYAPxs/P/cvKIAWN/4AnDz4he0WHurdTndPg2sEXpZtNw0\n6vUCp0mgxtYMLAnUOLWWCZ6VlDkRMjdhGPF4HHNzc46fL+EdvfkOJTylXtXK7B+bcVqcKIoYGBjo\naNXKDq+aFquqikKhoAVZyLKM8fFxzM3NmTZLNrJZzbLGTL6AagFriWQ5xcdBmuAhTfAoHar9NavY\n+IoKAssyhlfEypTFpY0pi3EF/LKMvmcF+FeU2uoYX1k7JuvkqzTpR3mKBz/Fo7zxM2Wger/NnocT\n+XIiV3oUk9feiXzVkxWrbZg9Xj3MqlxMEuyEy4p6IlaoUxHTE3BY8aknZHrcyllJsa7q+gxVyH7O\nvfjZPQZv+DuHVsuY/XEGO06lMfVvWfCiitKYH+9+ZAyXjo5i+eYhKKHK38wPBf6Nv6Pdc6jaf19z\nMgbYC1k9CTOr1Bol7Jnpv625zWcG/y/baWBmIsau2zWE7iZ6WbS6NdrdjZAZpyzWW+to/C6KoqMT\nB6qqYm1tDcPDw149TcIFvfkOJZqmkYTAbDarhTmwqtWuXbswNDSE06dPY2JiomunfzRT0WJnWhOJ\nBNLpNPr7+xGLxXDo0CGEw2HH23EiWXYwATMm3s3yjU0/bHZanBHjlDil3wfhSh9WrjTrLbUx2JbV\n2rVjG5f5uAz/WzL6nhUxkjOpjkU4lCd5lDcqYuUpH8pTG9c3fi5Ga6tjQSiu5aoeZjIkwpvHMBMw\n/eM5nTqobc9GPutJmFXaYr3KiNVg3A4nQuZGxvSEfc4/C9y+vk4eo29RxOypDGZPZhB7bg2cAqzN\nBnHudydw6Y4xJA8PQq3TisHJc/BKxvQiZqxSWk3BtJM3YyXMKF4A8P+s/UnlAgcgDPzZ1f+k/Y4N\ncvXTwDKZjDbgZZ/1+kGzVf+xTgpZL4uWmyCIbsTn8yEcDtcdA5gdq9lsVrt89uxZUyFbXFzUkgZZ\nJWsrnDzYjvTmO5Rwjb5hMBMshtOq1fDwsCZXxjNRrGKk7wfTTQSDQRQKBce3LxaL2nMXBAFjY2OY\nnJzE1Vdf3dDUlWYli2E20F2QzStgdgLWasmqR17Z+OjiAEQrX5FDmwNr4xRBrsjWjSnVa8eWK99D\n/yYhsCLDOOZTeaA8UamMlad4CBuVsdAkD3FDysqTPLgBb/6BeSVXdhhfa15XgZEbCN/Qo5cwo3S5\n6SdmxE7EZNWHcoOvWyuqY0aspmaaYSdlkfPrmD2ZxeypDMZeLgIAsnvDeP1zU1i4YwSZA33AxkDK\nD+vHdPNc3MqYYiXSFtuxC0mxkjczATObgmiUr7+6eFfNbf5s1z/V/EyPsSE06+3ELusbQttVyFp1\nArFX16YB3VvR8horIWNpzYcPHwZQLWSsOvbMM89geXkZy8vLuHz5srYkYWZmBtPT05iZmcHMzAyO\nHDmCa665xnIfnnzySdx3332QZRn33nsv7r///qrfC4KAu+66C2fPnsX4+Dgee+wx7Nq1CwDwta99\nDQ8//DB4nsdf//Vf48477/T2BdoCcC4XFG6fpg1EXeyqVvrv+tubVa1isVhVmIMZr7zyCubm5jAy\nMtK6J9QEmUwGCwsLOHjwoOnvFUVBOp1GIpHA6uoqgsEgYrEYYrFY0wEarZQst8zyeU8lq5FUPfvt\nVQ86+l1UHiBX1o4F4jL8y9Vrx/Ri5reojokbVTAmX+IUD1l3XTKpjgGtlysvXmMnAtbIlEJt+w6P\nzUZFsFERc/04Hq6dg6pi5OV1TJ/MYvpkFpG3KjKxen0/Lh8dwcIdI1i70nlF3Cn1noPsYiqq0EAl\nEnCfUllv6qJ2O5PKl5564mUGOwGplzGjnLETk/r1Y2YhCW5Pwl28eBF9fX2YnJx0vd9bnaWlJUiS\nhB07dnR6VzpCoVDAhQsXbAWJ8eqrr+Ib3/gGHn30UZRKJcTjcSwuLmJpaQmLi4vYv38/PvShD5ne\nV5Zl7Nu3D6dOncLc3ByOHDmCf/mXf8GBAwe02/zN3/wNXnrpJXz729/Go48+in/913/FY489htde\new2f+tSncObMGSwuLuJDH/oQ3nzzze10csDRhyFVtAiNRqpWgiBoCYGsahWLxUyrVnaw+cbditka\nrVKppFWtCoUCRkdHEYvFsHfvXs8+SLpJsgDr6leMd17tY3gpWVaJe0WTqU+W8sVzkCYr0wY3t1u7\nj76ColXCgrqqWCAuI7gsI/xvgm11TF8ZEzUp82sVMqW/+dfFa4HlTdIPZdXXlFxVbd+mqXOzVTYA\nCFhUeFgFxix0o6HHaXK6IiepGHu+gJmnspg6lUX/UhkKD6TeO4gLn45i6egwSlObx3TQpnIFNDZt\nsd5zCHDOpxSGbCqRdhLG1syJhtfKSqjMph+uybUSqj+Os1Lt7Ikvv/U/aZf/jz1/Z7l/ejiOs20I\nzWB9nYwx4vrrdql1+kAPdtKy3YEQ3US5XN7SUwebxU0QxtLSkpY4GA6HsWvXLq3iVI8zZ85gfn4e\nu3fvBgAcP34cJ06cqBKtEydO4MEHHwQAfOITn8AXvvAFqKqKEydO4Pjx4wiFQrjyyisxPz+PM2fO\n4JZbbnH+RLcBvfkOJTQaWWuVyWSQTCarqlbGCHK3eBU20SpYvDtrGpxMJsHzvCZWAwMDns9/7jbJ\nsiMhWw8wjBLW6ipWPczkC9gUsHr7pwz4IOz2QdhtM9jcqI4FNQmTwMdVBOMSgssy+s6XMfRv6/Dn\nawVDGuJQnqyIl17EmJiJk/5KdcxX/Xf1+nWtfUom0tnEeq2a7Vscp2aSZ7dP1vtj/T7gHYSetErG\nfIKC6H8tYPKpHCZ/nEcwLUMOcVh5XwRv/PEU4rcPoTza2L9qJ9MWG5GxelMK64mYAp/l2iy30wmt\n5MsYN28Ur2F/ddqjUbwakS479H2YBgcHLW9nTK3ThySw9WNsTY4oiigWi8jn86b9x7bzmhwWoNWr\nuIl2j8fjDUe7LywsVFUN5+bmcPr0acvb+P1+DA8PI5VKYWFhATfffHPVfRcWFtBrkGj1GM1UrRKJ\nhJZc00jVyo5QKORqDVS70K8zy2azuHTpkufPvZW0Q7LqoZcw/cB4vIEqmB63glWPvMmArV5aniW6\n6lj5Wuv99BUUBOMygstSpTq2IWLBuIxgXMLwuTICidrqmOKvVMfEDSETNDHzQ9RVyJS+5uSgmWqS\nEwlr9visJ2F2cuX6sTyUMX9eRuzpNUyezGHi6TX4CwrKgz6s3B7B8p1DSLx/APLA5nETgPVx2Ox0\nxVbIWNhXhmJx7NQLNrFKgLQSMKfy5Ua8Wi1ddjhNrZNlGa+++ipisRh8Pl/V+jFRFLVAD57n6wZ6\nbEWoouW8orW4uFhVgSLaC4lWD9BM1YpVbqLRKHbv3t1U1cqOYDCIdDrt+Xbdoqoq8vm8VrVSVVUL\n8FhbW8O113pTZaqHF9WsbpAshtmAPWVRBasnYF4Llt3rZBWp7kTAnOynMuBDaY8PpT321bFAclO+\ngsuVNWNM0PreLGP4mXX418yqYz5bERMn/SiPb1bHrAbtdtLkhlaulTKvuNkvK/ZSxAB7GQsmJcR+\nnMfkU3mMPVsEL6oQxnksHhtG/I4IUrcMQA26F9tWpisy6smY06mEQGNR74A7AXMiX3biZVftaqd0\n2cHzPFRVxejoqO2AW5KkmjVj+XxeuyzLlb8tWz9mFejRbf3HeiUMwwpBEGwro3ri8TiOHj3a0OPM\nzs7i0qVL2vXLly9jdnbW9DZzc3OQJAnZbBbj4+OO7tsLkGhtQ4xVq7feegs7duzQphI4qVqNjIxY\nJgS2gmAwCEForN9Ms0iShFQqhUQigUwmg8HBQcRiMVx//fVV/8A4jtPm0LeSXpAsO+wErJ2SZYeV\ngJmtNWoankNhIoTCBACTvmPazdYU9C2LGxUxqapSFlyW0P+miGBChrEIpAQAMeaHOOmHMOmHsCFj\nwhS7XPluVh1zImDtlis31BMxoDkZCy+UMXGyIlejzxfBKUBxLoBLnx7F8h0RZA73aQEpvo1HY3g1\nVRFofbpis1MJte0YJIy9BnZhF0GT55aXa9dfGd+beUNFS38spCXrAKNMeXPbf/TGH2EkUJGyTkiX\nk3h3tn7MLpRJVVVIklQV5lEsFpFOp7XriqJUTX80q5A5abLrFb0uWm7XaDUqOEeOHMG5c+dw4cIF\nzM7O4tFHH8V3vvOdqtscO3YMjzzyCG655RZ873vfw+233w6O43Ds2DH87u/+Lv7kT/4Ei4uLOHfu\nHN7znvc0tB9bGRKtbYKxYbC+apXL5WrK7IqiVCUEsqrVnj17MDg42Pa53e0Mw1BVFcViUWsaLEkS\nxsbGMDs7iwMHDlieueN5vuWLjx+++JsArD8QD4bqz2/eypJlhQIOCdn67N2Ib93yd2Z4/RqxAarV\ncLNenygr3EzZkgd9WBsMA3tqf6dNsZNUBJMyfEtAcFlCKC4htCwhuPF94E0Bo88U4C/UCkh5yAdx\nSi9jfggbVTFh4+flcR5yG898G6cOenW86XElY6qKgfMiJk/mMfFUHsOvVKom+X0hvP35KJbvjCC/\nP6TFsNvhZKoi0H4ha0TG6olYvZRBuz5dZhIW4Ws/D4zyFTFUtPTiNeovVv1OL15MrICKdDHx+qM3\n/ggA8NdX/7XlvnqNoiieVJo4jkMgEEAgELCtkqiqWtNkN5/PI5lM1m2yq//yYv2YLMtbdtqjFwiC\n4Fi0lpeXG16j5ff78c1vfhN33nknZFnGZz7zGRw8eBBf/vKXcdNNN+HYsWO455578OlPfxrz8/MY\nGxvDo48+CgA4ePAgPvnJT+LAgQPw+/341re+1ZN/M4p336LUW2ul/xB7/fXXMTk5iYGBAW1KnL5q\nNT4+3vEzQ6qq4he/+AVuvfXWlmyfNQ1OJpNYXV1FX1+fFr/utGnwr371K1x11VVNx7VbUZGs5ghy\nEnYHVzzYm+bxUrIaxShgrRKsRjETsEYb3NbDTTgFn5cR2qiMhTaEjF1mghZMWlfHBJ2Q1Xyf8kMJ\nd2YaUitEDKqKoZdKmHgqj4mn1jBwoSIFmRvCWLkjgsQdERSvDHo+TbERvJSyengZde827h1wFvlu\nVvna/J35/wWzape+yqWn1dL13HPP4ciRIy19jEYw9nQyTl3Urx+zmqrIhMyKM2fO9GR1hHHmzBkc\nOXKkrrCqqor3ve99ePHFF7d1OEqHoHj37YZd1cpsrRWrWrE1R+FwuKNVKztasS/r6+ta1apUKmFs\nbAyxWAxXXXVVQ2cBA4GA9g+iG2FTaN4WJxzdvlVC1g2Cxcgo1gOpiK9k+bt6eDWI1K9J0UugVRx5\nIzQSwS5HeBQjPIrz1oNVRQKCiQ3pWtmsjrHvA78WMPZzi+rYsM9exib9KI/xNcmK3QInqRg5U8TE\nyTwmTq4hHJeg+IH0e/vx7u+NInF0EMJU9ckrJ5UxwPt1Y3p4KI73o9kpn15OVzRbn6Xv52UmYmbR\n8lm5WpKM+6j/vVW1VF/tYtJlrHIxOlHl6gasmuwa0fcfYzLG1o+JoljTEFofcS/LMtbX1xEMBnuy\nSgI4GzfJsgyfz9dV471eg0Sry2FipQ+xAKwTAllvJ33VKhKJYGxsDPPz8+3cdddwHNfUVAhFUbT4\n9VQqhUAggFgshquvvtqTGFgW8d4Kmq1mma1TqIdRyMwGYLuCCVfb7CbJqkdesR4EWEmYpw1pNzCr\nstkNcp1ImFf9rYxU7asfEKYDEKbtq+GV6phBxNjlZQmDbwiW1TFhwg9hKoCSbv0Y60PGhMyuOuZl\nBctXUjD+XwuIPbWG2I/zCGYUyGEOqfcP4PyfRpC4fRDSSPMDPqcixKj3XnG7PYYb2W9GysyEzDht\nsl5Fyyoow3jfYb5oehsmWGa/Z7/Ti1daGtj42eZrmxL7MRJYN5UuJlyAd9LlcjZSV8LzPPr7+x2t\nH9NXxQqFAhRFwfnz56saQhvXj+mrZI00hO5WJElyLJeJRAKxWKzFe0TYQaLV5ZTLZW0RqlXVSt/X\nivV20letWNBDt8N6aTmdygfUhniwpsF79uzxfC1VMBhsSUWrE5JlxGowdlGs/wHNZGwrSVY9jBKm\nn3I1wHkj241OY7Qa1AYgt0Swmp1uuVkdqx4s648XTlIr1bFlCeG4hNByWZOx8LKEyOsCok+vwV+s\nPU7FER+EyYAmXqXJipwJk5vXG62O+XMyoj9dw8TJNYz/rPL45YgPid8YROKOCJLvH/CkwXQzGN+7\nXvUdc4PbCiw7hp2uQ7MTKT1mQuZEwuwEy/i7rNyPUX91MmpaGsB4cPN2KdFaHLyqcvXKGiX9+jF2\nwrRQKKBUKuHgwYPa7VhDaH2FbG1tDalUSrtubAhtNl1R3xC6W2m0WTHRGUi0uhyzD1JWtUokEigW\ni1pfq927d5vKRSgUQqnU+DSpduFEtFRVRS6Xw8rKitYw2SiWraIVUwe9WJfVLI2e8WZcFGOOpglN\n+bO2v+8GwTLDuK6loFpPp6snYa0IKmGDZ7v1N056JdVst4X7akT1c1p1LGd1Z1WFP6/oqmNlhHWV\nsnBcQuS1UqU6ZjiklSAHIcbXVMeEyQBKuimLSsiHYEJC7MdrmHgqj7FnC/CVASHGY+m3hrFyZwTp\n9/ZDDXbPsWonVo3c1iw504t1g/rHdipYepwkjroVMrvbW1Wv9dN9w1y5RrzqSdeqUJGFZoXLSeLg\ndkUUxZp15fpExEgkYnlftn5MXyHLZrPadX1DaKveY8FgsKMNod0EYSwuLpJodZjefJduIdh0OjYl\nbnV1FX6/H9FoFPPz847kIhwOdyw63Q1MtIyUy2VNLHO5HIaGhjrSNDgYDCKbtZeFdtNsNatZyQKc\nr8WIS8OWv2NTcMb5fNP74xWNBAfYSVijyYNWuKlO2A2U9RLWTrlyDcdBGuIhDfEo7LUeZHDlSnWs\nImFlTczCG1MVI6+VEPupBH699thXeICTKyucpQEfcteEkT7Sj8xNfSjNVKpkdfrttgU3cuVoe3U+\nB9yKOov99zLav7/BSnLR5D3pVMjMGOELyOhaUPT7RKzqrvs3XqsIXzKVrrFQRcqYcP2P/+1/BQD8\nv9d9zdV+9LJoNdOs2M36MWOgB2sILQhC1foxuwpZK6qOgiA4fv7xeJxEq8P05rt0C5FMJvHqq69q\nCYGNTInz+/3ah0I3EwqFIAgCVFXF2tqaFmShqirGx8exc+dODA8Pd+wsktcVrU5PGWynZNlRtc5B\ntj4TyWi1jLUqmc2uGatTCWvF1C+rnmAAEGigEsZo1TS1eqgBDsJMAMJMAIBFGIqiYPjFEqZO5BD9\nWQH971ZefyniQ3mUh+oDAhkZwy+WMPKrEvCfdHcNchAm2BTFzWpYaTJQvXYs5N3zb7dYucWqn1oj\noS5mUtdMRY0JmtvXMOIrWa4N06/hzCj9Wpz8qjSIsfCGSMkDmApmkCxvfqYx6dILF1CRLrfC1eui\n1eqTrDzPo6+vD3191oFKAGr6j5VKpaoKmbEhtFWFzM36MbdTB2+55RbH2ya8pzffpVuI8fFx3Hrr\nrZ7IRTua7TaKJEkolUpYWlrChQsXMDg4iGg0WtM0uJNYVdwagSSrAm+c4+UAo4xZTUMa4QumP7ei\nndHXRuwkDPA2hZDhZKpmvelaRhHrlFw5QtmIYX8yj4mTeQxcrMhV5sY+XP4fRrByRwTrV1SfJa5b\nHXu1hNhPzKtj4ii/uU5syq8Fe2jX2doxi8/kbpYrJ42qneKkWua0otboiQGrkw0DPgGBOn+HEV3P\nrl2BVNXv8koI6ANS8iDeEaMAgGQ5gslQHkmx0q9KVrmqKpdT4ep10WpVmxW3sIbQdoFbxobQLNDD\nSUNoY6AHx3EQBMG235meeDzecLNiwht68126hWBNcpslEAhAkqSO98vSUygUtL5eoiiiv78ffX19\nOHToUFemAzVb0dJX6poZ028HyWpEsEy3YzPg00/vsYLJWCclywn1pmA5FTGv18HVEzEvjrVm4Moq\nRs8UMfFUHrFTawgvV2LYV28ZwLv3jGHlaATihPW/QUfVMbZ2TFsrZhSyMiKvlhBMWawd01XHREOi\nImsM3Uh1zEux8uKkyua2at+zitrYcdlMxVVPxKYhshvChuc2srHdHf48rg8tudiOikzu/7O9jT8I\njEWBjMWixjDX3GdaRqkcP1PDv25qO62gHRUtL3HbEFpfIcvlctplFughCII2ljAL8wgEAtoYanFx\nkUSrw5Bo9QhsWl4nP5wURcHq6qq21iwcDiMWi+HgwYPo6+tDPp/HhQsXulKygMZEizVKTiQSSKfT\nGBgYwNmR+1u0h/XZLpLVyGJ6M3I2fbYYzaznaBf14uA7FTTSquhx222WFIw/U8DEyTxiP1pDIFuJ\nYU9+YADn7owg+cFBSMMerpvQrx3bZ792LLTCIu7LWqJieCNhcejVEkI/lsCXzKpjvirxqu45Vklc\nLI/6wHv4Z261XOnxOfhMCHpU1Q17IGehJj/DwnVnllj/vp/zbti2rNT/G8ezV9X8rNPyJYpiw2u0\nuhmO4zRhsuPs2bOYn5+HoiialKXTaYiiiF//+tf4i7/4CwCV9flra2v4q7/6K8zNzWFmZgYzMzOY\nnZ1FPp/HF77wBbzwwgv4y7/8S/zpn/6ptv0nn3wS9913H2RZxr333ov776+MWS5cuIDjx48jlUrh\nxhtvxD//8z8jGAxCEATcddddOHv2LMbHx/HYY49h165dNfvtdrvbBRKtLodFujfbMyMcDqNUKjku\nN3tFqVTS1lqtr69r8ev79u2rWSTq5dS8VuB02iV7zisrKxAEAWNjY5icnMTVV1+Nf3j3w03tQzPV\nLJKsTXwupmXV6+HD6FYhq1cN80paG8GJADo5bv05GdGfVGLYoz9bA7+uojxUiWFfuTOC1PsGoPR1\n9gSOGuBQmg2gNBsAz1kcU6oKf06p7jVm+B55RUAoWSsLSpCDMMlXiZg4Zbg8wUO1qY55JVf1xMop\nXlyZHwEAACAASURBVMjV1hAra7wUq1+JlQCIGF9AoInnpJevuNSP68d/1fS+uWGrVbS8RpZlyyC0\nAwcO4OMf/zgAIJ1O46Mf/Sg+/vGPY2lpCYuLizh79iwWFxdx8eJF7Ny5E7fffnvNtj//+c/j1KlT\nmJubw5EjR3Ds2DEcOHAAX/ziF/HHf/zHOH78OD772c/i4Ycfxuc+9zk8/PDDGB0dxfnz5/Hoo4/i\ni1/8Ih577LGmt7tdINHqEVhFq9UoioJsNqs1Dfb7/ZpYDQwM2MpKt4uWFaqqas85mUxqz9mrRskM\nL/plNUqn1mPVbKPNkuUGJ0LWjTLmZNpkN8pYMCFh4lQekyfzGPvFRgz7hB+Lv62LYQ9055pUSzgO\n0jAPaZhH4Srr48kvKghuVMeCOhELLksIxWVEXhYwfqoAXjCpjo3xECd5TcCkKR/EKT/ESb7yfYqH\nNOKzXDtmxXaSq+0gVkyqzEgYplU3u+buxdQNANA24SqXyz27Po3h5MRvX18fgsEgbrvtNsvbPPjg\ng1XXz5w5g/n5eezevRsAcPz4cZw4cQL79+/HT37yE3znO98BANx999148MEH8bnPfQ4nTpzQtvOJ\nT3wCX/jCF2oyARrZ7naht4/ULYIXFa1QKIT19fX6N2wAURSRTCaxsrKCtbU1jIyM2Pb1sqJbgzr0\n+Hw+yLIMVVW1yPlsNqv1Mrvyyistn/N9r9wLYE67fmjwsuPHPbc+2dR+9zewBmFPeAVAd0hWNwuW\nG4wyZva8wj7vm2I3gj7Yop6MteskQN+7IiZO5jH5VB4jL6yDU4HCFQG88/tjWL4jguz1fZaNiTu9\nXkxPo6EhLBRCDXIQ5gIQ5mzO6qsq/FlFJ2CbYqZNV3yphECq9hiUQxzKkzyEqUqFTNR/Z1I26Yc/\n5M1r2utyBXgnWD8s1v6vsHr/zvrTAFDV6LwZ6WqXcLE+V72IJEmOI+OXlpYwPT3tavsLCwvYsWOH\ndn1ubg6nT59GKpXCyMiINr6Zm5vDwsJCzX38fj+Gh4eRSqUQjUab2u52gURrC+CFgITDYaTTaQ/2\nZrNpMKvgcByHaDSK3bt3IxKJNL2/3ZqOWCgUUC6X8fzzz0NRFESjUezYsQPXXHNN3f2tSFY1L63N\nmdzSnD6+8cF3I5IFAG+VJir35xuvMoZspGEmkHG0je0iWXrsnlNJcTYlplVC1ogEiDYR8YyGZExV\nMfhrAZNPVZICh16vVARzB0J4674olu+IYO2qkKPqi5t1at0kZfr0QVd9zjgO8ogPwogfuLqyVs8s\nyZATVARXJASXZQTjEoJxGcGVzcuDLwkInpRNq2PlcR/ESR7ljUpYeYrXKmPlKR7iFA952Lw61g1y\nBWwfwfqn3LxpeumOwGrV541euhakUdttyhshJbsCSVf70mrh6sbxQbtwG+3uVrQI7yHR6hGanTpY\nLpeRSqW0Ck4kEkEsFsPhw4c9XbTIAie6YSGkoihIp9NaeAdL9Nm9ezdisVjb9qMTkqXdv0WSBQCL\n5ZG623DaXyrmN++ttZUEyy1OhMypjLUjlt2xjCkqhl9cx+RG5ar/nTJUrhLD/saXJiox7Dtb+/nQ\naSnzMtpdn0hp+ncOAtIcj6LdeR9VRSi3IWFxg5QtV74P/DfBtDqmhLgNGeMhTflQ3rhcnuK1y9Ik\nDzXo/DXvdPWqsg/tnx4o6P6WoY21l/8h+Z6q28wFV6uE61J5zHRbTk50sZkIF8vROrc0l7EXUzfg\nYjmK35o6Vff+TpFluWerWYC7ZsVLS0uYm9t8Y3/rW9/C3/3d3wEAnnjiCdNGxrOzs7h06ZJ2/fLl\ny5idncX4+DgymYzWVoD9XH+fubk5SJKEbDaL8fHxpre7XSDR2gJ4cfYmFAqhVCrVv+EGqqpq8euJ\nRAKyLCMajWJubs5RBadR2DqtTomWKIracy4UClp4x969e8HzPN544w3XH/Jm1SynbFfJqodTwWIk\npNpGx26jn8f8a65u74ZOrXGqJ2M8p3raE6lRuLKK8dNrmD6Zw9SpHMIrEpQAh+TNAzh/bwzxo0MQ\nYpXn0sm1ima4TXS0ErNWyVWz8JwCcIA0wkMa4VG82vqz2bQ6tpGuGIzL6HuxjOHldZgtVaxXHfNN\nA/IwB3AcxAbbMQQ3jnWhwTh5vaCVGpjOP7Dxv0Pw4O/zwpv/hOuvvx7n1r6EmXAWmXIfZvsyuCxW\ni1U0YH4Sip3oEnSfEVeGEqa3ZcIl27xudjL2n+NHPZOtXg/CcFvR0svU5z//eXz+85+3vc+RI0dw\n7tw5XLhwAbOzs3j00Ufxne98BxzH4YMf/CC+973v4fjx43jkkUfwsY99DABw7NgxPPLII7jlllvw\nve99D7fffnvNGLGR7W4XSLS2AF5IDc/zddd5ybKMVCqFZDKJdDqN/v5+xGIxHDp0COGw9cJaL2FR\noe1KR2S9rVZWVpBMVs7IxWIx7NmzxzTVp52BHc1IVrNsJckyo5H+OqvS5jHn5v4R3nrtYydDJOzQ\nr5lTHA5avRYyfl1B7Od5TJ3MYfInOQRzCqQ+DisfiCB+5zCWb4tAGqpdi+CkMgZ0n5AxjGLGxMuu\nquhUwrwSrEalTw1xEHYEIOyoDIRNAzJUFXxG2aiOyQhsVMgCG9eDcQkDLwoIrJpUx8IcypO+jeqY\noSo25dOuwxCCEmzy2PWiAjbgURUmHHkDAKAoz8Hn8+Efr30IAPB7L38RBSmETLnSsmK2r1KxSpYr\nJ6EKUmVwfkVfdeWJfV4LSgAXBPOZGkzAnAiXFf85fhQAmhauXhcttxWtG264wfR38XgcN910E3K5\nHHw+H77xjW/gtddew9DQEL75zW/izjvvhCzL+MxnPoODBw8CAB566CEcP34cDzzwAG644Qbcc889\nAIB77rkHn/70pzE/P4+xsTE8+uijACo9vO6991488cQT8Pv9rre7XeBchix0z8T1HkJRlKYa5TKe\nffZZ3HLLLVXyUCwWtQqOKIoYGxtDLBbD6OhoR8rzb7/9Nvr6+lo6r5j1tlpZWUEmk8HAwAAmJiYQ\njUbrfoBdunQJqqpi586djh5rK1aztrJkedHA1Jtt2A/yO5E+6FWTaDvqyVggK2PyJzlMncxh4ud5\n8CUV4giP5dsjWLpzGIl/Nwgl3N7PnU4JmVdTDp1IkVMB86qq5kUCYZ9Yhn9DwAJxGYFlGf64snk9\nLsO/ItdUx1QOkMZ9kKZ4yJOV70zC2GVpkoeyUR2rR7OS5ZVgAZuSBQDPPfccjhw5UnOb33v5iwCq\nP8eZdDHZ0mMUL8HhGtErQ4mGhAsAZi/9h6omu/qvYDCIQCBgeYJ5dXUVq6urmJ+fb+ixtzrnz5/H\n2NgYxsbMp4Tqueeee/DAAw/g0KFDbdiznsTRG4AqWlsAr6bpBQIBCIKgTQlcXV1FMBhELBbDgQMH\n0N/f78njNAOraHlNqVTCysoKEomE1ttqenoa+/fvdyWUwWAQ+bz5VAwjJFnu6LRktUOwGPWi4I1S\nFOYa/7u0Q7AYZtWx0EoZ0z/KYvqpLKK/XINPAtan/Hj3348ifnQYqfcMdDSGvV6FjMmHJ6EsHp6r\ndCNF9XupVfZLqbNWz+749irePbzxGGqIQ3mnH+WdNn8fVQWfVhDYEDD/hoCF4hL8ywr8izLCL4jw\np2tfdyWMDfHalC9WGZOmePimfZAnfDXVMae0SrDqwSpcQEW6+nkRC+ub62Fn+zJVwvXOevWUPyZe\n9YSLVb+M05P3hZfq7uPCji/hvx99XGu2KwgCMpmMdpmdWOZ5vkbCCoUCgMoJU6fpe9sJQRBcTR3c\nbuudtiIkWluAZkWrVCohmUyiUCjgzJkziEajVeuOuolQKIRisdj0dlhvq5WVlap+Xvv3729KKFlY\nRyshyXLHVqliOcFKikqquzWLYU5sq2AZ6X9HwPTJLKZPZjH2qyI4FVjbFcRb98SwdMcw0of6LWPY\nGZ1cO2YmMU76jWn3N+x7pwTLdjsN7FPZRErZ36nZQJUgJ6Pf7ecIx0Ee4yGP8SgdCFhOEeRKKvzL\nckW+4nLlcnzjclxG+GwZ/uUSjB+fKgcoUR+kKR/kyYqEyVM85A0xkzeuG6tjrZYspzOR/vHah/Cb\nv/wqpgbyWC31YW4gWyVdI4Haac968cpJYewbWLbfP1+5SrbeLNXORjGTrx+mPwLAfiqhJEkQRbFK\nyLLZLGRZRiaTgaJU/t6sOmZWJQsGg9sqpdDN1MFcLofRUftkSaL1kGhtEdz00lJVFZlMBslkEslk\nEjzPIxaLIRqNYmJiAhMTEy3e28ZpZg2UJElaP69cLqf1tnLbz8uL/WummtUoW1GyeqmKVQ+vxIiH\ngrLqR9nB5jyLiFdVDL1R0uRq+I1K8E7mYB/euG8KS3cOI7/XWQw7w8naMc/XjXkkMUYpq3eUOZna\n10nBssKL1z+oew8WHU5bM8Lex2WrYyYEYGcAsJnxHYBsWh3Tpi0uyOg/W4Y/bbJ2rK+ydky2qI5J\nUzwkF9WxMZ6zrWK5qeY8efMDAIDf/OVXUZSDmnAB0NZzZcQ+7BpYrbnvkL+EuDCMnLS5RttMvNjn\niFX4jpl8ARUB+6uLd+HPdv2T6e/9fj/8fn/VyVFZljE0NKT1aFJVFeVyuUrG8vk8kskkRFGEKIpa\n3y0rGQuFQlumATJL56sHGy9uJ8ncqmyNI4uoK1qsaXAikUA+n9ckY9euXdrC0Xfeeacl0/K8xG0M\nvT4ZUZIkRKNRXHHFFRgaGmrJB0yrK1qdCMAgyWr0/t0nWG6xGhgZY/GDZq+VomL0V0XMPJXF9Kks\nBt4VoXJA6qYBvPy/zSB+dAjFHc6muDSKVzLmZeJfI9Sb2gcAZXXzNo28d7pNsACL48ol3pwsUQBU\nV8esYNUxJmBs7VgoLtWtjslRH6TJTfkq60Vs47IS4RCO/Np2f50OtvXohetyYVj7+dxAFiPBdVws\nbK75MUrXkL+kydabheqGyHrxMla36sEEzE62jBjDMDiOQzAYRDAYRCRSmz7LUBSlSsbYMgp2WZIq\nn+d+v99UxJikdUO0vJOxTS6Xa9k4iHAHidYWRVVV5PN5rWmwqqqIRqPYtWuX5ZsrHA47Xl/UKepV\njIy9rcLhMGKxGK699tq2JCM6Ea1Gq1mdmDJYT7IurFtH9kb8ztsFGBnkBWQbvG8/39zJAvZamTX3\ndEqYK2sD3/4Ggy06KVhWWPUdEzeeKycqmDi9htmTGcz8KIO+RCWGffnWCF7/gykkjw5CiHZXIpiV\njOkX8ndatNzi9tgdaGKNn57tJliV7bh7TmqYQ/kKP8pXbA6faqYsqir4tKpNTdRPU/QvK/AvyAif\nNV87pg4MQJ0+DGV6Gur0NJSZGajT01BnZio/m5mBNDDQcAVGL1xBXtKkS6tyiX2m0qWXLT168do3\nsOxathhOZavR1EGfz4e+vj709fVZ3kZVVUiSVCVjxWIR6XRau66qqiZ3VkLm9/tbIjhuKpnUrLh7\nINHaInAcB0mStKbBmUxGaxp8ww03OJqzGwqFtAjzbsXn89VU7ur1tur0/unphimD5wvOp4ZGAo3J\nUrOS1SheSVajmFUSinWCLYxo05xMDiM3U/m8Eqx6TZ35ooypZ3KYfSqD6Z/mEMzLkPp9WPrAEBbu\nGMHSbcOQIrr3YR1/9GKQ3QxmSWlOB4aeTbVsMwWXa/wYTNC6SbAAL6tYzWEZG89xkMc4yGM+CA6q\nY/64glj+P4JbWoJvcRHc0hK4xUXwp0/Dv7QEznDycYDjEB0bg2/nTk3I1JmZipRNTVUu790L2MjY\nkzc/gN/85Vcrz8MgXBlxU0bMpMtMuIDaatfOvtrpiHY4ka1W9tnkOA6BQACBQMC2xQxLgm4kzEN/\n3e34xU0QxuLiIolWl0CitUWIx+M4f/48xsfHMTs7iwMHDrguYYfDYVdNizsFC7JgUyE5jrPtbbUd\naLSatahb2OyWXpKsVgiWW5wMDp0M+Fn1RT/drJH9sxOsQEbCzE+zmD2ZweQzOfhLKoQRHgt3DGPh\n6AiW/91QwzHsolp/cNEKGWs0ilqPk7/PVpUxM0oOe5aZ0a+ronWTYFW200LJcgGrjsWu+zUsJyKr\nKrhUCtziIrjFRfiWlrB+/jyUy5cxnM/D98478P3iF+DS6aq7rZ07B3Vy0mKjFfTVLaBauABgMFD9\nuamXrrFQ/dCqd9fHsKZLODwQqZ9IWE+2uiFtkK33qic9ZmEeuVxOu2wM8zCTMX2YhxvJpMTB7oFE\na4swPT2NWMy8maBT2tls1y2sWXIikUCxWMTbb7+NyclJHD58uGVnrxqF4zgoilIjuo1Ws94pjOHq\nodoFxm/k7P9JAsBQoDH5IMlyTqfXkjHspre5mUrmg2IqA+FlEbOnspg9lUHsl3n4ZKA4GcCFfx/F\nwh0jSB4ZhOpv/UkORfWh5CDBzqnQeCFYbtguMtZsxbSoq6IVG5glO+KrTsTrlioW4I1kMWZG7Ndj\ngeOgRqNQo1Hg0CHIAJLxeGXQre/nuL5eqYhtVMNUF+OFJ29+AEdO/d8Yj1Si04N8RfvWypW/oVG4\nAGBV6EdaqFS+9gylLLc96Bc02Xotv1lhsZOuerK1VU62moV5GHET5sGmLV68eLFumEc8HsfevXtb\n/RQJB5BobRG8+GCpN+2t3ayvr2tTAgVBwPj4OKanpyEIAq666qqu6OtlBhPWRtaEvbw6U/OzoVDJ\nkVTV3K/NktUMW1GytoJguUE//YvJQOSdEnacXMXOU2nEXlwDAGSvDOO1e6bx7tExFK4L1o1h94J6\n/ZvMqCc0XoWVtIKSEnAUF9+RxtYdjNXXk1Eqg/hmovHH+YJ2uVuqWHrqSpYFpmEYfX1Qd++GvHt3\nQ9t87ugfA6hUt0TZr8kWYC1co6GKDL+VGwdgLVx62WLopQuoFS8z2eqm8YtXOA3zkGUZ77zzDlRV\nRV9fHwRBwNramlYxkyQJ9913H4LBICYnJ1EsFpFKpRAOhzE7O4uZmRlMT0/ju9/9Lh566CGoqopI\nJIK//du/xXXXXQcAePLJJ3HfffdBlmXce++9uP/++wEAFy5cwPHjx5FKpXDjjTfin//5/2fvzcPj\nqu97//esGi3WMqtkjWzLlmXLNjbGiLXkGhrqmLSQUK6jPrfB/MBpMWZpetNcJyWUp6UNtLlpwgWX\nX4DeKvQhNo/zA9/eOg4EAkkh8cZmwIBdLGwtsy/S7DNn5vfH8D06c+bsc84s0ryex49leXR0NJrl\n+zrvz/fzeYaeeXrrrbfi5MmTsNlsOHDgAFasWFF27nKPu9DQyXzgLrxHeYNQKBRUSaPeeOMNXHHF\nFTXpnEPazvv9fnq2ldPphMPhKJGqDz74AH19fXU7/+Hdd9/F4OBgyYvidb96QNGxOluUSU8tJEtp\nmqVUshpVsAB1JEsLwUKhgJ7TCQy8FMayl0Lo+bi4UAqub8P537PiwvU9iK5qldWGHVCe0CgRLLWo\nlYjJmcclBbVkrF4Ei4la88fUeC71G9RtJJUK/QwWi0VRE4WJiQm0tbVpNqqFK91iwpVwkXSLwCVd\nbNnigyldTNnKZrM4deoULrnkEknHWWicPXsWVqsVVquV8//z+TyCwSA+/fRTPPzww7j22muRy+Uw\nPT2NqakpzMzMIBKJYGBgAC+//DJ+9rOf4cEHH8TRo0dBURSGh4fx0ksvwe12Y3R0FD/5yU+wbt06\n7NixAzfffDPGxsZw5513YtOmTdi9ezf27duHd999F0888QT279+P559/HgcOHCg5JyXHbSAkvVE2\nE60GQa2ovKWlRXEao4RsNkvvtZqdnUV3d7fobKt6LnEEqjO0uN5oVMkK59plf20ayrvmkQ6E8x0J\n5T+O1RYsHVWA/e0Ylr0YwsBLYSyZTCOvB/xbluD4Xy7Hhc/3IN5fWRt2sXSJLWK1FCwC1/BdJmqL\nmNqCRYhLaMYiJmNNyRJniuJPG+SwseN1pNNpmJYIN1FgSxhz/04ul9N0nxJJt0Zf+kfo9cX7z9U5\nL5qxrBnhZBsGOiP053pakiWyxZVycSVbXDDTrttO/Q/8y0WPACiuJxZi4iEVsWHFer0eDocDDocD\n4XAY99xzD9rby98DKap4IfCKK67A5OQkAODYsWMYGhrCys8S0bGxMRw6dAgjIyN45ZVX8OyzzwIA\ndu7ciQcffBC7d+/GoUOH8OCDDwIAbrnlFtx99910eSNByXEXGk3RaiDkDC3mg8yp0lK04vE4fD4f\n/H4/KIqSPduKRNL1ClMEs9kstv3mbxQdp1HSrGpJ1ky6i/5+MUr+wr/LWExn0gpaC1e6j4uvxXsi\nL31RMN+NUHgBJZYe6ZGHPpNH729nMfBSGAMvh9EayIIy6TBzVRfeu3MpJn+3Bylb9dqwExHja7Wu\nRoqoFqRZh1DTDjkpklaCJQc+GWMLbzej1K5W1Jtgqcmo7QQASGqiwNdiPJPJIJlMIhAIYGpqqqyB\nAhE0g8FQ8QXa49d/nZYt72xRNIlw9bQmcGF2vhnTQGekTLaAeeECitIlVbaYENlS2tp9oZDJZCR1\nHSwUCkin07zbL4ikP/3009i+fTsAYGpqCgMDA/Rt3G43jh49imAwiO7ubvrCuNvtxtTUVNnXGI1G\ndHV1IRgM0sOklR53odEUrQZCLdFKpVLo6uoSv7FEyGwrn8+HcDgMi8UCp9OJjRs3KhK6lpaWup73\nlc/nMTMzg/Pnz9Ndg+SyGCSLiFM1vh+RLCVUIllKZ2gxkVtmyJcetSSzcP86jBUvBjHwWggtcxSy\nbXpc+C9WTF3fjamtXch21OYlX2ywsFgzj2qImJRuiAShFIlIWD0IlhBcqWKEEk+AtZSxxSBZUiBN\nFLjSCAB4//33sWzZMphMphIhY8574hvAy0zKTCaT6DYCIlsAOIWLQKRroDNSJlsEpnS52mIS7olS\nFrtoZbNZSfPT8vk8dDqdoGj/8pe/xNNPP43/+I//UPMUm3DQFK1FhsViUSUtSqfTdCOLRCJBz7Ya\nHh6uuKSh3koH8/l8yd4yoHg/bty4ETcc+7san504akvW+QR3fTihx5zAHM+cFSXfTwylkqVViiUV\nNfZxtUSyGHw1gBUvBtH/egTGdB6pbiMmfs+OiettmL6qG1QLYyElso5tUVloxARLKkTEKA45qHSP\nkhzBkkI83yL4c6sh55VQadkmU8b4ZNKmYD9TU7Kkk8vl6BRL6GIm1wDeWCyGYDBIp2Oko53ZbIbF\nYuEcxMtVSkiEy2wqfR0jwtXRIvw49yY6MJu0YLVN2mzP2079D/yd7b5FLVqAtG0kgUCgJFV6/PHH\n8eSTTwIADh8+jEAggF27duFnP/sZbLai/Pb39+PChQv010xOTqK/vx82mw2RSIRuwEI+z/wat9uN\nXC6HaDRKH4+g5LgLjaZoNRBq7NNqaWlBNBqV/XWFQgFzc3O0XJHZVqtXr0Z7e7uq7VZJeWMtyeVy\nCAQC8Pl89N4yp9OJoaEhWrqUll9WM81SIlnn5qzobkkhnJHf9bHHLD5bhQslktWoKValgtXmTWPw\nF0EMvuRH3/Eo9BQQ62vBhztc+PR6GzxbuhS3YU+LJEtSRUwtwWLCJVkAf7okJmBqCxYg7ecWGnCt\ntYSpuTdOKLELStzPZDPMqSZYQH1KlhZwdh3kQM4AXqaMESEjH5M9PT+yb8WfBF4FAFq4MllDmWwB\nQCxdfJwLCVdnawpngvNCICZd3w7+EA87vi54m4WKnPlhMzMzWLp0vsPxnj17sGfPHgDA+fPncfPN\nN+OZZ57B8PAwfZvR0VGcOXMG586dQ39/P/bv349nn30WOp0O1157LQ4ePIixsTGMj4/jpptuAgDc\neOONGB8fx5VXXomDBw/iuuuuK1sLKjnuQqMpWg2EGjJjsVjg9ZbPbOKCzLby+XyIRCJYsmQJHA6H\n5rOtapVoJRIJem9ZLpfj3VtGzk9pp0ElKC0Z5OLcnHAi1a1QBJVIViOlWLUUrK5zCaz4RRCDvwjA\n9U4xLYisbMW7X3Nj4vM2BDZ0yO4UqAQxEQPUa2tP4BMsMYTK+9RudKGWWBIJY+/T6zIou4BBqJZg\nycWX6xS9Ta9R2oXBepUstdMsAJxzHCtBr9ejtbUVra3cJX/A/LynV9JDSKfT2P7WMyWyBZSnW4C4\ncHW2pjCbLF60JNIlJFx7/f+If+l9RMJPtbAQa4TBZHp6Gn19fZz/99d//dcIBoO46667ABRLS0+c\nOAGj0YjHHnsM27ZtA0VRuP3227F+/XoAwCOPPIKxsTHcf//92Lx5M+644w4AwB133IGvfvWrGBoa\ngtVqxf79++nvv2vXLhw+fFjRcRcazfbuDQRFUXTdtVLS6TROnTqFSy+9lPP/uWZbORwOdHd3V7Ul\n/BtvvIGrrrpK0+/BbDcfCARgNpvpdvNCbzipVArvv/8+/iJ+SPb3rGaaFUjJT6QWg2RFc/y/Wz46\nKhjUDAAWGQv7Nn0aKBRgOx3H4C8CGHwpAOuZ4v3r39CBiett+PR6GyKr6mfOnBQZktsCXqlgCSHW\naESJTKud3omdIxsxCatXyVJzkHS/KazasdREC8kCgOPHj2N0dFSTY8vhop/uAwC0dc+/JidmLei2\nce/jm4tZ0GcrF2ciWlzwSRfpRLhYiEQi8Pl8JSkUH08//TT0ej3uvffeKpzZoqbZ3n2hoUaixU6L\nuGTD4XBgZGSkbgcGV0Iul6NTumg0iq6uLjidTsF282zMZrMiyVJKtSRLKbWSrGCGvxSGSednXxPN\n17dg6agClr4VwdAvfFj1sg9dUynk9cDMlm786tv9+OR37YgtZSxIWBfwlc6yqgQ5MiTUAp557rUQ\nLAJfWR+XgNVasAhRiv+5vkSv3nDyepWsPPS4kLWJ3xDAgIl7oK4WaCVZ9cSpP7wLF/10HxKRVlq2\n2jpTiATbOWVrSUcKM8FikySmcDFTLTZ8KZfaqV69IyfR8nq92LJli8Zn1EQqTdFqINQQLZ1O4C0S\nMgAAIABJREFUR3fNkzPbqtrodDpZNclCkJTO5/Mhk8nAbrdjYGAAGzZsUHSfKn1xV5JmSZGss5Hy\nRUa3RX7qoyTN0kqyAulSieo2fTZYV6JcETor2MdViWRJFSxDJo+B34Yw9LIPK1/xoz2YQc6kw/mr\nbDhx53Kcu86OlFXam6vYLCv63FQQMrVlKJU3cYpGpQ1LlMoLG6aAkcYc3RWW8zFR6zwJmc9mhAUp\n7ueLzSCv41s9S5Yc+ISMef+vNPsqOidAW8mqtPOw2jBlCyimW0S2AJQJ15KOFOZiljLhEpItAGV7\nuW5//1u4K/mH0Ol0nA085A6BrnektnYHinu03G63xmfURCr1sapuoimFQgHxeJwuCUwmk4jFYrJm\nW1UbMlhZqISPj0KhgNnZWfh8PgQCARiNRjidTqxbt06VlE7J3iylJYNMuISKi3qXLLZEiUEkSy5K\nJUtrwTLFc1jx6yCGXvZh8LUAWmI5ZNoMOPdf7Dj7eScmP9etaRt2ucOFmVQ7beKaQyZFvtQWFwKz\nDX2EJ0mSI2BanGdGZBAzIF3A1G5Rr5ZkqZkmsn8Hn2Scol8jJGNaJ1laDytWwqk/LO73YaZbbZ0p\nJGYtvMJFYAqXmGwRiHR9Ha/h15/778hkMrLb3LMHQdd7OpZOp3nb/bOZmZlZsB38GpGmaDUQZC6C\nlCta+XweoVAIfr8foVAIbW1tcDgc2LRpE95//3309/fXdWkgKXGUKlrsxh2dnZ1wOp0YHBysm5RO\nDv/pt2NJWwo+yJSSOpKsmUTpRndrSwJpngUeH0okqxaCBQhLliWcwcpf+jH0sg/LXw/BmMkj0WPC\nx9uc+M/PO3H+Civ0lvq4Us0lYtnPFu9qdsVTKhl8Q6CJgGkhL2JzvphwCRiXfGmVYlUCU8DYg79d\nJvndapk0gmRJhUvGVpp9VSkXlNpxsBaw0y3o51/TmOWEJNViwhQuKbJFuOZX/xOvb/0L2W3u4/E4\nQqFQSZt7nU7HK2MkHasVmUxGcumg3+9Hb2+vxmfURCr1+Wxtogj2bCur1QqHw4E1a9aUXK0hs7Tq\nWbSktHhPpVJ0SSBp3NHf34/169drltJpkWb9p99e9rklbfIX/bWSLLZQcWFtkZ9+NYJkxXIWtBk+\nS1hYD7mOmRSGXi7ut3IfD0OfB2Z7LTi1ox9nrndienM3CkY9TLqcqi2u1STLWrzL2cPEf0xtrsaH\nOIbtVrpHSY5gCcGULyIwVqP8ga18qCFZTNiSBQDeLPcAcjEBq2WpoBBqPw6rtSeLoqi6FS1gXrYA\nAHldmWwBxXSLS7aAeeFqb1Pvoo7abe5NJpNgOqbF+iOdTksqHSwUCnX/GFlsNH8TDQYz0SKzrUiJ\nnF6vlzTbqh7mVInB1eKd+fP6/X4YDAY4nU6sXbtWcqReCWq0c+eSqlohV7Km5rpga41jRkJLZibV\nkCwpgjXL0W2QfF1MwYDlNkNmXrJQTFpsn8Sw5mUP1rzswdL3igtQ/8oOvLFrCB/9bi886zpL2rCb\nCpQmnfAqhS1YYkgVsGqnTXP58t+rVPlSS7KYMAUmlCtf8CmRr2pIlhBCArZYJOsrfYdVPZ4Q9Zxo\nEYRkC5gXLkML/yiIeKL4miJFuK5+9R/w+ta/UHi288hpc8+Usbm5OQQCAaTTaWSzWXoItFA6Jrf8\nU+rvPZvNwmQy1eWWkMVKfT9bm5RBURSd4pDZVk6nE8uXL5c8Mb2lpQWplHrdqLTAbDYjmUyCoiiE\nQiH4fD6Ew2FFP2+tmJrpgbk9C7/M8r9qpVlCTM2VL55srdw19kLIlSy5ghXKtsFqSnBKlBhK0y+m\nXNEUCvh/xl7H0veLcjW9oQuv/NkafPS7vQgNlv/+5cyaEhpwW3ZuFUqZXMESg2sulJriqESG2PLF\nFi8tBAuQJjBy5EttwQLkS5YQn2bK95S6TSFFx2pK1jyNIFpA6b4t5D9b9LOEi0obBGULKAqXmulW\npZDmG2azGUuW8A/mpiiqLB2bnZ2lSxVJOibUyIMpTKS0UYyZmZlm2WCdUf/P1iYleL1eRKNR9Pf3\nY926dYo2cFosFgSD1WtzKxfyguTxeODxeGC1WtHb24uRkZGabViVkmZNzfRU4UzKUaNkkEusmNRa\nskJZ7jJXq0l+WlZJJ0JOyQIAnQ5ntrpw6iY3PrrOhblebvFTe5gvGyEpExMctSWreMzyRS3XOcqV\nLzVliCleCap4bl1G9ToKViovXPLVYVD/QpmakpXieSxNZrmHpQsJWD1LFlDcD6N12RiTRhEtgli6\nRaWLvxMh4Yr6is+BLid/4qtWqqUWBoMBbW1tgls0CoVCWSOPaDRKf5zNFhsT6fV6pFIpnDlzhlPI\nmOsij8fDO6y4SW1oDixuMNQYWjw3N4dz585h48aNKp1VZRQKBcRiMbokUKfToaurC/F4vG5mQbBF\nS4pUmdvlt9GuRpoVz0hPSIDqSRafTHEeX4FgASqnWDLQWrAqIVswVNxGneuYlcAnXlqlTUSw+FAq\nXmrKC4GvGYjTNKvoeGqfI59kKWGpKaLasQD1ReuawlMli2TSVEGv18NisfAmFZVcMJycnIRer8fS\npUtV/Em0h5YtoEy2AEAXNUHv5H//y8XmH6dCwlVPsqUWsVgMZ86cwfLly8tSskwmg8cffxynTp2C\nzWZDW1sb8vk8/viP/xj9/f30n9deew0PPPAA9Ho9jEYjfvCDH+B3fud3AADj4+N46KGHAAD3338/\ndu7cCQA4efIkbrvtNiSTSdxwww344Q9/CJ1Oh1AohK985SuYmJjAihUr8Nxzz6Gnp3xNJPe4DYik\nk26KVoORz+fpqxxKyWQyePvtt3HZZZepdFbyIV0RSUlge3s7nE4n7HY7vT/rnXfewejoaM3Okcma\nA/8o+2vkilY9ShYgX7SkSJYvNX+V3m6RefwqplgLXbCEUCpfWjW70Ovy4jdSgJhkcSFFvKopWVxI\nEa96liyA+3czZPEqOlY1Swa5ysaYf/L5vOIOdxMTE2hra4PTKd6Gvt5gypYhZAJlL32N0UWLj0c+\n4WLKFsAtXAtRtCKRCHw+H4aHh3lvk8lkMD09jX/+539GMpnE+vXrMTU1Rf8JBoPQ6XQ4cuQIfD4f\nduzYgQ8//BChUAiXXnopTpw4AZ1Ohy1btuDkyZPo6enBZZddhkcffRSXX345brjhBtx7773Yvn07\nvvnNb8JqtWLv3r14+OGHEQ6H8cgjj5Scj5LjNiCSRKtx8ucmANQZWmwymSqWNSVkMhl6fxnpikia\nWbCv8NXqHLmohmQpQY5kTYW7i1/TLk841JAsplSxqSfJYu/zajemMUfJb5JBsOizZYs7tZMjJUhd\ncMqdYaWVYPH9Diot71MiWIRorjx9JeejhWAB8iQLAHzZ8qY1TPlqRMkCgLMpF+fnhQSs2vuypJSN\n5fP5srKxeDyOVCpV1uGOmY7Nzc3BZDIhk8k0XNODkjJCAIaAuUS2Cl1Z6KIm5H0WTtkydmRLZIur\npLDeSgjVQEprd7PZjBUrVgAA/uAP/gBf/OIXy25DgpVz587Rj5uf//znuP7662G1Fst6r7/+ehw5\ncgRbt27F7OwsrrjiCgDArbfeihdeeAHbt2/HoUOH8OqrrwIAdu7cia1bt5aJlpLjLlSaorUIIfO4\ntIYMSiYlgQBgt9sxNDSEjo4OwXNopDcPNtUoGRSTLCJWJV9TBckSkio2WkoWaZIBcHcbFKLdWNnG\na6GBv3IXy4B6cqbGYpPr/LVM7YREly07csSrEsniI5prQ4xxXLtJnfbtSh4zfBD5in12v/abw6oc\nt1qSJQSXgA1ZvHXb/IKUF4rNf2J3uEulUggEAvD5fPTFSL5hvBaLBSaTqa6G8ZImGRc/+SQAYdkC\n+NMtJlFfx4KWLamt3QHhYcUvvPACvvWtb8Hn8+Hf//3fAQBTU1MYGBigb+N2u+kUzO12l30eKPYK\nIPvAent74fWWX+RQctyFSlO0Ggy1BMRgMGiyqTafzyMcDsPn85UMSr744oslv1Cwj1fLNwklaVYt\n4BIrJlpJlide7Lpkb4tLliy1BEtoT5fSPVyVSJaQYFWC0EJbqoRp0lb9s0QkhfJkZEmFzRqUJIlS\nxUsLyQJQIlkAEMgyymMVSpeakkWIMe7bqUzpvgol4lUPksXHqcQA5+fXtk6r9j20hKvDXSQSwYoV\nK0pGmkgZxqt2u/FKeftrXyuRLQBlpYQAytItdqpFkNIwo1GRM/fU4/HwitaXv/xlfPnLX8avfvUr\nfOc738EvfvGLis+tWhfuG5mmaDUgzFlaSiGztNQQrWw2S5cExuNx9PT0wOFwYHh4uKIXb7PZjGw2\nq0jQakU10qx4sgXxpPT7RE3JImLFxN4mXZyUSFY1mmRomWJpCd9CnAiYVuV8KZGyM7YoyRGvSso1\nmbDFS6vkjS1YXDClC5AmXlpLFhds8QKE5aueJSspcP99mCxvJCFFvqrdyp0LrgukRqMRRqNRcJ4k\nRVF0qWIqlUImk6HbjTP3jfG1G7dYLDAYDKouqpmyBcynWyTVIrDTLT7ZAuaFa8Nz/4T3duxW7Vxr\nSSaTkbwOCofDsNmKoxUef/xxPPnZ/Xv48GG6gcrnPvc5fPLJJwgEAujv76fLAIFis5WtW7eiv78f\nk5OTJZ8nAudyuTAzM4O+vj7MzMxw7hdUctyFSlO0GhA1RUvpoF9mSWA+n4fD4cDKlSuxZMkS1V6I\nSVOMWolWNdIsOZIV9Bclx9Kh3T4ftmRxiRUTtSUrkJp/PNotcc0lq1EFS4woR7lkpY09AHHB4kOK\neKklWFywZahHpfbtUiSLCzHxqoVk8cGXetWzZClBTL7qQbIA5e3dDQaDpGG8zH1jqVQK4XCY/jfp\neGwymThFjD37SQpcspU3ca9v5JQTLiSklg6SdSG5//fs2YM9e/YAAM6ePUvP4nrzzTeRTqdhs9mw\nbds2fPvb30Y4XHxev/jii/jud78Lq9WKzs5O/Pa3v8Xll1+OH//4x7jnnnsAADfeeCPGx8exd+9e\njI+P46abbio7FyXHXag0RasBUUNkLBaLrKHF+Xye7nwTCoVgsVjgcDiwceNGwRrzSiAyKDQUUG2Y\n3RDlonYDDCJWTORKlpw0y9YaFxUrJpVKFlOqxG4rRrUlq14FK5vnT7ASFEdzCxnypVSyuNBSqpjw\niVCYlXYpES+lksUFU7xmcxb0tihr1c6HUsniYirTg9ns/PEG2/wVH1NtyRJKs+RA5OuvVj6lyvHU\nQMtyemYnRD4KhQJdqkgad8zNzSEQCNCznwqFAgwGA6eIkXljzJ+BLVv6bHGNIyhcbeIJ9UJJtaTK\ndSwWQ3t7O+ca8ac//Sl+/OMfw2QyobW1FQcOHIBOp4PVasV3vvMdusPzAw88QDew2LdvH92Gffv2\n7XTDir1792LHjh14+umnsXz5cjz33HMAgBMnTuCJJ57AU089pei4C5Vme/cGJJfL0R2JlDI9PY10\nOo3BwUHe22SzWQQCAfj9fszOzqK7uxtOpxM2m60q9dznzp1DS0uL5vNC2KWPVqsVX3r3iOzjqNHO\nnUuuCFpJViDajq4l8soLlUgWn1hx3VYqcgQrki1eyRUajCxGi0H+DDsthstyISRZcmDLl5qCxWY2\nVy4AnUZ17i+lIiRFutSULCZc90el0qWmZAEokSwu5IpXvUoWk3oSrePHj9fN2BMh2PvG2H/IvjFm\nqeKXXnqp7Dh8sgUAeQmyBaDhZevYsWOSxvF8/PHH+O53v4uDBw9W4ayaoNnefeGiRqLV0tKCaDRa\n9vlEIkGXBOZyOdjtdixfvhydnZ1V3/BIEi0tYP6cFEXB4XBg1apV890QZYqW0jRLSKwqQUyyAlFl\nJaOAdMkKJNphb4tLEiygcskiIiVEtSULkL7QVSpkagkWgSRfRCbUEh8mXELB9X9Kv3clIsRMu9jS\nVU3BInjSpW3a5YhXtSULAM4lHCX/ViPxkspCl6xGQsq+sXw+XyJf/2fbNtz485+X3Eaf1fHKlj5h\ngDmiR2ppfVYYqAFFUZITTLJvqkl90RStBkQt0SJXlUhJYDAYREtLCxwOBy666CLNSgKlYjabMTc3\np8qxCoUCZmdn4fP5EAgEYDKZ4HQ6a/JzZmfaUOjMIhiXnhSosS+LT67kpFlikhVIMPZYadAkI5Ju\npWd1SRErQi0ESy58i2I+AVNbsErPZV4o1BAfJkJSIXRbKd9bbRFilxhq0VBDzv0BlIqXkHSpLVlK\nERKvajW/UEq9SVale7PrDb1eX7ZvjF1GCAjLVqY7D8u0SVC2GrmEUM4+9enp6QXfWKIRaYrWIiSX\nyyEajSIUCuH1119HV1cXnE4nVq1apXq790qoNNHK5/MIBoPw+XwIh8Po7OyE0+nE4OCg4M8ptwmG\nlDQrOzO/YCt0yrv6VknJoFhyValkMcVK7La8x+WRrEi6XKS4BiKLoVSyqiVYYnAtmLN5PToM6qe9\nYqLClgK54iVXKuR8b63SJoBb6h3myttIV3J/APzSpYVkSUmzpEDEay5X/H0NtVcv8ZJDvUkWUHzv\nrnYb9lrAJVutnmKqk+zNc36NZfqzcRM8wnXs2DG6xT1zvxjzTz3NGyNIGVZM8Hg8gttBmtSG+llV\nN5GMkkQrmUzSpXLZbBY2mw1GoxFXX3113c5AIF0H5ZDJZOj9VolEAlarFX19fRgZGanJiyhTsKpB\nd3tSclmgUsnikyuu2wrejiFYXFLFRq5kNUKKJZdsfv4xzBaLSsVLiahITZwqFQqxY+p12l3p50tO\n/Zn5BhZKpEvt+4RIVzhTfH4OtKkzjBhQT7IIRLIA4Gx8PvFSKl1apFn1SC6Xg8mk3Z7JeuLtr30N\nwPxg47StgJagDq0efZlsZbrzMEeKr4186dbtEyfxzh/+Sdnw52g0WtbiXmjeWLUvRssZVuzxeHD1\n1VdrfEZN5NIUrQZEihgVCgVEo1H4/X66VM7hcGD9+vV0TB8IBLQ+1YowmUzIZsXTH7Lfyufz0a3m\nh4aG5vdbyUCNNEtIrrRMs3I5vSaSBYjLFUGqZBl1eUlyBSzOFIsNU7D4UCpeaiVBfImTFpJFiHC0\n/7ea5Xet5D++tMeoXOnS6j4hkgUAFxLzbdkrkS4tJYsNU7oAaeK1GEoGCRRFLYpEiwkz3apUtgwG\nA9ra2gSH/+bz+ZIW92QANBEz0ojMZDLxJmNyW9wLkU6nJSdaMzMzzdLBOqQpWg0K1yytXC6HYDAI\nv9+PSCQiWipnNBrr+gqZXq/nrEknEkn2W7W0tMDpdGraal4MLZIrLedlSSEy24buTumSIyZZ4WTx\nPnK0Sb/6X80Uq16RIllciImXlqV2AHA+aaU/7lY4SFoILskCgNBnslGJcMnZ/8dGTLqqIVlslEpX\nNSWLCzHx0kKyvtn3vzRtoV4JSmdoNTr/9oUv4A+OlDao4pItJlylhFL2aun1elgsFsG1RKFQQDab\nLZGx2dlZut09uUBsMBh4ZYzd4p6PTCaD7u5u0dsBRdFyu92Sbtukeiy+Z+wCgYhWKpWiSwLJALr+\n/n6sX79e9IoK2QNVr6JFKBQKJfutIpEIva9s5cqVqr3xyE2zdLMmZGel33dy0ywpJEPFBaGpU3rZ\nmFCaFZmdX7xWKllErJhoKVlAcYH81MWPS7rtyZMnsX79eklyTpqpeL1eBAIBtLa2wuVyweFwlDx/\n/vLDW2WfMx9KBYsPdoMLLboJEtgSRP6tlnDxSRaTEEM85EhXJZLFhi1dtZAsNlKlS23JUgM1ygxF\nv8fZs6JlZGTxXO10abGKViaTwS++/GV8/vnn6VQLKJctZqpFEGuUoQSdTgez2Qyz2Sw455Pd4j4e\njyMcDiOVSiGTydAt7oVKFeUMK04mk4JdHpvUhsX3jF0gTE1NYWJiAkajEQ6HAyMjI4JxOBdkaHFH\nR4f4jWtAJpMBRVH0FHOr1YqlS5dKkkitMF+Yf8HLdvFfTasUoTSLyBWhUsliyhVBqmSxBYtLrghS\nJUuqYEkVKj4MBoPgPDrSkdPr9SIYDKKjowMul0tQ7v927Y85Py9HwNQWLCbMhb7a3QQBcQFi/r9S\n6ZIiWWykSpeaksXmozkXAKCvtXysRiXIkSw2fNKlhWTJTbPEqEbJILuMLJVKIRaL0f+mKIpedPMN\n51VTjBaraGWzWZjNZrz9ta/h8odLf0diTTKA0nSrmh0IlbS4J+kYU87m5uY4k7FsNguTyYSenvnn\ncb3uuV/MNAcWNyjRaBQGg6GiNGpiYgImk6muanrj8Ti93woovrGsXr0aDodD0xcQsTSLKViAfMlS\nY28WW7AIUkWLLVlcgkWQIlpEsoTkCqg8xapUqPg4deoUVqxYUXJFMp/PIxwOw+v1IhQKoaurCy6X\nS7Mh3WwBq5ZkCaFUupQIEEGKdFVyfD6Y0qWlZIXS3AutSqWrEskSokvlEly1JWs2U/pYHunyVnxM\npfuyCoVCmYwxF825XHHfp5iMSXl/m5ychF6vx9KlSxWda6Ny6tQpDA0N0fvL2bJFILLFTrXYnPiz\nXeqeoIYcP34cW7ZsKSlVJI+x1157DePj44jFYigUCojFYti2bRv6+/vhdrvhdrvpjycmJnD11Vdj\n//79uOWWWwAA4+PjeOihhwAA999/P3bu3AmgWO1x2223IZlM4oYbbsAPf/hD6HQ6hEIhfOUrX8HE\nxARWrFiB5557rkTyCHKP28BIOvmmaDUoFEXRL+BKmZmZQSKRwKpVq1Q6K/mw53hZLBY4nU44HA60\ntLTg9OnTcLlcsFqt4gerAC7RYssVEy1FiylZfHJFkCtZQnJFkCJZBold3uRIFgD8f5f/g6zbV8IH\nH3yApUuXorOzE4FAAF6vF9FoFD09Pejt7UVPT0/V9mm89957GBgYQFdXF775wW2qHruScjWp0qWm\nBHFJlxaSxUav0yah5pMsNnKlSwvJimTKHyvL2yvrXqi1ZHGhRLy0bIDB3NPDFrFUKkW/l5tMJk4R\nIw0WPv30U7S1tcHpdGp2rvXIm2++iY0bN5akeXyyJXWyQaPI1rFjx3DZZZeJ3u7tt9/Go48+igce\neACTk5OYnJzE1NQUJicnceHCBXi9XixduhS33347brnlFoRCIVx66aU4ceIEdDodtmzZgpMnT6Kn\npweXXXYZHn30UVx++eW44YYbcO+992L79u345je/CavVir179+Lhhx9GOBzGI488UnIeSo7bwEgS\nrcWXQTehsVgsCIfVawEsFYqiyvZbuVwuDA0NlaUGSlq8V4qQYClBjmQVMgZRuSLIKRmUIliAsGRF\n54rnZZVYVihFsqopVkxyuRySySQ+/vhjetyB2+3Ghg0banKFTa/XI58vLvT/ft2/0J/PZrP4yzNf\nU3zcSvcEiZUXaiFA7P1cWktWIFUqLM5WdYakA9IlCwBmkl0ApAmXVkkWF5/G569Yy5UutSVLKqej\nLvpjKdKldZdBKXt6CoUCvaeHyBgpIUulUrSomc1m+P3+MhmzWCyqdrurJ7jmhx3du4tTtgwp6bJV\n78hpyuL1erFixQpcdNFFuOiii0r+7wc/+AFMJhOOHz9Of+7nP/85rr/+evoi9vXXX48jR45g69at\nmJ2dxRVXXAEAuPXWW/HCCy9g+/btOHToEF599VUAwM6dO7F169Yy0VJy3IVOU7QaFDVeTCsdCCyH\ndDpNz7dKpVKSm3ZU4xwv+t4+mCF9QaDV3iyj14xsD/+eIaUUCtIeK3ySRQQLUEeyaiVX2WwWfr8f\nXq8XyWQSer0evb29WL58ec0XJ0zRYvM3q/5f+vzuP/snko6n9dyqTmNKcwH6JGanP1bSGEUKbMkC\nAF+yuBCuVLjkSBYTIlwAt3RpJVlcaRabSqRLDaSkWWzEpKteWrnrdDqYTCaYTCbefdOkwsNsNtOJ\nWCwWQyAQoLvdFQoFGAwGzlTMYrHAbDbX/PVOCVznXIlsXfqDp+o+1ZIzQ4uvtfvU1BSef/55/PKX\nvywRrampKQwMDND/drvdmJqawtTUVEnnQvJ5oChzfX19AIDe3l54veXPJyXHXeg0RatBUUu0Uint\nOo/FYjG6IyIAOJ1OrFmzRlZXHLPZjGhU3c3j1URKmmX0Fjd1y5EsqWmWUsliyhVBqWQRsfrkk0/o\nGvtqkclk4PP54PF4kM1m4XA4sHr1arS3t+PcuXNoaWmpi0WHkGgxeWjoRyX/5hIvLedWAUWBYEpE\nt1n9lvqhdBvnv9USLi7BYkOEC5AvXUoliw1bumopWWyIdPEJVy1KBsVgSlcjQlEUWlpa0N7eLtjE\nit3tLpFIIBQKIZ1Ol3W7Y6diclqPV4NCoSD4Gr2Qk61MJiN5hpbH48HFF19c9vk/+7M/wyOPPKL6\n71On09XFe2cj0BStRYzBYOCcU6UUZpe2UCiE1tZWOBwObN68WfKLBRutE62LvrdP1u3VTLOIYMlF\nimTlQhYYeqTdb0zJ4hIsQLpkAYA/0YFfX/tXZZ83Go2CXf7UIpVKwev1wuv1Ip/Pw+VyYd26dWVd\nOcW6DlYTtmjl83kEAgF4PB4AKFkEMVtLPzT0I9z0pflmIcsf/kjT8+QSiEim+JhRS7jYksX3f0ql\nS4pksZGTcqklWWxmkl0IptrQ3z6r6nGVSBYTrpSrViWDctm/+W9rfQqyoChKUtdBKd3uKIoqkbFk\nMolIJEL/W6j1OHkNqoaMSem0yCdbHeeB2DL+r6v3VEtOojU9PY0vfvGLAIDHH38cT3425DkajWJs\nbAwAEAgEcPjwYRiNRvT399NlgECx0crWrVvR39+PycnJks+TpMzlcmFmZgZ9fX2YmZnh3Cuo5LgL\nnaZoNSjkaoIaoiR2xUgIMiTZ5/MhGo2iu7sbTqcTq1evVqVLWy32aKkFX5rFJVhqlQzmQvIWTd2d\nCV65kguXXDExGAz0IEe1SSQS8Hq98Pl80Ol0cLlcogOsDQaDpBSpGuj1euRyOXi9XngK+aC0AAAg\nAElEQVQ8HsRiMdhsNrpMg1kmRPZskDk/f/PX1+A7D/waj/7w99HS8oewWCx4wPOA6ucoJhBqCJeQ\nZPHdVo5wKZEsJmLCpZVkMZmKd9IfVypdlUoWm0/jPYimWjHYFVT1uGqkWWwaTbIAddu7GwwGtLW1\nCY6FYbYeZ+8bE5o1xkzJKl0HkBbmYnDJVtJZlC1AWLjqFbmJFhGXPXv2YM+ePWW3ue222/D7v//7\n+NKXvoRQKIRvf/vb9D79F198Ed/97ndhtVrR2dmJ3/72t7j88svx4x//GPfccw8A4MYbb8T4+Dj2\n7t2L8fFx3HTTTWXfY9u2bbKPu9BpitYix2QyIZPJSL5qAhQXfaQFezqdht1ux8DAgCaNBOpJtOSk\nWWzJUppeseFKs7jkSkqapdMVJEmWWJolJlgEo9GIZFK9MrNYLEbLlclkgsvlwsUXXyz5sazX6zUT\nP6nkcjkEAgHMzMwgm82ir68PK1asQGdnJ3Q6HXK5HD2rhwsy52f8X5bRi6HZ2VnsyuxCJpNBPp/H\nv3b+a8XnKUcgiHAB8qRLjmRxfZ2YcFUqWUy4ygq1lqxgqvz+IdKldspVT2ghWY1KPp+v6pBkvV6P\n1tZWwZJvrWeNSRUtgF+2Wn386VY9p1rpdBrd3d2Sbuvz+dDb2yv52FarFd/5zncwOjoKAHjggQfo\nBhb79u2j27Bv376dblixd+9e7NixA08//TSWL1+O5557DgBw4sQJPPHEE3jqqacUHXeh02zv3sBk\ns9mKr8ifOnUKy5cvR2dnJ+9tyHwGst9Kr9fD6XTC6XTKHpKshDfeeANXXXWV6sfVsmyQiJYUwZKa\nZrEliy+9EpMsKlIUEWOP+P48PsmSKldMgsEg/H4/1q5dK/trgeLjcHZ2Fl6vF4FAABaLBb29vXA4\nHIrmyXm9XszNzWFoaEjR+Sgll8vB5/PRTTkcDgcKhQIsFguWLVtWdlsh0ZJCPp+nr0b/lVfe700t\neRASLqWCxQeXcKkpWXwY9dqlo1ySxYUc4VI7zQKAaKp8QV5putVMs+Y5fvw4vYBtJJizxrja25MS\nbqaMERFLJBJIp9NYtWqV5NfBsqHGvvmPuWSrXkXrgw8+wMDAAG+nSkKhUMA111yDt99+u7lvqro0\n27svdLTsPJjP50vmW5HZHZdcconi/VaNjBzJMiR1QFLafaSkZFCoPFBIsohgAcolS4lgEZTsiWLu\n+wsGg+jo6IDL5cLKlSsrLqGp5h6tbDZLy1U6naabcpAN7ZOTk7znUklpL1C8Kk1KhB7teRQAcO+H\n94p+nZoJDV9ZodqSxT6mtSVRFckCAG+8uBhytavXHl4uUssKqyVZAHAuaqM/litdzTRrYcAsL+S7\nqMs1aywSiSAcDiObzSIUCgHgnzVmsVjowc/sZIukWgB3slWvqZbUaiNSUtqUrPqkKVoNjBpPKovF\nQnceJGVMPp8Ps7Oz6OnpUXW/lVIMBoMqtemFQgFzc3Pw+Xz48gsvqnR2pbRN6ZG2qh/86nI62fuv\nCEzBAuRLViVyVfJ9jUZJQ7bz+TzC4TDdVIXMWVP7cai1aJGOh16vF5lMBk6nE8PDw5zdwqpdxvjo\n2qJwnTx5EuPt42X/r1UZHLOsMC+xI2YlhNJtCCbbYWuNa/p9gsn5+0tt4ZKaZrHhKyvUQrKkQqRL\ninBpJVmNmmap2biqHuGbNfbpp5/CYrHA5XLRs8aYqdjs7Cz9b/IaajQa8c9/cCVu/7ffcH4vsSYZ\n9YLUskmPxwOXq7E7ai5kmqK1yNHr9XSHtmw2C7vdTpcS1svVEbJPS4lokUW7z+dDKBRCe3u77Bck\nKWlW25T87ktS0yxdTtrvgZ1msQULkCdZagkW/b0FRCufzyMYDMLj8SAajaKnpwe9vb1Yu3atZp2t\ntBAtsn/R4/GAoig4nU6MjIyIlthKbe+uNjqdDt8f+j6MRiOdclWjoQMAhD4TCKtFmxlZTIgIaSFc\nTMliQoQLUC5dSiWLCTPlajdps9+VL83io5KUqxIaVbIAdRthNBLZbJYWL+asMaHBzxRFIZVK4fDX\nenHDk88DKE21gHLZuvQHT+GV23fQ7e3rYf0jtZrB4/HQjZOa1B+L71m7gFDyQsDeb0Umj2/cuLEq\n+62UQMobpZ4fRVEIBALwer0lydyaNWtUX7SzBUvNNKvFV3x6ZqziMsCULC7BAqRJFqC+YNHfn9Xe\nnSSoXq8XsVgMVqsVbrdbk6YqXKglWqlUiparQqEAl8uFDRs2yJoZxidaWt8PzO9LUi4A+ON3vqXp\n92WipXD5E6XpodrCxSdZbJSkXGpIFpt41qy6bMmVLDZc0tUsGSxnsYqWnM57QPE102g0oqOjAx0d\nHbyt34Fy2Zqeni6bNcY19Lkas8bI2kwKMzMzWLp0qWbn0qQyFt+zdgEhdRHGleo4nU5s2bIFFEXh\n/fffr1vJAqR1HiRlWj6fD6lUCna7HcuWLUNXV1fZ/SS3CQYXShIsJkJpFhEsQJ5k8QkWIE2y3r/p\nL0RvUwmkvfv09DTdCMJut5d02asmlbR3T6VS8Hg88Hq9ktvJC1HLRIurJOlfN30XgHbCFeAQFLWF\niy1ZTNQQLqmSxUSqcGkhWQAQjrehvVs90apUstgQ6dIiefzz7E04duwYTCZT2Uw65h6feiWXy9W0\nhL9WyOk6yIdU2WI3amLOGkulUpJnjTEfV0plLJ1OSxbM6enpRTOTqhGp31eVJhWRy+Xg9/vh8/kQ\ni8V4Ux2DwaDpQGA14BOtRCJBy1WhUIDD4eDdA6MUdtmgkGBJTbP4JIspWIA0yQKEBUsKWgsWkWCP\nx4N4PI5EIoGhoSF0dHTUtDxDr9fLSrSYs7r0er3sdvJi51JPokVQW7i4BItNKNVWsWwJSRYTpcKl\nRLKYCAmXVpJFmIwU20W7uyOafp9K+CQ4n3KttFVeWkhKBvkaLpCPSWkzV/e7WsvYYk201BAtoChb\nW/+cX7YAYOufP4VXvz/fFKOWs8bkjN3xeDyKu/k20Z7F96xdQLCHFpMSJp/PR++3EksL9Hp93W+y\nbWlpwdzcHN3MgrT3JrOT5CQJStOsShMsIdiCJQsJnsKXZmkpWKlUit77l8/n4XK5MDIygnfeeafq\n7dT5kFI6GI/HabkyGo1wuVzYvHmz6p0361W0CGoIlxTJIlSSbkmVLCZyhKtSyWLCFi4tJSscLz12\npcKldppFHzdR+lpOpEsN4eJruMCES8bC4TC9cBaSMfK3FkJEUdSiFC05JXRivPp9ftlSitxZY+Qx\nxTV8nvmYymQyoCgK8Xhc9DE1MzPTTLTqmMX3rF1gzM3NwePxwO/3w2g0wul0Yv369bL2h+h0OlVf\nzNQkn88jmUzSZVodHR1wOp0YHBzU/E3HFANMMWn3idI0i0+yJKVZCiVLK8FiJj5qlNNpDZ/cMAch\nm81muFwuzcca1GqPFnnuS0WpcMmRLCZyhUuJZDEREy41JYuJN74E4XgrHJ0xTY7PliwmRLgA6dKl\nlWQJoTTlktsAQ46MMbvfCckYOxVTImOLNdFS+zVQTLbYqZYakL1eFosFXV1dnLdhzxqLxWKgKAoT\nExNls8YoisL+/fvhdrsxMDCAmZkZdHR0VDwKpIk2LL5n7QKD7Llavny54nidXD2plwVxLpdDMBik\nm1ksWbIEJpMJo6OjVZPBznMFJB3avWBVlGIBMAcNyNiFZYwtWVoIFlNKSMIoVk5XL28G5BxIgxhy\nwYIMQr700ktVKVmRQq0SLaWJthzhUipZTKQIV6WSxYRLuLSSLCb+2eLPoKZwCUkWm1qXFbLTLD7U\nTLmUwJQxPoRkjGtIL9e+MaZYLUbRIknPYoA9a2xubg7d3d2w2eYvMJDHVCQSwSWXXILJyUm89tpr\nOH/+PP70T/8UkUjxeWuz2WA0GvHqq6/C7Xajo6MDN998Mx544AEAwJEjR3DfffeBoijs2rULe/fu\nBQCcO3cOY2NjCAaD2LJlC5555hmYzWak02nceuutOHnyJGw2Gw4cOIAVK1aU/Qxyj7tY0Ml8k63v\nGrNFSC6Xq7hz2nvvvYeBgQHeKy3VgDlzKJ1Ow263w+VyobOzExRF4eTJk7j88ssr+h5SywblSpbU\nNEuflXZMsTRLrmSpKVikfNPj8SAQCNBS4nA4JEnJ0aNHsWXLlpovGMjPceLECVgsFrS2ttI/Ry3O\nLZFI4KOPPsLmzZtLPk9KTrS6wHD69Gm43W7eK/dS4RMuNSSLC7ZwqSlZbEJzbbAu0bYFfTjOnRCp\nIVxyRIsNl3BpmWZJFS02fMJV7+3cmSkGs6yMKWOkpXk2m0VrayusViuvjC000uk0PvzwQ2zatEn1\nYwulWmonWkqQ+tpcKBRwzTXX4O2336ZLwUOhEJ5//nn86Ec/wqOPPoorrriCvj1FURgeHsZLL70E\nt9uN0dFR/OQnP8G6deuwY8cO3HzzzRgbG8Odd96JTZs2Yffu3di3bx/effddPPHEE9i/fz+ef/55\nHDhwoOQ8lBx3ASBpUbdwn6FNJEOGFldbtJilZgDgdDqxdu1atLeXLs60HixL6Dyn3XWEtmkdUg7x\n2wlJljlY3DArJlkAkAtbcHDLVixfvlzyOfJRKBQQiUTg9XoRDAbR3t6O3t5erFy5UvabPJmlVYvF\nQaFQwOzsLC2J7e3tMBgMuOyyy2q+WKn3PVpicCVcWkkWUJpwaSlZ9PfTULb4JAuoPOGqRLKA8oSr\nHiUL4C4rrHfJAkpTDD6IjJ09exatra2gKAqhUIiWMiJjzP097ISsUbsVqtUIgwuhEkItygflkk6n\nJTXDSCQSaG1tpZM/nU4Hm82GoaEhOJ3OEskCgGPHjmFoaAgrV64EAIyNjeHQoUMYGRnBK6+8gmef\nfRYAsHPnTjz44IPYvXs3Dh06hAcffBAAcMstt+Duu+8uq0xRctzFQlO0Ghw1YnUyp0pryEKXzPBq\naWmB0+kULTXTunSgEsGSkmZJlSw+iGDJ4aPb7sHp06d5BwSLQUYCeL1ehEIhdHV1weVyYfXq1RW9\naQsNLdYCtiQuWbIELpcLQ0NDMBgMeOONN2ouWQC/aBUKBU2b1aglWgQiXF/47UOqHVOIj/3FJ1ZP\ne1KT44fm2so+VlO4hCSLiRLhqlSymBDhWmLR5n2iEsliw5SuhQCRMYPBALvdjs7OzrLbsPf3pNNp\nURlj7xurRxnTUrQAbZpjqIXUn11ohtZvfvMbbNq0CUuXLsX3vvc9rF+/HlNTUxgYGKBv43a7cfTo\nUQSDQXR3d9Pvh263G1NTUwBQ8jVGoxFdXV0IBoOw2+30cZQcd7FQ+xVGk4pQS7RIba/aMBfs4XAY\nHR0dcLlcsptZVNqwg6tskE+w1Nqb1TYt7zhcaRZbssTSrI9uu4f+mJSbSCWfz9N74yKRCHp6euBy\nubB27VrVSteqkU4WCgX6MRcMBmlJHB4e5vw56mHPGFO02Mkb+RyZ10I2VbOHZyr5GbRK0o5ccb/m\nshWYm0/MwvFWzWSLjRbCJRX/bIck2VJTsph4A8VFvss+q8nx1eLY5/+81qegOkKVAOz9PVxwyRjp\nfCcmY+TvasuY1qIF8MtWrVMtqe9L09PT6OvrK/v8JZdcgk8//RQdHR04fPgwvvSlL+HMmTNanGoT\nEZqi1QQWi0XVRCuXyyEQCMDn82Fubo6e4VXJgp3M0lKrYYcaZYJ8aRZbsJSUDHKlWEKSxRQsgpT0\niKIo+P1+eL1exGIxWK1W9Pf3Y/369ZrIh1aJFjuB6+7uhsvlKpsbx4aIX61TLZ1Oh2w2i48++giB\nQAAdHR10eSbZEM4enhmPx+mr1tlsFoVCAUajsWxxRMSM62dUO9FicuSK+wFok24xJYtA0iG1hIuZ\nZgn9v1LhkppmsdGiYYZc1BQuNdOshUylr1NyZIy5V0xIxri6KaopY5lMpipNE+ot2ZJzUdnj8dCJ\n1uOPP44nn3wSAHD48GH68zfccAPuuusuBAIB9Pf348KFC/TXT05Oor+/HzabDZFIhBZ68nkA9Ne4\n3W7kcjlEo9GSJh3M28g57mKhKVoNTr2UDqbTaXrBTppZLF++XHCGl5JzVCJazDRLTLAqTbPkplhs\nlJYJcsGXaGWzWfp3lUwmJc1bUws1RSufzyMUCsHj8dAJXG9vryyhr6VosZOrVCqF7u5uuqyR3CaV\nStEz88SGZ+ZyOaRSKXqhFI1G4fP5kEqlSlpOk0VRPB6HyWTS9Iq12sLFJVlM1BAuMcli31aubCmV\nLCZ8wqVVmpWIlZd3VypcWkjWQkyzgOLrttaJkpw9Y1JkjEvE5LzOZLNZdHRovwcT4JatWqVacgRz\nZmaGLtnbs2cP9uzZA6AoYCQVO3bsGPL5PGw2G7q7u3HmzBmcO3cO/f392L9/P5599lnodDpce+21\nOHjwIMbGxjA+Po6bbroJAHDjjTdifHwcV155JQ4ePIjrrruubK0wOjoq+7iLhaZoNThqLIxJWiSX\neDxOD0gG+JtZqIHScwS0aXLBTrP4BEvS3qyCuGBxpVl8gkUwmUxIJIoLQGZXx0wmA4fDgaGhIXR0\ndFS1bK5S0SLljR6PB9FoFFarFUuXLlWcwFWr0QqBLVckuRoaGsLRo0fhcrlQKBSQz+fL/gZQcq6k\nnJC5CdpoNKKjo4N3ccIuH4pEIohGo5idnUU6nabTM75UTGmJIqBOOaGYZDFRWk4oR7LYXyNFuNSQ\nLCZM4aqmZDFplJLCRqZQKNTFPiopMpbP50ta25O5UFwDeoVkrBqlg0zqJdmS2ggDKAoVV0fmgwcP\n4p/+6Z9gNBrR2tqK/fv30+8Rjz32GLZt2waKonD77bdj/fr1AIBHHnkEY2NjuP/++7F582bccccd\nAIA77rgDX/3qVzE0NASr1Yr9+/cDKJYt7tq1C4cPH1Z03MVCs737AiCTyVRc/vPGG2/gqquuErwN\nVzMLl8sFh8Mh+UVBKRMTEzAajXC73bK/9uo9j0u+rdREiylaQimWVNESQolkAcUX4PPnz9NlZy6X\nC06nUxMRlsqFCxdQKBSwbNkyyV9DURQtV3Nzc7Barejt7UV3d3fFkvjuu+9i5cqVml415ZMru91e\nkly9/vrr2Lx5M4xGI53IkSQLQMkeLvIx83Pkb/Z9Qo5BPs9M+86dO4eOjg44HPMP1Hw+X7KPg/k3\nea0xGAyCJYpivxclwiVHsthIFS4lksUFn3CpLVlMjAbtulaKiRYbKcLVTLPkcfz4cYyOjtb6NFSD\njK5gt7Ynf+fzeaRSKXR0dKC9vb2iZEwuTNmqRaLl9/sRi8UwODgoetudO3fioYcewrp166pwZk1Y\nNNu7LxbU2GfB12yClGf5fD6EQiF0dnbC6XTKbmZRKS0tLUgm5V+d1kKyDJ+NqBIrE9RCssQEi9ky\nn6QhmzdvRmurdgs8ORiNRkm/R/beMbvdjmXLlqGrq0vVBE6v12uSaAklV0y5YiZWDocDp0+fpmVG\nr9fTM76YImOxWGAymcruB6Z0Mf8wvxcwn4qRhYzFYkEulyuRsdbWVsHHTC6XK1kUzc3Nwe/3I51O\n0+WqzJJEdgMPueWElUgWIK2cUC3JIseqdrOMeKgN7Vb1v6dcyQKaCVcTccjrm9B2gDfffBPDw8Og\nKKqkgQdTxpgJG9e+MSX7wpnJVi3KB+UmWkouQDepHk3RagJgfpZWW1sb3czC6/XSCYLa3efkYjab\nEY1Ga/K9uah0L1arp/h30iX9a/gkKxaL0XJlMpngcrlw8cUXI5/P4/Tp03UjWYBw6WAul6PlKpFI\nwG63Y3BwEEuWLNGsvFHN0kElckXQ6XQYGhoqOR5ZXJA/RGZI8wtgXma4/nBdCMnlcohEIvB4PJid\nnUV3dzd6enoAoOR8xEoU9Xo92tvbedPRQqGAbDZLy1gqleJsN/33bTfim4n/I3i/VipZTPjKCdWU\nLPYxiXBpmWalo8XFajxU/J5aCJcS+ISrmWbJQ8sRD/UMRVFob2+HTqfjnfPJTMaYF36kyBj5u1br\nGj7S6bSkuaaFQgGJRKLigfNNtKUpWgsANRahRqMRFy5cQCwWQyaTqWqDBCloPetLbpolhlCaJVey\n2IJVKBQwNzcHr9cLv98Pi8UCl8uFSy+9tKSePZfLyWrvXg3YYkMac3g8HqRSKTgcDqxatapqe8cq\nFS0uuerr65MsV0I/o8FgkCQzTBkLBoP0x8wFBgC6RKerqwu9vb0YGRnhLL3hKlFkJ2QAyu43domi\n2WyG2WzmXQSQBdKB9D1IpVLYee7JstuoKVkEtbsTihGaa4NOr91CmUgWE7XSLSVpFhdM4dJCsh7r\n/B28+eabnOWsLS0tkspZ65laDXmvB8R+b8xkTEzGmA08yL5ULhmzWCzY/83fx9jf/18A1U+1MpmM\npESLvCY38mN7MbA4n7kLDKVPMmYzi1QqhZ6eHoyMjAh2NasVcpthJBIJXP8X/1vVc+icyCPeW9mV\nL6WSxR68297ejt7eXsESzmo3epCC0WhEJpPB1NQUPB4P3ZhjeHi4at2lmCi5j9hytWTJElnJlVpv\nimQzudlsLmvXTB4vMzMzCIfDaG9vh81mg8FgQDqdxvT0NCYmJkr2WzHL+0i5oholiszzZd8HzAXS\nEVdpOaEWksWEpFtapFlM0hELLNbqSB2TStMttSSLiTfQicKcCa2uuKrHHR0dLStn5eu4ySVi9Tqw\nl7AYRUvNFE9KmSLZm8r886M7P4c/eeJXAIBjx47RMsYsT9QiGZNaOhiJRNDd3a3K92yiHYvrmbtA\nkbpwIwtEr9eLQCBAN7O4+OKL4ff7kcvl6lKyAPHhu4VCgS6h8/v9sjoViaVZnRPyNplzpVlEsKRy\n6ht3lQwQDoVC9ODd1atXS1oU1NNVLtL1cGpqCrFYDEuWLNGsQ6UcDAaDpIG95LkzMzODYDBYM7kS\nO8doNAqPx4NwOEw/XsRKfrUoURRKxcRKFP999Fu4/JVHVblPxJiZLJZNtnRpk5anI8WFXSpUTNHU\nFi6uNItNPZUTFuaKr8tJb/F5r4ZwkZJBk8kEk8kk2HGT3QmPq5yVT8ZqMbCXsBhFi6Koqpb06fV6\nzr2pr35/mP6YLWOpVIo3GePrpijlZ5LabXF6enrRzaRqRBbXM3cRwtfMYuXKlSUv3BaLBcFgsIZn\nKgzXQpUr5XG5XFixYkXxZ/vfRyv+vkzJkpJmSZEssTTrlZ3/Fe+99x49G6rW++OUkk6n4fV64fV6\n6a6Ha9euxZkzZ7By5cpanx6Aomjx7RljigtTroaHh+nfRb3IFZHxzs5OuFyuknMUQ80SRS4Ra2lp\n4bwvhGTsN1vvBgBc+epjSu4WSaTC85KSjrZoJlsl3zPUqppsSZEsJnKES4s0iws1hUsMZgLMB3tG\nFGlLzrXfp5KFtFwWo2hls9mqDCuWA5+MMWHKGHnczM7O0l1bSdMxZiLGfgwB0i6UzszMoK+vT7Wf\nr4k2LK5n7gKF/YRkNrOIxWKSmllovQdKLSiKQjgchtfrLblqz0555HQb5EJuisWF3BQLAJ64chM9\nvV3pbCg21azhTqVStFwVCgW4XC5s2LCBfmOiKEq1gcVqYDAYSkpSG0WuSDJNzpE8B7RY5AmVKBKY\nLeFTqRSSySTC4TC90ABQVqLITsW47ruTv/fn2PLi91X/mZiSRUhHiwsctYSLpFll31ujdEsqYvu3\ntJIskmZxkfS2K5IttRtgSJ0RxbeQTqfTdDLLV6KoZBYdRVF1XdqoBdWeoaUWlcpYOp1GPB7HiRMn\nOEUsFovRI3U8Hk8z0WoAmqK1ANDpdEin0/R+K9LMQk7HNtJ1sB4h4phOp/HGG2/AbrfTm/krXVhy\nlQ1ySZbcNItPsvjSrJ/e+Hm4XC7Vm4+Qkkstrwwmk0larnQ6HVwuFzZu3MhZD6/X6yWV6lULvV5f\n0omvnuWKNEAh+8JcLheGhobqIumUsrjI5XL0QiKZTNIz+ZgliswBpmSf2H9csxu/8+t/Uu1cuSSL\niRrpFp9klZxHBemW3DSLTT2VExKqmW5VgpTHOkVRJQvpRCKBUCiEdDrNOYuOLWPsvZGLNdFqRNGS\ngtBjKJVK4eOPP8aGDRvK5otFo1F873vfw1tvvYVcLodCoQC73Y7z58/D7XZjYGAAbrcbk5OT+Lu/\n+zvkcjnY7Xa89tprAIAjR47gvvvuA0VR2LVrF/bu3QugOE9xbGwMwWAQW7ZswTPPPAOz2Yx0Oo1b\nb70VJ0+ehM1mw4EDB7BixYqyc5Z73MVGc2DxAuH48eN0WaCSfVaFQgG/+c1vRIcWV4tMJkO3+k6l\nUrDb7ZidncXw8DDvVXUmUhMtpmgJpVhSRUssxeISrXf/+27NFutvvfUW1qxZo/reOzKvy+v1wmAw\nwOVyweVySdrAK2U4ttaQ5OrcuXMIh8O0vNvt9hK54tpXVE25isVitACS0ljS1GKhwVWiyPyTz+ex\nO/IfFX8fMdFiolS4pIgWEznCValkcUGEqxZpFh9iwvXCujEsXbpU6SnVHHbzDubf5MKD0WiExWJB\nJpOhR3cslE6KYszMzCCbzcoabr8QIHuB16xZI3rb++67D9u2bUNfXx8uXLiAyclJnD17Fvv378fV\nV1+NI0eOwOfzwel0gqIoDA8P46WXXoLb7cbo6Ch+8pOfYN26ddixYwduvvlmjI2N4c4778SmTZuw\ne/du7Nu3D++++y6eeOIJ7N+/H88//zwOHDhQcg5KjruAaA4sXkyQuUlKqYcXbFJ25vP5QFFUWTe6\nDz/8UFK78lpIlo5SJlmnvnGX6LErQayJiBzi8Tg8Hg89r6u3txeXXHJJw1yh4ioL7OzsREtLC9at\nW0ffptbJFbmfA4EA2tra4HK5sHLlygUpV0yklCiezF9WUSmhHMkClKVbciULUHfvlhLioTbozNp0\nKFUiWYB4wtXozwej0Qij0Si4N5LI2Pnz56HX6zk7KbIHg7Pb2jcqCznREiKdTssvU5oAACAASURB\nVEt+T/V6vbjsssswODhIf27fvn1wuVx46KFi91an0wmg2DVxaGiI3h89NjaGQ4cOYWRkBK+88gqe\nffZZAMDOnTvx4IMPYvfu3Th06BAefPBBAMAtt9yCu+++u2wrgpLjLjYa91nYRHVIY4BqvjjH43Fa\nrvR6fdmeHiZa7CNTYy+WFNiSpbVgEYQGBEuBJCp+vx9msxm9vb1l87rqGbE9V9FoFBcuXKipXAEo\n6ZjZ2toKl8uFwcHBhl9Mqo1er8dbX/gGCoUCLvn5/5T1tXIliyBn75YSySJI2bulRZpF0PmKP2fB\nWV97dbmE68CqL9dFyayW6HQ6upOi2WyG1WqF1WotuQ17MHg6nUY4HKb/zddJkflxvb7GZLPZmnel\nrQVSW7sDRdFiN8P4+OOPkc1msXXrVszNzeG+++7DrbfeiqmpKQwMDNC3c7vdOHr0KILBILq7u+l1\nn9vtxtTUFACUfI3RaERXVxeCwSDsdjt9HCXHXWw0RWuBoMaCkIiMlqJFNvL7fD74/X66xbyUZETK\nLC05TTCkSJaUNKvzUwpzbulvVtWSLEB+osUsVyPDkNWWK51OR3de0gI5DS30ej38fj/eeecdujFD\na2uraJMGNSAXGZjPg0svvbShr0JrDVNIx5f+HnZOvyjp65RKFhOxdKsSyWLCl25pKVn6yPxjTudr\nUU22lKZZXBDhOvXf/hRnz56tW0HQAr4LoMwUmG8wuJxOinxt7WshtYs10cpkMpK2R5DEky1luVwO\nJ0+exMsvv4xkMokrr7wSV1xxhVan20QCzXf0BYJaopVKpVS/ilQoFOhOgaFQCB0dHfQVezmLSrPZ\njFgspso5meKVbzfs/LRYaiMmWcw0q5qSBUhLtEijBXa5mtzfj9xzUrPkkMjVzMwMQqGQ5IYWFosF\nV111Fb0ASSaTdLt09qBTtoQpGVJJ9rb5fD46IdyyZUtTrgRIJpN0yWpLSwt6e3uxYsUKGAwGvLVx\nIzYf+Z7g16shWYRqtoEHateZsF7TLUI+n2+KlkTqtZOiGItVtKQmWmTOmE6nw+OPP44nn3wSALBj\nxw5s27aNHtnxuc99Du+88w7cbjcuXLhAf/3k5CT6+/ths9kQiUToxxj5PAD09/fjwoULcLvdyOVy\niEajsNlsJedBbiPnuIuN5rt7ExqLxaJaaV4+n0cgEIDP56Onl7tcLqxZs0bx1bGWlhbRREsKPR8k\nEFvO3zFKCkSypFJtwSKYTCbO3ylz/lIgEKDld9WqVZovYNQSLTJHzePx0HLV19dX8hiTsudKr9eL\n7pUgV4TJH7IISaVS9CKE3bKcSFk2m6U7gpIN7ZdccsmiXERIhTmDjTRb4bvP3vrCNwCAU7jUlCz6\n3DhKCdVKs9gQ4dIZtOtDxUyz2FQiXGqmWYRT/+1PAVR/mG2t0bqkX2knRTLCgdlJkU/G2J0UxSAN\nQBYbmUxG0nujz+eDw1Fsdbxnzx7s2bMHAHD69GncfffdyOVyyGQyOHr0KL7+9a/TMyzPnTuH/v5+\n7N+/H88++yx0Oh2uvfZaHDx4EGNjYxgfH8dNN90EALjxxhsxPj6OK6+8EgcPHsR1111X9jscHR2V\nfdzFRlO0Fghqlg4qJZfL0Z0C4/E4bDabqvOgxEoHxcoGez6Q18qYr2yQKVlSSgZrJVlAUbTi8eLe\nBi4x6e3txdDQUFWvDhuNRlCUso337J+hs7MTvb29suVKDswrwl1dXZy3oSiqRMRCoRCi0Sh935tM\nJrpdeSqVossyyZ/FtGjkI5vN0nKVz+fR29uLjRs3St6v8NYXviGabqkJSbe0kiyCKVR8m8451Glq\nw0RIspioWU6oBottrlQul6v5z2swGNDW1ibYwZY072CWKJLRLOS922g08soYs1R7saWWBKmiNTMz\nw9l1c2RkBF/4whewceNG6PV67Nq1Cxs2bAAAPPbYY9i2bRsoisLtt9+O9evXAwAeeeQRjI2N4f77\n78fmzZtxxx13AADuuOMOfPWrX8XQ0BCsViv2798PAJiensauXbtw+PBhGI1G2cddbDTbuy8QyFX3\nSggGg/D5fBgZGZH8NZlMBj6fD16vl57f5XK5JM/vkkM+n8fRo0dx5ZVXcv6/kGgxJUtqmsUlWuwk\nS0y0jv1D7SQLAEKhECYmJmCxWBAKhdDV1YXe3l7YbLaaLe4/+OAD9PX1oaenR9Lt+eSK+TPUuhU7\nUNo1kzR2cTqdMJvN9Kb1ZDJZ1rac7JPgSsXIH7lXgxsFcnHG4/Egm83C6XTC5XIJXlmXwuYj39Mk\nzSqjoP3vhIgWoL5sSRUtJlKES8s0CwDeffddDA8Pc87qW4gcP34co6OjtT6NimF2UuRqa8/spDg3\nNwe3272gOilKQerv+t/+7d/w/vvv42//9m+rcFZNeGi2d28iD6mlg2RArc/nQ6FQgNPpxNq1azXv\nEKTX6yHzwgAA+UkWUC5ZXKWC9SpZ+Xwe4XCYbgYBAMuWLcPatWvrIjmRum+s2smVEpglbnq9Hk6n\nE5s2bSq7IsnctC6UipEFRyqVwtzcHPx+f8lcHdLKmb1frJ67h7GhKArBYBAejweJRIIe46Dm68db\nX/gGRn7ymGrH48MUMiJrVd7VU8rxmRj9JtVkS4lkAeLplhaSxWaxJVoLBWYnRTK2hQ25aPzWW29h\nyZIlop0UudKxRn1skMYkUvB4PIt2z1Oj0RStBQJZXCoREQJf6SCZ7UPkymg0wuVyYePGjTW5osie\n4wBwp1lcgqVkb5bc/VhA9SUrn88jGAzC6/UiEonAarWit7cXg4ODOH36dEk71lrDJ1pMuQoGg3T6\nVo9yRVJcAPRzQWqJGx9ipTlcA30DgUDJQF+dTseZiLW2ttY0Fcvn8wiFQvB4PJibm4Pdbsfg4CA6\nOjo0O6fTf3S3prJlChaFQivZYksWwegvfl8tSgmlUu1mGU+v3oy33nqLfjynUilEo1H6YkOjLqyb\nlKPT6ejGG2QPEhsiY8xELBaL0f9md1LkkrF6uOjIRmrZIFAs37vmmms0PqMmatAUrSY0zAUws1lC\nMBikZ/vUeoYSOUexc1CSYhGYaRafZAmlWdWSLNJwxOv10t2Ali5dWrInjqIo1QYWq4XBYKD3aDVK\ncsUskc3n8/S8t2peaJAy0Defz5eIWDweRzAYpDesA6WpGPuPmgtW5u82Eomgp6cHbrcbXV1dVfu9\naSVbRLLof2ucbHFRSbqlNM1iw063tEqzRkdH6YsMpNSWpPbpdJpOObgW1aQrXj0urKXAfM1bLIgJ\nB/N3LfRayG5rz9dJka+tfbUvTMkRLa/Xy7lHq0n90RStBUSliVY+n0cul8P777+PcDiMzs5OuFyu\nqjdLEII0xBASrUoki4mSJOvXD2m72ZOiKFqu5ubmYLPZ4Ha7sWHDBs43Bb1eX3dv1AaDAbOzszh9\n+nRDyZXT6cT69evrel+IXq8XTcVyuRzdyj6VStEiJpaKSWnjTObkEXHu6uqCy+XC2rVra5amaZ1s\nEUgCpYZw8aVZbJSkW2pJFoGkW/lWbV5n3vmjr6FQKNAXGQBgYmICq1evLrkdc2FNFtPRaJRuzqB2\nV7xqQVHUgt+XxEaN1u7MPa98kAtTzPEekUhEs06KYsgZVjwzMwO3263a926iHYvr2bvAUfKEZy7c\nZ2dnkc/nYbfbMTIyUpdXAEl5I3M/B7NsUEiypJYNigkWV5p17B/uwjvvvINsNltxCRmbXC6HQCAA\nj8eDeDwOu92O5cuXo7OzU/R3Xi8LB2a64fV6YTKZMDw8XCZXRKxqJVekFbvX60Uul4PT6cS6desq\nbs5QLzD3SPANOCUzdciCNZFIlKViRqOxRMAAYHZ2FtFoFB0dHejt7cXq1avr5jXk9B/dDQCqCBc7\nzSr7/wrTLamSxUTNvVtKaZ0yINmvrJuoECT9Jn9nMhlQFIVcLke/LpByM7GFNekOyu6Kx94HyZWK\n1aoRg9at3euRas3QErswBfA/ZtLpNLLZLAqFguROimLISbSCwSBvaWWT+mJxPXubACi+iJE27Mlk\nEjabDcuWLUNXVxdOnTqFtra2ulkgseFq8a5WggUAhrT8RJCUCoq1n5cDsxtbMpmE3W7HypUrNenm\nqBV8e65sNhvC/z97bx7fRn3n/78syfJt67A1Gsmxc5OEhCsJFEK5Cm2htCkUknAlNCd8y27766+0\ndGn3R/vbluPb325h6Zb9lcVJQ0IIlGPZ0lDuLYSjhKuBAIHYcSzNSLIk27rP+f6R/Qyj8UgaSaPT\n83w89LDj2PJIHo0+r8/79X69/X709fXVjLgir4dEIoG+vj4sWrQo55tvI5Nvpg6pik1MTMDlcoFh\nGN7WqNVqEQqFcOTIETgcDsmqWDUsOYRSq1v5RBb/fTVqJVS6miWmzXF8E0opwfXh+uPXVvIaZVkW\nyWQSAwMDAD7fnAEgOTKCXEfI+abVavlBrlIIK75kYZ0tiEFqUV2O3h9VaFWXfOcMgGnnjNCimEvA\nC8UYcLyilc0KKYSc97XiNFLJzcx69TY4uRYvJB3N7XbzMcrz58+f1oxOKkbZdrurjTCwg+M4BAIB\nxe6758MJBOdJJ8IRxNUsYT9Wc3NzSf1QwsVENBpFX1+f5N+oUJqamvj48HIjp+fK7/cjHo8jmUxm\nWF0rKa6EM99isRgsFoviyXeNCImwd7lc0Ol0/N9WvCgSV8UikQg/3JS8fsVVMeGtnAvLYsWWXJHF\nf38RYquYapaQagVltPgyX7dKVbc++OADhMNhxGIxmM1mLFiwQPK9iWzSCMc8kGuLcANHLMaE1xzy\nMV/FVzjAXLiwJhZFcq3NVhXLZ78VMxOFVjwer6uNLp1Oh87OzpxJiolEIiO8Q0rAx+NxhEIhBAKB\nnEmK0Wi04PNIpXrMrFdvgyN+0YXDYV5cNTU18TaoXBewUocWl5vm5mb4/X589NFH8Hq9uP0n+2X9\nXD7boByRJUYcelFMRUs8h4ws+LNdsIuhubkZyWRStiWhUMSVK4PBAIqistoCydDeN954AwD43WFx\nSp7Su8PEgulyuXghu2DBAlVc5UF4jnIcB4qicMopp+Q8n/JVxQDwCw/SK0aEGFl4AMevR8IYe6Wq\nYvXYt1UIUtWtclezxJRa3fr/556EaDSKzs5OPub70KFD04SM3D5CoRiTEmYEoRgjgQliiyIJS8gG\nGdVAFtKhUAg+n48f3Cu2m0nNiiK/sxaGFVeaWqpoKYEwyCiXgH/33XdB0zS/UUWSFGOxGJ577jk8\n8sgj6Ovrg9lsRjwex65duzBr1izMmjULdrsdd999N3bt2gXg+Hlz6NAheDwemEwm7Nu3D9/97neR\nSqWwefNm3HLLLQCA4eFhrFu3Dl6vF8uXL8fOnTuh1+sRi8Wwfv16HDhwAGazGQ8//DBmz5497bgL\nvd+ZiDqwuIEQRiiPj49Dr9fDYrHAYrHI7hsifUDz5s0r89HKhyRMuVwujI+PAwAWLVoEs9mM1Rf+\ns6z7yCW0ej6cOP49BVSzpJIFGYZBJBLB3Llzc95PPB6Hy+UCy7JIpVL8kNZyLfjfffddxQUFEVcM\nw8Dn88FgMMBqtcJkMmWIKwA5bYG5BvlGo1F+oSOeG0X+nW+nl/QgEgtmX18fKIpSVMg2IslkkhdX\npAJutVorGgQiDDcQnx9kM0ir1UqK9EKqYnIEV6HVLCnyia1Sq1nZSPYlyiqyxNUsKYoRW3+7ZltW\ngSFO1xTesvURyqmYSokx4dfIR7GQE1sU5WwQie1mUoN79Xo9LywpimqIWVFy+PDDDzEwMDDjrtN/\n/etfsWLFiqybSPF4HMeOHcNzzz2H5557DhdeeCGOHTuGY8eOweFwIJFIQK/XY+/evXjvvffwL//y\nL3jhhReQSqWwcOFCPPvss+jv78fKlSvx0EMPYcmSJVizZg0uv/xyrFu3DjfccANOPvlk3Hjjjfi3\nf/s3vP/++7jvvvuwZ88ePP7443j44YczjqeY+20w1IHFM41UKoUjR47AYrFgzpw5RdkNWlpa4PP5\nynB0hSGeC2U0GkFRFAYHB/Hxxx/DYrEo8nuIyCqEbPHter0ek5OTkv8nHGybSqVAURROPPHEitgj\nSEWrVDiO4yOVheJKOAi50J4ruYN8hRY0MnZAuCARVj70ej0/Z4dUrubOnVvWmU2NgFiUVttOKQw3\nMBgMkt9DFqvkRs6NSCSSURXL1ium0WjyVreUEFlAbithuUQWcLy6lW6u7h5poVZC0puVjULSNcmN\nWLXknhtSFZVcFkWhOJOyKJKP5HO5drPR0VE+/CPbrCipqlg1eyFLpdEqWoWQ62+m1+sxb948vPPO\nO1i1ahV+8IMfTPueeDwOrVaLW265BVdddRUA4M0338T8+fP5TeB169bhySefxOLFi/HCCy9g9+7d\nAIANGzbgtttuw4033ognn3wSt912GwDgiiuuwE033TRtk6GY+52JqEKrgWhubsbJJ59c0n1U0zoo\nji43mUySc6EKtedlq2YJRZZc22CuGVniHi1hPwuxXC1durTiCXY6na7o3rFyiKtCkdPAHg6H4Xa7\n4Xa7EYlE+B3fpqYmjI+PY2JiQrLqMdOHnUoNEq4nUSpnsSqsGAgFeDQaBXD8/Nq79FysOfhy2Y+3\nGlbC1nEAaEKYVl5syalmEZQOysiFnHRNcm4Izw8SYiCsphPxQuzMpVgUs/WLSVkUgeMLa51Oh+7u\nbsnNRWIxyxdPLhXAUGgiXiWZiUKrkDEsuaLd9Xo9wuEw9u3bh3vvPb6B5HA4MGvWLP57+vv78cYb\nb/BWf7Ip39/fD4fDMe1ndDodenp64PV60dvby99PMfc7E1GFVoNR6iwt0j9TKYQWpXA4nDe6XKvV\n8hekS8//30X/XiUrWQS9Xo9IJILh4WG+L85qteKkk06q6uylQkM6akFcyUFY9SQzxRYtWiSZzEiG\nnZKbz+fj7WjZZkcRMVar83WKRfj3nZychMlkwqxZs2SNC6g3hH/XbJDKx19oGl98dk/G/ylVzRIj\nrG6Vs5olpJ0pj9gqlHzVrXzVLKUQnhu5qulCISY1ADyXRTGbWJDbLxYOhzExMYGOjg4kEglJi2K+\nXshkMpkRwhAIBCQj7bOJsWpsRBHhOZMgtj85MAyD5cuXZ/3/p556CqtWrYLJZFLq8FRKQBVaDUap\nQksoZMqFOABCqXQ9KaSqWWKRJaea9fxDf5f1/8LhMD8fKhQKgaZpnHLKKYrP0yoWnU6X1zpYT+LK\n5/Pxc9/kigQ5s6OEQowsqCKRCL8YqVRoRzmQGiRstVqxePHihhNXhSKsiglthOUSWYRKCKzj1azP\naWeO/62VEFyFVLPEVLK6VQparbYoi6JUqItci2IqleLHJqTTaVAUBaPRmNHXWqhFUafT5XQESCXi\nkc9JIh6xI4qtiuW4/pWyhqlXChlWzLIs7HY7AOA3v/kNfve73wEAnn76adhsNuzZs4e3DQKA3W7H\nsWPH+H+PjY3BbrfDbDZjYmKCT7YkXxf+TH9/P5LJJCYnJ2E2mzOOo5j7nYmoQqvBUGLRRMSakguw\naDTKiysSALFo0aKi+j80Go3kzBQ5FFPJkhJZoVAILMvC7XajubkZVqsVy5cvx1tvvZVRSq8Fmpub\nEYlEpn29nsQVOU4irux2O5YsWaLYMcjp+RCHdgQCAVmhHbl2tstNIBCAy+WCx+NBV1cXKIqqqUHC\ntcihq25CLBbDyt9tL/vvanMBEarsv2YatVrdqlQ1SykKsSgS4UL6TMnXyLVDo9EgkUggHo/DaDSi\nv78fRqMxa0U9W79YsRbFXIl4JJhGymYZi8Uyrn9SVbFCXAGVGkVSa8RisYIqWsQ6+J3vfAff+c53\n+P+bnJzEyy+/jAcffJD/2sqVK3H48GEMDw/Dbrdjz5492L17N5qamnD++efj0Ucfxbp167Bjxw6s\nXr0aAPCNb3wDO3bswJlnnolHH30UF1xwwbS/YTH3OxNRhZbKNEhMeakVGdI343K5+Hh5JXqUWlpa\nZKcNCilVZAWDQV5ctbS0wGq1YsWKFTXvJRdWtOpJXJHI+MnJSRiNRthsNkXFVSEoGdoh1SemZOM6\nqbC63W60tbXBarVizpw5M7oXTQ7ExkzSQP/rGxfj0v/8U9l+X5vr84/lEFviapaYUsRWKdUsMfVS\n3SqWXPbVaDQKhmHAsiz0ej16e3vR3NyMWCwGr9cLh8PBWxSFA29LSVHMF2lPjllsUSzEZhmLxXhX\nAIm0Bz63WUoFeJDHMBP7swDIXnOR+aHZBhs//vjj+PKXv5yxia3T6XDvvffiK1/5ClKpFDZu3IgT\nTzwRAHDnnXdi3bp1+MlPfoJTTz0VmzZtAgBs2rQJ1113HebPnw+TyYQ9e47bqp1OJzZv3oynn366\nqPudiajx7g0GSScqhb/97W98n1ShBINBfnZXc3MzKIoqKF5eDh988AF+dNPT+Y/lf2yDuQRWLtvg\nc7tvyqgItLW18Y8n2xvb/v37ceaZZ9aUHcvv9+PIkSNoa2srKYq93BARSJImyTwuoW2mnpEKZiAV\nsmxDfOWGdkgNEs51nqoch/T5MQyDcDjMx9gLK5sn3fvvZfndRGgRlBZb+YQWoRixpaTQEnLgx9vK\ncr+1RCqVgtvt5q2BNE2Doqicr1Upi6LwJjdhU4p0Op01RTHb+lDKopivCkUeg1ScfSwW49ctWq0W\n8XgcNE1Pq441cqXryJEj6O7uzgibkCKdTuPcc8/Fu+++W6EjU8mBGu8+E1FiQVpI8iDZXSFipLW1\nFRRFYfny5WUbTlfI/RZTxQKAf7vty9i/fz86Ojr4ioCcRSsJnqj2YD5h5Wp8fBwcx2FwcLDmKldk\nHpfL5YLf7+d7h4TDjhsFOcEMhYR2NDc38xYejUbD9wZW+9yrdYQDticmJmAymTB79mzJEBUAeP+m\nbYqLLbHIEn5NCcElV2QBhfdtqSKrcDiOw+TkJJxOJyYnJ3nrvNzxHoVaFMUJm2J7X7bgjnJbFMlj\nyJUS6na7MT4+jvb2dkSjUQSDQf4xCIdVS1XFsiVB1gPxeFzWtXtqaqqoTXCV6qEKLZVptLa25kwe\nJG8aZIBwR0cHKIoqenZXofxfW5+Q9X35RFa2atY/33IODAYD5s+fX7DdqppCi+M4PiiCVK5omsac\nOXPwwQcfoLe3t2bEFTl/fD4furu7YbVasXDhwoYTV4WSbzEVi8XAMAw/SLi9vZ1PJHM4HHA4HHyC\nmFSv2Ex+foPBIBiGwfj4OH/OLVq0SNZ5r6TYkhJZ4v+fyX1bjUQkEgHDMHC73ejs7ITNZitbAI2c\njRyhvVmcQCi0KAoFjDDWXimLIvkaEWPiqlhnZ2fWWZkk0p48jnA4DJ/Px1sUhZH2Uj1jtRppLzcM\ng2EY2Gy2ChyRilKoQqvBUKqiFQgEMr4mXsSThUoxYqQSaEPFzQL784P/q6THQ/rbKjXkVfx3MRqN\n03quyOyxeDyecX5UWlwJU++6u7vVYAaZCAcJkwHMy5Ytk9wNJ6EdQmtiPYR2lItoNMongra2tsJq\ntWLu3LlFvcbLUdnKRiliq5Bqlph8YkutZuWHWAOdTicAgKZprFixoiZsvHJmEootij6fr6QURQKx\nKIo/F4qzUCgEt9sNo9HIx9kD0y2KciLthdZE4Xw0YlHU6/WSwR1kBmOlicfjsq6/TqcTNE1X4IhU\nlKL6r3wVRVFKaHk8now5RRMTEzAajaAoKmMRX5O8/h6wbFHBP5Yrwl0uhc6sKgYirliWhd/vlxRX\n4spVV1cXDhw4gFQqlWEfIYtt8lHJ3T5iK2VZFl6vV029KwDy2mNZFqFQCL29vZg3b15Wyw1BGNqR\nzV5SaGiH8PxQMrSjXCQSCbhcLrAsi6amJlAUhdNOO00REVmq2MpXzRJ/b6FiqxSRRcgmtsolshoB\nYkd1Op2YmpqCxWLBkiVLKj6cvlQqZVEUCplUKgWPxwOHwwGO4/j+UpJ+XIxFUc4gc7IhRR4LCe4Q\n2rTFQkxoUSzHe5ic+2RZVq1o1RlqGEYDQsrnxZBKpeB0OvHpp5+iubkZZrO5pgIJ8g4pfv09AIA2\nj9AS2waVEFkAcPToUWi12qxT24slm7gyGo0FB1oIZ0YJ48ojkci0hbaw6tHW1pbXA89xHJ/OOD4+\njs7OTlAUBbPZXJOVz1pCapCw1Wqt+CBhqYUUOT+yhXYIz5Vq/J1J1Y9hGMRiMVAUBavVmtNGVQrF\niq1ChBahELGlhNASIhRcajVrOkJrYFdXF2w2GwwGQ028V1YT4WaO+EY2Ipuamni3RU9PDywWCwwG\nQ85riFyLIiGfRTEXxKIoDOwgH4mg1Gq1WatihUbaHzhwACtXrsz7vb/61a+waNGijDlZKlVDDcOY\nqRQ6tDiZTMLj8YBlWYTDYZjNZuh0Oqxataq+3jBkiiwxSoksAHw0rxJkE1eLFy8uKdBCzswo8qYS\niUQQiUTg9/sRiUQkffytra189WpiYoLv2SvWojWTIP1q5G9MEiGrOUhYbmgHmQlEhrQyDMPbi4T3\nIRZjhSxAciF8fUxNTWUMPi83xVS2ihFZ5OfkiC2lRRbweXWrnNWsTz/9VHZceS0gHCgM1JY1sFbI\nZlFMJBJgWRZOpxN6vR5msxmtra2IxWJ8r5XwGqLX6zN6xJSyKMoZ9CzHokgEJXm/FPa8EUEpjuUX\nCjNhpH0hM7QuuOACWd+rUhuoV4YZSjwe52dcxePxjEVKU1MTfD5fXYosOQirWUqKLOD4RVXc31YI\n5RJXhSBcJBsMBsljTCaTfG+Yw+GARqNBc3MzNBoNwuEwRkdH4Xa7JSseM31BIq76dXV11V0YSL70\nMLIbTIQYmamTrele3C+W7XkQ9/oZDAbFh1fLpRCxVazIEv58NQIygONiK6XcdI4MXrrpar7S4ff7\nM3qBhNYtqV6gSld5G8EaWA3Ie5rT6UQ4HIbVasWpp56aV1iQIclKWRSFTmAYHAAAIABJREFU90uO\nS1wNK9SiWGjPWywWg9/v54UZOc/JsOqRkZFpYkx8LWRZVnHHjEp5mdkrngYl2xtQLBbjZ+2kUik+\nYjbbRaLWJrRntQ0WILKEKC2ygOMNtoX2aKXTad4yJhRXwsVjLaQFAseblcVR/kuWLJkmnlKpVIYt\nkcRpixdRYmtirpjheicUCoFlWX4mWynBDLVOvt1gcWhHNBqFx+ORDO0gVqJQKIRAIIDOzk7QNF0T\nvX6VDsgApAVXOapZhA72+CJ0alD512RXV1fWXiBxuhyprAvnzmm1WslQF6U2dCKRCJxOJzweD28N\nrNbQ9HqDPHdutxs9PT0YGBgoyAYtfP1nQ2xRDAQC/HVEWFHKJsYqkaIop+eNvPe3tbUhFovx4R2x\nWAzBYBA//vGPQVEUbDYbjh07hjfffBM+nw+zZs2CxWJBIBDAtddei9HRUSSTSfzgBz/At7/9bQDA\njh078E//9E8AgJ/85CfYsGEDAODAgQO4/vrrEYlEcMkll+Duu+/mN9nXrl2LkZERzJ49G3v37oXR\naJx23IXe70xG7dFqQIRDiyORCC+uAICiKFAUlXcn7sCBAzjxxBPL1uNQDJJCSySy5NgGg/N6yiKy\nACAcDuPjjz/GqaeemvP7sokrk8mUIa7I91ZTXIXDYX4IdUtLCyiKQl9fX0kLGXHFQ9gvRt4g9Xq9\nZJ9YPQQyEEjqHRngbbVaS37uZgrhcJhfqAFAe3s7tFptxnDTWgntyCW2Sq1mSSEWW5UQWoCyYkuJ\n3iyxhVXOEF9yjmQLNJCyBlIU1ZAbIkqTTqf5xMV0Og2bzVbV505qQ4ecL7FYLG/lNFfohViMiYc9\nS5HNouhwOAAAdrtd8udCoRBGRkYwPDyMX/ziF1i7di2OHTuGY8eO8cOvdTod7rnnHqxatQonnHAC\nWJZFMBjEihUr8NZbb6GpqQnLly/HgQMHYDQacfrpp+Oee+7BGWecgUsuuQR///d/j4svvhg//OEP\nYTKZcMstt+COO+6A3+/HnXfemXE8Pp+v4PttUNQerZlKPB7H8PAw3G43dDodKIrCKaecImtGA6Gl\npQXRaLQmhFYymeQXWxnUUCWLQOLdpainyhURVx6PB83NzYomtwHyKh5C2whJx8sWyCDe1a7moige\nj/ObGyT1Th0kLA/yWmdZFqlUChRFYcWKFZLPXbb0M+E5kq3i0dbWpug58v5Nx0VDNeLfKyWyAKD7\naPmqW8UgZwCunHOEvM+RPlSz2YyFCxdmrUKoZBIIBOBwOOD3+9Hb24sTTjihYiNOciEnhVXKojgx\nMSFpUZTqFyvVohgKheDxeEDTNB9pL7YodnR04MQTT8SiRYtw11134ac//WnG77r99tvx2Wef4dxz\nz+WHsOt0OjzzzDO46KKLYDKZAAAXXXQR9u3bh/POOw9TU1P4whe+AABYv349nnjiCVx88cV48skn\n8dJLLwEANmzYgPPOO2+a0CrmfmcyqtBqQNLpNFpbW7F8+fKiF3ekQbVaJBIJfsEVi8WmDy8sUmQ9\n+eZPFDi67Gi12gxvt1hckSS5WhRXpPopFOinnnpqVWYrkV3GlpYW9PRID5YWz4siPUAknlfs3xcu\nuJV+TOR8JbZciqKwbNmygjY3Ziokyp5hGITDYd7SnC2shSAntIP0RwhDO7L1AYkFWb6ETSnEVsJy\nVLPE981VYU+h+yhXktiqVNJgvnMkEolgbGyM7ycliYGxWAwff/xx1sqpsFes2vbVakGCLRiGQUtL\nC2w2W131mRKqYVFMp9N8pH06nQZN0+jt7c2ItJeyKO7fv1/y+G666SZ84xvfwJIlSxAIBPDwww/z\nlbJZs2bx39ff388Pthf2eZGvA4DL5eLndFmtVt4NJaSY+53JqEKrAWlvb89agpZLS0tLxYUWCehg\nWRaJRAIWiwUnnHACOjs7M22DWURWPttguUUW8HniI1k4kt2lWhVX0WiUF1darRYWi6Vuqi/5vO/i\nN8ds86LEs8TkLrLJ/BfhZsDixYvVBnkZkHABlmX518js2bPR1dWl6Pmfb56OuA9IidAOIrbKKbII\nnc40ArOqs7AtVWxVC1I1JdZAm82Ws1eykLlRUtVTJWcTVhsygsLhcCAUCoGm6bp5vygFOaEXYoui\neOOvqakJOp2Ot7t2d3eDpmkYjcacFsVQKITHH38cO3bsQFtbm2Ss+zPPPINTTjkFL7zwAj777DNc\ndNFF+OIXv1jy4670eqRRUYWWiiStra3w+/1l/z2xWCzDKkTSnLLuZtdoJSudTvNpgST0wGaz4cQT\nT6xJcUWqLxqNBhRF4eSTT264N0s5b47C/o5wOJx1kS3s/YlEIvD5fAiHw+jt7a1YpHgjEAwGwTAM\nxsfH0d3dzQ/artabeSmhHbFYTLJy2trailc3XI0L79pdkcfQdaw8YktsG5SiXsQWEQgMwyAQCBSU\nGiincipnCDjpORVX2euhKhaNRuF0OuFyudDT04NZs2ahp6dHXYT/D7ksiqRvbWxsDOl0GhaLhXcM\nkdEeZPbpY489hk8++QR2ux3d3d345JNP8Mknn+Cb3/wmdu7ciTlz5vDP+W9+8xv87ne/AwAYjUb8\n/Oc/R1NTE+bPn485c+bgo48+gt1u522AADA2NobzzjsPdrsdY2NjGV8nm/MURYFhGNA0DYZhpruJ\ngKLudyajhmE0KKUMLQaAqakpHD16FMuWLVPwqI5Dqigulwscx8FiscBqteZ807v0/P+dV2TlqmiV\nQ2gJxZWwcnX48GG+n6lWAi2IoBWGolgsFtXalgOyyI5EIvB4PPB6vYhEItDpdNBoNNBoNHx/h3gn\nW+keoHqGBIK4XC60tLTwNplGeX6yDWeNRCL4zpMHyvZ7O53pjH8rLbbkCC1CIWKrkgOKw+EwP1C4\np6cHNputKgJB3HMqvsmJKq/0MYvtbdUOtqg3QqEQHA4HvF4v+vr6YLPZ8tqhWZbFgw8+iJdeegla\nrRYmkwmtra1wOBzw+XwAgL6+PixYsAD33nsv/3M33ngjKIrCbbfdBpfLhdNOOw3vvfceNBoNli9f\njrfffhsAcNppp+HAgQMwmUzTQiv+7u/+DpdccgluvvlmmM1mPgzD5/PhrrvuyjhOn89X8P02KLJe\nlKrQalASiUTGwr5QYrEY/va3v2HFihWKHI8w/ZAEBFAUJSts49K2a/N+T6VEVjZxZTQa+TfCd955\nBwsXLkRra2tVxRUJZXC73Uin07y4qoWAk1on2yBh0sNBEPcACT8ndhEp21lbW1tDWYrEJBIJuFwu\nsCyb8XqvRr9ftTnjp+UJyBALLUA5sVWIyCLIFVvlFlrEGuh0OqHRaEDTNCwWS80LhGyCXSqJVeqm\nVFUsEAjA6XTC5/Oht7cXNputJoIt6oFUKgW32w2HwwGtVgu73Y7e3t6cfxuO4/C3v/0NQ0ND2L9/\nPy677DJs3rwZAwMDkt87Pj4Ol8uFpUuX8l93Op24/vrrwTAMOI7DLbfcgmuvPb5ueuCBB/DLX/4S\nAHDrrbfyse9vvfUWH8N+8cUX41//9V/R1NQEr9eLNWvWYHR0FIODg9i7dy9MJhPeeust3Hfffbj/\n/vuLut8GRRVaM5lShRbHcXjttddw1llnFX0fJLnO5XJBq9Xyi61CqyilCC0lRJYccSWsXA0PD/MN\nwmRRLewDKmf0tHAQNbEpyBW0Mx2O4xAIBOByuTKsbUajsehFTDqdnlblEC+eaiWivFRSqRTGx8fB\nMAxisRgoioLValXPPSgvtqREFkEJsVWM0ALyi61yiSxiDXQ6nQgGg6AoCjRNN9S5J6yw56qKZQt3\nyVUVEwZb6PV62O12mM3mmrc01grBYBBjY2Pw+/2wWCyw2Wx5banBYBCPPvoodu7cid7eXmzduhUX\nX3yxOvqjvlCF1kwmmUxmpN8Vw6uvvopVq1YV9DNkoK3L5YJOp4PVagVFUSX1/xQrtEoRWYWKK3Hl\nCkDGm6L4zRH4PJ5cKoyhkDc4YeJdMpnkxZUayiAP4SDh9vZ2WK1WmM3miuyASzXak3NFGGMvNdhZ\nyV3sUo6fvE6mpqbQ19cHq9Wq9qxJoKTYyiW0gNLEVrEiS0g2waW00KoVa2CtkEqlpg15Fm/sCMNd\nUqkUAoEAYrEYrFYr7Ha7aieXSSqVAsuycDqdaG5u5qtXuc49juPw3nvv4YEHHsAbb7yBK664Aps2\nbcpI6lOpK1ShNZMRDi0ultdeew2nn3563gVnMBjkLWp6vZ63qCkRrlBJkVWquCr0zZ002UuJMY7j\npvX/CBfYxD/vcrn4hEZVXMlHaGXV6/U1O0iY47hp9kTxeSKVeEY+L8fjIZU/hmHg8/lgMBhA0/SM\nXuDKRQmxlU9kEYoVW0oILWC62FJKZCWTSX6gcD1ZA2sBjuP46sv4+Dhf/SIbPmK7s7jSXo1esVpj\namoKDocDExMToCgKNpstb+U0EAjgkUcewc6dO0HTNLZu3Yovf/nLNfd+o1Iw6sBildIgyTjiBk5y\nsSZVgNbWVlitVqxYsaImejAKEVlS4qpSaYH54snFC+zx8XFMTk4iFAohnU7zgzoNBgO0Wi2CwSBS\nqVTZFtj1TiwW48WVRqOB1WpVdAhzOWhqaiooxl6YeBaJRHLOiip04USqB6TyR9M0FixYUPWqWj1A\nxGmpyBVZQHFphEqJLEDZREIpa+DSpUsbyhpYTqSCLRYsWCD5PiG2O+cbeSC+NaLgTSaTfPWqpaUF\n/f39edNSOY7DO++8g6GhIfz1r3/FlVdeiccee0xN4ZuBqBWtBiWdTvNWgWL58MMP+TkPZKHAsizG\nx8fR1tZWkSpAoRUtOSKr0pWrUkgmk3zlKhaLoa+vDxRFoaOjY9oCW/g5WWCLKxzCSsdM2JkUDr4m\ngSDF9AnWM8JZUeLzRKrJXmxNFPZZ0jRdk5W/WiUajYJhGLhcLl6cXnrPY0XfXyFCi1CI2FJSaBGm\nBpuKrmaFw2E4nU54PB7VGlgEwWCQT6xTKthCqsouvOWqismdUVgLcBzHV6+mpqb46lW+946pqSns\n3bsXDz74IPr7+7F161ZcdNFFDSlAVVTr4IyGxMmWwqeffgqNRoNEIoHx8XF0dHTAarWit7e3Ygut\nfEJLbBvMJrTqSVyRQbikKkHEVaF9L2SBLbacRSKRnAN7690iQpKfhIOvVVtldsRN9qFQCH6/H4FA\nAOl0mu8lbG9vlxRj6gIiE+FAXI7jeGubsHJajIWwGJFFkCO2yiGyCPvu/rbsBTaxBjqdTmi1Wths\nNvT19annmUxI4qfT6axasIV4ELjw/YesS8h1pdauKSQYxOl0oq2tDf39/RnrBCnS6TQOHDiAoaEh\nvP3221i7di02btwImqYreOQqVUAVWjOZYoUWx3GYmJjgo5n1ej3mzZtXlbk3pVaz0uk0vF4vXC4X\nL65oms6I6K4lcUViW8kgXKvVio6OjrIdh3hgr3B3kpw7pNIhFmO1loiXTqcxPj4OlmURDof5UAY1\nllge5LXCMAzC4TA/247YhpPJpGSEPdnBJmlnUn1itWzNVAqykcMwDEKhECwWC2iazinuCxVbpQgt\nIL/YKqfQ+v+2np7XdkZSahs1NbCciK2VVqsVNE3XbOVeWBWTEmOkKiZOZCU3pd9/yDiPsbExBINB\n0DQNmqbz9plPTEzg4Ycfxu7duzE4OIitW7fiS1/6UkXXStFoFOeccw5isRiSySSuuOIK/OxnP8Pw\n8DDWrVsHr9eL5cuXY+fOnYr0zatkoAqtmY7cocXkIu1yueD1etHT0wOKogAAfr8fJ5xwQrkPVZJC\nhBYRWWJxZTabp80/qiVxRY41FArxx9rZ2VkTIkY4ZFMsxkgiHlk0SQmxcu+gptNp+P1+PvGut7cX\nFEWhq6urJp6/WodsqgirvDRNF/X8kb4OKTEmVT0VirF6sRKJIb2qZOaQ0WgETdPo7u6W/Xjkiq1S\nRRYhm9gqp8h68b4b+M/FtrOpqSl4vV6Ew2FoNBrodLqsQqzRZ88Vg9Ca2tXVBbvd3jDWSqmqGLnF\nYjFwHJezV0yO6yaRSIBhGDidTnR2dsJut0+blSh1XH/9618xNDSE9957D+vWrcO3v/1tWK1WJR++\nbDiOQygUQmdnJxKJBM4++2zcfffd+Od//mdcfvnlWLduHW644QacfPLJuPHGG6tyjA2MKrRmOrmE\nFlmkulwuPjmMoqgMi0EgEMDw8DBOOumkSh42j1zb4OOv/wO8Xi+/4Ca2wFoUV0IhGAgE6locEMtZ\nNiFGkhOlrInFRpOLxYHRaITVam2YxUUlCAaDYBgmY1aYyWQq6/MnjLEXizGpSod47lwtBW5Eo1Gw\nLAuXy4XW1lbYbLaSrFlyxJZSQosgFlyVElrA59Y2hmEkrYHCiHJxD5C4p1As3GvtXCkHJNjC6XQi\nmUzCZrOBoqgZ2TdJ3n+kbmS8jXiDp6WlBbFYDG63G5FIhK9e5au8+/1+7NmzBw899BDmzp2Lbdu2\n4fzzz6+p8y0cDuPss8/Gb3/7W3zta18Dy7LQ6XR47bXXcNttt+GZZ56p9iE2GqrQmumIhxaL+5TI\nIjXbQNZEIoF33nkHp59+eiUPm0dORevWF76DdDpdV+LKbDaDoqiCdr7rFeEboViMkWjybEKMLLyE\nQSxer1eRQcIzDaE4aGlpAU3TVbEDZ0Mo2qUW2MJxB1JirNyPQ9x3ReYDKmWLzCW2lBZZBCK2KiGy\nyLw1p9OJUChUkjVQWGkXnyvCDZ5s/T/1KkjKEWzR6KTTaf5cCQaDcLvdmJqagkajgVar5auownPl\nvffeA0VRmDdvHgwGA9544w0MDQ3h4MGDuPrqq3H99dfDYrFU+6FlkEqlsHz5cnz66af4zne+g5tv\nvhlf+MIX8OmnnwIAjh07hosvvhgHDx6s8pE2HGq8+0ynqamJX+SzLIvJyUnJ+PJs6HS6kpMLi0WO\nyPr5q9+HTqfDsmXLyh7FXihiW5vJZEJ/f/+Mq7zIiSYXCjDSZ0AsZ2Twtl6vh9FoxLx589DW1oa2\ntjZVZOWBVA5YlgWAmo6zb2pqgl6vh16vR3d3t+T3CM+VaDQKv9+fsXstTjoTLq6LCXch4oBhGASD\nQVgsFixZsqQsoSpv/L/bFB1oLIdi4t8LJRQK8SMBDAYDBgYGSt5gIr07LS0t6Onpkfwecf/PxMQE\nf+6IRx6Iz5dasrKSWHGGYdDc3MzHsqvXPnk0NTUhFArB4XAgGo2CpmmcdNJJGWJbfK785S9/wSef\nfILR0VFMTEwgFovhhBNOwJlnnol0Oo3nnnsOg4ODmDt3bs2EXWi1Wrz77ruYmJjAZZddho8++qja\nh6QiQBVaDYzT6cSnn37KL/KXLl1a0BtIrbzZSPFUeCcCgQBGR0cBoKbElcvlwuTkJIxGI+x2O5Ys\nWVLTz2U10Wq16Ozs5BMVI5EIWJZFJBJBe3s7LBYLurq6+F3Jqakp3vIhXDBJVcVmYj8HCVVhGAax\nWKyh5g2JzxUx4pTNQCAAj8eT03ImFe5ChjF7vV4YDIaKbZBIia1yVbMItv8OYHJhYWmmcrlr03J8\n8sknoGkac+bMqWj1VKfT5T1X4vH4tE2ebFbWSs6KIvZoh8PBB1ucdNJJNRtsUYvEYjE4nU6wLIue\nnh7Mnj076wYOOVfa29uxf/9+jI+Pw+12Y9OmTdiwYQNMJhNYlsXo6Ch/e/3110FRFG699dYKP7Lc\nGAwGnH/++XjttdcwMTGBZDIJnU6HsbExdX5XFVGFVgNjtVphsVhKWiDodDr+xVophGIpF83NzZic\nnATLslWbDyUMEpmYmIDBYABN01i8ePGMW+QXi3CQsFarBUVRsisvchbX4mG9tbhzXQqk8iIMBZk/\nf37B4wDqHWJDzVZxkgp3mZyc5D9PJBJIJpNobm5GT08PZs2axY864DiuIudKNSpb5WLZsmU1K/A1\nGg0vmqSQmhVFhvaKkzalesWKqaCKgy1mogOiFDiOg9frhcPhQDweh81mw8qVK/OuXbxeL3bv3o09\ne/ZgyZIl2LZtG84555yMqqHdbofdbseZZ55Z7odRMB6PB83NzTAYDIhEInj22Wfxox/9COeffz4e\nffRRrFu3Djt27MDq1aurfagzFrVHq4FRYmjxu+++iwULFpTdCy60OP749Dtzfu+TwR1Ip9PgOI6P\n8yZviCQWVqrCoVQvhzAC3+/3o6enZ9pMLpXcEFuby+UCx3H8IGGl42elQhjE81zqJcJeCOlbYxiG\nD7OhaVpdmBUAmbfGMAxSqRRomobJZJpmUYxEIojFYvziWsqaSCqoSnHGT/+97NWsruEQ/3k5qlri\nEIxGgyRtSvWKyQ3tIGMpHA7HjA+2KJZoNAqHwwG32827SLJZ1QnpdBqvvPIKhoaG8Omnn+Laa6/F\n+vXrYTabK3TUyvH+++9jw4YNSKVSSKfTWLNmDf7xH/8RR44cwbp16+Dz+XDqqafiwQcfVKuiyqOG\nYcx0lBhafOjQIVgslrJcgITiSpgWeJ3t77L+zONTQwBy2wLT6bTkkF7iz5daLAkrYlKQORssy/Li\niqIoNZChAJLJJDweT00NEhYP6xV+FEfY10IaXjgc5nte2tvbQdN0xYeR1jPivqu+vj7QNM3PC8tH\nKpWSXFiLraxSYqzQCuqXNv222IcpC6HQApQVW40usuQgDu0QnjPhcJivoLa2tsJgMKC7u1vWe5EK\npglUu90OiqLybqR6PB7s3r0be/fuxdKlS7F161asWrVKvX6qFIsqtGY6Sgit4eFhtLS0wGazKXJM\n4nAOqTlX2YIwnghsV2S3XjjzR0qIkaZ6IgDC4TCCwSB6enr4XW/1wiwPMiusXgcJEwuR1FBnYRqe\n1KBepQI74vE4nxio1Wp5S7C6EJOPMNK+nNU/qdk/4gpqc3Oz5PkiJdzLKbbEQgtQTmypQms6wmAL\nnU7Hz2wSizGhcAcab/5cKUQiETgcDng8HphMJtjt9rwW6XQ6jb/85S944IEHMDw8jOuuuw7XXXcd\nTCZThY5apYFRhZYK+F35YnE6nYjFYpgzZ07R90HEFcMwCAQCeaPYV3dukLyfJ4M7ij4GuRBbIBlC\nSnanm5qaJIWY0Jao7kIeRzhGgMwKq6VBzEoj7uUQijGh3UzKzpptB5ZU/4itjVgrVeuHfGKxGFiW\nBcuyfKR9X19fVTdJxBXUbLPnhOfJhv/nKcWPQ0pkEUoVW6rI+hzh+0kgECg41j7b/Dnh0HidTjdN\niFUitKNSkLlhDocDHMfBZrPBYrHkfWxutxu7du3CI488gpNPPhlbt27FmWeeqW6SqiiJKrRUcg8t\nloPX64Xb7cbixYsL+jlS2ieLbZPJNG0XWWrO1WXd35a8v3KKLNLv4nK5MD4+js7OTlAUhd7eXsmL\nstCbL66KiWOmhSJM6T6OWkIdJJwdYjeTEmLi8yWVSiEYDPLWSrvdLtvWpjK974rMu1K696+ciIV7\nNBrFDXc8p+jvyCW0gNLEliq0Pk+9I8EWNpstY2NRKYShHVJijPQsS1lZiw3tqBThcBgOhwPj4+Mw\nm82w2+15nRCpVAovv/wyhoaGMDo6ivXr1+Paa6+F0Wis0FGrzDBUoaUyfWhxoYRCIRw+fBinnHJK\n3u8ViytiC8wnroDPe66kqlnlEFkcxyEYDPLiqr29HVarFWazueRdwJkixDiOw9TUFFiWhc/nU0NB\nioCkVjocDkxMTKC9vR1tbW38OZRMJgEga09hLS+UKgV5DhmGwdTUFCwWS13ZU+WgpIUwn8giFCO2\nZrLIEvcN0TQNq9Va9Wt6IaEdcuys5T5Wt9sNh8OBpqYm2O12WVVolmXx4IMP4g9/+AOWL1+Obdu2\nYeXKlWr1SqXcqEJLBfzA11J+/sCBAzjjjDMk/18JcSVELLSUFlnBYBAsy/LiilSuKmmxEEaSC0VY\nNiEmXlhXG+Fz2NHRwQtU9U1NPsKeoe7ublitVphMJknRJOz7EVfFxBH24nOmkfs4hM8h6Z8sR9Wg\nVlBKbMkVWkDhYmsmCq1gMAin0wmv1wuz2QybzVZXoxWk7KzCQCChnVVKjCkhJEOhEMbGxuDz+dDX\n1webzZa3kp9KpfDiiy9iaGgITqcTGzZswDXXXJN1iHU5OXbsGNavXw+Xy4WmpiZs3boV3/3ud+Hz\n+bB27VqMjIxg9uzZ2Lt3r1pdayxUoaVSutACgP379+Oss87i/620uCKUq5oVCoXAsiw8Hg/a2tpA\nURT6+vpq1r8ung0lFGTJZDJjSG+lhFg4HIbL5YLb7UZrayusVmvFBWq9E41G+VAL0jOkxHNIQm+k\nkhOlAhjEyYn1JExI35XL5YJer1fsOawXShVbhYgsQiFia6YIrWQyCZfLBafTyQdbZLOaNwLikQdy\nQztyjckgNl+HwwGtViv7OWRZFr///e/x2GOP4YwzzsCWLVuwcuXKql7HGIYBwzA47bTTEAgEsHz5\ncjzxxBPYvn07TCYTbrnlFtxxxx3w+/24887c42tU6gpVaKkcv5gR+1Gx7N+/H2eccQafHidHXHEc\nx/87l7gSomQ1KxQKweVywePxoKWlhRdX1bZxKIGUECMfxUJMbE8sRIhFo1FeXJG0u76+vpqoqtUL\nZF4Yy7IAwPcMVfI5lBNhL26oF35e7cVjKpXig0GSySQoioLVaq2rvislKUVsFSO0AHli64k7r+IH\nPNeTeJdLqcEWjYzc0A5yPQmHw4hEIujt7cWsWbPyVgBTqRSef/55DA0Nwe12Y8OGDbj66qvR3d1d\niYdXMKtXr8ZNN92Em266CS+99BJomgbDMDjvvPPw8ccfV/vwVJRDFVoqpQ0tJpWrgwcPQq/X8+lx\nSoorIUKhVYzIIlUXMimdiKuZJgzyCTEAWVMTOY6D2+3mhUG5Bgk3MqlUCuPj42AYBrFYjBcGtbwg\nSyQSWQM7pJLwhEKsHNUkcd8VmXfVSH1XpVCM2CpWZBHyia3f/P3h3auwAAAgAElEQVQ5iEQiOYf1\n1msVlQRbdHZ28rHs9fQYqk0qlYLD4YDT6URTUxO6urqg0+kkQzt27NgBk8mEwcFBGI1GvPnmm9i3\nbx9WrVqFLVu2YPny5TX93I+MjOCcc87BwYMHMTAwgImJCQDHr2lGo5H/t0pDoAotlcKFlpQtMBgM\nYuHChfzukZLiSggRWoWIrEgkwldddDqdWnWRgbjnJxQKYWJiAsFgEOl0Gs3Nzejo6EBHR4dkWEct\nv8lVCzIIlwzf7u3tBU3TddWrkQupJDzyuVSEfbE9HKFQiB/IPBP6rkqhULFVqtACsostsWVQPKw3\nWxVVquenFmLJyfug0+lEIpGomWCLemNqaooP+qEoCjabLeuGEwnteP755/HKK6/g/fffh9frRTqd\nhl6vh0ajQV9fHwYHBzEwMICTTz4ZX//61yv8iHITDAZx7rnn4tZbb8Xll18Og8GQIayMRiP8fn8V\nj1BFYWS9MalXjQZHzgJFSlwNDAzwlauPP/6Yb4qVEldKWIsKEVnE0uZyuaDT6UBRFE499VRVXMlE\no9FAr9djamoKHo8HkUgEFosFJ554Itrb26cJsWAwCI/HkxG+IBW8MNOEGBkLwDAMfD4fDAYDbDYb\nlixZ0nDPgU6nQ2dnZ1bhSCLsyTkzOTkJlmUlI+zFVTGO4/h5V83NzaBpGnPmzKn6YrvWef4/bizr\nQGMpej4JyrIREvtyS0tL1nACUkUlN6/XKzl/TkqIletaHwqF4HQ6MT4+DpPJhPnz5zfMZkmlIIOZ\nnU4nWlpa0N/fj0WLFuW9JjqdTuzYsQP/+Z//ibPPPhu//vWvccopp2S4ZzweD0ZHR3H06FG+/7RW\nSCQS+Na3voVrrrkGl19+OYDjjhCGYXjroMViqfJRqlQDtaI1A5AaWpxKpab1XNE0je7u7mm2wKNH\nj2J8fBxGoxGtra18BLWSb3b5hFY0GoXb7YbL5YJGowFFUbBYLKqlrQDI4GiXy4VgMIje3l5QFIWu\nrq6C7of48cWpieIUvGxhHfUuQsLhMFiWhdvtRnt7O2iaVlMX8yC2s4bDYb6Kmkql0NzcjM7OTrS3\nt08TY41wzpSLYDCI1d/dmff7lKhmCRGKrXIFYAjFu7iaSizQ4gAG8nkhaZviYAubzVb1wdb1Bhn1\n4XA4MDU1xVev8g1YTyQS+POf/4yhoSFMTk5i48aNWLduXd1ZhDmOw4YNG2AymfDrX/+a//rNN98M\ns9nMh2H4fD7cddddVTxSFYVRrYMqxyFDi8XiivRcSYkrYeUqnU5jamoqa/BCuWZCxWIx3hYIgBdX\n+S7eKp9Del1YlsXk5CRMJtO0v3k5fme21ESxEJMK66jFRXU8HufT7kgwiMViUa1EBUDCBBiGweTk\nZIa9UuqcIR+z9fwIZ/3U4jlTLsi5yLIs9Ho9/u9/fS3vzygttIDPxVa1kgalAhjIR2HappSlVa/X\nIxAIwOFwqMEWJZBIJPjqVVtbG/r7+/POUeQ4DseOHcOOHTvw1FNP4dxzz8XWrVtx0kkn1e3r+JVX\nXsEXv/hFLFu2jBfov/zlL3HGGWdgzZo1GB0dxeDgIPbu3QuTyVTlo1VREFVoqRyHYRiMjY0VJK4A\neT1X4plQ2YbzikVYW1ubpDUoFovxlSsAsFgsoChKFVcFUOuDhMULJKlFtViIVaO6kUwm+bS7VCrF\nB4Oo52JhCPuuuru7QdN0weeisOdHvKgmFXvxorpaQ1fLAalGO51ORKNRvmeIVPRzWQjLIbKEPPHC\nD8p6/8VC0jaF5wrpR41EItBoNGhpaUFXV5ekGFOtq9JwHIfJyUmMjY0hGAyCpmnQNJ3XXZJIJLBv\n3z4MDQ0hFAph48aNWLt2bd55WSoqNYwqtFSOMzY2Br1er5i4KgTS4Co1nJf48PV6PdLpNCKRCJqa\nmmCxWGQNLFTJJBAI8IOEu7q6QFFUXVraqinEyIKWZVmEQiE+7U49FwsjHo/D5XKBYRi+76qcs+uk\nFtVCIcZxXEb4gvjcqdXXCOkB9Hq9MJlMsNlsWa2+2cRWOYVWrYosIeQ17XA4EI/HYbPZ+BELJORF\nKpY8W2+hsE+sFjauKkU8HgfDMGBZFh0dHbLSFzmOw+joKLZv344//vGPuOCCC7BlyxYsXbp0Rj13\nKg2LKrRUjkOGFldKXMkhkUjwMeKJRALd3d1obW3NGIyYTqczYqXrZXFUSYT9Qm1tbTNikHA2y5BY\niEmdN1KLI7JDyzAMJiYmKmKvbESEsfbxeJyPta+VCqAwfEEsxoQR9lJCrJKvJ6E1UDjYWs71Tiy2\nZmo1C5gebJFLpGaDbBRKiTGhpVWqT6wRLK3Eeu5wOBAOh/nqVb7+7Hg8jqeffho7duxANBrFpk2b\ncOWVV6Ktra1CR66iUhFUoaVynMOHD6O1tTVj96la4srj8YBlWSSTSd4WmOviS4SXVPACWRyRxbRw\ncdQIdqFsCFMXybwwtV/oc4jNTOqcIb0ber0eOp0O8Xgc4XAYXV1doGkaFoulYc+bcpCr76reEG7y\niD/mSsFToh9VaA2MxWKwWq1FD2UWiq2ZVs0iwRYMw0Cj0cBut5c12EJ4rRGLsWzDwOthszAej8Pp\ndIJlWXR1dcFut2fMz5SC4zgMDw9jx44d+NOf/oQLL7wQW7ZsacgUVhWV/0EVWirH+fnPf47nn38e\nfr8fWq0Wdrudn0Uxe/ZszJ49G4ODg+jq6lL8gkjElcvlQiKRQF9fHyiKUsyKJWUXIrNaiF0omxCr\np4s/sWK5XC40NTWpqYtFEo1GefuLTqdDV1cXmpub+QqZUIhJhXUUkmbWyITDYTAMA7fbja6uLths\ntprpASwXQhu0+COxmWWztGYbexAIBOB0OuHz+YquukjxpU2/nTHVLFKRFife1UKwBcdxWWfQxWIx\nRWfQKXGsPp8PDoeD7wOkaTrvMcTjcfzxj3/E9u3bkUwmsWnTJlxxxRUVf/43btyI//qv/4LFYsHB\ngwcBAD6fD2vXrsXIyAhmz56NvXv3wmg0VvS4VBoaVWipZEIu+qOjoxgeHuZvR48exfDwMILBIPR6\nPWbNmpUhxAYHBzE4OIi2tjZZCykSIuByuRCLxXhxVenIVvJ4pfrDhA30UkEdtbCgFlYAU6kUXwGs\nhQVEPZFIJHiRynEcrFYr36MhhXiXWnj+zGQhRhLGWJaFVqvlK4CNbFMtBHGEvZTNrKWlBXq9HolE\nAqFQCC0tLXzPkNLP4zcv+JWi9yem2kIrFovxmyadnZ11K/bFM+iEn6dSKQDIWklVok8sFovx1aue\nnh709/eju7s7589wHIfPPvsMO3bswDPPPIMvf/nL2LJli6x5WeXiv//7v9HZ2Yn169fzQuuHP/wh\nTCYTH6/u9/tx5513VuX4VBoSVWipFAbHcYhGo7zwIreRkRGMjo4iGo2itbV1mgAbHBxET08Pnnji\nCTz99NP4/ve/zy8eatlCRBroxQvqSCQybUEtXlSXqxE6lUrxIjUajfLiSg1jKAxhv1AsFuP7hZQQ\nqeIEPOH5IzxvpHrE6k2IkWHmTqezJvuu6oV0Og2Px8NXC0hPKjmPpM6bUvt9yim0qiWyxMEWJH2x\nkYfVCwfIi8WY+LwRV8ayWeg5joPX68XY2BgSiQT/fp2vehWLxfDUU09h+/btAIDNmzfj8ssvr5nN\nv5GREVx66aW80DrhhBPw0ksv8QODzzvvPHz88cdVPkqVBkIVWirKwnEcgsEgRkZGMDw8jI8++ggv\nv/wy3n33XUSjUcyfPx/t7e2w2WwYGBjA4OAgb0202WzQarV1tcjM1uuTK/2O3LJZhaQQJ92RCP5a\nFqm1CLG+sCyLqampqvULZYsizybEaq2SKg4HIcPMlbC0zTSE1sB8z6PUxo/QZgZ8HmEvPneyLajL\nJbYqLbSUCLZoVLKdN8LETdLLrNPpEIlEEAwGYTAYMDAwgJ6enrz3f/jwYezYsQPPPvssvvrVr2LL\nli1YuHBh1a9VYsRCy2AwYGJiAsDxx2E0Gvl/q6gogCq0VMpDKpXCddddhw8//BBf+9rXsHbtWixb\ntgwAMDExwVfCjhw5gpGREYyMjMDpdCKVSsFsNvNVMGGPGEVRVQnoKIVsg3nJMGcAOWeIVXqQcCPC\ncRwfge3z+WAwGGC1WvPGDlcT4cIo20yoagznJX1XHo8HnZ2doGkaJpOpZp/HWoWkBjIMg9bWVths\nNkXGLAit0FJCTLigFoqxTVcOKfTIjlMpkZVKpeByueB0OisSbNGopNNpuFwuvnrV3d2d0ZdK+sRa\nWlrwwgsvIBKJYHBwEP39/fjss8/wyCOPQK/XY9OmTbjssstqupqdS2gBgNFohN/vr9bhqTQeqtBS\nKR+ffPIJFixYUNAijNiQjhw5kmFLHBkZgdvtBsdxoCgqw5JIqmImk6nu3mDFPRvhcBhTU1MIBoNI\nJBLQarXo6OhAd3d3RjVMHZaZH2GsfXt7O2iarsuZYVJks7SKh/NKjTwoVIiR/jWGYdS+qxIQWyxL\nSQ0sBXHwAvn4j9/7s2K/o5xCi1RTnU4nJicnayrYot6IRCJwOBzweDwwmUyw2+1Zq/sk6OX111/H\nK6+8gnfeeQculwt+vx+dnZ1obW0FTdMZ78urV6+uub+Lah1UqTCq0FKpHziOQzqdBsuy+Oyzz3h7\nIgnr8Pl80Gg00y72pCJWq5UgYrcUDhK2Wq0wmUwAIGkvI8MyxWlUwoX1TFwIk0qBy+WCVquF1Wqd\nkbH2ci1mUudNS0sLOI7j+9ei0aii/WszDbE10Gaz1azlVykbYTmElnAYbnt7O+x2e10GW1QbYS8g\nx3Gw2WyyNk4ikQieeOIJ/P73v0dLSwu2bNmC1atX8xsFqVQKLMvi6NGj/O173/tezV0zxELr5ptv\nhtls5sMwfD4f7rrrriofpUoDoQotlcaB2GbGxsam2RKHh4cRCASg1+vR39+f0R9GBFl7e3tF37RD\noRBYloXH40F7ezusVivMZnNBAkmcRiUUYsTuIWVLrOX5LIVCEiwZhkEymeQTA2vZvlJtslXEAoEA\nwuEwUqkU9Ho9uru70dXVVddjD6qBUBQoaQ0sN0oIrR/f/sWMYeDCj4UGBIlnh82EYItyEQ6H4XA4\nMD4+DrPZDLvdnjfll+M4HDp0CNu3b8dLL72Er3/969i8eTPmzp1bl9eAq666Ci+99BLGx8dBURR+\n9rOf4Zvf/CbWrFmD0dFRDA4OYu/evfwmp4qKAqhCS2XmQPqljh49ipGREV6IDQ8PY3R0FJFIBC0t\nLbwII0KM/FuJEIJoNMrb2Zqbm2G1WtHX11e2iotwmLO4Iibu1xDaEmt9mLM4HKSvrw80TavJi0UQ\niUT4eVcdHR2gaRpGo5EX8eLzRxy6UO/z55RCbA2sV1FQqth67Lnv87094oqqMCBIai4UucYKRYHR\naITdbleDLYognU7D7XbD4XCgqalJdg9bOBzGY489hp07d6KjowNbtmzBN77xjbo7l1VUagBVaKmo\nEDiOQzgc5vvCSJ/Y0aNHcezYMcRiMXR2dk6zJQ4ODsJut2dNEfT7/bw1UKPRgKKonDOaKom4cV6Y\nRiUc5iwlxCq9mBYn3anhIMVD+q5YlkVTUxPfdyVX8MudPydla20kIUaCVpxOJ/x+f81bA+VSrNiS\nYxmUmkFH+lPD4TASiQQ0Gg06OzthNBrR3t4+40V8oYRCIYyNjcHn86Gvrw82my3vJhTHcfjwww/x\nwAMP4C9/+QtWr16NzZs3Y/bs2epzrqJSPKrQUlGRC1noSwkxh8OBZDIJk8mEgYEBWK1W+Hw+vP32\n29BoNNixYwdomq45v3o+EonENFtirqpGOZLviEj1eDzo7u7m+9fUN//CIFVAhmEQiUTK2neVTYgR\nEQ9ICzFS3aj1v20sFuMHM7e1tTVU0AqhGLFVaG8Wx3GYmpqCw+HA5OQkLBYLaJrmI8az9ReKN4CE\nw3ob6W9QCMIERq1WC7vdjt7e3rzPRygUwh/+8Afs3LkTBoMBW7Zswde+9rWa2AhUUWkAVKGloqIU\nwWAQDz30EHbt2oWxsTGccMIJoCgKDocDLpcLHMfBYrFk9IeR+Ho5b4i1BunzkeoPk4ogF97y9WoI\nLZZ6vR40TaO3t3dGBnyUAlnIMgwDv98Pk8nEz2mqppiRiiEXnjvZqqm5BqyWGxIi4HQ6kUgk6tYa\nWAiFii25QkscbGGz2QraPBFed8Qinliis4n4RruGBAIBOBwO+P1+WCwW2Gw2tLW15fwZjuNw8OBB\nPPDAA9i/fz8uu+wybN68GQMDAxU6ahWVGYMqtFRUlCCVSuGCCy7Aueeei3Xr1mHJkiXTvockJpJK\nmDCow+v18hYuYV8YsSbW8synbIhnQQkX02Qor7BXo7m5GeFwmJ9pQtN0zVgs6w2pvqt6G38gFvHC\nBTXHcdNSE/MN5i0GsTWwWgOuq0UhQiufyCJJlpUIthBH2As/JyFBUj1ira2tdZFQShL+nE4nmpub\n+epVvveIYDDIV6/MZjO2bNmCSy65pC4es4pKnaIKLRXl+OlPf4onn3wSGo0GFosF27dvh81mq/Zh\n1QUcxyGVSmFsbCzDlkjE2NTUFHQ6Hex2+7RBzoODg+jo6KhLIRaJRHhbYCwW4y2H6XQawHEhJq6G\nkfQylUwSiQTcbjcYhimq76rekKpqCAfzStnL5AqxWCwGhmHgcrnQ1tbGV1zqSagqhVyxlU1ohcNh\nOJ1OeDyemgm2EKe1Cj+mUik0NTVNE2C1cO0hNsuJiQnZ88M4jsP777+PBx54AK+//jq+9a1vYdOm\nTZg1a1aFjlpFZUajCi0V5ZiamkJ3dzcA4J577sGHH36I++67r8pH1RiQBvLR0dFp/WFHjx5FKBSC\nXq/PSEwU3mqpiZzjOPh8PrAsi6mpqaxVApISKa6GRSIRJJPJaYsh4YKoUcWFGGHfVTgcBkVRddkL\nWA7y2ct0Ot20kI5wOAyPx4NUKsUPFFZFvTyxJRRapF+IiH6bzYa+vr66se2JB8mLrz3A55tAYjFW\naIR9PpLJJF+9amlpQX9/vyybZSAQwKOPPoqdO3eCoihs3boVX/nKV2rm2rhv3z5897vfRSqVwubN\nm3HLLbdU+5BUVMqBKrRUysPtt9+O0dFR/Pa3v632ocwISHVIOMSZVMRGR0cRj8fR3t4+rRo2MDCA\nWbNmZU1MVPL4AoEAGIaBz+eDwWCA1WotyRIpXAyJhZhwV1pqjli9LPikENvZaqXvqt4gtlafzweP\nx4NQKMQvkjUaDT/6QCo1Ua1sZfLECz+QDLaQ0y9Ujwg3gcRCjETY6/V6SWuinE0v8lyOjY0hEAjw\n1at8swE5jsM777yD7du3480338SVV16JjRs3wm63K/bYlSCVSmHhwoV49tln0d/fj5UrV+Khhx6S\ntNyrqNQ5qtBSUZZbb70Vv//979HT04MXX3wRfX191T4kFXz+xk2EmHCYs8PhQCKRgMFg4CtgQjFG\n0zQ0Gk1Ri/hwOMyHWrS3t1c0nS2dTkv2+EQikWl9GuIFdS0upInN0uVyVfy5bDSkrIFmsznjHM83\n+kBqBl2jJ99lE1t3D13JJzDa7fYZnwpKHAhSFdVc4w90Oh38fj8YhkFbWxv6+/thNBrzPpdTU1PY\nu3cvHnzwQfT392Pr1q246KKLanZD6bXXXsNtt92GZ555BsDxjVkA+PGPf1zNw1JRKQeq0FIpjAsv\nvBAsy077+i9+8QusXr2a//ftt9+OaDSKn/3sZ5U8PJUiIXY+IsKE/WEMw4DjOPT29koKMfEATKfT\niXfffZe3Clmt1prsFco2kJc0zAuTy8QWoUotpJPJJD/viuM4vu9KtbMVjjA1MJlMlhy2IhZi4rAO\nsRCrxvmjNFJC6x/uOAdWqxU0TavnpUzEia1+vx9erxfxeBxarRbNzc3ThFgqlUIgEMC8efPQ2tqK\ndDqNt99+G0NDQ3j77bexZs0abNy4ETRNV/vh5eXRRx/Fvn37cP/99wMAdu7ciTfeeAP33ntvlY9M\nRUVxZAmt2lodqVSV5557Ttb3XXPNNbjkkktUoVUnNDU1wWw2w2w2Y8WKFdP+P51Ow+128yJseHgY\nr776KkZGRuDxeJBOp9HU1IRkMolkMomvfvWruOKKKzB79mwYDIaaXFhqtVp0dHSgo6ND8v9TqVSG\nLdHr9UoupMVCrFRrWTqdhs/nA8MwCIVCoCgKS5YsaUgLVrkhlVyn04mJiQn09vZi4cKFWf/mhaDT\n6dDV1ZU12EGYfJfr/KknIbb7v/4Xrr703zK+dvrpp1fpaOoXUqEi1avOzk4sWbIkw0pNhBg5hz74\n4APce++9YBiG/1pLSwvOOuss3HDDDZg7dy4mJydhMBjUa4WKSp2hCi0VWRw+fBgLFiwAADz55JNY\ntGhRlY9IRSk0Gg0fDnDWWWcBOL4QePbZZ7Fr1y4cOnQI5513Hk499VSkUikMDw/j/vvvx9GjRzEx\nMcEP0BTH1s+ePRudnZ01aTPSarXo7OzMGuUtDlvweDySqXdSQkz8eMU9bEajEQMDA+ju7q7J56bW\nIdZAlmXR0dEBm82GRYsWVfS51Ol0Oc8fcQS5lBATi7BqCLFUKgW32w2n0wkA+O2ua3DjNbsq9vsb\nCY7j4Pf74XA4EA6HQdM0li9fLlkJJFWtrq4ufjTIwMAAfD4fNmzYgDVr1iAWi+Ho0aMYGRnBiy++\nyPfkXnrppfiHf/iHKjxCedjtdhw7doz/99jYWM31kamoVBLVOqgii29961v4+OOPodFoMDg4iPvu\nu0+9eDYwLMvijjvuwDXXXIMVK1ZkXcSS4bSjo6PT+sNGRkb4xMRZs2Zl2BLJx9bW1roTG8KBvFLx\n48DxhZRer0c8Hkc4HOZ7hSiKqtneilomlUrx1sBUKlX3c9jEQkzYIya2too/lirExJVAqWCLb17w\nK9kDimc68XgcTqcTLMuiq6sLdrsdPT09ea9rExMT2LNnDx566CHMmTMH27Ztw/nnn1+zFU+5JJNJ\nLFy4EM8//zzsdjtWrlyJ3bt348QTT6z2oamoKI3ao6WiUiw333wznnrqKej1esybNw9DQ0MwGAzV\nPqy6g+M4RKNRHD16dJoQGx0dRTQaRVtbW0Z0PamIzZo1S/E45XJD+q6IIOju7kZLSwvi8TgikQg/\nzJmklokrGvX2eMuJWBD09fWBpmlFrIG1jtDaKiXENBqNZH9hLiEWj8fBsiwfxqAGWxQP6Xt1OByI\nRqOgaRo0TeftVU2n03jjjTcwNDSEgwcP4uqrr8b1118Pi8VSoSOvDE8//TS+973vIZVKYePGjbj1\n1lurfUgqKuVAFVoqKsXy5z//GRdccAF0Oh1+9KMfAQDuvPPOKh9V48FxHILBIB/QIZwhNjY2hkQi\nge7u7mkzxObMmQOapqHVaqu+UOQ4jp93FQqFYLFYQNN01l4KklomVREj8dHiOT7kVu6o/logGo2C\nZdkMa6AqCDIhQkyqKiYUYsKghUQiAZvNBpvNBr1eX+2HUJfEYjG+etXT04P+/n5+vmQufD4fHnro\nIezZswcLFizAtm3bcO6559Z99UpFZYajCi0VFSV4/PHH8eijj2LXLrV3odKQvgcpIeZ0OpFOp2E2\nmyWFmMViKetChvRdeb1eGI1G0DStSN+V1BwfciMDVbPNEKu19Ee5NJo1sNoEg0GMjo7C6/Xy5wcZ\niSA1/kBYFVOtrZlwHIfx8XF+VAaxAMupXr322msYGhrCoUOHcM0112DDhg3qWBQVlcZBFVoqKkrw\n9a9/HWvXrsW1115b7UNREZFOpzE+Po4jR45k2BKHh4fhdrvR1NQEiqIy+sOIGDOZTAULMVJtcblc\naG1tBU3T6O3trejOtHCYs1iIiYc5iytitbSI5jgOk5OTYBhmxlkDy4E42MJms8FisUj+zfONP1CF\n2PHXusPhgNvthtFohN1uz5pCKcTr9fLVq8WLF2PLli0455xz1OqVikrjoQotFZVcyJkb9otf/AJv\nvfUWHnvsMdW6VGdwHMcnen322Wd8VYx89Pv90Gg0sNls0+aHDQ4O8tWpyclJ7N69G4ODg7wYqOVq\ni3CYs3gxnUqlMhbR4sV0JRbR0WiUHyisWgNLgyRaOhwOXqza7faSI8CFQkx8DomFWDXOoXJBNm4c\nDgeSySTsdrusAJt0Oo1XX30VQ0NDOHz4MK699lqsX78eZrO5QkeuoqJSBVShpaJSCtu3b8e///u/\n4/nnn0d7e3u1D0dFYUh64LFjx6bZEo8cOQKWZREOh6HX67FkyRKcffbZWLx4MR/Y0dbWVpfigCyi\nxdWwXEELpUaPC62B6XSaF6v1anWsNiTYgmVZtLa2wmazwWw2V+x8lDqHyOfZhBj5WItCLBKJwOFw\nwOPxwGQywW63Z43uFzI+Po7du3fj4YcfxtKlS7F161asWrVKrV6pqMwMVKGlolIs+/btw/e//328\n/PLLqqd+BnH48GHcf//9+NOf/oRVq1Zh7dq1oCgKo6OjvBAjiYmRSAStra3T+sNIdUyv19etEJMK\n6sg2jJfcxMOciTXQ6XRicnISfX19sNls6qZFkQiT7iKRCGiahtVqrclgC7EQE55HUkJMeC5VSoil\n02l4PB44HA5wHJfTain+uVdeeQUPPPAAjhw5guuuuw7XXXcdTCZTRY47F4888ghuu+02HDp0CG++\n+WbGgPrbb78d//Ef/wGtVot77rkHX/nKV6p4pCoqDYEqtFRUimX+/PmIxWK89eMLX/gC7rvvviof\nlUq5efHFFzExMYFLLrkELS0tOb+X4ziEQiHejkhuIyMjOHbsGBKJBDo7O6fZEgcGBtDf318TiYnF\nIJwBJRRhRIhpNBq+j6ytrQ0WiwUURdXlzLRaIBKJwOl0wu12w2AwwGaz1f2w62xCTGxvlbInlirE\nQqEQnE4nxsfHYTabYbfbZfUFut1u7Nq1C4888ghOPvlkbFRITxsAABN8SURBVN26FWeeeWZNVa8O\nHToEjUaDbdu24Ve/+hUvtD788ENcddVVePPNN+F0OnHhhf+nvTuPieps2wB+HRhmENnJDDIzCFiq\nYhUVN1xqNEWNaDVWU7cWjbikSVtTU1NN00RNLWmaNNbYxJgooLY2jUuxLtSlaqNttaZ1j5VURpgF\nGBEQkGG28/7hd843A6OgIgPM9UuMwgF9jqKea+7nue8s3Llzp0tWF4m6EQYtImq/p70aSs9Oquh4\nhjApiEnd9WJiYuStiImJiUhOTvbqmNhdHqalRgwWiwVOpxNxcXEICwvzamPvOcy5ZTUsNDQUKpWq\n29zvy/YsjS16Is9zhr6CmGfDl5bbE31tR3W73aiqqoLJZIIgCNDpdFCr1W2GJJfLhd9++w27du1C\nWVkZcnJy8M477yAmJuZl3XqHmDRpklfQysvLAwCsX78eADBt2jRs2LABY8eO9dsaiXqAdv2HxQ3y\nRAQAGDx4MA4ePIhVq1b5eyk9giAIiI6OxvDhwzF8+PBW191uN6qrq+VBzqWlpbh8+TJKS0tRWVkJ\nURSh0WhabUtMTk5GXFyc319Jb7k1UKPRYODAgU/dGiiKIhwOh9dDdG1tLWw2mxzElEplqyAWKMOc\npQHNNTU1UKvVSEtLC8itlkFBQQgLC3vivbds+FJXV4fKyspWnTcVCoX8dRYXF4f+/fu3q3NgZWUl\n9u7di/379yMjIwNr167F6NGj/f537nmZTCZkZmbKb+v1ephMJj+uiChwMGgREQAgLS3N30sIKEFB\nQVCr1VCr1Rg9enSr61LHRCmElZaW4ty5cygtLUV1dTWCgoKQkJDQqnV9cnIyoqKiXloo8ewaGB4e\nDq1Wi7S0tHb9eoIgQKlUQqlU+hz0KgUxz22JDx48gM1mg91uB/B4mLOvilh3DWIOhwMWiwUVFRVQ\nqVTQ6XQYMGBAt7yXzvK0IOZyuWCxWOQgERkZiaioKDQ3N+P27dteQezChQuoq6tDSkoKUlJSUFlZ\niR9//BEmkwk5OTk4d+4coqOjO/v2nqo93XKJqOtg0CIi6oKk1vNarRYTJkzwuiaKIlwuF4xGoxzC\nbt26hWPHjsFgMODhw4dQKBTQ6/U+Z4j17t37mR7kPbeyiaKIhIQEjBw5ssO7BnoGsaioqFbXpWHO\nUjWjsbER1dXVaGpqgsPhAPA4iLWshklBrKto2diiT58+GDZsWJdsbNFdSG3ua2pqoNFokJ6e/sQ2\n91JFrK6uDr///jsOHDgAo9EIq9WKyMhIREZG4syZMygtLZX/3owbN65LtGs/derUM3+OTqdDeXm5\n/LbRaIROp+vIZRHREzBoEQUQvhraMwiCAIVCITfYmDx5std1URRht9tRVlYmb028fPky9u/fj3v3\n7slt66UA5hnE+vbtC5VKBVEUcfr0aVy9ehXjx4+HRqPx+1Y2qRIRGhrqs9IgBTGpGlZfXw+r1Yqm\npiY4nU6vz295vqczWs23bGzhOa+Nnp3L5UJFRQXMZjNCQkLaXQ0URRHnz59Hfn4+qqqqsGTJEixa\ntEiustrtdhiNRnkA+l9//YV+/fp1iaD1PGbNmoVFixZhzZo1MJvNKCkp8VlFJ6KOx2YYROSl5UFq\n6nlEUcSjR49w7949OYhJD5UlJSWoqqoCAPTt2xdDhw5FRkaGXA3T6/VQKBTdMhxI3RBbdkxsebbH\n1xyx521E4TlDTKoGtmcILj3Zw4cP5SHN8fHx0Gq1CA0NbfPzzGYz9uzZg0OHDmHs2LFYsWIFRowY\n0S2/ln05dOgQPvjgA1itVkRHR2PYsGH45ZdfADx+MW3Xrl1QKBTYsmULpk+f7ufVEnV77DpIRM+O\nQSvwuN1u7Nu3D4WFhXC5XHj33XcxZcoUWK1WeZizFMRMJhOcTieio6O9KmJJSUlISUlBnz59ulXH\nRE9P6nbna/5TyzDWslGC51Y2zhB7cU6nU65eqVQq6PV6xMbGtvl15nQ6cerUKeTn56O6uhpLly7F\nwoUL29UUg4joKRi0iKj9nvZqKPVsoihi69atmDVrFlJSUtr18Q8ePPBq1FFaWop79+6hoqICbrcb\narVa3oooNelISkpqV1vtrkqa/+Sr7bjb7ZYf+pubm6FUKhEfHw+NRuMziFHbRFHEw4cPYTQaUV9f\nL1ev2ppxBzzutFdYWIiioiKMHz8eK1euxPDhw7vlCwBE1CUxaBER+VJcXIzVq1fD5XJh+fLlWLdu\nnb+X1KNIc4vu3r3rVQ0zGAy4f/8+BEFAfHy8VzVMCmIxMTHd6mFYCp1msxmNjY2IjY1FRESEVxt7\naZhzcHCwz2qYSqViEPMgdWK0WCzo1asX9Hp9u74unE4nTpw4gfz8fNTW1mLZsmVYsGBBuwYSExE9\nIwYtIqKWXC4X+vfvj5MnT0Kv12PUqFHYt28fBg0a5O+lBQRRFOF2u2EymeSKmMFgkL+vq6uDQqGA\nVqttNT8sKSkJ4eHhXSKIeTa2iIqKgk6na7OxRcvw5TnMWRRFKBSKJwaxrnDPL5M0l81oNKKhoQEJ\nCQlISEhosxOjKIowGo0oKCjAkSNHMHHiRKxcuRLp6ek9/veMiPyKQYuIqKU//vgDGzZskLdF5uXl\nAQDWr1/vz2XR/5FmaZWVlcnnw6RtiQaDAY2NjXLHRM9qmPTj0NDQl/aALVXqzGYz3G43tFpthzW2\nEEURTqfT5/kwaZhzSEiIz0YdSqWy24YKu90uV6/Cw8Oh0+kQHR3d5v04HA4UFxcjPz8fjY2NWLZs\nGebPn89zcETUWdr1jy7buxNRQDGZTEhMTJTf1uv1uHjxoh9XRJ6kWVqpqalITU1tdV0URdhsNnkr\n4t27d3H+/Hncu3cPZWVlsNlsCAsLk4OY1AK/b9++SExMfK7Bxi0bWwwcOLDDH+gFQUBISAhCQkLa\nHOYshbDa2lo0NTXJw5yVSqXPilhXG+YsiiJqampgMpnw6NEjJCQkYMSIEW3OOhNFEWVlZSgoKMDR\no0cxefJkfPXVVxg8eHCXuj8iIgmDFhERdRuCIKBXr15IS0tDWlpaq+uiKKKhocGrSceJEydgMBhg\nNBrhdDoRGRkpV8ISExORnJwsd0wMDg6GIAiwWq3Ys2cPxo4di7CwMGi1WvTv399vZ6naM8zZbrfL\nVbBHjx49dZizZ0Wss9r12+12mM1mVFRUICIiAomJiYiKimrz17bb7Th+/DgKCgrQ1NSE3NxcbNiw\n4YkDiTvT2rVr8fPPP0OpVOKVV15Bfn6+POMtLy8PO3fuRHBwMLZu3Ypp06b5ebVE1NkYtIgooOh0\nOpSXl8tvG41G6HQ6P66IOpIgCIiIiEB6ejrS09NbXZeqKZ5BrKioCAaDAWazGfX19XC73QCA4cOH\nQ6PRIDU1FS6Xq7Nv5ZkIggCVSgWVStXmMGebzYbGxkbcv38fNptNDmJPmiH2IsOcpWYhRqMRzc3N\nSEhIwMiRI9v8OUVRhMFgQEFBAY4fP46srCx8/fXXGDRoUJeqXk2ZMgV5eXlQKBT45JNPkJeXhy+/\n/BK3bt3CDz/8gJs3b8JsNiMrKwt37tzh/DSiAMMzWkQUUJxOJ/r374/Tp09Dp9Nh1KhR+P777/Ha\na6/5e2nkJ1IzhQMHDmDkyJGYM2cOoqOj5SYd0mBnq9UKAIiPj/dq0iFtUYyJiem23QM9hzl7ng/z\nNcy55SwxX+GhublZrl5FRUVBr9f73BLZkt1ux9GjR1FYWAiHw4Hc3FzMmzevXQOJ/e3QoUPYv38/\nvvvuu1ZnP6dNm4YNGzZg7Nix/lwiEXUcntEiImpJoVBg27ZtmDZtGlwuF5YtW8aQFeAuXLiAxMRE\nnD9/3qsV+Lhx47w+TuqYaLFY5CYdJSUlOHnyJAwGA2pqahAcHCx3TJQCmBTIIiIiulQ1xlNQUJAc\nnHzxHOZss9lQV1eHiooK2Gw2uFwueZizKIp49OgRRFFEnz59kJGR0ebcK1EUcffuXRQWFqK4uBhT\np07FN998g4EDB3bZ3y9fdu3ahfnz5wN4fBY0MzNTvqbX62Eymfy1NCLyEwYtIgo42dnZyM7O9vcy\nOt2yZctw5MgRaDQa3Lhxw9/L6TKkh+O2CIKA4OBg6PV66PV6TJw40eu61DmwvLxc3pZ47do1HD58\nGAaDAQ0NDVAqldDr9a2CWFJSEnr16tVlg0VQUBDCwsJ8NgGx2WwoLy9HVVUVevfuDbVaLZ+Vu3r1\nKtxutxzkDh48iIiICKSkpCA5ORl37tzB3r17IYoili9fjs8//7zLVa+ysrJQUVHR6v2bN2/G7Nmz\n5R8rFAosXry4s5dHRF0YgxYRUYBYunQp3n//feTk5Ph7KT2S1DmwX79+6NevX6vrUsfEsrIyOYj9\n+eef2LdvH8rLy2Gz2aBSqVoFMCmUdaXugW63G/fv34fJZILT6YROp0NmZuYTzyC5XC40NTVh6NCh\nuHz5Ms6ePQuz2YyamhrExMQgLi4OxcXFuH37ttycZMyYMV2iXfupU6eeel2a4XX69Gn5z4dnQYkI\n4BktIqKAYjAYMHPmTFa0uiBRFNHY2CifDbt7967cxr68vBwOhwMRERHyzDBpS2JycjK0Wq3cMfFl\nampqgslkgtVqRWxsLHQ6HcLDw9v8PJvNhsOHD6OwsBAKhQK5ubmYM2eOvK2wvr5evlfp/j/++GNo\ntdqXej8vqri4GGvWrMG5c+egVqvl99+8eROLFi3CpUuXYDab8cYbb6CkpITNMIh6Dg4sJiIibwxa\n3Zcoiqirq5MDmGcQM5vNcLlciI2N9aqCSUEsPj4egiA8VxBzu92wWq0wmUwQRRFarRYajabN0CCK\nIu7cuYOCggKcPn0aM2bMwPLly5GamtplKnMvKjU1Fc3NzYiLiwMAZGZmYvv27QAebyfctWsXFAoF\ntmzZgunTp/tzqUTUsRi0iIjIG4NWz+V2u1FdXS036igtLZWDWFVVFURRhEaj8Xk+LC4urlXHxOvX\nr8Nut8NutyMuLg46nc6rWciTNDU1oaioCLt374ZSqcSKFSswe/ZsKJXKl3XrRESdjV0HiYiIAkVQ\nUBDUajXUajXGjBnT6rrb7UZFRQX+++8/eXvemTNnYDAY8ODBAwiCAI1GA+Bx1zyFQoGPPvoIWVlZ\niI6OfmoVShRF3L59GwUFBThz5gzefPNN7Ny5E/369esx1SsiomfFihYRUQBhRYt8uXXrFnbs2IHi\n4mKMHDkSQ4YMQUNDgxzI6uvrERISAr1e73VGrE+fPrh27Rr27duH3r17Y8WKFZg1axZCQkL8fUtE\nRC8Ttw4SEdH/W7hwIc6ePYv79+8jPj4eGzduRG5urr+XRX52/fp1rFu3DitWrMCMGTN8hiRRFNHc\n3IyysjKv82Hnzp1DamoqNm3ahOTkZFaviChQMGgREREBQHl5OXJyclBZWQlBELBy5UqsXr3a38si\nIqLuiUGLiIgIACwWCywWCzIyMlBfX48RI0bgp59+wqBBg/y9NCIi6n7aFbSC2v4QIiKi7i0hIQEZ\nGRkAgIiICKSlpcFkMvl5VfS8PvvsM6Snp2PYsGGYOnUqzGYzgMdbHD/88EOkpqYiPT0df//9t59X\nSkSBjEGLiIgCisFgwD///OOzMx91D2vXrsW1a9dw5coVzJw5E5s2bQIAHD9+HCUlJSgpKcGOHTvw\n3nvv+XmlRBTIGLSIiChgNDQ0YO7cudiyZQsiIyP9vRx6Tp5/do2NjXITjqKiIuTk5EAQBGRmZqK2\nthYWi8VfyySiAMc5WkREFBAcDgfmzp2LxYsX46233vL3cugFffrpp9i9ezeioqJw5swZAI/nfyUm\nJsofo9frYTKZkJCQ4K9lElEAY0WLiIh6PFEUkZubi7S0NKxZs8bfy6F2yMrKwuDBg1t9KyoqAgBs\n3rwZ5eXlWLx4MbZt2+bn1RIRtcaug0RE1OOdP38er7/+OoYMGYKgoMevMX7xxRfIzs7288roRZWV\nlSE7Oxs3btzAqlWrMGnSJCxcuBAAMGDAAJw9e5YVLSLqaO3qOsitg0RE1ONNmDABz/jCYo9is9kw\nceJENDc3w+l0Yt68edi4caO/l/XcSkpK8OqrrwJ4fC5r4MCBAIBZs2Zh27ZtWLBgAS5evIioqCiG\nLCLyGwYtIiKiHk6lUuHXX39FeHg4HA4HJkyYgOnTpyMzM9PfS3su69atw7///ougoCAkJSVh+/bt\nAIDs7GwcO3YMqampCAsLQ35+vp9XSkSBjEGLiIiohxMEAeHh4QAeNwVxOBxyp77u6MCBAz7fLwgC\nvv32205eDRGRb2yGQUREFABcLheGDRsGjUaDKVOmcI4YEdFLxqBFREQUAIKDg3HlyhUYjUZcunQJ\nN27c8PeSiIh6NAYtIiKiABIdHY3JkyejuLjY30shIurRGLSIiIh6OKvVitraWgBAU1MTTp48KXfq\nIyKil4PNMIiIiHo4i8WCJUuWwOVywe124+2338bMmTP9vSwioh6NA4uJiIiIiIjar11tW7l1kIiI\niIiIqIMxaBEREREREXWwZz2j1X2nGxIREREREXUSVrSIiIiIiIg6GIMWERERERFRB2PQIiIiIiIi\n6mAMWkRERERERB2MQYuIiIiIiKiDMWgRERERERF1MAYtIiIiIiKiDsagRURERERE1MEYtIiIiIiI\niDoYgxYREREREVEH+x9VYm9TgFriOAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "'''\n", + "======================\n", + "3D surface (color map)\n", + "======================\n", + "\n", + "Demonstrates plotting a 3D surface colored with the coolwarm color map.\n", + "The surface is made opaque by using antialiased=False.\n", + "\n", + "Also demonstrates using the LinearLocator and custom formatting for the\n", + "z axis tick labels.\n", + "\n", + "https://matplotlib.org/mpl_toolkits/mplot3d/tutorial.html#d-plots-in-3d\n", + "\n", + "'''\n", + "\n", + "from mpl_toolkits.mplot3d import Axes3D\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib import cm\n", + "from matplotlib.ticker import LinearLocator, FormatStrFormatter\n", + "import numpy as np\n", + "\n", + "\n", + "fig = plt.figure(figsize=(15,6))\n", + "ax = fig.gca(projection='3d')\n", + "\n", + "# Make data.\n", + "x_lim = [-3.0, 3.0]\n", + "y_lim = [i * 10.0 for i in x_lim]\n", + "x = np.arange(x_lim[0], x_lim[1], 0.25)\n", + "y = np.arange(y_lim[0], y_lim[1], 0.25 * 10.0)\n", + "X, Y = np.meshgrid(x, y)\n", + "Z = (1 + X**2) * (100 - Y**2)\n", + "\n", + "# Plot the surface.\n", + "cmap=[cm.plasma, cm.viridis]\n", + "surf = ax.plot_surface(X, Y, Z, cmap=cmap[1],\n", + " linewidth=0, antialiased=False)\n", + "\n", + "# Lines\n", + "# Saddle point at 0,0,100 for f = (1 + X**2) * (100 - Y**2)\n", + "x_zeros = np.zeros(len(x))\n", + "y_zeros = np.zeros(len(y))\n", + "zx_saddle = np.ones(len(x)) * 100\n", + "zy_saddle = np.ones(len(y)) * 100\n", + "\n", + "ax.plot(x, y_zeros, zx_saddle, color=\"red\")\n", + "ax.plot(x_zeros, y, zy_saddle, color=\"red\")\n", + "\n", + "# Customize the z axis.\n", + "#ax.set_zlim(-5000.01, 500.01)\n", + "#ax.zaxis.set_major_locator(LinearLocator(10))\n", + "ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n", + "# Add a color bar which maps values to colors.\n", + "#fig.colorbar(surf, shrink=0.5, aspect=5)\n", + "\n", + "fig = plt.gcf()\n", + "plt.show()\n", + "fig.savefig(\"toyfunc3d.pdf\", bbox_inches='tight')" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": { + "ExecuteTime": { + "end_time": "2017-07-06T09:29:25.598315Z", + "start_time": "2017-07-06T09:29:25.515302Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "def run(lr_x, lr_y, n_iter, sigma):\n", + " x = 0.5 #.509124898\n", + " y = -0.4 #-.402918798624\n", + " lrx_hist = []\n", + " lry_hist = []\n", + " x_hist = [x]\n", + " y_hist = [y]\n", + " obj_list = []\n", + " norm_list = []\n", + " for i in range(n_iter):\n", + " x -= lr_x * (2*x*(100-y*y) + np.random.normal(scale=sigma))\n", + " y += lr_y * (-2*y*(1+x*x) + np.random.normal(scale=sigma))\n", + "\n", + " norm = np.sqrt(x ** 2 + y ** 2)\n", + " obj = (1+x*x)*(100-y*y)\n", + " obj_list.append(obj)\n", + " norm_list.append(norm)\n", + " x_hist.append(x)\n", + " y_hist.append(y)\n", + " print (x, y, obj, norm)\n", + " return x_hist, y_hist, obj_list, norm_list" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": { + "ExecuteTime": { + "end_time": "2017-07-06T09:38:22.518019Z", + "start_time": "2017-07-06T09:38:22.285704Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.7935282626580147 0.012823230456653944 162.96844238571578 0.793631866092\n", + "-0.0031984340496097412 -0.009700826780655883 100.00092889103405 0.0102145005065\n", + "-0.0005298009068054984 -0.0958138941740459 99.99084776400649 0.0958153589243\n", + "0.023539809577689468 -0.14057804533175197 100.03563912599105 0.142535291996\n" + ] + } + ], + "source": [ + "n_iter = 5000\n", + "sigma = 1\n", + "base_lr = 0.01\n", + "res_otur_1 = run(base_lr, base_lr, n_iter, sigma)\n", + "res_otur_2 = run(base_lr/10, base_lr/10, n_iter, sigma)\n", + "res_ttur_1 = run(base_lr / 100, base_lr, n_iter, sigma)\n", + "res_ttur_2 = run(base_lr, base_lr / 100, n_iter, sigma)\n", + "\n", + "res = [res_otur_1, res_otur_2, res_ttur_1, res_ttur_2]" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "metadata": { + "ExecuteTime": { + "end_time": "2017-07-06T09:38:24.855680Z", + "start_time": "2017-07-06T09:38:22.520660Z" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA34AAAF1CAYAAAC3cTj+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXecHVW9wL+/mbn3bm/ZzSabSgKp9C4dBKUooCiCioIU\nxadPsYHPJ3ZFFAQeRVERRRBRfMKjt5DQAiQhpPdedzfbd2+bmd/7Y+a2zaZANv18P5+7O/fMmTPn\nzsw99/zOr4mqYjAYDAaDwWAwGAyGfRdrd3fAYDAYDAaDwWAwGAw7FyP4GQwGg8FgMBgMBsM+jhH8\nDAaDwWAwGAwGg2Efxwh+BoPBYDAYDAaDwbCPYwQ/g8FgMBgMBoPBYNjHMYKfwWAwGAwGg8FgMOzj\nGMHPsNMQkftF5Kdb2d8lIqN2wnl3SrsGg8FgMBgMBsPeihH8DLsNVS1T1WU70oaIvCwiV/V3uwaD\nwWAwGAwGw76EEfwMBoPBYHifiIizu/tgMBgMBsP2YAQ/ww4jIuNDzVubiMwVkfPzdteKyPMi0iki\nk0VkRN5xKiIHhtsxEfm1iKwSkY0i8lsRKc6re4GIzBSRDhFZKiJni8jPgJOBO0Pzzjvz2xWR40Rk\ng4jYee18TERmhduWiNwQtrdJRB4RkZqdfLkMBsMegoisEJFvicgsEWkXkb+LSFG472oRWSIiLSLy\nuIg05B2nIvIfIrIYWJxX9mURWRyOdz8RkdEi8no4bj0iItGwbq2IPBGOmS0i8oqImN9jg8EAQDh2\ntIjIkeH7BhFpEpHT+qh7vYj8s1fZ7SJyR7h9uYgsC8el5SLymT7aGCQiPSIyIK/syPCckX7/gIbd\nhvmhMewQ4YDwf8BzwEDgq8CDIjI2rPIZ4CdALTATeHALTd0EjAEOBw4EhgA3huc4FvgL8G2gCjgF\nWKGq3wNeAb4Smnd+Jb9BVX0T6AbOyCv+NPBQuP1V4ELgVKABaAXues8XwWAw7M1cDJwNHAAcClwu\nImcAvwj3DQZWAg/3Ou5C4DhgQl7Zh4GjgOOB7wD3Ap8FhgEHA5eG9b4JrAHqgHrgvwDt589lMBj2\nUlR1KXA98FcRKQH+BPxZVV/uo/rDwLkiUg4QLnZfDDwkIqXAHcA5qloOnEAwF+t9vg3Ay+FxGS4D\nHlbVdH99LsPuxwh+hh3leKAMuElVU6r6EvAEuQnOk6o6RVWTwPeAD4jIsPwGRESAa4DrVLVFVTuB\nnwOXhFWuBO5T1edV1VfVtaq6YDv797dMX8JB8dywDOBLwPdUdU3Yvx8CnzCmWwbDfsUdqrpOVVsI\nFrEOJ1iwuk9VZ4Rjw3cJxq6Recf9Ihyv4nllN6tqh6rOBeYAz6nqMlVtB54GjgjrpQkEyhGqmlbV\nV1TVCH4GgyGLqv4eWAK8STBefG8L9VYCM4CPhUVnAD2qOjV87wMHi0ixqq4Px6e++DPBQlVGeLwU\neKA/Pothz8EIfoYdpQFYrap+XtlKAo0dwOpMoap2AS3hMfnUASXA9ND0qQ14JiyHYLV86fvs30PA\nx0UkBnwcmBEOkgAjgP/NO+d8wCNYgTcYDPsHG/K2ewgWshoIxjEgO3ZtIjeuQd7YlsfGvO14H+/L\nwu1fEUzongtNsG543703GAz7Mr8nsBb4n3ARaks8RG7BPWvZpKrdwKcIFrrXi8iTIjJuC208BkwQ\nkQOAs4B2VX2rHz6DYQ/CCH6GHWUdMKyXf8pwYG24ndXuiUgZUBMek08zwaRooqpWha9KVc1MklYD\no7dw/q2ukqvqPIIJ3DkUmnlm2j0n75xVqlqkqmv7astgMOw3rCNYGAIgNJcaQG5cgx0wzVTVTlX9\npqqOAs4HviEiH3y/7RkMhn2PcM50G/BH4IfbiEHwD+A0ERlKoPnLznVU9VlVPYtAa7iAQJjcDFVN\nAI8QaP0uw2j79kmM4GfYUd4kWCX/johEQsfjj5LzhzlXRE4Kgxr8BJiqqgUr5aG28PfAb0RkIICI\nDBGRD4dV/ghcISIfDAOyDMlbsdoIbCtn30PA1wh8A/+RV/5b4GeZgDMiUiciF7zXC2AwGPY5/kYw\n5hweWgv8HHhTVVf0R+Mi8pEwAJUA7QSWBv42DjMYDPsXtwPTVPUq4EmCOUufqGoTgY/en4Dlqjof\nQETqw+B4pUAS6GLrY81fgMsJFqSM4LcPYgQ/ww6hqikCQe8cAs3d3cDn8nzwHgJ+QGDieRSh/Xgf\nXE9g+jRVRDqAF4Cx4TneAq4AfkMwSZpMbjX+dgK/vNZMBKs++BtBAJeXVLU5r/x24HECc6tOYCpB\nsAaDwbAfo6ovAN8HHgXWE1gcXLLVg94bBxGMcV3AG8DdqjqpH9s3GAx7MeEi9NnAtWHRN4Aj+4rI\nmcdDwJkUWjZZ4bHrCOZhp+a1uRmq+hqBYJjvFmPYhxDjT27YHYSmoR5BcINVu7s/BoPBYDAYDPs7\nIvIS8JCq/mF398XQ/5johYbdxcFAgsLACgaDwWAwGAyG3YCIHAMcCRi3l30UY+pp2OWIyEXAJOD6\n0FTUYDAYDAaDwbCbEJE/E5igfz1Mq2XYBzGmngaDwWAwGAwGg8Gwj2M0fgaDwWAwGAwGg8Gwj2ME\nP4PBYDAYDAaDwWDYx9mrg7vU1tbqyJEjd3c3DAZDPzJ9+vRmVa3b3f3YUcz4ZDDse+wL45MZmwyG\nfY/tHZv2asFv5MiRTJs2bXd3w2Aw9CMisk/kDjLjk8Gw77EvjE9mbDIY9j22d2wypp4Gg2Gb3PT0\nAm5+ZsHu7sZOQUTuE5FGEZmzhf0iIneIyBIRmSUiR+7qPhoMhr6Zs7adi+55ndbuvgNEN3clWbyx\nk0Ta28U9MxgMhj0PI/gZDIZt8tvJS7n75aV97mvvSXP4j59j/PefoSvp7uKe9Qv3A2dvZf85wEHh\n6xrgnl3QJ4PBsB3cO2UZ01e2MnlRU5/7j/7pC5z1myl84f63d3HPDPm47kbWbTiV1WtH0th0KaqJ\n3d2lLM9PW8ivHp7E6sa23d0Vw3aQTLv86em3+Ovz00m7W1/QmT1jJbOnr9g1HdtL2GmmniIyDPgL\nUA8ocK+q3i4iNcDfgZHACuBiVW0VEQFuB84FeoDLVXXGzuqfwWDoH/4xfTVtPWkA3l3dxokH1u7m\nHr03VHWKiIzcSpULgL9okPtmqohUichgVV2/SzpoMBi2yOPvrtvivpTrZ7dfX7ppV3THEJJITqcn\n/hSlJRfg2EPY2HQOvh8MmcnUFOKJFygp/shO74fv+6xcs4GSGpv6svps+ZR3l7JodROxiMPtj76C\nAi/NWMLTN1+9Q+dLeR62CLZl9Cq9aYsneHHhUg4bMogD6wa873Zu/tsk/u/1udiWhev6XH7OMQX7\nO9p6uP/O52lc38GsGStQhS9982zO++QxW2jxvaOqpFJvourR0XkLIkVEYyeQiD9HUexcysuvpLP7\nz6ApKsqvRCTWb+feUXamj58LfFNVZ4hIOTBdRJ4HLgdeVNWbROQG4AbgegpX1Y8jWFU/bif2z2Aw\nbAfbMpFasGGfz/M6BFid935NWGYEP4NhD2F1S89mZfPXd+yGnuwfuF4LIhFsq7ygfFOyE9drJ95y\nEZCko+tuHKsa1da8Wj62NRTP20Ay+Rax2AnYdm7BMOV7TGtaQZeb5NRBY4jZ2z9V7e5K8NAfppCM\np1i2aAPLm1cy4DurkbXKcfbpXHXs5UyZtYwb7n2KZNpFCDQTAJs6ut/39djY0cXHfv9XNvXEsUS4\n7OjD+e6HTyXQaey/tHT38On7H2F1azt+eKVjjs2zX76C+oqyzerPXrqOWCTCmOF9xyhJpV0ef20O\nvoLnezzz1gLOP3EiTzw5iRfjT1JaVELzQ5W0rwkWowmv/z/+/Op7EvzaUj2kPJeBxRV97m/v+Dld\nXX9ESQLBAlMiOSns49t0dv8B198I+KTdpdTW3LLd597Z7DTBL1wNXx9ud4rIfILJ0gXAaWG1PwMv\nEwh+ZlXdYNgD2Zb55j+nr8lu798/cSAi1xCYgzJ8+PDd3BuDYd/n8GFVzFzdxi3PL+Kqk0dRHLWz\n+56cbaYPO4P27sdZ1/I1EJvB1b8kmV5IefGHmNdVzden/5kyO8HdY9NZXyLfb0UkELAyvxE98Sfp\n6XkQJYklFQweNA0foSOV4LRnbiHle4hAsRXh8NoiJgxowt1Qjc45lCsvOYm6AeV99u22Hz/GlOfn\ngQZCRtnJXWApEoE32qcwevk4ljUJrrq0j/LxSgAfKpYKE4fV88vnXub4ocM4dcLorV4D31fmLlvP\noLoKvvavJ5m+Zl32A/q+8uD0dzlz3IEcO3Jof1zyvZYXFi5lbWs7rh9q3wVSrseNT77AlScczbEj\nctfn4edncNc/X8VX5bMXH8sFx06koSIQvJav3cQb7y5n5LAB+Jprf+n6Tfzk8YfpGDKFaJlPmm5K\nv9RC+3+H7YbPwfoN7cx9dxUTD+v7d/m3f5nMsy/P4xPnHcmE0wZz7dTfE7FTXHHA+TSnuqmJlvKp\nUUdRHiliTXcbXR2PU27FETJiX+7ZVsD312IBNpCMP0w6/SWmtrpMaVzAJ4Yfx4iidjy/hZLYCbt8\ncWCXRPUMzaiOAN4E6vOEuQ0EpqCwnavqZmJlMOxa8jV+nq/Y1pYHqb3Ux29brAWG5b0fGpZthqre\nC9wLcPTRR2tfdQwGw47T0p3itF9NKii74v63ePiaD2TfTxhcuFqvqvu9BqY/aOt+GCUFCutavgG4\nbOr8A99f9ClSvtDiO7zRcRInVkwBQo2ahkElQgGwq/uuzB48TXDn3Ke5Z9HM4AQSCH2qIFY3lcWz\nWJ8Ar2QFi5Z08Y/vz6LrSOH0UQfy/eMnELNiDIiNRERYNHddcA7bQjwlPruYqk8KqopVkebhTXdB\nJUTPGESDb5NyI2xsriQ5AN6ONfLq/PX8bv50HtCPc+LEA7Kf+b7HpzJ5xhJKimOs39RBMuXS3N5N\nsgKSteEzJYSChuD5PqXRyK64HXs0hw8djKs+oGRuqqfw8pLlTF6ynCO9Wj589FguO/cYpi9YQ3vM\npacBfvPOVO6ZM40nPn8ZqY40V//oIVKuR7xBiNf5+JGgyRHVzfSMfQdHMvpEwanzAuGrWNhwepRk\njUX9qym+dc39/GvS9by2aRUiwhlDRiMiNDZ38vfHp+G6Pn945CVG1QvHNywG4MmNDzBv0yAA7pg/\niWOqmym1N3F2jcdhFSAiSObM4S++ho+DRbBfFRa3PMl3320h5busan+Grw9/DsWnNHYKlaUfpbz4\nnM205zuLnS74iUgZ8CjwdVXtyB90VVVF5D1NjszEymDYtcRTOcHvJ0/M44fnT9xi3T+8upwPTRy0\nK7q1K3kc+IqIPExgft5uLBEMht3Lp373Bh2JwoWmqctaCt5bvRapDvjuU9x/xTGcNnbgTu/fvoiq\nz8a2H5JMLwccRKKgSSwgpWmq7E10uVUk1cFzNxIJDSldAs1H0EjG+k7xCeQkF1jc8WcmVlYyu30I\nkpEOgfEDNuaEQFtxk0LFGU2URSEdmcNDy3+HJRZPL51IZ2Ik9vgkHYfE6BpnYXf6DJxsUQ/YEshl\nmSnoxJEbcH0bNGi7MV0FlhdU8mDqklUMH1LDzHXr+e9nnyfemSbSA5qC4laQNCCQLianylTI6H1s\nsSiNRXF9H2c/9vcbM7CW8YMGMnd9Y3gTC2QAFq5sYsHqJv7w+OscO2EEPUMAC3yUpOvy5b/+m5aZ\nraTwaRsLXplmVWvlpXFGHLoWRINmFRQFS2i9xKJpQBFEABHWXBBj4KsuX3/jCaZsXMG42uXM7FiM\n61tMXXk66ahSMbSLsZ9ZgkQ8LII2a4u6QsFOGVO+kouHvQpAa7wMwj1CDaot2WfAUggNTRGFFreI\n25bPJUIV1dEeTq58NzQRFbqTL9GTfJ3W6IMcUP/YLrknO1XwE5EIgdD3oKr+KyzemDHhFJHBQGNY\nvt2r6gaDYdfx4oLG7Pb9r6/YquD3kUMH74ou9Ssi8jcC8/NaEVkD/IDg5wJV/S3wFEHQqSUEgaeu\n2D09NRgMGRY3dm2zzm/7iER8+Z/eZsVN5+2MLu3zdCdfo6XrISAOWAwb8Dv8tv9EtZPGVBG3H/gs\nCd/h2sXncVzlyqzA1nuiKQQauMxMWVHOHzwLH+Gd7hG80zqMWc0NHFK7nrqiznB6bdG6qpyjrpxF\nVXE8KFNwLFD1GFHZyOT2KvxDBF8dEPDKbdZ9xOYAcXA0nSecBVii+CpEIy5Eg7JMt+5cMo17Fk7P\nHVMEyaKgTroELJ/Axs+CUOLIarRAKIlGuPC3DwDCHz77MY4esX+afE5ZvIx56xsL7XwzuNA1EtSG\njqjP4z3LgxWCsK6nyoKeFqwDlHQpuQcpvIe11Tkf3tCiEwlEP+SgFHQWY9k+USdNIuXQeEqEZ9cu\nZmRNEycNXYQlELE8Thj5Avy30O4WE/djiCiiwT21xePskQsQfA4rWYktPgKkLeG2lVfx7XFNdCVm\n49CCrRkNX+4hc1WZ01XPj0Y+TUSEqEBSlTbNPPmgJIinZtLR8zQVJef0/03oxc6M6inAH4H5qnpr\n3q7Hgc8DN4X/H8srN6vqBsMexE+fmMcfXl1eUNbbXGpMfRmLNgaTsLVt8V3av/5AVS/dxn4F/mMX\ndcdg2CF8X7ntxcUMqy7mk0cP2/YB+xC9F57mmeAu/UrErgcy+RI9NrV+hyqSCMrASBwVJSIpfjfm\n32xMxSh3Aq2GCPiqocYtmBj7QI+vuOLQ4saCfUCpnaKhrJ0BRd2URNIIwXG++NSMbKM62oNIoFXJ\nFyQqi3s4Z/xcPLWYvmYYVkRJpKK0dJQyc80wThi9lHwFsCqkPaFxUyXrNtaAD2qFbQpoDDShiMpm\nAqM6QDrsRFbYI+8/dMST2e49+Pa7+6Xg98j0Wdz4xIuheBOSldAkK4H4DsE17y2RKPi24sZAMkrT\njFOdQmNbCaOH5drOaoZRxPKoq26nurIbAWpjXVRG4qxsGcCoAc14aiGhd54IIEqFE8dLC56CE85x\nyiJpEKHS7mZQtANLAoEw5TtcNuxfdPYE0YJdyawBBP3JGvkKnF61kliokhQRYkAk1HSHHwVwWbPp\nKg6MTifq7FyrqZ2pfz4RuAw4Q0Rmhq9zCQS+s0RkMXBm+B6CVfVlBKvqvwe+vBP7ZjAYtoPeQh/A\nrDXtBe8riiIcPCTwpfnd5GU8bQIqGAy7jf/639nc8eJivv3PWYy84UlmrGrd9kF7Gfkr6gB3f+ZI\nAJ6Y1ffYM25Qoe9MXxFADdsmFhmDSBGggf+Stx40Ed6PQECzgFLLJ2bZWcs+VUhpOMlVxQ1N4WyB\nJq+IVGgI2uHFWJOsRoAiJ01EfIZHuhke6SaGR0xcbEKhLyRUzKAOWJaPY/kcPnQNlWVx6ms6GD6o\nmdKyBN3p0CksRAQitsfcDUPwMsJdxvMolNi8aLCQkl+WwXMKxJkwwkfGRDXvPMBx+2mAl1tefC3r\nd7eZti/8DmetY3s7bqni24pfBDiB31z+pY1sgsqKRNZyNN91V4HqmjgDqrqxBBzbY0Csh4itjKnb\nSG20k27fxg+fHYvgmbLwGR5rYWxxEw2RVuqdOI54qCpp384+IhYwuqSZmLQgkjt3MivTBqsHmf92\nXt+C44Vay2aQZVPS68IkUnNo6fo7yfRKVNPsDHZmVM9X2XKQvw/2Ud+sqhsMexBbSuPQnSr0q1nc\n2FUwsbr2wRn868sncOTw6p3aP4PBsDkPv7264P3H7359nzNt/PY/Z2W3Lzy8gXMOzq2QL2vqYlRd\nGa6Xy+F33+XHcMJNL2Xfz1jVyrCakl3T2b2YtLuO5U2fw9duhg+4l+LouILE6xWWVWDaJqGhnYUQ\n7TWTT6iNkvebErpqDbG7WOuV4atwaDTOITULuWf9eMZUbuSQWDvHFQXnW5AS1no2K1MD6NEiNIyg\n4QuoCo4E+hsXJWp5lEVSdLsxSovSDK1ooyqaDE6rioZayKzmw9IwIkeedi8jxTqgbp5gIWTNEdUH\n8ULVjU1Om5WHKjz01iwuOfqw93kX9l6OGtbAi4uWFV5Tsmq54KJmpYT8axcI1X7GMTS/jgpqKala\nKIklcpE0Q6lQBNxQSMu0WOXk0nQMiHQRszwkXLwg7IaiVEkPgyPt2OJzQrlHkSjNnsNT3dWsSVax\nNlXJ0EgbjigWPgnfwrFygYg8gpcdmvv6CknfptTOfXQJe5yxnKq0LOKeF64ZlLFm0zfw6Ql9AG2G\nD7ibypJzd/RWFLBLonoaDIa9j+4tROisLM5FKkt7Pu3xNG8uLwyqMGNlqxH8DAbDTiE/hcxtlxxR\nsO83LyzmqOFVjB4Y5Aj7wokH0FBVXFAnYu+/wTbeC81dfyHpLgSUpY3nITjEnHGk3DkQTnQlz/Q/\niNoZTHgHRuMk1MJTH49ANlIV0r5Fme1lzS4thYFWDxbFDI+AkOTrDXMosZSaPNVeja1s8JU6p5MN\nrkVCY6QBS4W0CjErSamdxhGflG/RlChFNcqAaDflodCXEd40DOiS8uzs+6xgp1v4D31rprLl4c5w\nhp8fttD1tp4Ld18i5bpc/dd/M3vdBi44bHxQmFHr+RpqVTORWMLtrCCYVzdfA5s9RFBRiATecd3p\nokBjJ7l7KECR7VEV7aEtXUJUXA6u2IAtPu3p3DhQcBtVKbfj1NjdRMSjRJRiUWyBetslZrmMLmpi\nTKSL8TGPJg98usOonUFba9KldKnDELuHqKWoesQEOt0otXYKK7NAUmBuHC6YhG34JPHJt0Zwaeq8\np98FPzP6GQzvk40dCXqbHO1L9KQKf6w+c1yQPiXl5lbSe5t9Zog6ZmgxGPYU1u2Fvrd90dKdYtR3\nn+xz34cnBpmhPN/nh/83j8v++BYATmhnddGRQ7Pj0qauJEl3/5mMv196km+TmyL7KCk8jRN1xuMB\nnWFuNs1IUgRmbpaAI1BsCT0aGIHWWVAkUGz7YUALwvpQZQmjIrkJaRSPGksLTOhWpC18hbjGiEpw\n7zzfYVOqDAsot1M44iMCUcvn8MrVDClppSKazAqZqoHs4Sks3lDH5LljwAeJhjqm/J9zHyRB4OOX\nQQtf4gcz9tI1LuIrEgp8vWPV3/SxD+/YjdiLeHvlWmav20BPKs3f3p4VXIvM9cq/PuHXT1SJNOcJ\neOQHasmVZf/b4YYF7T0l+L7g+rKZPF4VTTC0pIVhxS044mMJVEbiOOLT7UVpd4tpcUtpcUtpTMbC\nyLQOCvQotPk5Ld64SA8NTjtHFqUosWC4owy0IlmBDYUqO84IJ8HoqDDMEYY4NoMch4OK0tgS6Pms\nUDWoed8VAYpCadAns9iee+ZkM8fHHWebszMROWB7ygyG/Ymzb5vCcT9/kev+PpN39kEfGtjcpPPc\nQ4LACWtac5PIv05d2eex767uWyA0GAy7nnwzxz0Z31dufW4hTZ3JPvdPX9lakLx5WE1uBf9XnwxM\n6Z6avWGzYwBuufgwXr3+dAC+/9hcvnD/2/3Z9Z2GiJwtIgtFZImI3NDH/piI/D3c/2aYN7lfCLR9\nhbheM2l3ASKQUMULA1ZkXvnYCFWWRY1lU25Z1NsO5WJlNTSZV4UVxQp9pQIfwYw5XNDO6wmh1bdY\nn64i4Udws2Z9iopPJoBM5hiRwH9wQKS7oJ0MilBX1YWngQ2eiqK2H2iTIKt9ssIJeMPIZg4/YTEj\nx6zPaqWczsDMUzzwHRsrkZFuJNcGgTZqVG3N+70Fex0H1g3AEqHIyQksGavODHa3h/iAJ5CGWLdk\nBcHBw5uZePRyquvaAwndDU1pw4bUB7UCzVlXsog3Fh1IMh3BU8n6e2aeDUd8qqKFz4AgpFyblmQJ\nbekYc9rqqYgEJ+jRIlrcUvI1to7AkbEEpxTnFoosLMqsGDG1gnMKjIzYjIz4CIotFlHLLvg+ZLZT\nqsTVI50RAAXKLMGnipzAl3tg4+mZpL3CMW1H2Z5l+Uf7KPtnv/bCYNjLWLChE4B/z1zHx+5+nXSe\nP8m+wnNzNxa8d8MZ11f/9k62LKP9u/TYwuiBj85Yg8Fg2HW0dKcYeUPf2rC9hcffXccdLy3hmJ+9\n0Od+v9cMPj9Ze0VRhBEDNvfbO+nA2oI6GV5bsmlHu7vTEREbuAs4B5gAXCoiE3pVuxJoVdUDgd8A\nv+yv89dVfI2so1uI0oUi2ECFFcpB4SRWe2kzAIrFxsbKCoZVlsNgK0al5RBXcCgOc+eQ1QqVBen1\naPJgbgoOiCgVlk9tJPjdtQWKrRRz2wezsL2elO/gq2QXBTKPSaXTgyWBEGCJksnHJghF0TSlxcEC\nQ3VlF4Pq27CigQAQi6SZOHYVYw5ZSfWIFgYPa8a2PaprO4nF0kgKStcTCC8iJOoFP6tWJPdfgpgv\nixr3/Getv6ivKOPpr1zOry86J0/Yy9Om+uDHggdHwkivGqrWSkrjDB62ieKSJKPHrmfoQY1oaSDc\nq/hYdhqifoF85KlFJOJlnzcbxQZi4jKiqIVKJ5UNwJIRQCujSRKuw+y2IaR8Bw8rm1MyQRQXKLeC\nZyXfHFMk0ABnzyU2PoKjFjaCJRYWFpk8lH1ZhKkE/ospfHw0G+22TDZfLBdi2PYAbKt/Fw62qEMU\nkXHARKBSRD6et6sCKOrXXhgMezkPv72ay44fsbu70W+oKrc+vwiAGz8ygfMOHcyG9sRm9V6YHwiH\nx48aQH1FEbe9sHiX9tNgMARc/+isbVfaw8mPQOp6Pk7oi9fYkeCmZxbwrxmFqX2H9PLdKy/afEpz\n7Wmjs9uxvc8E/VhgiaouAwjTXV0AzMurcwHww3D7n8CdIiLaD34IdeVXU1t2FeCyvPHzpFJTkDBE\nS4VARDImaoHWz1MfnyAUfjhvxspoAjWsR1BWhk3agkoLHLELzqvhZHiADbV2YA432PZ5OZGbhDv4\ndIQ+XhtSFVQ7XVRFEgWaP1sCs8Ko5VIX6QSEDclyOv0Inm8xtK6FIselpCgVpKSo6mDO/JEMa2ii\nvDywbKmsyFm42LbPkBEbWL2wgc6RoXSqgdZPMmsKEp7cByyhMhZj4uCBO3or9irqyks5c/yBjKmq\nYWFrvtAtBcECAAAgAElEQVSriAtqg4qgdvA+PkSwgDGHrsbK8+scUtvGuqYavGIYe8AGyksS9CSi\nzFvegO0o5UVxhtS24YZBViKWZmXCUjuXTqPwmxAkXxxV3sLGVCU9bpTWZAl1TieOuAxy2hkb8XHy\nfA6VQODzMgFhwgYrLBvPVxQ/FPeC4Ea5r55Fq5+m2rLDaLY+DkEgoqT6pNTHEsFHKRahRKBdo/So\nT0XR2VSXXUJp7HAsifbr/dnaKDgW+AhQBXw073UkcHW/9sJg2ItY2rR54uDOxM4Ju7u7yA/Wctyo\nGuorijh0aOVm9f7zgwcBcNrYgYH9usFg2C08Py+nob/65MAb4/efOzpbNm/dnp/Tzsuz4+zO8zH+\n2VPzNxP6DgqDt+RTFttc8CuK5ISK3qaIe4GlxhAgP0zrmrCszzqq6gLtwIDeDYnINSIyTUSmNTU1\nbXcHAk1dhCrbZYAFAywYaIMjgRbPyxjXaaDJ6PB9/HCS2+zl9kEg8FmhNkREqLGc3CRUcufLEKTV\n06zp5iavLJS1hG4/Rn2kk+aeUirsOCWOm5HDcikeEGqjHdQ4Xdii2OJT5iRI+zaqQlVJD0WxdKgV\nhPKSJOPHrCIRj+ZFicy9AGrqu5n4gWVYjgYdtEEz8fpTgKsUb4DK5TCus5TJ37iaWGT/iKOYTrk8\ncs8L/P2u50kl0tz6ifOIdUmYsE6xeqB4PYHJpg1YoFECAXlgJ47jb5aeYWhdM0dPXEl5SYKo7VFV\nGucDE5dw3NilHDxyHZWlPfhqkfYd0n7wNAk+JVYKkAKhLxdENCgstlK4vsWCTQNBIGoFY06Do9nF\nCiWXf9LOMyn2UWwRBlgOtVY0EPy08Pm1RaiwHNLq4+aZAYsI0fD74xNoqyVsu0zSgEsy9QblsWOx\nrap+v09bFPxU9TFVvQL4iKpekff6T1V9vd97YjDsJfyxj9x2z8zpXxvs3UnK9elK5Pz7MpOp/AGt\ntTvF3S8vyZp6FkfsPidiBoNh59BboXP+YQ3Z7VcWN7PipvM4a0J9tuyOF/dcbXzK9bn8T2/x4Jur\nsmWH/eg5OsIFte5kYSCWJ756Eo988QP0Zuqyls3Ktsa/9iOTdFW9V1WPVtWj6+rq3nsD7sKsHx7k\n/3cCs70wUEWnOix1LZa6Nk1qs9TN1Asmtxn/uUxYe0vAVz8vsEdmqp0j7StTEw4JP4qPkPAdOt0Y\nKztqiOJRYgfPiasWnoZpHcLcfMWWS9QKg9AAnW4YCdKCqO0j+KFwGWpxyuJEi5LYlgafl5wSL2Pu\nZzsekVj+Ym9wbPk6qFyuRLsD3631TV20bArMUzeuaaGlcc9ffNkR/n73i/z1tmd58I7nePCOZxk9\npJZJN17DuO5yKldA+XrF8SBrVwlkonqOHL+OwnWZwDx3cG17eN1z2l7LIucTKpD2g6fJ10CkKbLc\nrDCf32jmlG5oelxqpdjQWkncjZL0HLq9CB4Wq9O5FCWZRzHb3VC4s8iYFueEucx3ILMI7qnS6UM6\nNPt0s7kLA8HSEQtHvKzmXIEkNlWWTY200tr4AXyv/82Et8fu4WMiUiEiERF5UUSaROSz/d4Tg2Ev\noS+Tx1lr2nOJXvdyrnlgGlf9ZVr2fUl089XKU26exM3PLOT2cDIZsYVzDhnM0187mTPGBWYty/rQ\njBoMhvdOvu8UBP58B//gWZ6a3XfC8l98/JDNyp6Zu+cuTi1u7OTlhZtroaatCAS53pq5g4dUUl26\nZfOnUXWlW9x368WHUVsWA2DPV/ixFsh3oB4alvVZR0QcoBLo99liUdlXASeMYpgvmgVCeSadeZUV\nGHR6oQYjkT9hzvPjy+QxUw2idnqhCNbiB/5WEqpnNNS2tflWMDH2bUBIJQ9iZWst69sriXuRcGIu\n+Fj4hOqkEBHwVFgVr6bdLc0KckDWPFA0J0jUDujOHpffRvY7qJCMR8lKBWmfspU+lquFB6ny8y/d\nzx9/8ThXnf5zrjjlp8x+c+kO3Yc9mWQ8hfqK7/msXd7EuhVNLJq+gk2rW3JRX31lwIweyhf25EJi\niuCl7TztXEbLRhgh1g2iumT2hv6TqsF9dcNonDErWGVI+k62XqaOj+AppPzg2VCFeU2DcdMWo8s3\nISIkNcqqVA3vJMvx0MAfT8FDafYgroIi2XgzG/IWNYIFC83697X5sNKFdZ4wubuW+emwT30ZRmV9\nEIWq8v+mxCpC8FGN46Zn9us9gu0T/D6kqh0EZp8rgAOBb/d7TwyGvYR4qu8w4HPW7R2RLFe39PDc\nViaBvSdgRZHcMHFTOKHs7JXjL6MNHD+4gkOGBCahZ9wymVlr2vqlzwbD/ko85XHAd5/i7pdzE8bl\nzV10pzzumrQECHzgHn93XXb/xIacWfYXTx0FbB6AaU8i6fYtgX3h/mmkXJ/Ji7bPNPGA2kDgu+1T\nh/P5D4zg+rPHbVbn40cO5YmvnvT+O7treRs4SEQOEJEocAnweK86jwOfD7c/AbzUH/59GTx3Ncme\nJ0l0/Q9IBJFBEIZJQSpwoicT6M2CZOpFYrG2ZxRvtx8MxLCtCuJalBUUM8KeIqQVOn3h9WSUd1MO\n89M2M1M2PX7hYocCzW4Fac1ESlQGl83j/NHvkvYjvLh6DHEvLwCNBtoWwoiLngor4gPo8opDuUxw\n1SKVMQ3sNRkPPk2hb1igyQnKmtdX5QVeFIhYlC1uRRJuUMENcgEUrWxn6ew1/PN3k3DTHqlEmumT\n5/fXrdnjOOsTxxCJOniuz2vPzOLK03/BN3/zGKni8N74SsmydioWd5CsL1y4WT5vUFZQ641lQaF7\nbqBtS3kWSd9GgbjnZOchfhiPNaPxU8ktCHgEaUHe2jCC1kQ5llgMLyvNMwMV2v0ikhqk/vAVJsej\nTEtFmZKM8lIiwrsph0k9MeanYlnz4oxGW1Vp94TVHvQQ4ZW2s3i06VKmJ27GwwmUnYBlj8KOngI4\nCDZKBE+VRNePcSLHABaWVY8TPbYf71DA9hgeZ1xWzwP+oartve3kDYb9hanLNvHGsr4XU2OO3Wf5\nnsbJN08CYMVN521X/fK8SHj1lduO61QSzV2H5c3dHDq0/23UDYb9hf94aAYAv35uIf9x+oEAXHTP\nGwDMXdfByBue5PhRuahv1542uiCP5nfPGc8zczZslpezP1nS2EVRxGJo9eZRNbfF1GWbuOTeqVvc\n39vC4r7Lj95CTXjpm6eypjXOsJqSrY47VSXBmPa9f8/mpqfn88uLDuWcMF3NnoSquiLyFeBZwAbu\nU9W5IvJjYJqqPg78EXhARJYALQTCYb/guStpaToT1MPCI9B9xEGgpOLnFJdeBiheeiHdHT/CTU3F\nFouzGk6jtPIH2Xb89GIWr7uK4ZHVeCosTUeZny5GEIqtJBbQmPd4vpaMYqvDIdEERZYyNV5BiiBv\nWhBpw8LHZVCJi4VFe3c5rakSSpxg8TXp2RTbHlYYmaPNjdGeLiZme0Hidt/CVQcLJe2DJYrvCyKK\npxaO7Ycavl73A9i4qoq1i+sz2etBoKgxRdGmNOk6H7eYrJSivecEAsefdXB/3Z49jleemkm8J0zF\nEmrz3IpYRl2K3ZUi2hSHtEu6yiF7Q30lsbqYxroa6oe3FAToyfzPmNn6CsvX1eC6ESKRNIMHdobl\nFl1ulFI7hacWPV4kawJsETy57ekiGlOVLNtUw1fGfI6Gg6sYX1XH2y23sLRzMRmdnac2/+qsZ4Cd\nosmLMKlpDBc3TAMFV4RGT4j7NimiPNcTQdLlHFbSQpnl8lDzGP7dfBAfHTibjw3/GN8Y9mVEgufB\nTR1OvOtOIkVnUFRyIapKOjkJKKa95TosViP4uKnJFJXfSEnZVZv5JfcH2yP4/Z+ILADiwLUiUgds\nbutmMOwHbG2CsrclBL53ylKuOWX0Vus8cGXhalNpH2afW6O5K/We+2XYe/n726u4/tHZnHfoYO76\n9JG7uzv7BC8taARyk6B3V2+uRc/3betLy1UadejupaXvT868dTK2JSz9+bnv+djeY+q1p41m5qq2\n7ALbL59ZULD/jHH1bAkRYVjNtoXPTMAXVehIuJT1EQ10T0FVnwKe6lV2Y952Avjkzji36y4KH7xk\n1u8uI3sluu+juPSTiBTjRMdTUXMfPZ2/BnxKyr9V0E5ShvPpeSdRF+nhkoZplES7URVWJxoYGltH\nuZMMP0vg/pXwHJ5pOpR5Netw8IiGidlFFRWbIquKmF3JacOu4+oxBzG9aQ3/9WYZTVVzOLx+LUW2\nGwSGCYW3xZ0D8bADjSEUqPg8tfEUlq0ZQNqNUFfVzqABnUFffEimbWLR4Ld9zZI6GlfWUrJeibS4\nlK6NE9vYgyTTpAaV4juSTeOACMmGMkqXF1oCPfPwm4w7YmS/36vdje/7PHzXizlNqGUF1znlQSz4\nvnllUToPrqFydgu1b7TRcmQF0Taf0rU+m46IsnZJPcWlScqqe0B9HCd3Dz3fwrZ8VAXEYmhDK1aY\nGoFQg9zpxuh2o3hq0ZYu5qjKVVlhcUFHPT1ulEWNdXQmSnl1/UpuPfFg/rXyazQm5oWCpYXn+1ii\nxNVhjesQV4ehpa14GgQYQiGtQpHtEUXp9GK82TmUFzqHMr9rEO3pYtR3WON+gRHVnym4Rk50DOU1\nd2TfiwiWM5H2pnMRNmafS8Wnp/OHWHYNxSUX9fu92uZop6o3iMjNQLuqeiLSTRA+2GAw5NEe37si\ne/78qQUFgt9dk5ZsZpppW4WrTfnavC3xyLRcELqfPDGPK086YAd7athbuP7R2QA8OWs93/lwNyMG\nbNnXyrBtunoJa+3xNBfc9doW6/cl9AGUxuzN2upvPF9p7U6hwIL1HSQ9n9PH9h3G/pXFTRwypJKq\nks399DKfIZOTcP76nR8QY/B2WDLsj0RjpyBWJerHsz58hD5MKXcZHe0/prLqF0GpVUJp5Y19thP3\ngt/GxlQJS5NVjI8EugO1juD11iMZWzqJocWtKMLizoE8O+dgzjtiDrYo0dDsMiNwKi4Jv42xlR+i\nofQQZres50uv/JO079PcOJzDBq7DsoKcj75vsS5RScp36HEjqCpVRemsGWBGaEi5Fps6KkCEuurO\nrIapaW0lK5cMpqIsjiJ0d5QAit0jSMSmZ0QZbmWMcWMHMbsxWHyR7jQatcC2iG6KU6C+UvC8vWuB\neHtZu7wJN+XmJLVQRVc5dxPdIypwa4rAtvDKonSPrCDammLwS92kq6Ik66LE2pRELSyZNZTisiTp\nlMMhxy9FLMV1beYsa2BgTSdt3aUMqOrEtgL/u6wJcfg3kXKIRHzSarMpVUptrJumZBlvrR1JKh3J\nugq+uXEVbanVNCWDtFWqUF90MBviM7NtegR+d4OKO5FAf5wN6GKJh6jP2kQ1x1cvw0IZX7aBJfGr\nOWvwIZw5aPs0u+2tX0P9jeHzqJl0lgD0dP569wh+IQ3AmSKSPzr+pd97YzDsRTx67Qd4cX4jIweU\n8p1HZ3HZH9/abvPJPZFfPbtws7KB5bGC98V9CH69LRHOPWQw//PSkn7tm2HPp3dwoy8/OIMn//Pk\n3dSbfYMv3P92wfvDfvTcVuvn56zLJ+n6zFrTzpLGLg7s5+i7qTz/vCN+8nzBvvzxsCvp8r3/nc31\nZ4/jsj++BcD8H59dUH/xz87Jbn/x1FH8bvIyljUHgTbu/PQRnHzQ+4hGuQV+dP5EfvD4XACq+xBA\nDSASw7Jq8PzAJzwIqqF4WICifutWj89QFS1hXPkQlnWvYcqmsTSmKujxogyLzeGDtUtxLIvVPTUk\n/XIuPfBujoq+w2ydD6TDfuTasongo7zb+k/aUut5ZMkGYs5A0ukiPLV5aeUYDq5bz5K2OpziUFMo\ngmX5RERJexCxM/JJ4Cu2eGk9GYF25dpaokmfSDsMWTaO9X47XVqajRRiJcDyMkFcFD9qsbqzO6+D\nQvXbG/FjNlbSDzRfvk9GSkl0JXnn1UUccdKY/rhFewyDhg0gEnVIpwoXmOykT/nCVjoOqcUrjwSa\n0MElMKiEaGsarzjQDBa1gNOldI0Q4m1FYMPcN0dTXtlNanYR8REOK1NFIJD2LKoqenJ+fCGq0B2P\nUhVJoCos6q5nWkuEtZtqSLlOQf3GVCfvNicAN2txOr7ywzQnCoOpKBaeB81+GSWkmdXdwPjy9dRE\ne4Jk8HagrbYtZVCRw5fGf+Y9mWeKFAdLG+Hzkf8rqn5yu9t5L2wzuIuI/AD4n/B1OnAzcP5O6Y3B\nsAeT6hWA4KgRNXzn7HEk9hITz1Wbevj3O4UB4bYU5GViQwXPXXcKBw4sLyjvS+N3VS+N3kfzwsob\n9h8u7yWkzN0L8sbt6by1/L2lJ9gSs9YE5mZn3jq5X9rLZ952auQefmsVj81cx7f+8W627MsPTs9u\nH3tADRG70DcxH0uEyuII/UXGzw+goh/b3dcoq/xJdjszn41GTyFWdDbllT/arjYssfjzCV+krrgI\nD4u5nUNY3lPHiOImIpaHoHxo6OV87ZAnGFo2nFPHn8eIihPwtQg3DMaRMXVOq4uvPr66LO+azNGD\nFvKpg6Zl1IGs6KjjiaWHsmBTPZ5aqAqeCsW2j2NpbwVcmEPNDnLwpcBLODQ9Ppj4n2pZN7mR6vlp\nrKQSbfMZ9EaSuqmJQPDzFTvpE2tzOX3CqKx1TMnydkQDgSc7/c84qClMfW4OP/rC71k4c+WO3po9\nikjU4aG3f8SP/3Q1Yw8fgZWJhEOgjStd1kb2RmYitjoWkQ4PCYXqSLcy6NUkde+kqJuWwl5twXPF\nVMz2Kd1oZe+RLYrnh2aRmvcCKsoTuL5kA7OkvAiu74SBXvJewMNL5waBVcJjJ238VYHg1Z4uZm38\nDAZ2ncey575AdfJPvNp6HFM2TUAkimATsytZ1D2OlA6nuuQr79knr6r6FiznMDQU+vID3JRX3/5+\nb8dW2R6N3yeAw4B3VPUKEakH/rpTemMw7EEsb+7m3dVtXHhEkC93Sz4ynzhqKDc+Njdbp7SPJMJ7\nAp/83ets7ChcQbrmgel9aim//5EJjKkv36y8JC8R7YkHDuC1JZsHuumtJTTsH0zpI/JiRyJNRZGZ\nVL9fjh5RzbSV26dV2V3M3krk3llr2rJBVn76ZBDN8PWluTFjUl4E4T9fsfXodeMGbT4e7Qj5aWry\nBU5DIZHIYYhUoprzVausvg3beW/aV8eyOa/hGB5YMSlbNqVlDB8fNIPa2BAaSj+SLbfF4ZkNNSzo\nOJijqlYyvLQJu2BKnsMSKHIycfXDfwJg0RYvIhpxidkutuTKXTdIwG2FwtqgNUm6FkTRIsFJQmlC\nsq56xc0+Q19OYNkWvhdoZaJrc77rliWMGVrL61+7jhUL13HtObfm7A/zo5I4NrUDSmle14YTsfe5\nnH5N69v49kV30N7Sxce/eAZL5q8PUjCEUpXT5VI5o4mLvvVhZjY246U9Vk9dQyqllKxN4juCnVRs\nAQ1zgBS3BNdZRBixwmZji9JyqJLoClJpeBpEf7Ug1JYFUp1IEOAn6To0tlUUdjRzY4FPjmpgcbeX\nLQbBw8HCw1ObxT0n862xlzKuchj2ccEYcaZO4K3m+azpeoCF7XOY3DKKuB9lUrOFY00jYo/lxLoJ\n233dLLuGqtrf077pMjx3EZBLYh+Nnfjeb8T2nHM76sRV1QdcEakAGinMK2Mw7PX4vtLTy0Thyw/O\n4Ot/n0kiHQwM3am+Bb/8CURL954bzKS30Jehr6A0W5qs55t6njom+OHvHfmsqiTK8l+cy4WHG83f\n/s7Db63adiXDFqkrj/VpmnnsATW8fsMZ/Ps/chOD687ctunYmeP79rnbEb4fLnr1xfl3btkfMZ9X\nvnN6n2bkFx05NLs9qq5/TVQ/OK7/r8W+iOsuQjX3u2Y747Hsmq0csWW+eOA53HTY5QyKVQPKykQN\nd6w8i+qy/2HqpvU8uvpVnlz3Fpe+fhMbEsGCx+yO0RxVcwvnD72ZUaUnk00lAYwuO5W0N4LX1p5A\nRSS34Kih9m9DSxUdXUWk3MJnSz2he1INuIGQUHpkB9GUEOsAOwkpSwITxJC6gRXc9dvLufmWS7ng\nY0cVtCUitLQEOWtHjm3gN49+hUjUzqmgwh/IMYcM479/9wXGHzWSsy4+jmM/OPF9XcM9lcmPz6B5\nYxuJnhRP/PV1vPwkmaqgPlXFEa783Knc892LuffGS7nzjs8BYPngpJRx4wfz69s+w3XfzJmAiwj/\n+V/nkrSV4laoWiDQ5rB2bRW+J6AWvm/h9tioG0hMloCtsG5tFa5nh+1kGoRDKwfx8keupST6bnZf\nZvcRNRdz4fC7uHrM//Lp4efw1Rl3c+ErP2J9PLC+WNq1nhvnPMhdy5Rnm8cT9yOA4uGjKBsT7z2F\nlW03UDPwRarr3kSkBLAoLrs+Gw20v9ke1cQ0EakCfg9MB7qAN3ZKbwyG3cTNzy7kt5OXsuAnZ2cj\nvmWCCmzqTjGkqphP3LPlx74oYpFI+zs9gMLO4JJ7p/K9cwvNqrYUxCUTJn5UXSlnjKvn9hcWc34f\nAp6IZAN7eL5uFiTGsG+xrKmrz/J3V+8duS33VLpTHqVRm0e++AEu/l1u/Pn7NccjIjRUFVNR5NCR\ncPn4kUO22M4vLzqE6x+dzQvzG3dFtzdjW2nlthSJ85aLD+PRGWt2RpewLGHejz+MZdJTbRUnMhbb\nGYHnLiIaO4uKmnsQef+pi06qm8C9S56GZCZgi/KDOX+l243jq5JWD0WJisMpdYdwWPUoTqw7HBFh\neNkxtCbXMLft/2goOZRR5SdyNvC1CXD+s/cxtzXPdUEDH7627nK64sUMLW+jtCyFH7foeGwgfsJG\n/TZwIbGgDETChOGCY1tUVUfpSnYhInz+Cydz0JhBAPzsp48BYNvC6AMHMWhQJZdc8oHsaTesbsFN\neTlnLdcjEnP40vc+ytjDhnPr/379fV+7PZmDjx2FY9s4ts0JH5rIM/+YVlhBobOlm7XLGhkxNkid\nMvrAeo49bjTT3l6GbVtc/92PMmJELYcdPgJf4aUX5nLxJcejxTau54MIpY1QulFhWjXxY6Dk2LbA\n7295Mbo4QdF5Pj5Cz8pSPAK/vszwU+pEmHnRN7GtUKCSo3m39REAHCvKJ4ffS03RiGyXH141GVd9\netwkrzbN5ZPDT6Yt1YUgBGfJMay4joMqBnNuw5bTzWwLJ9JAXcPi9338dp9nSztE5ERVfQ24TlWT\nwG9F5BmgQlVn7fSeGQy7kN9ODpIjdyTSFEXsgonKdQ/PxLaEDR1BJLILD2/gK2ccWHj8Z4/i8j+9\nvVNzZe0ID0zdsj/BO6vamLGq0JxsZO2WozHmm4bO7RWcIZ/yMET6NX+ZxvJN3bz4jVN3Sk4aw+7n\nukdyflufPX44Y+vL+f5jc3ly9nru2o392hu486XFnDGungkNhSZJ7fE0UxY1cdSIao49oIYVN53H\nObe/wqlj6gq+R29894Os3NSz1TQGnzpmeDbialfSpayfzNFfnL9xu+od8N2ntl1pCzx33Skk030n\neN9RSt5jepr9EZEiquueR7UTy6rslzY/PuxEbl3wr2yk0CI7QqfbQ1pdolZgbWKJ8PWxF1ITKzTx\nrY4N5aT6azdrc0LVQBa2NeJmwjbmmfS56rCitYaBj9pYbU7W/LL1vuFIzGNApBrX6WH08DrO+MAY\n6gaUc9ZJ4/A9Je16FOVZwIwaPZB5cwNf+e/feCENQ6oL+vHrr/8V9RQsi7KqEkaPG8Qxp09g/FH7\ndnTrcUeM5N5J/0V3R5wDxjdw5kXHcvM3HqJxbTi38H2isQiDRxaaCP/8potZsbyJmpoyKqtyY9hH\nLziSj14QpARSVQ6fMIx35q3BtiwsS0imXOIzKrCKXSSqdE+pgbhF6p8ptCrNuqERqAIExFeOHTic\nXx3/0ZzQBwwvO4aPDvkVq3umMaHqI1RFhxb07ezBR7NiyZM4ls3ossH8YPYDVEXKOH/IcSzoWMOy\nrg10eXGKrChfHnMuJ9XtHVrcrY16dwBHEWj3jgRQ1RW7oE+GbfDItNWsaY3zqWOGMaSqeHd3Z6/n\nzbyE7Gkv+CHqSOQ0d2+tKAywcPbBg/sIehJ8leI7SfBb0tjJmbdO4YmvnsTBQ97bj+/M1W18/99z\nCsr+8oVj+dx9b2Xfd+Z93v5SzrX1BBHZXlzQyNDqYiP07cPkR/T86YWHAFs3ATQExFMev35uEb9+\nblG2LLOwcsGdrwIwPc/H7+mvbR4ltTTmbCY09kXMsUi6Pj39JPg1diS48s+Fq/qv33AGJ9z00nYd\nH7UtUp7PW9/74Fbr9eVrbNi1yP+zd97hUVVbH35XJp3eQVroSi8BRAQFURAU7GDHXj8LKoLeiw0R\n9VquFb32XrCAgo0mokiVXqRFeu+QPvv745yZTE0mMMnMkPU+z3nm1DlrJmd29tp7rd+SOETC4/QB\nXFDvVM6q1Zafty9kR9Z+LqrXnfc2/MLO7P3c1PhcMo7uoFXFBn5OX2GM7tyfU6rU4sl5v5AvhqQN\n2WQ3LigenrA1l7g9CVaCk0vgJVswOQnsMof4+cO7KJfire7qiBcc8d7hdqPHXMrcOetIS6vh5/S5\nMQby87n9kQvodUGnwOecgNSsWwXs76RVeiPen/kw075dwPPDPoF4Bw+PG0qiT9sjIjRqXHjYtYjw\n9IMX8tG3c0hMiCc1JZEX350O+Q6O/FodXGUdBPK2JbKnqeCsbMf72v+aFu3ezFsr5/Bo575e712/\nfDr1yweepbu0QQ/OqNmG1Pgknlz+GbN3ryQ+zsFtTc/jzuYDmbr9L178ewLNyp9E56qxo9JaWOuf\nKyJvAvVE5CXfg8aYu0rOLCUYWbn5DB9vTbi+NHVNTJcPiBZmrd3tXnfl8xUmmx7I2XaFRvrmCYaL\niYu2ApYKZ3Edv92HvHP7Tm9anZ7Na1ApJcFde9BTsdRZeFRWyHiq5tWqqHWyTmSWbrFCOp+5uG2E\nLcgkr4cAACAASURBVIkt9h4NnhOcsedoWO819uI23Pv5Yo6EaXBq64Es93qfU2ox9uI2VC+fRIOq\nqdStnEL7BpV567f1Xtfc2aspr0y3Sr3M/3cfDhzNpWYFbRvKIhUSUrm4/unu7REtL3Ovn1Kp+DIS\n8XFxrN+yg3wxECdIZi7JK/PIapxChT8OUPvTneQ7HMS3PIm8RjVJNIb9BzMhOQHyndzz+Jc89/DF\nVCxf+POYlJRAj56B62UCPDzuOsY9+g3tTmvKmfaMVVmm9wWd6NijhVX6oOqx5+mmJCdw0xDreTly\nMJMX35nmkbhniIuLw+k0VIgT9h/MgtoeInNOQ06c4Zs/5/JA8x6UqxQ8OsKXmsmWOFX5+BQcEocg\nlIu33vus2h04q3aHY/5MkaKwzMHzgGlAJlZun++iRICDWbFVJDwW8Kw5d9278/hx2bZCz29Tz9/x\ncgkT7CkhcZc82xsTEYaPX8yG3Uc4/+VZzMsILPf+3eKtpI2YRFZuvrtWlYtXrrAaqpcuL2iwFm7c\nR3JCHJd2qsechwofgQ+Va09Lc69XTNaQqhONuz/7i0Gv/s4+j2e+X5va7vWbejTyq/GoeLPdw3ly\nMWP1TkZ+vZTOaUFmE46ReDvEKWPPkSLODI3/TimYpZyycgfVy1udoZnDe/HpzaeSHO8gN9+Q7zGS\n5Go7OzWsQsXkhELDUxWluOz9eg2SZ5AcJxXmHKTuM//Q5NbV1PpwB/HxDhINXNanHa+OvpyjBkhJ\ntGbn4h2sydjJ1N9XHbcN3c5pw/t/jGLYf67QKBebytXKH5fT58u6RRtI2Ljb+ts5DVVmr+GFW89h\n1lf3M+yCrtR9fQuJG7OIO5hH5W93krgpm7iDeVR8bxN3dHv4mO55b4sLGdrobO5uPohzase2Qx+0\nN2aM2Q18JiIrjTGLg52nlC6nPeUdRpOT53QLbijHz8a9R7n1o4VBjwfrjLlm/EZ+vZTLuzQIu10u\nx2/G37tYvGk/X8y3BA9GTVjuFf419odVzN2wh4UbLWWpD2ZneAmreIrXnNG8BiPPPZmnfljFvAwr\nnOzZS9uFzWZPifS/dwQW/1Bilwn2LLRn0e4Ej/yJ8kkJVsSTivt4sedwNp1GT2H2yN7MWrPb7/jQ\nd73rIT59cZuw3PdXu9zGqAnL+G147+N6L2OMVymGQCQnWM+CZyh9rxY1Wfjvs4OKRynK8VB5cx4N\nv1iDSRASdufZ8v4Wpw/qzE1jLqfaSVXI2LwHcdWZyzfgzMcJNGukSq+xQNMOjai/9zA7lm3C5OaT\nAzx+yXNM2PceJ3dpSnymkwaPZlhaDU4n1b7d5XbCt8uxRVKkxidxdaPwDIpHmiI9BnX6ooftB7Lc\nDoCL/YWECilFU5TanC99W9UOuN+zvl1ufviFCN6caYVMLd7kLRXs250e9+s6t9MHlhz8xr0FDZ3L\n6Qu2XVIU93suS0xfvZPV2w8BVqhx2ohJPP3j8Y88lzb1qqR4SfK7Ov6u8OmSwhjDT8u3e+UZerL9\nQBZpIybx3yklr5YWCv+datlx9dtzWbCx6Bp9gzuHZyDp+u6WuMTg9OKH0X0wO4MpKwqEXGav867f\nOfqC1n7XrN5hPdNXvDUHsMpNJMbHUbVcYqm1O0rZomaD6sQfyCNhtxUZJVjRgNXqVOZG2+kD2DJ/\nHZWWb6bm4Swq/Pk3Ff7K4M6+7WjdXEsQxQKpFVJ4d+WLjPl2OIlJ8cTFxZFa0UrBqd+8Dv2GngFY\nEVISF+c183r6xZ0jYnM0UWJTRSLyjojsFJFlHvseFZEtIrLIXvp7HBspImtFZLWI9A38rmUbzw68\niy5jpkbAkhOH7LzCnbR/DfAuc9ArSO0nzw5vsELvJcGKbQc5VEj4772fF4zbjDzXPy+hXf3KJWKX\nL1sDhLQpFte9O4//+9SaZXY5fK/PWFei93Q6DcYY9hy28j/fnrWBtBGTSBsxid2HA9d79CSQMzfq\nPO+itR/NsZRk35i53u/ccPLNX1u45cMFdBkzJWAofI9nrCiJFzxCEyPJSXaO8NqdhwMWvffkkk71\nCj1eHFy5ySm2EJUxhp0HQ/tdjpqwnBs/KBBy2bwv072+aNTZXHVqQ79rNvrkKJogBbgVJVzc/MxV\nJKW4cssLFD8O7TvEMo8wzhdvf4sjyzeR/+ca7nx8MK9/P5LLbuwTCZOVEMnLzWPJzBXs3W4NbO/a\nvJcnhrwACI3aNuD56Y+6z+3Qy6WuaajloyJ64+grSsfgKKYkYwTfAwJpvb9gjGlvL5MBRKQlMARo\nZV/zmhxPoZgTlMPZmt8XblzlF4adHViRaehpaXxwfRf3dpMgRYQ9w20PZpZuLb8HvrTEfjYFGBjw\n5JYzmvjta1/Cjt+4qyxFs3ev01G2wnCFwv5tz5KUND2fnU6jkZPpNHoKizbt54nvV7iPvTy16Jmx\nmz6Y77evcmpigDMtEaqS5PlfLIdu9+Ec2j5qiTI9/eMqdx1Ol1IvwI4QHZ3S5ud7ewbcP35B+GrY\npSZZ/1Jdf+tGIyfTZcxUL9XQQHg60zsPZrFmxyGGf1VQ0SnY3/1fPgMBL0bJjKty4pKUlMDEfe/x\n5ZZxvLX0P7Tq1swq927gwO6CtrVRq/okpyaRl5PH/l2HaHCKVQPzyIGjPHbJc9zTYxTbMyJT81IJ\nzKMXP8fD543l+lPuYffWvfz4zjTy85zkZueyf+cB6jSu5T736MGjJCQ5wBhE4Ponh9C8U2P+/fk9\n1PZxBMsiRTp+IjIswHKDiLQv7DpjzEwgsPKEP4OAz4wx2caYDcBaoEsR15Q5PpgduBZbfrhkGMsg\nLhXO2hWT6d60mtexjLEDiHfE0bN58RqKsT+uDJt9AJv3Fe7QuVRJPUfhfRner0XQY74zNeGkX+va\nrB7dj14tNHciFH5fWxBC9+1fW0rsPp7PytwN3mF7cSHk4/3mkZv21W3duKt3U7/810fOK6hplOcR\n/rx531Eue2O2e7YxFIyxZii/mLfJb4Cj9UneYkujJizj9RnrOPe/v/m9TyCHtTTIys13h6L+tsZ/\nli/REfhf8X1BBqSOhYQg9/hgdgYAB47mBpzJ/eCPDPf6C1PWeP0f+uq2bn7nu6hRIclr+/Yz/Qee\nlOCISFUR+UVE1tivfgnmItJeRGaLyHIRWSIigyNha7RRsVoF6jerw/B3bif9nLb0HtKdc6870338\n8QnDadPDiub54NEvGFjpGmZ/t4D3H/2SWd/MZfkfq7mt0wiyMzWVJlpYNmsVWUeyyc938sTgF/h0\n7LfkZOYQn+Bg6OPej33r7icTn+AgMTmBMy89lcH3nc/Lvz/B6YN0ABpCm/FLB24F6trLLVizcv8T\nkeHHcM877QbqHY+GrC6wyeOczfY+P0TkZhGZLyLzd+0qPEzmRGOGnUzftGZ5rzwNV/FxJTD5TsO2\nA4GdoqWbLRn61CQHH994Ku/bs3u+I/AT7+zO7JGFCyI8er7lQE1euv14TfYiq4jixZel1+fA0Vwu\n/9+fQc+5vJA8oeu6p3Fnr6bMDZOapy9J8Tp5fyzc8/miUrnPt39t9dqOD8Hxu6ij1Tz/PqI3nRpW\nZdg5LfwU7M46pcDZd9WJzMrN5/SnpzN3w16uf89bxKQw7vtyMY1GTmb4V0u4+u057v0zVu/kx+Xe\nvzdPx+TpH1fRyqPG3Ybd4VG09CQzJ7/Q2facPCcn//tHxv64CmOMl3PvomG1VO7q3dRv/50B9oWD\nRyYU1PWcsGgr3/61hXaP/8zJ//7R71zPGoNb92cyYVHBgERFj8LWvlQvXzATmFYtlQf6Bh98UgIy\nAphqjGkGTLW3fTkKXGOMcUVLvSgipRO/HwPUTqvB6AnDuXfcTSQmFzyPyalJVKxWnjx7oCM3O4+n\nrn6JOEdBG3Z4/xEuqn49y/9YXep2KwXk5uTxzr8+pVEba5a2RecmbFi60X08MSWRPld61zc9qUkt\nPvz7JcbNH8t1j+tYiC+hOH71gI7GmPuMMfdhFXWvCfQEhhbzfq8DTYD2wDbguWJejzHmTWNMujEm\nvUaNsjll+/M9PXn6krY8NtAaUX/2J22YCuO5n1fT7alpbpEHz7ya2z62cqtcKnNnNK9BxtgBfkWD\n29arTJ1K/vX7PAmXCIMnD3+zlD7P/+q1z7c0QvnkeL5csAlfLupQMHZSpVzgcCywEqDv79uCmlpr\nL6J8Nnej376iZnuPBd9BkBV2SKSL+CAzQ57UqJBEYnxcwJqWLkSEZy+x6vrtsmf3+r440318sT3o\nEgpfLyxwNjzr2/kqYPry+ox1LN9a8PlcDmg42HYgk8/nbeSJSSvo8cx0r1lNF69MW8Pv9oz8mzPX\n+9XQu/usZsx9+CxEhGHntGDuw2fx4Q1dOLd1ba7rnlZicvDv+0SPBBtk8K3D9+vfuzjo8R1WKMTx\nS4p3cGVXq01sXquCStsXn0HA+/b6+8AFvicYY/42xqyx17cCO4Gy2TEqJjeMsXK9XMJjicmJ3PDU\nFbTs1ow4O3UjJyuXKR/7Rw4opcfnz0xg/POTWPnnWroNSuf8284h60hBtEhOVi7b1u/wu65ClXLU\nbRpYjK+sE4rjVxPwjMnJBWoZYzJ99heJMWaHMSbfGOME/kdBOOcWwFNqrJ69T7HxVEV0hWI1rlEu\nUubEFL/YanSnPmUJ4Vzzzly/c8LRKUlJdNC6bkV6NKt+3O/l4uM5Bc7AfWc358d7evDiECvK+olB\nrUh0xJGT56ReFf96WF0bV+W1KzsWOVOpRJb6VS3nacTXS/2O7T8a/rzebj4lYVz0sxVrU0NQXMzO\ndZIcQhkZlwrxfV9YIkP/+Ah+fL9kq981JUlxw7YLo9tT03jwq6V8Yv9Gmz78gzs8N99pSBsxif/8\n/DfXecxsegqezH3oLO49u7lXAfOaFZLp0awGr1/ViUfOLwiVDRePnF+8sO7RkwoPWy9fRH3OPqdY\neTdZRYhoKQGpZYxxFZXdDtQq7GQR6QIkAhoCFAJValcmuVwSGEDgye8fJCEhnv/OGs09r91InCMO\nR3wcXfvHds22WGbWN3P54LEvyc3OxZnvxOGI47vXfsY4rbxNiRManlKP2lqGo1iE4vh9DMwRkUdE\n5BHgd+ATESkHrCj8Um9EpI7H5oWAK95kIjBERJJEpBHQDPDvnZdBPpmzkcPZefzfp3/5HTu9qeVg\nDPUolK34E0pR9dwwdUyO5uTzxzr/UK5wcFrT6pxcuyK9T67F+Fu7cdWpDUmMjyM33+lVQqJ5LUuA\nJjnBQf82dYqcqVQii2fHH2DKsDPc65klWArBN6/svnOsfLLnfvm7SIfsvT8yvGZ+guGwB1Q27D4S\nsKTHr0XUggP/8iiBQlFnj+xN/zaFj+52alglaMmHcOGaOVu2JfBspmvQ6aIOdSMyw35GCI5vIGXU\nYJRPKtzx69q4Kp3TqgRUFFZARKaIyLIAyyDP84z14wn68Np9qw+B6+yB9UDnlNk0mUA4HHE8P+Mx\nLrq7P8/89C9apFth1dszdvLSne/gzHeSn+fkx7cDD5QpJc9zN42znDysyY8W6U3of9NZJCTGk5iS\nwJhJI3llzhjiEwpvhxRvQqnj9wRwM7DfXm41xjxujDlijLky2HUi8ikwG2ghIptF5AbgGRFZKiJL\ngF7AvfY9lgNfYDmSPwJ3GGNKtvhTDLBo034e+mYprR/5ie+XbPM7LiJUK5dInlNHU335Z88RFvxj\naQvtDeD4pY2YxDkvFIRQ1gpTJ2z9riNhE9vx7fBm5xX8JNLTqiIixDuEnDynW6QG4IXB7albOYUe\nzTTiJxbwzX2qXzWFFwdbs7pHc8LfDF5ohwCvesJbdLla+QIxjjs/+Yu0EZOOO3/YVY6gXpUUGo2c\n7He8W5Nqfvt82edTqzTFY0aycfVyDGhrDW50aljVvX9Qe+96XMP7tSAlwRG2moJF1aUc9OrvAfe7\nSmWc3bLQyZsSwzMy4M5eTVnxuH/lJJcyqicXdvBPuc8YO6DI+6UmxvPlradxSp2KRZ5bFjHG9DHG\ntA6wTAB2uAbL7deAMpMiUhGYBDxsjAma6K1pMv40bZ/Gbc9fS/teBXUo50z+izyP/6dHDwUXTVNK\nFs+QTkeCg7j4OHpffjqfbXmDL7f9j0592uIIITVB8SboN2Y3JohIVWA91mjSh8B6e1+hGGMuN8bU\nMcYkGGPqGWPeNsZcbYxpY4xpa4wZ6BHGgDHmSWNME2NMC2PMD8f/0WKfzACdPt8OQ3KCg8ycyDt+\nG/ccZdKSbVFTqPuMZ2dw8euzCz3HJaHfsUHlUqtnVxjLthzw+v6ufGuO1/FTG/l3kvcfzeXDP//h\nwa+sMMGvbz+NVidV4vcRvalaSF6fUoCI9LPrh64VET8BBREZKiK7POqP3hjO+5/a2PvvmhTvoEVt\nK8f0aAnUhMzMyad5rfJe6p09m9egSqp/vtbYHwIXko+PEy4Nocac6x6rtnuXqehmf+aiZjR3Hcqm\ny5PetUoPZeeRY8/Q7z2aQzX7Ob+2W0Etuf8O6eAWWwK4/cymzNmwh/lFlC4oiu0Hsujz/K9+n6e4\nZOVFZlzTs+zM/X1bkJpY+Eh5rYpJDE6vzwuD23OBhzP9yhUdSsxGxc1E4Fp7/Vpggu8JIpIIfAN8\nYIwZX4q2nbDk5ebZ9f8gKTWR+9++LbIGlWGGv3c7tRpWp/4pdbn47v6ce4MlQFexanlSyqsmwbFS\nmKv8if26AJjvsbi2Y5Yj2Xks3xq6sEC4+GvjPoZ9sYhpq/wTUQMRKFzof9eke21v2Z/JVwvDV+sp\nGNe+M5e0EZOCHu/57HTu+GQhU1ZGV+2by8YV7vwBPHtpu7Dd754+zQACCj0Uxhu/ruO8l2fxyrS1\n7n1zNxRUQ3lhcLuQZPab1QxcZ1AJjF0v9FXgXKAlcLldV9SXzz3qj75V0naVszvkXy7YHPbBlMPZ\neX4hei8NaR80zzVQeGS8QwoVDCqMREccb15j1XcMNLjlSecnp3htd2lkjTkezbGcv/1Hc6lWLsm2\nKY5ZD/Zi5gO9ABjavZHXta56fsX9Pj+du9EdovnVws2s3Xk4YKmIgvsU/duvEqT2XWnRJa1g7DZQ\nKQnX32X/0Vwq2QMCLw4pcPb6t67jd40SdsYCZ4vIGqCPvY2IpIuIqw26DFtoz2NgqtBSW0pglv+x\nmo9Gf8XJXZpRt0ltkssn8+/P7qVmg/Dl7CvFo9fg7ny0/lVemPEo839ewuX1b2XhVP9ceKV4BHX8\njDHnidUTOMMY09hjaWSMaVyKNoadVo/8xICXZhVZuPZ4MMbw0/LtLN18gIUb95GT5+TC1/7g64Vb\nuP690PzmJyeHXg9u+4GSLUz8q62EmTZiElNW7CAnz8mc9VYum2fnbdqq6HL85mYUXUoyKQSRilBx\nzUTsPJSN0xZ4uCtAfqYvT9kzK8/ZxahdgjQAt/RszIUdip5dAWsGWCkWXYC1xpj1xpgc4DMsNb2I\n8PnNpwIFxbanrdrJzDW7Wbhxn7v0yPHgdBp2H86mvI8ao+u5cQ1ceNL4ocn8ub4gb/VAZi5Zuc5j\nDmm+omsDd7hmYaGsvoqmDaulcmYLK0Rt496jtH3sJwCqeZQNqFcllQbVCsIZx9/ajV8fOBOAW8+w\n6sh9uyg03bA3Z64jbcQkRn69lJl/7+JQVm7QHMFhHvX2mj1cELCS4PB3ppvXKs+ZEaxrufDfZ/PB\nDQVlcifddToAN5zeiHpVrHzgvzbtIys3n+w8J5VSCp6VVU/04+/R54Y0CKUcH8aYPcaYs4wxzeyQ\n0L32/vnGmBvt9Y/sqKr2Hkvp1IE5gdizbR/DzxnNh499ySMXPsu7q17kuwPv03WACrtEA9M/+51N\nq7dyYPch3h75SdEXKIVSaI/XTigOPs0T43h2ZsLNT8t3cMuHCzj/lVlc9NofjPKom1Rcfn3gTFIS\nHDxxQeug57gUK13M/HsXExcXXzHvaE6eW348GDd+MJ/m//qBwW/+yW9rdnEgs0AM4NO5GzlSAuFp\nx8tdvZsGLZJ8UhjFT1y5mC9PW8P4BdZM7MTFW4stKuFZaHpEMYQRghVpVoISag3Ri+36o+NFpH6A\n48DxCyjUrmSFr3jWR7v2nblc9NofnP/KrGK/ny9XvT2HVdsPuZU7XbXWXIMf9/Rpzt1n+Tt/Q94s\nSB16aeoaIPQC856CIg/2O5lHzm9JvCOOREdcoY7fC7+s8dp+9YqObNtvDXD9e8Jyd31LT8fEl/S0\nqjSsZqkfN7FVkO/9fDHzQxgQGjPZO8z1vd8z3AMzvjSomspNPRr57Z85vJffvsJKIJQGVcsleg0Q\nNatVgYyxA/j3eS156qI2ADiduNv1yh4hwMkJDq9wUUU5ETh6MBNnXj5OpyHzUGbUpKwoFs07NSEu\nTkgul8Qp3Zoz+7sF7N91sOgLlYCE0oIvFJETptz9ao/cjGd/Wu0lmBFOJi31FmP5fZ23M1WchqVh\ntXKsfKIfV5/a0O9YsKK417wzl7s+/StkMQNjDF2enELLUT9x5VtzvGp9tRzlX9TXxdVvz2WpT0jq\n/szwS9Dn5DkL/c4WbdrPHR8vJN9pWL/rsN/xCskJvD003S2d70k4R69ddcs+nbuJ4V8tce9v/NBk\nXp2+lrQRk9iy3ztZ3PdzeW43qVGu0FITLTzqDb58uebdlBDfAWnGmLbALxTU1vLjWAUUbulpBVE0\nqGrNVgXrXB9Ph+RoTp5bcXbBRiva4YXB7Xmgr3fx9XvPbs6yx/q6c+cAr/y/t2dtAEL/3bzk8Vym\nJMS571U+OZ7D2cHbCt8Q9rqVU9xiMYs37Xfvr1iI4+eJp4N4SQgh4L4UVrpgYLuTWLbFuyNyauOq\nARV1H+ofvQqXrhBU61mx/mftOFisqk2KEnN8N+5nEKs8wNWjLiEuTgc3oomW3ZozbuHTjP5uBLMn\nzuOpq17iprb3k5NVtGK74k8oT3dXYLaIrLNHvF2qnDHJsC+8oyDOeWFmkDOPj8o+nZFNe707+zkh\n5IGUT4rn+u7+o8ieuDqMJ9tiENl5+V65eKEWLD6cncfOQwX/4A9mWtdZipGFO4+es1MA+0IonxAq\nc9bvYcPuI/T6zwxGTVjud3zbgUwenbicwW/MZtLSbazbdZjez/3qd971pzeiR7Ma/Da8N+3qV3bX\nQHR9f+GicY3gOXbP/rQagC/meRdb91U7nJdREIK8bteRQu/3w9093OvnttZipcdAkTVE7ZAr14/j\nLaBTuI0Y2f8UMsYOKLKe5ItT1hR6vDC27Ctog3bZv/UezWpwR6+mfueWT4pnssezdZpdOsZT6j8U\ncRewHC5X+3SRxzWVUxJCrlP4UP+TqVIukbpV/B2pGh5qpIVxvDNVH87+hzqVAgsKxMWJe7bWxRtX\nWfnYC/7Vh0WjznbvP7l29CpcpiYWhOA67M5vn1O0RpZyYvP3gvXk5eQTn+AgMcIz8kpgFvyymHHD\n3mPnpj1kHs7iyP4jHNh9fCJbZZVQ/hP2BZoAvYHzgfPs15hk+VbvUVnfgsLh4sM//yn0+L4jhXd4\njDFk5uaTklj4nyjeDu1zqcwd8Jlt8xVHCIbvqO5zP1tOikt+HCCtmn+R8ED42nCsZOXmM/jNP+n1\nnxls2Z/Jh3/+Q9qISW7xhE/nbqTbU9N4748Msu3cusPZedStbHUOW9SqwLLH+pIxdgAOj9mJCXd0\nZ/JdPRh6Whp39Pbv9B4PNSoU3Qn979SCzvvW/f5S0Ze9EfpshOesS7yGeR4L84BmItLIVsgbgqWm\n58an/uhAIPTk2zDz6dyNx3zteI8ZtApFFN6GAicAYJIdwjzgpQJRk/vPCRxtEIgf7+lJxtgBXiGs\nlVITgrYVvgIpN/e08vOqB3DyfB2uYPgK2hQmVvXHOv9w93pVU9lWSC71QA/Vy+//73S3KEq18klU\n9hBzieY83HL2d7Ro0353bnIobZqixDK3/ucaajWsTlJqEp89/S1v3P++hntGEavnr+ON+z9k7V8Z\n7mqWfa/rRY16RZcDUvwJpY7fP0BlLGfvfKCyvS+m6dWi5OrYeDYYLifExTN2KOCpT01lxuqd/LYm\ncB7Qj8u2k+80hXY0fDmSnceSTccmAPHQN95KSa48P5fj16RGOabdd2ah7/GWrTgaqG7esfDMj6sD\n7l+x9SD5TsPIr/3VnTbvy3SHUn58U9egBYaTExw8OrCVV0c0XJRLDL1jF8jx8+Q+D9GIYHx35+mM\nOi+QEKVSFMaYPOBO4Ccsh+4LY8xyEXlcRAbap90lIstFZDFwFzA0MtYWr7i2L9m5Bc7U+9d3KeRM\niwrJCbx2ZYG4QV6+0yty4XhDpAub8fMUN/rl3p5ex3xrwvlGVwSjU8MqtA+hbMsfa3cHzF+c+bd3\nW31X76ZMGdaTL2/tBsCZHrmMLQupW+eIYmEUl7P/3h8Z7n1FFWlXlFinRecmHN5/lMN7D7Nv+wG+\nfeVHlv8RuP+hlD7xCQ5yfdICzrysW4SsiX2KdPxE5G7gY6CmvXwkIv9X0oaVBC6J/fPa1uGdoSWX\ntnjEDo3s2qiql1BCWrVUr47+0HfncfXbc9l/1N9RcjlcvU8uOszmwX5WzsiG3Ue48YPiV9pwOo1f\nAfOL7ZAslx3/udS/nMDlXbw1LtrWrwQcv+O3btdh5mXspVbFwCPNTmOCOkyeCpqBZgdKg34eUucz\n7j+Tr28/ze+c12dYhbFdz0qF5HieGNTK65yVj/fj/wIIbfjSpl4lrj+98JBgJTjGmMnGmOZ2HdEn\n7X2jjDET7fWRxphWxph2xphexpjAxe3CzC1nNPYL383K9Z4JO5CZS9qISdzx8cKA7+FZVsQ107R4\n1Dl0bFAlJBv6t6nDqY0t6X9PZ+Atn7Iyx0KV1ES/4uxgDZzdbn+esRe1oZlHHivANx6/p0Wjzg7Z\nARURvr2ju5eKr68y6Xu/b+CKt+bwxfzCS+RkjB3AsHNa0LRmBTrbpRFEhA1P9WfDU/1jVvUyWS8O\neQAAIABJREFUUF2/SIvRKEpJc/RQJkcOFKRV5OXk8cRlz3F4f+GpFkrJc/RQJhNf9daZqFKrEs06\nxXRxgYgSSmzYDUBXuyM0CjgVuKlkzSoZXp1udbZnrN6FiDD0tDTiBA4dxyh6IFzhSxd1rOsuxAzw\n/V09yMv3Dx/4vwBy/66Qm1YnVSryfp3TrE7ceS8XqP69fHkH96hzUTXlGj80me88FEBPqpTszg10\n5QK5nChXrsrU+87gqYvaugufj72ojbue1ux1xVdLff7n1e66hWc99yuXjpsdtDTEp3M3eon0RBsu\npcR3hqaTVr1cwE720z9avsMe27H+7s7TuerUhl7iMynFmDlUTjxGnnsKr1/VifG3dmO0h6Jvxu6C\nzshEuzSBr5gUwKa9R2n68A/8vHw7OXlOxv1qtX+VAhRqL4yLOlqDQKMnFUS4hkPAqVJqAgd8Zvw+\nnJ3hlfOalOD/L8ozVLLyMdTDG9y5YMCqyUPe+bWPfreiyOvrBcgzdCEiQfM0h56WFtXCLhDds5GK\nUlKkVkihWYdGSJzgiiU8ejCLVXPXFn6hUuJ8PPorfv7gV0SEOIfQpF0DPlr/CqkVwqfGXtYIxfET\nwFPdI9/eFzNk5uTz945DvDDFkuJuU9dypnYczMJpoM2jP5OX7+SNX9dxOAylCFyOT6WURK/8k/JJ\n8dx2ZhO/839bs5vJPh23L+0R51DCBtvU83cOzzqlpnvW7kh2cHGWD2ZneG1/fGNXth7I4pu/trBw\n4z4e/MoKp3TleVROTSRj7ACa2CIm7w3tzLXdGjKgbR13p+HH5dtDzvO7bNxsnvt5NS9NW+vluALM\n2RBYcv2L+ZuLnNm85YzIjQbd3acZT1/chl4BanW5Zk9cuGZHq5VPRET45vbutKtXiT9HnlUqtirR\nT3paVa7yUPTd5ZF3e1Ll4P/85tklC75fso2r3p5zzPdfsdVfNjtcgh+HsvPIzXeSlZvPiK+W8G8f\nAaek+MDt35gL27gVdIvLQ/1P8dr2nPW76tQGXsdc5Q08+eo2/xn8UHh0YCt3rmI0M7BdQa7iybUr\nFHKmopwYZB3JIr1fezr0bmU7fxDnEE7pGl4NAKX4JKUmInGCweDMd7L57638MWFepM2KaUJx/N4F\n5ojIoyLyKPAn8HaJWhVmvluy1Uu9841rLFG+RR6S4E0f/oGnfljF8z8HrtMUKut3Heb+LxcDVv0j\nVw5Z05qWo1QuKZ55D/fxu+72jxe6Z7wAZts1BkOZ9fHtHM24/0xSE+MpbxeBPpwT2Jk9nJ3np5TZ\n3VbvA7jotT/c68EECaqUS+SxQa39woHaPfZzkaUk9hzOZm7GXl6eVjCqVli9u4yxA/z2LXn0HHdR\nZ09G9IvcyHpqYjyDOzfwGvlfPbofk+46nc9u7uYlKz9lpZXL5MqjqV4+iQl3nh6yYIVSduhp55C5\nIhR+XLaNG94vGADZc9hboMmVH7x+92HmBhlECYWODb1nrEdf0PqYZtp8mbrSmtF/6OuldB0zlc98\n1G6BoCqaV3RtwKXpQcspFkpygoNxVxUIs3oO9lVO8f5cl3dpwIan+nvt8w2LP9F4+uICh3r8MTq5\nihJLjLv/A778z0QWTl2KcRoQOHVgZ8pVKhdp08o8Q0ZcyJDhg9yzTdmZOVStE1qqghKYUMRdngeu\nA/bay3XGmBdL2rBw4pn8Hx8nbmfs2Uva+Z37zu8bil1s2xPPUgIVkxNISXTw4uD2vHddQU5hjQpJ\nZIwdwNyHvWd1XGUTpq0qEDYIlHMRiAoeCfhp1a3GylWTybdD6GLNDu9wyXCH+dzz2aJCjy/Z4i9E\nEyyErHKQ8LSKyQm8cFl70n06p0XJ4pc2SfEOd9juoPYnuZ2/P9dbHfJos1eJPh4baOWAumbTb/3I\nO6/Pt+6dq3yIb3254uKbZ5gaphBk18zblws2B40Q6BBiLmJx6duqFt2bWopwR2zHLys3n1em+4d2\niQgP+8wSnsikJDqYMqwnb1+brsIuSpkgJysXZ77TrRiZUi6Zm5++KrJGKQAkJiXQ+4oexHv0hVt2\nK1r0TglOSPrvxpiFxpiX7MU/IS3K8QyH8izCW7Vc4FHrPWFSpXTl0lzQoS71qviXQqhZIdlLvOVQ\nVi5Zuflc/17BKH6oztich8+idd2K/GtAQQfF9bmDCaF45rNc3LEesx7sBVi5KOHgx+XbAxZTd3Hd\nu/7T9S9P865T5hKQaXWSv0re1PvOAKxZx/G3ncYLgy1HvmODopX7IklSfBwHMnP5c33xcyGVsotr\nAOvlqWt5arJ/RYkxk4vWnHn6Yv/QxaJI8CkTEi7HL5LpZCLCkM5WWKcrn3nj3uClfW7s0Yjfhvdi\n2WN9S8W+SNO0ZgXOOqVWpM1QlFLhtueupXPf9sTFx4FAnca1qKazSlFD3WZ16HlJN0QEhyOOpb9F\nrKLSCUGZKPzVum4lHuhr1Zy69YyCHIvkAMIBYOX+ebJy20GufnsOR4OETAaiQlK8XymHQIy+oDU3\n9bAUGcf9uo7HvisIvSxOLklqYjzf/18PbuxRkNvmcvyClYRYbIe6DmhTh/9c2pY6lazzHx3orS55\nTbeGftcGY8b9Z3pte4aiuRj9/QoGvjLLbz/Au79nAHCtfc9/n9eS16/syKtXWLLyX91mSfieXLuC\nO8/QhWvGdOHG/UQz01dbsvBD3vwzwpYosURF2/Fbv/sIb8xcX+i5vmqVLgZ3bhBwf1H8+sCZ7vVw\nlbfq2ti7BtPJtSswoI2liFu/aopfiGW4yXNaolePTlyOMYZHJ3qHvU8ZVlBGQkSoXzVVZ8AU5QSk\nYrUKjP5+JHe9eiMDbjqbR795INImKR6ICDUaVMdgyM9z8uV/JhZ9kRKUMuH4AdzRqymrnujHLT0L\nHKPGNcrz4uD2/Da8l5f0v6/IyLn//Y3f1ux2S/B78ubMdVzxP6sD/9oMK0worVoqS0McGT6pcgoj\nzrVm6eZl7OPTuQV5Lp0aHt+IU+WUBOLjhJ2HCkI9F2/az4ivljB+QUFY2KtXdvQLNXxxcHvAUiZ9\nfFBrQiWtejmvXLwNu4/w5Xzv3J23Zm1gyeYDXrLqvvRtVZuMsQNITYzn3DZ13DlFnRpWJWPsAH68\np6ffNa3tUMphIdS+iyT929Tx2h7kUfhZUYIRbPbfs53Ysj8Tp9OEXam4YbVy7pm+7LzCVYJDpVJK\nAn+PPte9ve1AFq9e2ZGMsQP4bXjvEg9/TrZzo2ev38NNH8znD1uUq/fJNfnohq40ranCJopSloiP\nj2fO5AV8OuZrnM7wtHPK8XH0UCb3njGKSW/8QnxCPInJCfS8VGv4HQ9lxvEDK6nftzNxQYe61K+a\nyvx/9eHjG7t6HXM6De971K7ydMpcjJm8ij/W7eFAZq674HhugJINhRGoQxdI/bO4xMUJeU7D6zPW\ncSQ7j/TRUxj06u98Nm+TW4AmGP1a1+aSTvWOWSTluztPd68/MH5JwHOy85w8NrAVLWr5d7C6NKoa\n4IrCaVe/MuvH9OeuEGrfRZK7fewbc2Hxw+8UxYVnKHv3sdN4ZOJyltr5s67yIGnVUpn5QK/jus8v\nw87g/HYn0adl+EIAEz0Gf0JVAg4X/TxyF6esLCgdM+6qTpzerHqgSxRFOUE5eiiT528ex+7Ne5n2\nySzWLCg8qkIpHWZ9PYc1C9ZxaO9hatavxlvLX6Dfdb0jbVZMU6Ycv6Lo3rS6W30zL9/JmMkrecQj\n/Ge3j0jKBo96Wu0e+9m9PiaABHhxeTDMqpStHvnJz/7CSE5w8J9L21HzGBXs2tSr5J5588yl9J39\nq1s5hZ/u7ekV1vVA3xbEO47t0YyFwsmOOHHnCj0xqJW7ZqOiFIXnb2lQ+5NoXqs8D/c/xUv86MM/\n/+Hqt+cC8OQFbVg86hxmPNCLBtX884yLQ93KKbx8eYcSC3cs7dIBwWYUEwuJRFAU5cTkm5cmWwIv\nQE5WDs5wxbQrx0Xjtg3BQHK5JFr3aEmdRpp7fLzofzgfXMIgTR/+gbdmbfA7/vGcf9zrd36y0O94\n4+rlOL1p8UeLfxt+fKPxx8qCf/mXlggXrpm3vUdyOGiHnvnO/nVOs2b2RIQ/R57FuKs6eeVhnqiU\nT4onY+wAru6WFmlTlBhiup1D+8QFrfnvkA78fO8Z1K6UzDe3dw94foXk+GIXbI8Uq7YfKvqkEuZ4\nZ0UVJVyISFUR+UVE1tivQXM/RKSiiGwWkVdK08YThYVTlrBp1RYcdtkqY+DN+z+IsFUKQNMOjXht\nwTOMGn8/F93dn0tr38jFNa5nzUKdkT1W1PHz4YbTCy/8/fA3y7jhvXnsOpTtrqvlydtDOx9TWYT6\nVVP59o7AnbfjYdUT/by2h/drQY9m1enbqhYrH+9HNY/cxpLAJaDT9tGfSRsxyb2/S1pVvr79NK9O\nae1KyfRrXTvsZSUU5UShUkqCNWBwqrfgUq2KgX/HjauXD7g/mmhSwyo/0/UYwrvDSfem1Y57VlRR\nwsgIYKoxphkw1d4OxhPAzEKOK0FY/OtyRl3wNDPHz6Zm/erEJ8QTFx9HaoWixfmU0qHByXVpd0ZL\nHr/0OfbvPMDBPYf45KmvI21WzKIxZj60CBBuNPS0NIb3a0HLUT8BMHXVTjo/OcV9/KvburFi60GG\ndGngJ31eHNrXr8ych84q+sRikJzgYO5DZ9FlzFTA6gjefmbTsN6jMMZc2IZhX/jnE35xqybnKkq4\nCNbuVEyJ/ib+y1tPY17GXvq2ql30yWGmbb1KLNls5UO+fHnHUr+/ohTCIOBMe/19YAbwoO9JItIJ\nqAX8CKSXkm0nDLs37wWE3Ow8kssnMex/t7J13XYuvLtkVYWV4jHt09/ZuXG3e/uPb+cx+7v5dDtf\nH/niEv29ggjw9e2n8dPy7Yw817tob93KKWwJUBOvU8OqdGoYntHqWseYU1cYnnl6vU72n6UsSQa0\nrePl+J1UKZlZD2pirqKEk3ifWfIhneszsP1JJa6MGQ6qlkuMiNMHcF7bOizZfIAXB7cPWtdVUSJE\nLWPMNnt9O5Zz54WIxAHPAVcBheZtiMjNwM0ADRocW1mXE5Gel57KwqlL2bRyM3e9dhNNOzSKtElK\nAGrWr4YjPg5nfhzOfCfOfCfTPpmljt8xoI5fADo2qELHBv7h9LUqJvk5fq76gNHOujH9EUpf/CQp\n3kH18onsPpzDRzd0VbU8RSkBfB28sRe3jZAlscX13RtRp1IK57WtU/TJihJmRGQKEGjU42HPDWOM\nEZFAaiO3A5ONMZuLGuQxxrwJvAmQnp6uyiU2CYkJPPDO7ZE2QymCjn3a8tg3w1k0fTnf/HcSxhjO\nu/XsSJsVk6jjVwyu696IhRv/ol39yrxzbXqJ58eFk0jmzc3/l/44FaW0+Oq20yJtQswQ74jj/HZa\nR1OJDMaYoLN0IrJDROoYY7aJSB1gZ4DTugE9ROR2oDyQKCKHjTGF5QMqSkzS6ex2dDq7Hdc+dhnG\nGBISY0O4LNpQx68YnN/uJO0kKIoSlWSMHRBpExRFCR8TgWuBsfbrBN8TjDFXutZFZCiQrk6fcqIT\nn6Cuy/Ggqp6KoiiKoijRxVjgbBFZg5W/NxZARNJF5K2IWqYoSsyibrOiKIqiKEoUYYzZA/jJfBtj\n5gM3Btj/HvBeiRumKEpMI8bEbo6viOwC/inyRIvqwO4iz4ou1ObSIdZsjjV7oXg2NzTGlK78bAmg\n7VPUEWv2gtpcWpSp9qmYbZMvsfj3DQX9XLGFfi5/QmqbYtrxKw4iMt8YE1O6r2pz6RBrNseavRCb\nNpcmsfj9xJrNsWYvqM2lRSzaHClO1O9KP1dsoZ/r2NEcP0VRFEVRFEVRlBMcdfwURVEURVEURVFO\ncMqS4/dmpA04BtTm0iHWbI41eyE2bS5NYvH7iTWbY81eUJtLi1i0OVKcqN+Vfq7YQj/XMVJmcvwU\nRVEURVEURVHKKmVpxk9RFEVRFEVRFKVMUiYcPxHpJyKrRWStiIyIsC3viMhOEVnmsa+qiPwiImvs\n1yr2fhGRl2y7l4hIR49rrrXPXyMi15agvfVFZLqIrBCR5SJydwzYnCwic0VksW3zY/b+RiIyx7bt\ncxFJtPcn2dtr7eNpHu810t6/WkT6lpTN9r0cIvKXiHwfI/ZmiMhSEVkkIvPtfVH7XEQj2jYdt80x\n1T7Fattk30/bpzLWPgGIyKX2s+oUkaBqg9HUloVCsGchwHn59jO0SEQmlradoVLU91/Y7zKaCeFz\nDRWRXR5/I78al9GGBPhf63M8aHsUFowxJ/QCOIB1QGMgEVgMtIygPT2BjsAyj33PACPs9RHA0/Z6\nf+AHQIBTgTn2/qrAevu1ir1epYTsrQN0tNcrAH8DLaPcZgHK2+sJwBzbli+AIfb+ccBt9vrtwDh7\nfQjwub3e0n5ekoBG9nPkKMFnYxjwCfC9vR3t9mYA1X32Re1zEW2Ltk1hsTmm2qdYbZvse2r7VIba\nJ4/v7BSgBTADSA9yTlS1ZSF+roDPQoDzDkfa1hA+S5Hff7DfZTQvIX6uocArkba1mJ/L73+tz/GA\n7VG4lrIw49cFWGuMWW+MyQE+AwZFyhhjzExgr8/uQcD79vr7wAUe+z8wFn8ClUWkDtAX+MUYs9cY\nsw/4BehXQvZuM8YstNcPASuBulFuszHGHLY3E+zFAL2B8UFsdn2W8cBZIiL2/s+MMdnGmA3AWqzn\nKeyISD1gAPCWvS3RbG8hRO1zEYVo23T8NsdU+xSLbRNo+1RG2ycAjDErjTGrizgtqtqyEAn2LMQi\noXz/wX6X0UwsPldFEuR/rSfB2qOwUBYcv7rAJo/tzfa+aKKWMWabvb4dqGWvB7M9Ip/JDg3ogDVK\nHdU222FJi4CdWP+s1wH7jTF5Ae7vts0+fgCoVso2vwgMB5z2drUotxesDuvPIrJARG6290X1cxFl\nxMJnj5m/Z6y0TzHYNoG2T777FW9i8XsK9iz4kiwi80XkTxGJVucwlO8/2O8ymgn1ubrYDokcLyL1\nS8e0EqVEf0/x4XojJTwYY4yIRJ3UqoiUB74C7jHGHPQcKIpGm40x+UB7EakMfAOcHGGTgiIi5wE7\njTELROTMSNtTDE43xmwRkZrALyKyyvNgND4XyrETzX/PWGqfYqltAm2fygIiMgWoHeDQw8aYCaVt\nT7go7HN5bhTxLDS0n6PGwDQRWWqMWRduW5Vj5jvgU2NMtojcgjWr2TvCNkU1ZWHGbwvgOQJQz94X\nTexwTeParzvt/cFsL9XPJCIJWJ2qj40xX8eCzS6MMfuB6UA3rOly12CH5/3dttnHKwF7StHm7sBA\nEcnACmXoDfw3iu0FwBizxX7didWB7UKMPBdRQix89qj/e8Zq+xQjbRNo+xTtv9HjxhjTxxjTOsAS\nqtMXld9TEZ8r2LPg+x6u52g9Vp5jh1IyvziE8v0H+11GM0V+LmPMHmNMtr35FtCplGwrSUr091QW\nHL95QDOxFMgSsZJao02ZaSLgUgu7Fpjgsf8aW+HnVOCAHZrwE3COiFQRS4nqHHtf2LFjwN8GVhpj\nno8Rm2vYo+mISApwNlbuz3TgkiA2uz7LJcA0Y4yx9w8RSw2rEdAMmBtue40xI40x9YwxaVjP5zRj\nzJXRai+AiJQTkQquday/5zKi+LmIQrRtOk5irX2KtbYJtH2i7LZPxSEW2jJfgj0Lbuy/e5K9Xh1r\nEGRFqVkYOqF8/8F+l9FMkZ9LvHPfBmK1p7FOsPYoLMR0Affq1aubtLS0SJuhKEoYWbBgwW5jTI1I\n23G8aPukKCceJ0L7pG2Topx4hNo2xXSOX1paGvPnz4+0GYqihBER+SfSNoQDbZ8U5cTjRGiftG1S\nlBOPUNumshDqybRVOxjw0m9s3Z8ZaVMURVG8uOT1P3jrt/WRNkNRFEVRlBOcMuH47T+ay/KtB8nJ\ncxZ9sqIoSimyevshtu7PirQZiqJEISLST0RWi8haERkR4HiSiHxuH58jVlmTsJPvzGbdrudY+M9V\nbNz7UUncQlGUUiCmQz1DxaXsHbvZjIqinKjExQn5Th2UUhTFGxFxAK9iiQBtBuaJyERjjKfAyA3A\nPmNMUxEZAjwNDA63LbM3noLgJB7YdnAW/xz4F90ariA+LjXct1IUpQQpEzN+guX5xbKQjaIoJyaO\nOCFf2yZFUfzpAqw1xqw3xuRgldMY5HPOIKzaZQDjgbNstduwkbH/E4R8kiWXVMklNS6XSo485vzT\nPJy3URSlFCgbjp/O+CmKEqU44oR8nfBTFMWfusAmj+3N9r6A5xhj8oADQLVwGrFi3+tUknxaJSTT\nMjGVlgmpnByfSFUHLN3ydzhvpShKCVMmHD8XOqiuKGWXEHJlhorILhFZZC83ehy7VkTW2Mu1vtce\nDw7RUE9FUUoWEblZROaLyPxdu3aFfN3qA/PYmu2kWUIyCVLQZXSIg2YJyXy59l4OZWUX8g6KokQT\nZcLxC3PUg6IoMYZHrsy5QEvgchFpGeDUz40x7e3lLfvaqsAjQFes0KtH7GLOYUFn/BRFCcIWoL7H\ndj17X8BzRCQeqATs8X0jY8ybxph0Y0x6jRqhlyHck7WL7XlVcNj9KBEpWBCa1NnN37tUlVhRYoUy\n4fgVoFN+ilJGCSVXJhh9gV+MMXuNMfuAX4B+4TLMESc4NRxBURR/5gHNRKSRiCQCQ4CJPudMBFxR\nCJcA00wYBQ261jib7DzIMTmApZVgMBhjcOIEhDZ10sJ1O0VRSpgy4fi55vu0b6UoZZZQcmUALhaR\nJSIyXkRcI+2hXntM4VSOOCHPqY2Toije2Dl7dwI/ASuBL4wxy0XkcREZaJ/2NlBNRNYCwwC/MPbj\nwRGXQKZJZsrBZLJNLnkmH6dxkk8+OSaPNUdrkeBIDuctFUUpQbScg6IoisV3wKfGmGwRuQVLKa93\ncd7AGPMm8CZAenp6SE2OI05wquOnKEoAjDGTgck++0Z5rGcBl5akDQ4q8nt2S+TIcnqkHEVEMAjv\n7W3OtkNJmk6jKDFE2XD83OUcImyIoiiRoshcGWOMZ17MW8AzHtee6XPtjHAZZom7aOOkKEp0MvCk\ne/ls8ximHGrHr0cMBjBGcBLHP9mVI22eoijFoGyEerpn/LRzpShllCJzZUSkjsfmQKzQKrDCrM4R\nkSq2qMs59r6woKGeiqJEM6nxFcjMd5BrEsnKjycnP55cp4Mcp7Ans1KkzVMUpRgUOeMnImcBfxhj\nMkvBnhJBc/wUpWxjjMkTEVeujAN4x5UrA8w3xkwE7rLzZvKAvcBQ+9q9IvIElvMI8LgxZm+4bFNx\nF0VRopkO1dLYtqwi1RIPkpIQjzGQle/gYF4qlY6GtWSgoiglTCihntcAr4vIXuA3YCYwy1a3iwk0\n/FxRlBByZUYCI4Nc+w7wTknYFRenoZ6KokQvubl5ZC6swP504WC+1acyBnKy4fk2QyJtnqIoxaDI\nUE9jzLXGmObARVjKdq8CoVf/jCJ0UF1RlGgjXh0/RVGimGeen0zez1XJWpaE04DTQPY24chzDViz\nZGukzVMUpRiEEup5FdADaAPsBl7BmvmLIWxxF83xUxQlylBxF0VRopl8p0GckD2xFtmemdFOw8xf\nVzN4SLeI2aYoSvEIJdTzRWAdMA6YbozJKFGLSgC3uIv2rRRFiTIccUK+Nk6KokQpI+4bwK+/rSI/\n3x4+N/Zweo4Tk58XWeMURSkWoYR6VgeuB5KBJ0Vkroh8WOKWhRFN8VMUJVpxxAl5+c5Im6EoihKQ\nxMR4JnxxN5KZT1y2tTiy83E4nezbfTTS5imKUgyKdPxEpCLQAGgIpAGVgJjqpbiKi+qguqIo0UaF\n5HgOZumouaIo0cuIez7GkWdw5BscBsRpEANNm9aMtGmKohSDUEI9Z3ksrxhjNpesSeHHXc5Bc/wU\nRYkyqpdPYvb6PUWfqCiKEiE2b9xt9aXyAbH7UsawdXPYKtsoilIKFOn4GWPaAohI+ZI3p2TQHD9F\nUaKVkyqnsP9oLn/vOETzWhUibY6iKIof+bnOgk6UR1/q2hvPiIxBiqIcE6GEerYWkb+A5cAKEVkg\nIq1DuO4dEdkpIss89l0qIstFxCki6T7njxSRtSKyWkT6HsuHCW5LON9NURQlfJzTqpb1+sLMCFui\nKIoSmOzMXO/Rc2PAGJYt2hQ5oxRFKTZFOn7Am8AwY0xDY0wD4D57X1G8B/Tz2bcMqx6gVw9HRFoC\nQ4BW9jWviYgjhHsUC53wUxQl2mhSozxd0qoC8M1fMRdJryhKWcCAGFOwYG3n5Gh+sqLEEqE4fuWM\nMdNdG8aYGUC5oi4yxswE9vrsW2mMWR3g9EHAZ8aYbGPMBmAt0CUE20JCXHX8NNZTUZQoZNzVnQC4\n9/PF7D2SE2FrFEVRfDBOS9bPadyzfRjDzk2a46cosUQojt96Efm3iKTZy7+A9WG2oy7gGS+w2d7n\nh4jcLCLzRWT+rl27Qnt3V47fcZmoKIpSMlQtl8hzl7YD4KnJKyNsjaIoijcJToM4nUi+E8krWPbs\nOhhp0xRFKQahOH7XAzWAr+2lhr0vIhhj3jTGpBtj0mvUqBHSNW5VT/X8FEWJUi7uVI+k+Di+XLCZ\n696dG2lzFEVR3Fx+Q09w1Rt1zfgBL7xzQwStUhSluISi6rkPuKuE7dgC1PfYrmfvCwsiBQUdFEVR\nopUnL2zD/V8uZvrqXeTmO0lwhDI2pyiKUrJkHc1B8vILnD8ApyG1fErkjFIUpdgE7VWIyHciMjHY\nEmY7JgJDRCRJRBoBzYCwDXnrjJ+iKLHAJZ3q8aad7zd15c4IW6MoimIx/u3frLFzVz/KnvW7dcBz\nkTRLUZRiUtiM33+O541F5FPgTKC6iGwGHsESe3kZK1x0kogsMsb0NcYsF5EvgBVAHnCHMSb/eO7v\nbUu43klRFKVk6XVyTWpWSOLWjxYAkDF2QIQtUhRFwSNoyridwH/+3oHT6SQuTqMTFCWOGWSYAAAg\nAElEQVQWCOr4GWN+9d0nIlWA+saYJUW9sTHm8iCHvgly/pPAk0W97/GgE36KokQ7CY44Rl/Qmps/\ntBy/rxZs5uJO9SJslaIoZZrsHEhMsNYNYAySm0dqhSR1+hQlhgilgPsMEakoIlWBhcD/ROT5kjct\nfBSUc4iwIYqiKCFwTqvafHVbNwDu+3Ixizbtj7BFiqKUJiJSVUR+EZE19muVAOe0F5HZIrJcRJaI\nyOASs8dpkJxcywHMzUNy88Dp5K2fh5fULRVFKQFCGaapZIw5iFV4/QNjTFegT8maFV5coZ5ax09R\nlFihU8OqvHZlRwBueG8e2Xlhi35XFCX6GQFMNcY0A6ba274cBa4xxrQC+gEvikjlcBsyY8J8cDrB\naSwH0CXykpdPuYqp4b6doiglSCiOX7yI1AEuA74vYXtKBNX0VBQlFunfpg7PX9aOPUdymLJCxV4U\npQwxCHjfXn8fuMD3BGPM38aYNfb6VmAnloZCWHnnqe8txy8/317yIC+P6rUrkphUpDi8oihRRCiO\n3+PAT8BaY8w8EWkMrClZs8KMe8YvsmYoihI5RKSfiKwWkbUi4jd6LiLDRGSFHTI1VUQaehzLF5FF\n9hJuVeNCGdjuJADu+GQh2w5kluatFUWJHLWMMdvs9e1ArcJOFpEuQCKwLtyG9L6os7Xiqt9n5/jt\n3qoh6IoSaxTp+BljvjTGtDXG3G5vrzfGXFzypoUPd46fzvkpSplERBzAq8C5QEvgchFp6XPaX0C6\nMaYtMB54xuNYpjGmvb0MLBWjbeIdcVyWbom73GILviiKEvuIyBQRWRZgGeR5nrHyVIJ2YOyorA+B\n64wxziDn3Cwi80Vk/q5du4pl59DhAyBOPBw/oyPpihKjlAkpJi3noChlni5YUQvrjTE5wGdYoVRu\njDHTjTFH7c0/gaiR0nzmknakJDhYsvkAuw5lR9ocRVHCgDGmjzGmdYBlArDDduhcjl3AWG8RqQhM\nAh42xvxZyL3eNMakG2PSa9QofjRoj3PbqNOnKCcAZcLxc6NtlaKUVeoCmzy2N9v7gnED8IPHdrI9\nWv6niPjl2pQG3/1fdwA6PzklErdXFKV0mQhca69fC0zwPUFEErFKZH1gjBlfksbc/sSl/jN+6gAq\nSsxRJhw/FXdRFCVUROQqIB141mN3Q2NMOnAFlnJekyDXHnM4VVE0rVmB+lVTALj380VhfW9FUaKO\nscDZIrIGS0l9LICIpIvIW/Y5lwE9gaEeOcjtS8KYLet3WgIvHk5fufKJJXErRVFKkCLlmERkWIDd\nB4AFxpiY6H2IaB0/RSnjbAHqe2zXs/d5ISJ9gIeBM4wx7phKY8wW+3W9iMwAOhBARMEY8ybwJkB6\nenrYW5yvb+tO5yen8M1fW/jmry1MGXYGTWuWD/dtFEWJMMaYPcBZAfbPB2601z8CPioNe8pVSnEZ\n4N535KCKTSlKrBHKjF86cCtWWFRd4BasejH/E5GYqNzpruOnc36KUlaZBzQTkUZ2eNQQrFAqNyLS\nAXgDGGiM2emxv4qIJNnr1YHuwIpSs9yDGhWSmHxXD/d2n+d/5WhOXiRMURSlDPHzJ39YM37gnvHr\ndWF6ZI1SFKXYhOL41QM6GmPuM8bcB3QCamKHF5SgbWHDHeqpfp+ilEmMMXnAnVilaVYCXxhjlovI\n4yLiUul8FigPfOlTtuEUYL6ILAamA2ONMRFx/ABanlSRDU/158IOVori5/M2FXGFoijK8bFk1mpr\nxQ73NE7Dedf3jKxRiqIUm1Acv5qAp4xcLlZ9mUyf/VFLwYyfoihlFWPMZGNMc2NME2PMk/a+UcaY\nifZ6H2NMLd+yDcaYP4wxbYwx7ezXtyP5OcAKX3/64rYAPPbdClZsPRhhixRFOZE5d2hPjNO4F4yT\n7/43PdJmKYpSTEJx/D4G5ojIIyLyCPA78ImIlCNC4U7Fx5Xjp66foignBonxcTxxQWsA+r/0G09O\nipHmWFGUmKPLWa0By+GzXg2r/8qIsFWKohSXUAq4PwHcDOy3l1uNMY8bY44YY64saQPDgdbxUxTl\nROTqUxvy/GXtAPjfbxuYunJHhC1SFOVEIz8vnxu7/Auc9uC5PYi+de1OMg9nRdAyRVGKS1BVTxGp\naIw5KCJVgfX24jpW1RiztzQMDCc636coyonGRR3rUb18Ete8M5cb3p8PwAuD23Fhh6ipP68oSgyT\nm51HdmauteHqSBkDIjgSHBGzS1GU4lPYjN8n9usCYL7H4tqOGdwTfur5KYpyAtKzeQ2v7Xs/X0za\niEm8+/sG9h3JiZBViqKcCCSXS6LPkG7+hduNITEpIbLGKYpSLILO+BljzhOrAN4ZxpiNpWhT2HHX\n8VPPT1GUE5T1Y/rz/dJtDB+/mKxcS3b9se9W8Nh33rl/f48+l8T4UNK7FUVRLO5/7XrWLf+HDYs3\nW/kztvO35PfVtO3eIsLWKYoSKoX+9zeWGsqkUrKlxNByDoqinOjExQkD253EqifOZd2Y/nRtVDXg\neaeNnVbKlimKEuuMHPQMG/6y5wA8ZvxysjSiQFFiiVCGfReKSOcSt6QEcZdzUMdPUZQygCNO+PyW\nbmSMHeAWf3mw38kA7D6czefzYjqIQ1GUUmTLuh38NX2ltWHX8XN1qOo3qxNByxRFKS5BQz096Apc\nKSL/AEewJtCMMaZtiVoWRsRVziHCdiiKopQ2F3Wsx0Ud69nrdek6ZioPfrWUB79aSoJDyM033H5m\nEx7o28IdFq8oiuJi56bd3juMlThTrmIK1esGjixQFCU6CWXGry/QBOgNnA+cZ7/GDNqXURRFgVoV\nk7mzV1P3dm6+NRz22ox19HhmOtsOZGq9U0VRvGjVrRnGGK8Fp5Mj+4/gcGi+sKLEEqHU8fsHqIzl\n7J0PVLb3xRzaoVEUpaxzf98W7rBPgMcG/j979x0eRfU1cPx7drNpEBJ6x6CggBUpdsQKInbsvffy\nw1fFhoiiKPYGKopdxI7SexFBeu8QIPQUIH3bff+YSbLpAZLshpyPTx6m3Nk9u1lv9sxtxwOQmJrF\nGa9P5fKP/sHn17pSKWUJjwi3unjmdvMM6O7p9XiDHZ5S6iCUmfiJyOPA90Aj++c7EXm0sgOrDPpV\nRiml4MHux5Aw+FISBl/K7WfG8/ejZ+edW759Pzd89i+7D+jCzEopm98USfoAxKEtfkpVJ+X5P/Zu\n4DRjTH9jTH/gdODeyg2rYunkLkopVbITmseSMPhSNr3WC4D5Camc9toUVmzfH+TIlFLBlrrbrgdM\n7o8BA+9Ne1G7eipVzZTn/1gBfAH7PgLWRK8OHLnr+Gnmp5RSJXI4hBF35k/i3PvD2XQfMo0st6+U\nq5RSR7L0/ZmF7pxb6/g1atEgaDEppQ5NeRK/EcA8ERkgIgOAucAXlRpVBXM5rcTPq+NWlFKqVOcd\n14jNr/fK209IzqR9//F8OmOjJoBK1UDNjmkMmALr9xnjZ/HU5UGNSyl18Mozucs7wJ1Aiv1zpzHm\nvcoOrCK57K4IHp8/yJEopVToExESBl/K4KtPzDv2+rg1tO8/npf/Wsm0tXuI7zeG+H5jOPb5cRzI\n9gQxWqVUZXI6Hdz8/FUY48f4fRjjB79h2DPfak8qpaqZcnXONsYsMsZ8YP8sruygKpomfkopdfBu\n6NqKhMGX8sSFbfOOjfgngTtHzM/bd/v8nDRgIvH9xrBjXxZJ6TnBCFUdJv0Cr0pz6wvXBIzxs44d\n2JNGwsrEYIallDpINWJUbm7i5/bpHzallDpYT1x4LAmDL2Xc4+fkHXv0/DZM7nsudaNdecfOHDyV\nzq9O5rEfq939wZCxcEsKD32/kOlr95CcnlNiQpbl9pHpPvyp9I0xxPcbQ+tnx/LbIv0Sr4q36t91\nmEIzegI0PaZxEKNSSh2ssGAHUBVyx/h5vNrip5RSh6p90zrMevo8pq3dw21nxAOwuP/FAHQfMo2E\n5EwARi/dwYod+3ns/Lb8vng7n9x8KtkeH2EOB7EBiaKyGGNYtzud96esY+zyXQB5/xYWExFGWk5+\nwte2UW3W70kvUKZlvSgGXHY88xNSGTZjY4FzxzWOYe3uNH554AzSsr3c+VV+623fUUvZn+XhzrNa\nlxjrssR9NIyJoGls1EG/TlV9ffK/rwFr8XYRsW5IGLis9q1ceMvZnNStAy2ObUZk7QjCXGE0almf\nAynpNG3dGL/fj0OXfVAqJEh17t7RuXNns2DBgjLLZeR4Of6lCTx7STvuP/eYKohMKXWoRGShMaZz\nsOM4XOWtn44kyek5rN+Tzg2fzS213PpBl+T1xChOtsfH7PVJXNC+ESLFTyLt8xucjmo1wTRgvban\nf1nGVR2bF0i6gu3W04/i27lbihxf2v9iYqNdLEhIoc+wf/OOu5zClL7dyfR4adekTlWGGlRVVT+J\nSD3gJyAeSACuM8akllC2DrAK+MMY80hZj30oddPQJ7/mt/fG2k9I/sLIJr/vpzPMYf/rzDt1UvcO\nLJq0jPrN6lGnfm3Ov/Fsrn/6SjYuTeDL53+k/WltuPmFPgCMeOFHls9azd2v38wJZ7UDwOfz8c2A\nUWxdvZ1737iFZsc0KRBXxv4M/hu3hOO6HFPkXK6UXaksnb6Kk87tQP2mdct8rVnpWSSsTOSYk48i\nPDK83O/RpmVbSFixlTOv7EpkdES5ryssOzOH9H0ZNGhW75AfQ9U85a2bKq3FT0S+BHoDe4wxJ9jH\niq3IxPrL/j7QC8gE7jDGLKqoWHSMn1JKVb76tSOoXzuCkfedXmry1/b5ccRGuVj60sUFjmd7fGTk\neDnj9am4y1FfX9yhMZ/d1pmFW1KJjXLRplHtAuf9foPfGL6du4X3p6xnX6Y1Cc3b157MNZ1alPn4\no5fu4LEfFzP7mfOIi7a+ANaOKP+fTb/f8MiPi7iqYwta1Yvm1i/msSctJ++xi/PZrZ24+HjrC2xa\ntoe7vprP/IRUru3UgsXb9nFUvWga1YmgT6eWzN2UzJAJazmheR3OOLo+Hp9hb1oOq3ceYFNSRt5j\nnt+uEZuTMri6Y3Mmr97N0Q1r8/vi7QCc2iqOt687hdYNalErIqxIC+HJAycWG6fHZ+g2ZFrefptG\ntZn4RDcch5iMe31+wipxTTi/37DrQDbN4qpNS2U/YIoxZrCI9LP3nymh7CvAzMoM5oG3biN5Rwoz\nRs0tlPTl89m9qnxePw6nA7/Pz+JJyzF+Q1JiMkmJySSu3cFZV3Zl4LVvs2PDLpbNWEn7048lzBXG\n7x+MJTsjh1evf4eRiZ8BMPvXefz67hjcWW6Sd6TwwZzXCjznE+f0Z9fm3YhD+Hrdh9RtHFfgvNfj\n5YGOT5GVnk1EdAQ/bB1GeETJvQ7cOR7uObEvB5LSaN62KUMXvlnizadAW1Yn8tiZzwHC+BHTeHNS\n/zKvKc6ebUk8cMpTZGVkc+OzV3HbS9cd0uMU9ut7f/PHB+Poced53PJinwp5zNIsn7Wa3z8YQ7dr\nz6D7dWdV+vOVJCsjmz8+HEds/Rh63n1+yLc8+3w+Jnw5DXeOh0vvuxBXeMX3kKnMrp5fAR8B3wQc\nK6kiuwRoa/+cBgy1/60QuV09dYyfUjWXiPTEusHkBIYbYwYXOh+BVV91ApKB640xCfa5Z4G7sdYx\nfcwYM6EKQ692Tj+6Pptf74Ux1tqAadke1uxKIzrcyaUfzAZgf5aH+H5jDut5Jq7aXexjvH71iTz7\nW8lTzT/581Ke/Hkp0/+vO+k5Xo5vVifvy12W28f387bwz4Ykpq3dC8DZb+QnOI+e34ZLT2pKXFQ4\nTWIj844vT9zPZR/NLvb5Suq2CdDz+Ca8e/0pRLocRb5gxkS6+PmBM0u8ttNRdXn4vDYlni/OoxdY\nE/W8e/0pRc490/M46tVy0aFpLHvTs3nutxVkefKX8Lixa0tev/ok3F4/x74wrsC1G/akc/RzY7mh\nS0u6H9eIP5dst1+Xs9R4nv1tGT/+tw2AJnUimfvcBQf1enK9M3EtH0zdAMAb15zIM78u58Hux5Dj\n8fPlP5sLlr3uZJYl7ufWM47imIa1i3u4UHAF0N3e/hqYTjGJn4h0AhoD44FKa4kUEV748X+88CP8\n8s7f/DTkT7xuL7ENY/D7/ezfcwCny4kguCJdeN0e0lMzaH96W9Yt3Iwn24MjzIE4HNSKq0WtOtE4\nnA6M3xBdJ5rIWhEYvyEsPKxA8hZZy2o5czgdRMUUTdq3r9+JJ8dDRHQESdtTiiR+mWlZHEhJx+fx\n4XV7yTyQSXjD2BJfZ/L2FFJ37ceT42HT0i1kZ+YQVSuyxPK5dm3ajYiQnZHD1tWHPlZ26fSVuHM8\neN1eJn41vUISv8y0LD5/+lt8Xj/fD/qVnnefX6mtiT6fj2d7vkpOlpt5YxbR4YzjaNQyOGs+fvz4\nl0z9bhYOpwOH00HPu84PShzl9ccHY/nyhZFgDHu3JXHvG7dW+HNUWuJnjJkpIvGFDpdUkV0BfGOs\nfqdzRSRORJoaY3ZWRCwigsspeLXFT6kaSUScwMfARUAiMF9ERhtjVgUUuxtINca0EZEbgDeA60Wk\nA3ADcDzQDJgsIscaY3RRu1KICLl5TEykiy7x1heNhMGXsmrHAXp9MKvU63976EwOZHmoHRFGs7go\nBoxeyR1nxjN66Q5uPeMoYiJcBVqcApWU9A29+VSGTFib1xrW/a3pZb6O+PrReWMXAT6cuoEP7QTj\nyYuOpVX9aPYcyGHQ2NVlPlbdaBcnt4zj/y4+jhOal/zlM1hEhPu65Q+HuKpjC1Iy3OzP8tA0NjIv\niQsPc5Aw+FLAaknrP3oF383dCsDI+dsYOd9K5MatGE+/S9oxZ2Myn9x8KnM3JrNoaypev+HUVnE8\n8F3Bjj27DmRz/7cLeO/6jkSFF0wYfX7D2OU7cYhw6UlNAVi3Ow2HwF9Ld+YlfQDP/Gr9/odOL9h6\nmavvqKUAfDUnAYAZT3XnqPq1Dv4Nq1yNA74D7cJK7goQEQfwNnALcGFpDyYi9wH3AbRq1eqwAuvT\ntzd9+vYutYzf78fr8eEKD2PLqkSMMSyespyTzu1A3UaxvPJXP8Z8Nok2p7Sm/WnWzYhBY55j3cJN\nXHhrt7zH6drrVJ4Ydh/b1+/kykcvKfI8jw+9l69fGsVpvTrSpmPRsal16sVw20vX8tfQifS690Li\nSkn6AJq0bsRpl57Kv6Pn0+veC8uV9AF0uvhkzri8C2v/W88jH95drmuKfZyLTiK6ThRet4erHu9V\n9gXlEBEVTp36MWSmZeOKCKN2XOV+1kUEp8sJWYBIXtffYMjYl4nP5wMRMg9kBS2O8tqfnI7f68dv\n/BxISquU56jUMX524vd3QFfPfcaYOHtbsL5kxYnI38BgY8xs+9wU4BljTKmd0A+mn3rHgRNpXjeK\n67u0Isfjw+3zk+3xk+PxkeP1k+P14/b6yfH6cHv9eHx+vHY3IZ/f4PeDz1j7fr/BZww+P3nbfmMw\nhrx/DdY1uYwx1izIuefyJsbKLV+wbKCC5wq+rsCyRX6TptjNEgV226/uDqmz0cFeVM3erMMN93Dq\niphIV7nv5FfGGBoROQMYYIzpYe8/C2CMeT2gzAS7zL8iEob1ZashVs+EvLKB5Up7zpo4xu9geHx+\nUjPcVtfQ+Vs5v10jvD6D2+endf1aB91lMCXDzaz1e/lj8fa8lrrJfc+lRd2oIq1OHp+fmz6fy/yE\nYodM5Rl2y6n0PKFp3v4b49eUmEwA3HlWPM/3ak9SupuGMRF5YxCNser86jgm8WBMWb2bu78++M/8\nFac047WrTuT4lw6vIf2VK09g4570vIQOoHWDWnQ+qi4z1+9lypPdGTF7M29PWlfgunevP5mrOpbd\n9Rcqtn4SkclAcQPTnge+zv2+ZJdNNcYUGKAmIo8A0caYN0XkDqBzZY3xU1XH5/XhznYTVbviuiUn\n70xl0aRlnNy9A41aNaywxy3JhiWbmfDVdM7o3YlTLzyp0p+vJMk7Uxn6vxHENqjD/W/ddlBjNoMh\n40AmHz3yBe5sN498dA91G5X/BmF566agJX72fqoxpu7BJH6F7lp12rKl6GD04vzvpyV5YxoCRYQ5\niAhzEB7mDNh24HI6cDrE+hHB4bD+aDvE+snddtrHRaxyIlb+IHnbYsedexwceXfCxd63X1tA5lG4\nS3ngbmn9zYteV/JjBgr8GJSjO3tIO5SPtDnEtEgOLcUMmsP93R7q5REuB0/1aFe+56icxK8P0NMY\nc4+9fytwWuCXJBFZYZdJtPc3YnU5HwDMNcZ8Zx//AhhnjPmlmOc5pPpJBUdatocsj4/o8DASkjLY\nkpzJee0aEh0eVuaYs81JGdz6xTwSU627yIOuOoGbTzuqqkIPeT6/ocd7M9lQaMbRQPd3O5pne7XP\n2/9nQxI3D59XbNm4aBeNYyJZu7v4u+APnHsM/S4pXx1zOKpwcpe1QHdjzE4RaQpMN8YcV6jM98A5\ngB+oDYQDnxhj+pX22Jr4KXXkCfrkLiXYnduF067I9tjHtwMtA8q1sI8VYYz5DPgMrMqrvE/89rUn\n0/eiY63kzpWf5JVn0K5SSpXHodZPKjhiIl3ERFqD509oHlug+2VZE420blCL2c+E9niRYHI6hMl9\nz83b9/kNDin9xuVZbRrkdSHN8Vo9qVMzPDSuE5F3nbF71ezL8hAb5TqSW1FHA7cDg+1//yxcwBhz\nc+52QItfqUmfUqpmq+rpbXIrMihYkY0GbhPL6cD+ihrfl8vhEFrWi6ZRnUhio1xEupya9ClVc5Tn\n5lJeGburZyzWJC/lvjGllCpebs+Y8ooIcxIR5qRJbGSB60QEh0OoVyv8SE76wEr4LhKR9Vjj9wYD\niEhnERke1MiUUtVWZS7n8CPWRC4NRCQReAmr4holIncDW4Dc6YrGYi3lsAFrOYc7KysupVSNNB9o\nKyKtsZK2G4CbCpXJvTH1L9AHmGqMMSIyGvhBRN7BmtylLfBflUWulKpxjDHJQJGB0fYQmHuKOf4V\n1mzqSilVomq9gLuI7MVKIMujAZBUieFUBo25alS3mKtbvHBwMR9ljKnw0eci0gt4D2s5hy+NMYNE\nZCCwwBgzWkQigW+BjkAKcIMxZpN97fPAXYAXeMIYM67YJyn4fFo/hZbqFi9ozFUl6PVTVTrIuqk4\n1e13XJ3i1VgrT3WK91BiLVfdVK0Tv4MhIguqYkB2RdKYq0Z1i7m6xQvVM+aqVB3fn+oWc3WLFzTm\nqlIdYw6m6vZ+Vad4NdbKU53ircxYQ3sJe6WUUkoppZRSh00TP6WUUkoppZQ6wtWkxO+zYAdwCDTm\nqlHdYq5u8UL1jLkqVcf3p7rFXN3iBY25qlTHmIOpur1f1SlejbXyVKd4Ky3WGjPGTymllFJKKaVq\nqprU4qeUUkoppZRSNVKNSPxEpKeIrBWRDSLSL8ixfCkie0RkRcCxeiIySUTW2//WtY+LiHxgx71M\nRE4NuOZ2u/x6Ebm9EuNtKSLTRGSViKwUkcerQcyRIvKfiCy1Y37ZPt5aRObZsf0kIuH28Qh7f4N9\nPj7gsZ61j68VkR6VFbP9XE4RWSwif1eTeBNEZLmILBGRBfaxkP1chCKtmw475mpVP1XXusl+Pq2f\ntH4qfL7E33VVK0es3URkkYh4RaRPMGIsFE9Z8fa167VlIjJFRI4KRpx2LGXF+kDA/2uzRaRDMOK0\nYynX31QRuUZEjIgEdZbPcry3d4jIXvu9XSIiRdbwPGjGmCP6B2vNro3A0UA4sBToEMR4ugGnAisC\njr0J9LO3+wFv2Nu9gHGAAKcD8+zj9YBN9r917e26lRRvU+BUezsGWAd0CPGYBahtb7uAeXYso7DW\nZgMYBjxobz8EDLO3bwB+src72J+XCKC1/TlyVuJnoy/wA/C3vR/q8SYADQodC9nPRaj9aN1UITFX\nq/qputZN9nNq/aT1U4dCZYr9XYdorPHAScA3QJ9q8N6eB0Tb2w+G+HtbJ2D7cmB8qMZql4sBZgJz\ngc4h/jm4A/ioIp+3JrT4dQU2GGM2GWPcwEjgimAFY4yZibU4dKArgK/t7a+BKwOOf2Msc4E4EWkK\n9AAmGWNSjDGpwCSgZyXFu9MYs8jeTgNWA81DPGZjjEm3d132jwHOB34pIebc1/ILcIGIiH18pDEm\nxxizGdiA9XmqcCLSArgUGG7vSyjHW4qQ/VyEIK2bDj/malU/Vce6CbR+0vqpxPqppN91VSszVmNM\ngjFmGeAPQnyFlSfeacaYTHt3LtCiimPMVZ5YDwTs1sKq04KhvH9TXwHeALKrMrhiBOU7QE1I/JoD\n2wL2E+1joaSxMWanvb0LaGxvlxR7UF6T3Y2jI9Zd6pCO2e6WtATYg/XHeiOwzxjjLeb582Kzz+8H\n6ldxzO8BT5P/R6l+iMcLVuU+UUQWish99rGQ/lyEmOrw2qvN77O61E/VsG4CrZ8KH68JyvPaS/pd\nV7Xq9ns62HjvxmqRDoZyxSoiD4vIRqxW9ceqKLbCyozV7sbd0hgzpioDK0F5PwfX2F1+fxGRlof7\npDUh8atWjNW2G3JTrYpIbeBX4IlCd3dCMmZjjM8YcwrWXbKuQLsgh1QiEekN7DHGLAx2LAfpbGPM\nqcAlwMMi0i3wZCh+LtShC+XfZ3Wqn6pT3QRaPykVTCJyC9AZGBLsWEpjjPnYGHMM8AzwQrDjKY6I\nOIB3gCeDHctB+AuIN8achHWj8OsyypepJiR+24HADLmFfSyU7La7lWD/u8c+XlLsVfqaRMSF9aXq\ne2PMb9Uh5lzGmH3ANOAMrO47YcU8f15s9vlYILkKYz4LuFxEErCa+s8H3g/heAEwxmy3/90D/I71\nJbZafC5CRHV47SH/+6yu9VM1qZtA66dQ/3+0spTntZf0u65q1e33VK54ReRC4HngcmNMThXFVtjB\nvrcjye9CXdXKijUGOAGYbtdnpwOjgzjBS5nvrTEmOeB3PxzodNjPaoI0qLGqfrfoOe8AACAASURB\nVIAwrAHZrckfPHl8kGOKp+AECkMoOOD8TXv7UgoOOP/PPl4P2Iw12LyuvV2vkmIVrMHQ7xU6Hsox\nNwTi7O0oYBbQG/iZgpMRPGRvP0zBAeqj7O3jKTgZwSYqfwKF7uRPnhCy8WL1448J2J6DNfYlZD8X\nofajdVOFxFut6qfqXDfZz6v1k9ZPgWWK/V2HYqwBZb8i+JO7lOe97YjVDbxtNYi1bcD2ZcCCUI21\nUPnpBHdyl/K8t00Dtq8C5h7u81brBdwbNGhg4uPjgx2GUqoCLVy4MMkY0zDYcRwurZ+UOvIcCfWT\n1k1KHXnKWzeFlVUglMXHx7NgwYJgh6GUqkAisiXYMVQErZ+UOvIcCfWT1k1KHXnKWzfVhDF+/LMh\niVu/mMeeA8GeuVUppQq675sF/Pjf1mCHoZRSSqkjXI1I/JLSc5i1Pom0HG/ZhZVSqgr9uymZtbvS\ngh2GUkqVKCfLzf96vsGNHZ5i+Zx1wQ5HKXWIakTi53JaL9Prq77jGZVSR6ZIl5Nsjy/YYSilVImG\nPDmC1Ys3s29vGk9f+U6ww1FKHaIakfiFOQQAj89fRkmllKpaUZr4KaVC2NC1Exl92WZShsfhbejQ\n1Q+VqsZqROLnCrNepiZ+SqlQE+lykO3RukkpFZpGbJpK7bAc6kdnkPpynWCHo5Q6DNV6Vs/ycjly\nEz+9TaWUCi2RLifZXm3xU0qFHr/fcFRUEi8fPQaXwIx9rfnFeVKww1JKHaKa0eLn1K6eSqnQFBmm\nXT2VUqHp341beLD5DCIEnALnxW0G/SqlVLVVIxK/MKd29VRKhaYIl4Ms7eqplApBCUmptIhIL3As\n5fToIEWjlDpclZb4iciXIrJHRFYEHBsiImtEZJmI/C4icQHnnhWRDSKyVkR6VGQs4U7t6qmUCk1R\nLic52uKnlApBy7fvQgody4irEW0GSh2RKvP/3q+AnoWOTQJOMMacBKwDngUQkQ7ADcDx9jWfiIiz\nogJxhVnVlldb/JRSIUaXc1BKharYiEgAnIBgfWnMbK8tfkpVV5WW+BljZgIphY5NNMbkrqI+F2hh\nb18BjDTG5BhjNgMbgK4VFUuYPbmLWxM/pWosEelp9yjYICL9ijl/h4jsFZEl9s89AeduF5H19s/t\nFRlXpMtBliZ+SqkQtOvAAcIFIgQisb40mrgwjNEeVEpVR8Fsr78LGGdvNwe2BZxLtI8VISL3icgC\nEVmwd+/ecj1RuC7grlSNZvcg+Bi4BOgA3Gj3NCjsJ2PMKfbPcPvaesBLwGlYN6ReEpG6FRVbpMtJ\njldvSimlQs/irYm4BESsnlNhAmT4WDxzDb4yZiPOOJDJzs27qyBKpVR5BSXxE5HnAS/w/cFea4z5\nzBjT2RjTuWHDhuW6Jkxn9VSqpusKbDDGbDLGuIGRWD0NyqMHMMkYk2KMScXqsl64G/shiwhz4NbE\nTykVgpK87rwxfiKCA2ho0njuyrfp0/px/P7i667Hu7/M1c0e5I4Tn+a5K9+ssniVUqWr8sRPRO4A\negM3m/y+AtuBlgHFWtjHKoRLZ/VUqqYrb6+Ca+zJp34Rkdw6qdw9Eg5FeJhDW/yUUiHJFbB2Q+4X\ntuioLACy0rN5uvebLJ21psA1CasTWbNgU97+wikrKz1OpVT5VGniJyI9gaeBy40xmQGnRgM3iEiE\niLQG2gL/VdTz6qyeSqly+AuItyefmgR8fbAPcChd0SPCnPj8RiefUkqFnJhagfPsWd+hMr35k7us\n+Gcdz/R+k52b9+Qdm/HrvAKP0bbjUXz18i98O+g3vB4vxpgSWwqVUpWrMpdz+BH4FzhORBJF5G7g\nIyAGmGRPnjAMwBizEhgFrALGAw8bYypstgPt6qlUjVdmrwJjTLIxJsfeHQ50Ku+1AY9x0F3RI8Ks\nalhb/ZSqucox+VSEiPxkn58nIvEB5yptOSynePO2xe70mZ5axzoQMMHLvPFLAMhMy2L6qLkFHiN1\n9wF+fHM03732B5fG3cUVje6lV507+bTfQY/2UUodprDKemBjzI3FHP6ilPKDgEGVEUtuV0+vX1v8\nlKqh5gNt7R4F27GWj7kpsICINDXG7LR3LwdW29sTgNcCJnS5GHspmoqQm/i5vX5qRVTUoyqlqouA\nyacuwupKPl9ERhtjVgUUuxtINca0EZEbgDeA6wsth9UMmCwix1bUzfNMv7fowQPZRQ4tnLKSv4dP\nYdvaXUXOJW1PLbCfk+kG4LcPJ+BwOrh3UHFfF5VSlaHMFj8ReUVEwgL264jIiMoNq2K57BY/nUBB\nqZrJXkbmEawkbjUwyhizUkQGisjldrHHRGSliCwFHgPusK9NAV7BSh7nAwPtYxUiPMzqSqUtfkrV\nWOWZfOoK8ruf/wJcINZUm5W6HFbHJk0wgDHG+gFymkWCPctn7r8r5qxj27rd1r4UWvK9lKUffnlv\nHAmrEisqXKVUGcrT1TMMmCciJ4nIRVhffBZWblgVS0QIcwhe7VOuVI1ljBlrjDnWGHOM3cMAY0x/\nY8xoe/tZY8zxxpiTjTHnGWPWBFz7pTGmjf1ToTe+Il1WNaxr+SlVY5VnAqm8MvaNrP1A/XJee8ha\nRNVDgBhHODGOcDDgT7K/SwUkeFkZOfYhKeZRSmAMGMPg2z+pqHCVUmUos6unMeZZEZkMzANSgW7G\nmA2VHlkFC3OKTu6ilAo50eFWNZyRU0yXKqWUqgAich9wH0CrVq3Kfd2yhCRiTo8gQqyeCTFOILPQ\nTSq7RU9ErIXd7X2HU/AH9GSIrB1BdnpOwHXWPweS0zDGHFzSqJQ6JOXp6tkN+AAYCEwHPhSRZpUc\nV4VzOXWtLKVU6KkdoYmfUjVceSaQyitjD7+JBZLLee0hTTwFkJnuJUKciP1fOE6IseqsBs3rgt9f\noOVPhLz99qe1LXDu5Z/78vw3D3P9U70Z+GvfvOPJO/cx87cKm8hdKVWK8kzu8hZwbe4gYxG5GpgK\ntKvMwCpauNOhXT2VUiGnVoR1Jz3DrYmfUjVUmZNPYS17dTvWbOl9gKnGGCMio4EfROQdrMldKnQ5\nLJfD4MABAhhwioNj2zXil81Ps3bBJp6/+u1CVwhhLqFZ60YMGPk4Po+XueOWcvzpbWjVzuqB2u2a\n0/D5/ETWiiDb7iL62i0fUTuuFp0uOKGiQldKFaM8Y/zOCJxZyhjzG3BW5YVUOcKcgserXT2VUqEl\nt8UvPUfH+ClVE5Vz8qkvgPoisgHoC/Szr63U5bD8MdYNc0HyumI6/OHUjo3mlHPb06p98yKTt7z8\n0//4fOFg6tSrTd3GcVxyx7l5SV8up9NBlx4nFzg2oM87FRW2UqoEZSZ+xVUgxpjkygmn8ricDjza\n4qeUCjG1I+3EL1tb/JSqqcox+VS2MeZae4KprsaYTQHXDrKvO84YM64i49rt2Z0fIwZBuPEoq8OX\nM8zJp/++UuSa+BNalOuxn/z0Xuo1icvbd2d7GDdi+uEFrJQqVaUt4B5qXE6HTu6ilAo5MZEuANKy\nPUGORCmlCsrxOfN3jJX8Hdssf4ygiND06EYFromKLt+CpFG1I7n6sZ4FVn94/9EvmfLjP8wbt5j9\nSWmHFbtSqqgalPgJbq92pVJKhZZa4U6cDuGAJn5KqRATYWrn79gJWrMGBSeHueT2bnnnWrVrRq3Y\n6HI/fp8nenHlo5fk7Ruf4c27htH/6ne466SnyMlyH3LsSqmiDjrxE5GLRGRSZQRTmSLCnLpAslIq\n5IgIdSLD2J+liZ9SKrSc3/Y4wFrAHawWv6ZxsQXKXHJHdzqddzxtTj6K57956KAeX0TodVd3wlzO\nIufSUzN496HhPHJWf2b9obN+KlURSkz8ROR8EVknIuki8p2InCgiC4DBwNCqC7FiRLocZOsCyUqp\nEBQb5eJAlo7xU0qFljs6dQTshdmNlQA6C623F1O3FoP++D8+mjWAo9od/NrxrY5rxosjHyeyVtEu\notNG/sv6RZt59cYPWTZztbYAKnWYSmvxextrsc/6wC9YUwh/ZYzpZM/sWa1Eupxke7TFTykVeupE\nubSrp1Iq9PgKL6ouOCphofXTe3Xk3D6nIY5iHtteFP6pi1/lmqb3sW7hpqJlqsjOTbt5+95P+f2j\n8XmtoAATv53B+w8PZ/uGXUGLTanyKG0dP2OMmW5v/yEi240xH1VBTJUiIszJXk9OsMNQSqkiYqNc\n7MvUxE8pFVraHdME3878fT9+/H6D01nxyd+9r9+EKzyMqJhIVv67jlX/brBOCHlLRniyPcz8dS7H\ndjq6wp+/JFnp2Qx98huy0rLZuDSBxPW7cEW42Ls1ibsG3cD6hZv58OEvyMl2M/bzybQ4rhkDf3uK\n5m2bMuX7WayZv4ErHu5J/WZ12b5uJy3bNSciKrzK4lcqUGmJX5y9WHte2cD96tbqFxWuY/yUUqEp\nNsrF9tSsYIehlFJFpPm9xDisr4vZxktEca1yFSCmbi0e/eBOANYu2MSTF76Cz+vH7/MD+a1rZ1/Z\ntVKev7BlM1fz4WMj2LI6MfDpAfDkePjj4/EsmLSUxHU78Xq89qynsG3NDu46sS89buvO1JGz8WR7\n+POTCThEcIY7adSiAZ8ve4swV2lfwZWqHKV96mYAlwXszwzYN0C1Svwiw3SMn1IqNMVFu9ink7so\npULQH2mN6V07ERFYllWXbpXU4hfouM5H89GcV9i7LZktaxL5+qWfiY6J4vgzjiUrPbvUayd+M4P3\nHhqOMXDm5Z14/vvHcDhKn8swKz2bWb/9R90msQzv9wPb1u7AW9p3RgGv20vCim15+7EN65C+LwOf\nx4fxGcaPmGa3VlpF/Mbgz/aye8te/vlzAedc3bXMuJSqaCUmfsaYO6sykMpmjfHTxE8pFXrqRoez\nL9ON329wVNLddKWUOhRZJoJf0o9BxOpxeZ6zapKV+A4tiO/Qgi49TubKh3pwdeP7+Gf0AuZPXMp3\nGz4ktkFMsdd9NeBnfHYPr9m/z+e+jk/z1uQXiWsYS/KOVL4b9BvLZ60mbX8m7kw3Nz5zObN++4/N\nK7fh91pdWa1WxnxOl5OYurXYn5TGmZd1wp2Vw4KJS3N7oBIe4WLAr0+SnpLB4Ds+ImNfJgAuVxhN\njm7Ezo178Put1ku/z8+bd37MJ/8bwakXnMT/ht1LeGTpXT/d2W4cToe2EqrDVuInSET6FjpkgCRg\ntjFmc6VGVQmsWT21q6dSKvTERrnwG0jL8RIb5Qp2OEoplS8E7kUZA8bvL7C9YNIytq7ZzsW3dqN2\nXC2Wz17D4qkrcOUmUcZggK2rd3D3iU/x9FcP8XKfd/C6C86gPPy5kcU+pyPMQZP4Rlz3ZG9O69WR\nuo1jSd+XSUzdWogIv743honfzKD7dWdy4S3n0LBFfQB+SvyUjx4bwaRvZ1C3SRwfzH6V7IxsXBEu\nZv/+H0P7fk1OlpvUXfuZMWoOCyYtpfe9F3L7gOsA2J90gIHXvUvKrlTc2R6M37Bv737CI8J5d8bL\ntD6xVeW8yapGKO3WQXG3UuKB50VkgDGm+P9TQlSky0m214cxxpqWWCmlQkRctPVFZV+mWxM/pVRI\nyfI4iXb5MAYycoLT4uQKD+OVP5/i/Ye+wJPjYczwqfw0ZDQ+r4/J38/imsd78c79n+F1Wz27xCFE\n1ookc781djotJZ0XL3+zXM8lDujasyNXP34JJ5zVDld4/muuUy9/QftrnriUa564tMj14REu+n56\nH3e+cj0xdWsR5grLW9T+nGtOY8znk9m8YhsYg9fjY/+eA4x6azT1msTRrE0T/vx4AivnrC3S6uh1\nZzHwuneIP6El/xt2H3XqF9/iqVRpSuvq+XJxx0WkHjAZKDXxE5Evgd7AHmPMCfaxa4EBQHugqzFm\nQUD5Z4G7AR/wmDFmwkG9kjJEupwYA26fn4iwoguFKqVUsOQme7qWn1Iq1KzdcB+xDX/B63MQ570n\neIH4YW9iCp4cD98M/CXv8MYlW3jzrqEFJ2AxcNWjl7BtdSIzfy26+Hvbzq2Jq1+HBZOWYfzWhU1a\nNyR1134Q6NLzFDqed8JhhVu3UWyRYzF1a/PxvNfJyXIz6dsZjHjxJ9xZbnKy3Hz61Le4czw4nQ6M\nMYSFh+H3+3E4HPh9PkDYsWk3uxL28NhZL9D+tLY0aFmfqOgI+jx5GeERetNQle2gb90YY1KkfE1m\nXwEfAd8EHFsBXA18GlhQRDoANwDHA82AySJyrDGmwgblRYRZfdKzPZr4KVUTiUhP4H3ACQw3xgwu\ndL4vcA/gBfYCdxljttjnfMByu+hWY8zlFRlbnUirKt6vE7wopULM0CuvZ/y6jtSJiODs+PigxVG/\neV183hK+FhaadfPsq7ty49OX44pwkXXFEOaPX5J37snh99PjtnPJyXJzU+tH8GR7iKwdyfBlQ5j7\n1yIcYU7OuqJzJb4SiIgKp/d9F3Hm5V0YP2Ia37/6K257LVef10/tuFr83xcPUqd+bZxhThof1ZDf\nPxjLHx+Nx+P2smPjbnZu3A0CYa4w9mxL4rwbzuakbu21V5sq1UEnfiJyHpBaVjljzEwRiS90bLX9\nGIWLXwGMNMbkAJtFZAPQFWvR+AoR6bKSvRyPD7QrlVI1iog4gY+Bi4BEYL6IjDbGrAoothjobIzJ\nFJEHgTeB6+1zWcaYUyorvthou8VPF3FXSoUYhwi9jjsu2GHQ8thmPP7RXbz70PAiiV6uiKhwft4x\njIioiLxjg0Y/zY+D/2DiNzO56tGe9Ljt3Lyywxa8ztIZqzn1ghMIjwinW5/Tq+Kl5KnXJI4b+11J\n8o5UZv02jxbHNiErPYf73riZjuefWKDsHQOvp3nbpkz/aQ7LZq7C5/Nj/Aaf18+4L6Yx+bvZ3PLC\n1dzwzJVV+hpU9VLa5C7LKfq/Vj1gB3BbBcfRHJgbsJ9oHysurvuA+wBatSr/ANfcxE8neFGqRuoK\nbDDGbAIQkZFYN5zyEj9jzLSA8nOBW6oquFr2GJJ3Jq2j14lNq+pplVKqWul553l06dmRDx/7gh0b\n93Dxbecy9cfZ1IqrRdPWjeh19/kFkr5cN/a7khv7FU2IGraoz4U3n10VoZdIRHj0w7t49MO7Si3n\nDHPS887zOP+ms/n700k4HMKW1dtZv3AT6xZuJCczh5Vz1lZR1Kq6Kq3F72rAHbBvgGRjTEblhlQ6\nY8xnwGcAnTt3LuGeT1GRLrurZ0ndBJRSR7LmwLaA/UTgtFLK3w2MC9iPFJEFWN1ABxtj/qjI4JrE\nRgIQHa7d0JVSqjT1m8Yx4Ocn8/b7PNEriNFUvfAIF1c/lv+a9yYm83zvwWSmZXHpfRexau462p/W\nFmMMnhwvG5ck0PrEVkTVjsSd7ebjx0eQsmsfF916LstmrebEc9rz23tjqB0XzYPv3kHy9hTand6W\nxLU7qFM/Jm+2UmMMy2asIiI6gnZd2wTr5avDVFri95Mx5tQqimM70DJgv4V9rMJE2uP6stya+Cml\nSiYitwCdgXMDDh9ljNkuIkcDU0VkuTFmYzHXHlKPBJfTQaOYCDo0rXN4wSullKpRGraoz2dLhrB2\nwUaePG8AIETXiWLf7v3UaRhDTkYOtevW4qwrurBv7wHm/Dkfr8fHf2MX4/cb/ho6Eb/PjzPMyf2n\nPIXD6aBO/Rj27z0AAt2vP4s189bTpmNrZv/+H2B45utHOOfq0u6dqlBVWuJXlaNDRwM/iMg7WJO7\ntAWKTsN0GGpFWC81w62z5ilVA5Xr5pKIXAg8D5xrjzkGwBiz3f53k4hMBzoCRRK/Q+2RABATGUZa\nttZPSimlDt7a+RswfoM72407240xhv17DgCQk+lm9NCJhNkz3Ie5nHg9+Q0h4VHh1rqHfkN2dg45\nmTl55SZ/NxOfx8fWNdsxfoM4hDX/baD1CS1pdFRDVs1Zy6q56zmj96l5s63e9+YtTPp2JnUbxXLe\njWcx5fvZNG/ThM4XnxyU90blKy3xa1jMIu55jDHvlPbAIvIj0B1oICKJwEtACvAh0BAYIyJLjDE9\njDErRWQU1ngbL/BwRc7oCdaXKkC/WClVM80H2opIa6yE7wbgpsACItIRa8bhnsaYPQHH6wKZxpgc\nEWkAnIU18UuFSs30MHdTckU/rFJKqRqgW58z+GvoJPYmJmMAn9tL3aZxJG1LJjo2mrTkNBxOJ73u\nPp/G8Q1p1KoB0378h553nUdaSjpRtSMZM3wKy2asotu1ZzBj1BxiG9Rh/94DOJ1O6tSvjSvCRWSt\nCNbM28AfH46zzicdwOvxMWrIaLIzrPulm5dvZXdCEs4wB399Oont63YiDuHewTfz798L7XUSi++i\na4xh95a91GtaV5eoqASlJX5OoDaH2PJnjLmxhFO/l1B+EDDoUJ6rPHITv3RN/JSqcYwxXhF5BJiA\nVbd9ad9wGggsMMaMBoZg1Xk/2zMP5y7b0B74VET8gANrjN+qYp/oMKRkuMsupJQ6othrI/8ExAMJ\nwHXGmNRCZU4BhgJ1sNY6HmSM+ck+9xVWt/T9dvE7jDFLUDVOXMM6fL7sLQAyDmSyZ0sSRx3fAofD\nwZ6tSXz10k+0bNec65+6HIfDmveicHfNM6/okrf91BcPApCwchsrZq/hrCu7ULdxHH6/nx7hN4KB\nfXsPgBj8Pj9ej48weyJFh8OB3+/DYRwcSErDne0hIjqC4c/+QHZGDitmrSG2QW0WTVnOWVd25czL\n85/3tZve558/5lG3cRy3D7yeYX2/Jv6EVrw29jnS92XgdXtpEt+oUt/LI1lpiV+KMWZglUVSyWpH\n5Lb46XTpStVExpixwNhCx/oHbF9YwnVzgBOLO1cZpq3dw3nH6R81pWqIfsAUY8xgEeln7z9TqEwm\ncJsxZr2INAMWisgEY8w++/xTxphfUMpWq040rU/MH2feqFUDnh7x8CE9VvzxLYk/Pn+khMPh4JK7\nL2D8F1Noc0o8p/c+lcVTV3BL/z7sWL8LYwyde5zMF8/9SL0mcZx73RkM6/s1Rx3fkgUTl+LOsm5y\nvvvAZ+Rk5DB95D+88tezLJ+5is49TmH27/Pwur0cSE7js6e+IS0lnfULNzJqyJ/89OafYAxPfHo/\nF916bkkhq1KUlvjllHKu2omJtJqL03O0xU8pFbruHDGfxS9eRN1a4cEORSlV+a7AGhYD8DUwnUKJ\nnzFmXcD2DhHZgzVkZh9KBUHfT+/jkffvwBXhQkS46bmrATi5W4e8Ms9++2je9nuzXgFgV8IeJn49\ng+PPPI6X+7yVd/7la4aQmZbFz2+N5oJbujHpq2k0ad2Ilsc1Y/54qwF777bkvKRxxqg51KlXmyXT\nV9L7/oto3kaXQSqvsrp6HjHCwxxEhDl0jJ9SKiQN6XMST/2yDICOr0wiYfClQY5IKVUFGhtjdtrb\nu4DGpRUWka5AOAUnlxokIv2BKUC/wImplKos4ZEHf3OySXwjbnvpWgCGTO7PX8MmcnrvTrx20/tg\nwO833Na/Dw+/dweRtSLx+/wsnbGKZsc0Jjsjh9m/zcOT46HbtWcw8Lp38GR7mPnzv/R58jI+f/pb\n2nQ8miFT+he7lqOylJb41T+cyV1CUUxkGGna4qeUCkF9OrXIS/yUUkcOEZkMNCnm1POBO8YYIyIl\nzgYsIk2Bb4HbjTF++/CzWAljONaMws8ARYbpHOpSM0pVluO6tOG4LtZ6gC//8TS/vP0X5990No1a\nNcwr4wxzcuoF+SMtfksegTGGTcu2IFgTwbizPXwzYBSeHC+bV2xl2YxVdOnZsapfTrVRaZO7hKKY\nSJe2+CmlQpKI0K5JDGt2pRHmOGKqXaVqvJLGDwOIyG4RaWqM2WkndntKKFcHGAM8b4yZG/DYua2F\nOSIyAvi/EmI45KVmlKpsXXqcQpcep5RZTkQQEdqc0pr7376NhROXceOzVzFqyJ/8N24xTqeDVu2b\n883Lo0hPzeC2AddRO65WFbyC6qO0xG/nkTS5C1gTvKTr5C5KqRD18c2ncsHbMzj32IZlF1ZKHQlG\nA7cDg+1//yxcQETCsWZE/6bwJC4BSaMAVwIrKj9kpYLvsgd6cNkDPQB47scnWL9wE02PbsyUH2bx\n0xt/4Pf5SduXwTNfPRLkSEOLo5RzR9wt59goF/uyNPFTSoWmYxrWpmOrONw+f7Hn/X5DfL8xPPDt\nwiqOTG1LySS+3xieHLU02KFUS2nZHnYfyAZgb1oOPr82OtkGAxeJyHrgQnsfEeksIsPtMtcB3YA7\nRGSJ/ZPbPPK9iCwHlgMNgFerNnylgs/pdNKua1tiG9QJdighr7QWvwuqLIoq0qB2OFu2ZgQ7DKWU\nKlGUy0mW21fsueXbraW6xq/cVZUh1Wg+vyEpPYcFW1IA+HVRIk9efCzN4qKCHFnJ/tucwqmt4ghz\nlnZvt2pdO+xf1uxKY+ELF9Jl0GTuP/donr2kfbDDCjpjTDLFfN8yxiwA7rG3vwO+K+H68ys1QKWq\nmcsf7EHGvkzS92Vwa/9ref/Bzxg7fApnXdWVF3/qi71Ob41V4l8FY0xKVQZSFRrUjmBvWg7G6J1G\npVRoSslws2BLarHnPCW0BBZmjGHGur14y1m+qpXW2pOYmon/IFuDVmzfz75Md4nP9dHU9SWeL8ub\n49dw2mtT2Lk/O+/YmYOnsmFP2iE9XmVbsX0/1336L1cPncO/G5NDJs41u6w4Jq/eDcCnMzYFMxyl\n1BEqzBXGrf2v5cF37sDhdDDm88n4fX7m/rWQxHU7+ODhz3m5z1skbU8GIHX3Ptw5+b0B/X7r76Yx\nhlFvjeb1Wz5g+wZrKK3P58PjLr7nYFZGNkP/9xUfP/4lmWlZlfwqD13o3A6sAg1jIsj2+Mko4W66\nUkoFW+4X5OKSn/JOTjVh5W5u//I/vpu7pUJjOxhur5/k9KKzyr/810qOeW4sK+zWy0CTV+3m7Dem\ncfRzY8t8/JQMN9keH9tSMun94WyuGTqn2HKzNyTx1sR1nDJwUt6x539fLZ/zowAAIABJREFUTkJS\n+Xp/jFthta6OXrKjwPEL35lZruur2sj5WwFYlrifGz+fG3JxNom1WkqjXIe+YpTfb/h42gb2Zxb9\nArYtJZOkYj53SqmaJ6p2JK1PbEVkrQjqN63LvDGLmDBiGnP+mM8HDw/n82e+5aZWD3BL64dI3bOP\n/le8QQ/X9Qy56xPmj1/CNwNGMfXHWQy89m22rNrGNQ3u4vI6tzFv7CKSdqTw9EUD6dfzVVL37OeH\nQb/y17AJ/P3pJL7u/1OwX3qJalTi16C2ta7H3jT9o6CUCm3b9xW9Y/jMr/nLPSxPzE+cvp27hWuG\nziHH68MYQ2JqJgAJyZmVH2gxfH7Dwz8sotOrk4v0sBjxTwIA8xOKdirJbQ0qi99vOPWVSbR7cTzn\nvDkNgI17iyZy8zYlc/uX/xU4tjkpg+/nbaX7W9PLfA6ArSnWe5ibkB+qTXvTefiHRbi9RVthD2R7\nyp2IlqVLfL0ix8Yt38m3VXwTYM+BbJ7/fTlur5+Z6/YWiAUgwpX/9WPgX6uI7zem2MdZsX1/kZsg\nvyxKZMiEtbz4Z9F5TM55cxqdX51c4NjkVbuJ7zeGNbsOFDie7fGxZNu+Q24NVkqFNhHhw39fY8iU\nl/hs+dvUqR+DOARnmIOYerWZMGI6Xo+PrPRs/vljPgsmLgEDk76Zjs9rNRI5HIIr3MX0UXPIPJCF\n1+3l13f/5psBo1g6fQWLpyznx9d+JcwVBggiEBYeukuh16jEr2GMlfjp3UClVKhq3cCaevqL2ZuL\nnNsTcNPqso9ms81OSl78YwULt6Ry3Avjaf3sWL6akwCAs4RlIfx+w8It+YnXq39bX7wXby2+i2lh\ne9NyeOnPFcUmMQCPj1zMpFVWEpdewtqpxV17ZpsGBWIszv4sD2e/MbVccV7/2dwC+9keX7m6v17+\n0WyOfm5smWUPZoKSfr8tZ8yynSwq5j1+YuQSur81vUK65hbXKvzg94t48Y8VRRKfiuL3G/5csr1A\n/C//vYrv523l/35eyhvj1+QdHzl/GwD7AlrrvvzH+qz7/Ial2/ax356EbcX2/fT+cDafzyrYLfRp\ne73LWeuthHJBQgpDJqyhJAP+WglAz/dmFWhpXrQ1lSs//ocJOmZWqSNWeGQ47bq2JapWJBfddi6P\nfHA3tw+8nkc+vJtL778QZ5iT2rHRnHFZJxq1bEBEdDhtOrbm9N6dePSju7nqsUsZ8Nv/0aVnR8Ij\nXbgiXFxwyzk0bd0IV0Q4rggXTVo35sZnr+LGZ6/i+meu5LYB1+H3+xn/5VR+e28M7uzQublU2uQu\nR5ymsZGANYakuLuiSikVbC9d1oE7RsznxOaxRc5d17kFoxYk5u1PX7eXW08/qki5xFSrtbCkxK99\n//HkeP1c0K4RX9zRheF2knnVJ3NIGHxpmTG+OmYVfy7ZwRnH1KfnCU2LnP972c687f1ZHmIiXQAF\nWnU+nbmJ+889psB1mQFJYo7XT1Qxd01nrd/LjoDxdoG6vTmNmU+fV2Lcn0zfSK8Ti1tHu6Bldmtq\nbgJdkp8XbOOGruVbDPu/zVai7Sg0sUByeg5T11hLt+3L8uT1TDlU+0uZuTohKYN2TYqf9c4YQ47X\nT+QhdMH8du4WXhq9ku37sniou7Ugc+5ncPTSHaVdWsB93yxgypo9dGwVx+8PncXX9vs/efXuIp8V\ngE5HWX/H+wz7F4BbT48v9nFzYwHo/eFs+nRqwbjlO4m3b7LMXJfE9V10UXOljnQiQs+78udDuvOV\nG+nT9zKiY6Jwhjn5dOlbbFu7g6M6tEBE6HFH/t+TBs3r88PWYbiz3TRoXh+/30/j+EY4nQ7O6XM6\nDoeDW/tfm1d+7OeT+eR/I/D7DIkbdvLYR/dU6WstSY1q8WtVPxqHwOak4HR/UkqpsuR+Md9TTJf0\nwuOi9mWUfhexpImscuzWtilr9hRpect0e3F7/WxLyWT1zuJbiOZusgbF/7JwO9cN+5f3Jq8rMYYD\nWcW3+KVkuPl0xkZ2BSRxBwLWWc10e/PK/b44P9kdMmFtic+1NSV/YpjiYt+0N531u9Pz9nfuz8p7\n/eNX7OTPJdsLlB8+q2Cr6wPnHsOsp8/j4g6NAasV72AVnlCuU0C3xC6DCnaNzfb4uOfrBazfXf5u\npqmlfCYiXU4WbknNe28DtX52LO1eHF/g3I59Wbw+bjX/bkwusXV30qrdvDTaalGbuDK/q+7SbfvK\njNXvN4z8b2ve/hQ7AV681br254XW731+gtVKOnXN7gI3D+pGuwo83nO/5/8+SpsI6ZeFiWS4fazc\nYX1G7jq7dZmxKqWOTDF1a+MMs/62RkRF0OaU1rjCXcWWrVM/hgbN6wPgcDg4/8azOfe6M3E4iqZT\n+5PT8Hv9+Lw+DuytnN4Wh6JGJX4RYU5a1I1mcwWNpVBKqYpWJ8rqiPHG+DWs2L6feXaSBfD1vwXH\nab09qeSEC8o3GczUNflf1hvFRNCh/wSuHvoP57w5jUven1Wk/NbkTHYfsJLSyat3819CCu9NXg/A\n2l1pRcZq7U7LZn5CSrFJ6Ovj1nDG4CmA1SV02pr8sWBZHmt8xamvTOJ/Py3NG7e4pYxxiz/ak5t8\nOmNjkXN/L9vJoz8uzts/4/Wp3PT5XPakZfPAd4t4fOSSAvHvOlCwZXHDnnRa1oum5wn5rYbbUjKZ\nsno3D/+wqMBkI16fnxyvj4F/rSowhuyJkUvytguPNzcG0gJaPRdtTWXy6t1c9O5Mcrzlm5RseDFd\nhHPNT0jhmqFz6NB/QollOvSfwDsTreR60NjVfDpjEzd+PpfB4wp2pdyWksnbE9dy7zcL8o7ljpcr\nTxdYl1P4eNqGcifPCxJSGDa9YJfPnxcmFvj/I7flFOCF31dw8bszOPP1KWU+9gnNde0vpVTFuvLR\nS7jglm6ceUUXHnzvTvx+P+898Cm3H/sos36bF7S4alRXT7DGzxzM3VOllKpK0eH51XLvD2cDsGpg\nD3I8Bz/+a9eBbL79N4EX/1zJm31O4oyj6xdpMVuQkD/mLLeVccX2oncnV+7Yz7Q1e4okn7n6DJ1T\n7DIUd46YD8D85y8s9jpj4ONpG4rEtTkpg+yA17w1OZP1e9ILX17E87+v4Pnfi076UZIFW1LpOqjs\n5AAgNsq6CxwRlt/ymju5TG6Mfz16Nq/8vYovZm/m2k4t+HlhYoFWtMBJe7oMKjgJCcBJAyay5pWe\nRLqcTF2dn8hs3JNBh2ZFE5QPpqznnUnrWPDChWV2E126LX9823O/LycuysXTPdsVWTfyg6kb6Hvx\ncewMiPXLfzbT/7IOefs3fDa32AmIThk4iXvPKbsFzeMzpd64ePiHRQX2c7tzFnb9Z3OttS89BV/D\nTwu2lRlDrvAQWu9QKXVkiKoVyZPDH8zbXzF7NVO+n0V2Rg5v3/0J51x9GgAet4cwV1iVrS9Y4xK/\nk1vG8dHU9aRl5487UUqpUNbjvZlsS8n/kv33o2fnJYW5syQWZ/ravUxfa7Wi5U6IUVhpLUSQP4HJ\npR/MLrVcSWsP5jorYEKWe85uXeB5i+u+eesXBWfjvGl4yXdIT24Ry9LEostDAJzWuh6PnN+myOMd\nivPaNQTA7Su+9W359v3c8/V8JtsJW25Xxd8WF+xC6vX5C6wLWNjpr09hSf+LC7xHhb8TvPzXSlrU\njeYdO3kqPJNlrqE3n8qD31tJVHTAmMkf5lkto+2b1inSmgdWN9hFWwt210zJcFOvVjhbkzOLTfpy\nfT6r9M9UeYxZVvRz3TQ2stj3LS7aRdb+Q1+mqaYv6KyUqnwNWzYAAxHRETRrY/Uaef+hz/l72ETa\nn96Wd2YMtGcGrVw1LvHrGl8Pv7Hucp/XrlGww1FKqSLOaduAWeuT8vYDkz6AEwImfsn9Up+rXq1w\nUsoY+3cwhs3YyLtldCktj8AxYi/07lBmwnkwrjileYmJ32e3dc5rqTtU/z13Ad7/Z++84+SmrsX/\nPZJmZqvXZW2z7gZsbGNswMb0HgjdL0B4pEESCCG9vRD4JXkvISFAIKQ8SCVAeCQ0k4TewRAgBmzA\ngDE27r3uevvOjKTz+0OaGc3u7Hptttr3y2cY6erq6qj47hyd5isjBgY16DqyrD0TsdJlaB0f97k7\nXs+7v63Z0ZTmO/ctzGu75I7XOX/maH7z7Ac7lXfUoGJGDizm1ZXVnBLGIwJtrGJAnutrlCOvbZs5\n9at/e4NXlm8v0LtnaE9Z/jA1AQ0Gg6EnGD52KDe/di1LF6zgqHNmoqo8+oegvuyKt1ez6t217H9I\n98cbd5t/g4jcJiJbROTdSNtgEXlaRD4IvweF7SIivxGRZSLytogc2l1yzRw3iIriWJs3sAaDYc9G\nRE4TkSXhPHNlge0JEbk33P6qiIyLbLsqbF8iIh/tblnv/PysdrdVlsU73LcrlT4IrHFugZitYydU\nFujdO3RUh+3DKn0AwwYUZZU+gKP2q+TEA4bu9ngdKX0ZHnhjXd76htqWTil9EGSxvPsLR7DsmtNx\nbCubqbUzxz1geHm723ZV6dt3aCn3X34kN10wnRs/Pp1HvnYMlWVxHvnaMbs0zs5YYeL2DQZDP2Ds\nlNGc8pnjKa0oRUSY+dHpxItiDBxawagDRrBxxWYuOfBbfG7S11nzfvfoKd3p2H4HcFqrtiuBZ1V1\nAvBsuA5wOjAh/FwG/K67hCqK2Zw/YxSPvbMxm17bYDDs2YiIDdxCMNdMAT4hIlNadbsEqFHV/YFf\nAteH+04BLgQOJJjTfhuO153ysvC/Ty247dcXHgLAl09om94e4MLDRnfqGJP2yf3Av7xAqvydkZFj\nV/nuRw/Yrf2ifOPkCTz69ZzycM7BI7j9c4d1uM/iq1v/Odp9bEu47bOH7XJSkI4sU3//8lEfVqw8\nLEtwdiN2LZNcaFf48gn7FYzhnDisnMPGDebcQ0dx/oxRTB1ZwfwfnJJnsd5dfvmf0wu2F8UKn/N/\nHDwCgKtnH8hXT9w/2/7+T7ruuTAYDIZd4aePXMXv37yB2xb/iqKSBPff9DBrl6xn3Qcbufvav3fL\nMbvN1VNVX4y+MQ+ZDZwQLv8FmAt8L2y/U4O0b/NEZKCIVKlq+8ErH4KvnzSBZxZv5lO3zuP0qVVM\nHTmAQSVxYraFSFBnKfMdlMGSNvEV7ZHpJiII4KvS+RK/PUtPRzX0RhxFb0Ru9PRpZmKwMtdXQhls\nS1CClP6+D0rwPGblzHxHntXoOJa0fy6qwXGjz7YUkIHwO2ZbHDth9y0kXcAsYJmqrghkknsI5p33\nIn1mAz8Kl+cAN0twQrOBe1Q1CawUkWXheIWzTXQRFSUxVl57BuOveiyvPRWmqf/OqQfw27lB5sob\nPz6dax59j6kjK3hmcZClc+rIAQWTtGT45kcmcvldCwC48vRJHDSygrvmrebfKzpn1YnZbR+O/zl7\nCp87ejwLVtdw3u9eKbjf7PAHeCHOmT6CkyYN45v3vtVuHwji0g4cUcGjXz+GO19ZzfjKMvYf1r6l\nCqA4bnP58fvx+wLZPltz3MShvLh0a4d9RIQrPjqJi27rfOzgbz99aDbZTWsOHTOo0+PsjE8dvns1\n6W66YDo/e6z9QujtccVpkwBYdd2ZeVlRz5zWtsZjIe7+whF84k/zCm6b/4OPFIxfPGR04eu1+OrT\n2vybAfj5+dO5+KhxHBJe55XbGnn0nY27VbfQYDAYugLLshh9wMjs+uTDJ/DUHc8DwpQjJ3bLMXs6\nxm94RJnbBGSCD0YC0RRc68K2NoqfiFxGYBVkzJjd++NWURLjwa8czQ1PLuGJdzftUoFZg8Gw6wwo\ncnj7R93uIdkRheaYw9vro6quiNQCQ8L2ea32HUkBumJ+ajUeq647k/vnr+W7YXKWY/cPXCyjxdnP\nOGgfzpk+Akvg5ueX8atnPuCLx+2Xjd/6wZmT+emji7P9CxVpP3NaFWdOq+Lwnz2TLdfQHl87af+8\n7KNDSuNsb0xxTCjbjLGDWHXdmTQkXRKOxYTvP57tO2pQCRBYHN/flMuw/LFDRvLL/zyYpOvxzXvz\nj3f+jFHMCROlfPaocXxkchCffeCICq4/f1q23+2fPYy7X1vDU+8Fyu9P/2Nq3jj7Di3NLj/xzWN5\n/v2tXP9ETtG565LD+fSfX+Xy4/alKekyf3UNnzlibLvXYdqofMvVVadP4trH3+fMaVXZ5CSfmDWG\nu8NadQcWyMoJ8PEZowD4w2dm8MX/W9Du8Vrz7VMmctPTSzlm/0rW1jRlS138+JwDO9zv5EnD2NqQ\nzBaqBxg3pIRzDx3FhGHlnH1zx4l8ohw+fnDe+g/PmsJPHgnep5w9vX0l/yezD+SHDwb1/47cb0i2\n/fbPHca85dv5w4sreOBLR1FZluDV/3cyh/8sl3n1yW8ex7jKUh740pGc97v89y8iwpKfnkbS9Tn/\nd68wY+xgfnDmZOKOlVX6AH7ziUP4xQWFrYYGg8HQG5zymeMZuf8+eK7PQcdO7pZjSHsFfrtk8MDi\n94iqTg3Xd6jqwMj2GlUdJCKPANep6kth+7PA91R1foFhs8ycOVPnz++wy05RVRqSLjWNaTzVwEKn\niq+BVcMP2zo3Vv6yolnrYUf79EZCsW687X2G3jjHnrbvqpL3jGWeO1Vwfc2zXFsZa5y0/6xG23b2\n3GcUj/bGyy2DLcJBozrn3iUiC1R1Zqc6dxIROR84TVUvDdc/Axyuql+N9Hk37LMuXF9OoBz+CJin\nqneF7X8GHlfVOR0dsyvmp53h+0p9i0tFpJC1qlLXnGtrSLqUJRzqWtIkHCuvFIHr+SiBRTa6f3Vj\niiFlCVKujwg0tLhUFMewLGFHU4qK4hgiwbJtCWUJB8/Xdl0LVZXtjam8pCiZeda2hPqWNGWJXDpr\n1/PZVNdCeVEsG6NX25RmQPGHT3ndlHIpjtk7HSfl+ixct4PDxg3usF+mr2MJliU0pdw8pRiCGMTy\nohi2JSRdD0uEmG3RlHJJOHZoXQ/k2daQpK45uB71SZeRA4spitk0pVxa0j4taY/BpfGspaol7VEU\ns3E9n7oWl8GlhWNAVZV1Nc2UFzkMLMnvs6WuhaHliawMvq9srGuhKekyenAJdc1pqptSDCsvoihm\nkfaCuaU8Ufh+tKQ9VAMra0esq2liUEmc0sTO30E3pVze31TP+CGlDIqcY01jiuK4jWMJrq/dbsHr\njvmpp+mJuclgMPQsnZ2betritznjwikiVUAm/dl6IBqYMips63ZEhPKimCntYDDs2XRmjsn0WSci\nDlABbO/kvr2CZUme0gfBnBZtKwt/VA8oMMcVUtREhCGhghZ3gu3RH9pRpSG67BRw/YyO2ToTpoiQ\n2aX1/OvYVtYymKH1ee4urZWy9og7VqeUvkzfjsaPXqeo4l2ob2VZInutonmnS+IOJQV0uoyi49hW\nu0ofBNd79OCSgtuGDSjKW7csYWQkmU1RzG7TpyM6q3y1vscdURJ3CrrDRp9Nx3htGgwGQ4f0dNXS\nh4CLw+WLgQcj7ReF2T2PAGq7K77PYDDslbwOTBCR8SISJ0jW8lCrPtH56XzguTDu+CHgwjDr53iC\nJFQfviicwWAwGAwGQw/Sba6eInI3QSKXSmAz8D/AP4H7gDHAauACVa0OEyjcTJAxrwn43M7cPMNj\nbA3H6QyVwM5zWfctjMw9Q3+Tub/JC7sm81hV7fJMMCJyBvArwAZuU9VrRORqYL6qPiQiRcD/AYcA\n1cCFkWQw3wc+D7jAN1X18YIHyT+emZ/6Fv1NXjAy9xS9Pj/1JLs4N0Xpb/fWyNt99CdZYe+Qt1Nz\nU7fG+PUlRGR+f/PLNzL3DP1N5v4mL/RPmXuS/nh9+pvM/U1eMDL3FP1R5t6gv10nI2/30Z9kBSNv\nlJ529TQYDAaDwWAwGAwGQw9jFD+DwWAwGAwGg8Fg2MPZmxS/P/a2ALuBkbln6G8y9zd5oX/K3JP0\nx+vT32Tub/KCkbmn6I8y9wb97ToZebuP/iQrGHmz7DUxfgaDwWAwGAwGg8Gwt7I3WfwMBoPBYDAY\nDAaDYa9kr1D8ROQ0EVkiIstE5MpeluU2EdkiIu9G2gaLyNMi8kH4PShsFxH5TSj32yJyaGSfi8P+\nH4jIxYWO1UXyjhaR50XkPRFZJCLf6AcyF4nIayKyMJT5x2H7eBF5NZTt3rCeG2F9tnvD9ldFZFxk\nrKvC9iUi8tHukjk8li0ib4rII/1E3lUi8o6IvCUi88O2Pvtc9EXM3PShZe5X81N/nZvC45n5aS+Z\nn3Y2L3V0j3uDTsh7nIi8ISKuiJzfGzJGZNmZrN8O57O3ReRZERnbG3JG5NmZvJdH/p29JCJTekPO\niDyd+psqIueJiIpIr2b67MT1/ayIbA2v71sicumHPqiq7tEfgppdy4F9gTiwEJjSi/IcBxwKvBtp\n+zlwZbh8JXB9uHwG8DggwBHAq2H7YGBF+D0oXB7UTfJWAYeGy+XAUmBKH5dZgLJwOQa8GspyH0Ft\nNoDfA18Kl78M/D5cvhC4N1yeEj4vCWB8+BzZ3fhsfBv4G/BIuN7X5V0FVLZq67PPRV/7mLmpS2Tu\nV/NTf52bwmOa+WkvmJ86My+1d4/7sLzjgGnAncD5fVzWE4GScPlL/eDaDogsnwM80ZflDfuVAy8C\n84CZfVle4LPAzV153L3B4jcLWKaqK1Q1BdwDzO4tYVT1RYLi0FFmA38Jl/8C/Eek/U4NmAcMFJEq\n4KPA06parao1wNPAad0k70ZVfSNcrgcWAyP7uMyqqg3haiz8KHASMKcdmTPnMgc4WUQkbL9HVZOq\nuhJYRvA8dTkiMgo4E7g1XJe+LG8H9Nnnog9i5qYPL3O/mp/649wEZn7ay+anzsxL7d3j3mCn8qrq\nKlV9G/B7Q8AInZH1eVVtClfnAaN6WMYonZG3LrJaSjCf9Rad/Zv6E+B6oKUnhStAr/wG2BsUv5HA\n2sj6urCtLzFcVTeGy5uA4eFye7L3yjmF7hyHELyl7tMyh25JbwFbCP5YLwd2qKpb4PhZ2cLttcCQ\nHpb5V8AV5P4wDenj8kIwwT8lIgtE5LKwrU8/F32M/nDu/eZ+9pf5qR/OTWDmp9btezKdOef27nFv\n0J/u0a7KegmBJbq36JS8IvIVEVlOYFH/eg/JVoidyhu6cY9W1Ud7UrB26OzzcF7o+jtHREZ/2IPu\nDYpfv0ID226fS7UqImXAA8A3W73h6ZMyq6qnqgcTvC2bBUzqZZHaRUTOArao6oLelmUXOUZVDwVO\nB74iIsdFN/bF58Kw+/Tl+9mf5qf+NDeBmZ8Mht5ARD4NzARu6G1Zdoaq3qKq+wHfA37Q2/K0h4hY\nwE3Ad3pbll3gYWCcqk4jeFH4l5303yl7g+K3HohqyKPCtr7E5tCthPB7S9jenuw9ek4iEiP4UfVX\nVf17f5A5g6ruAJ4HjiRw33EKHD8rW7i9AtjegzIfDZwjIqsITP0nAb/uw/ICoKrrw+8twD8IfsT2\ni+eij9Afzr3P38/+Oj/1k7kJzPzU1/+NdjWdOef27nFv0J/uUadkFZGPAN8HzlHVZA/JVohdvbb3\nkHOf7g12Jm85MBWYG85nRwAP9WKCl51eX1XdHnkGbgVmfOijai8FNfbUB3AIArLHkwuePLCXZRpH\nfgKFG8gPOP95uHwm+QHnr4Xtg4GVBMHmg8Llwd0kqxAERP+qVXtflnkoMDBcLgb+BZwF3E9+MoIv\nh8tfIT9Q/b5w+UDykxGsoPsTKJxALnlCn5WXwJe/PLL8CkHsS599Lvrax8xNXSJvv5qf+vPcFB7X\nzE97+PzUmXmpvXvcV+WN9L2D3k3u0plrewiB+/eEfvIsTIgsnw3M78vytuo/l95N7tKZ61sVWf4Y\nMO/DHrdfF3CvrKzUcePG9bYYBoOhC1mwYME2VR3a23J8WMz8ZDDseewJ85OZmwyGPY/Ozk3Ozjr0\nZcaNG8f8+fN7WwyDwdCFiMjq3pahKzDzk8Gw57EnzE9mbjIY9jw6OzftDTF+vLaymnFXPsrmut7O\n3GowGAw5Uq7PKTe9wI8fXtTbohgMBoPBYNjD2SsUv3teWwPAFXPe7mVJDAaDIUfcsfhgSwO3v7yK\n/ux2bzAYDAaDoe+zVyh++w4tBWDDjuZelsRgMBgK8+rK1rXTDQaDwWDoGszLRQP08xi/znLejFHc\n+NRSPtjS0NuiGAyGPoaInEaQnt4GblXV6wr0uQD4EUHdr4Wq+smuliPl+jvvZDAYDIZuQ/0mUjv+\nCz/5LwLbiIvSglKGxKZh+RsQbyPg4ZNGwv2EGNgTkNiBkH4DpATxFgM+YCHFn8cqORe/5quovxm1\nKtDiy3CKjibdNIdU86OgdRTZQxCrDKn4NU5sDL7fBOk38Bt+A/GjoPlJxF+ePS6A4oBzJOIuAJoI\nEgZ7MOBqrJJzgz7JuWjN5Sh+uE+GIiAIgxJrDDiTID0fSi7FT88DKcUq/y7plrmkW54CFDt+GKRe\nxPZWQvxYPHs/RIZC+jXs1HNAA2CjJLDKvgXNc1BvBRRdgJ9+C9xFoQRWeH1sQFHi+PYU1HsPSGA5\nI8FdHIhpVWEljiY+4ErEGti1N30vY69Q/Iocu7dFMBgMfRARsYFbgFOAdcDrIvKQqr4X6TMBuAo4\nWlVrRGRYd8iS9oziZzAYDD2BqoeqICLU1P2OZGohjnModvIeinRVtCMAIrVo+kUCZSVQu0QVkWBZ\nSSPee/jeewhgIdlt4KNNfyLZ+AccCRztLL8Rv+G/aahPZw8VFxvxasGD1LbjqFUXRRkgRVgiaPoN\nLKw8pS8gjbovIVl1LvRua/gNZBS/+l8Dub8xuTFyuS/UWw3eGkRAG67PKonNzQ/i4mX7eam5lEgc\nEUFbHiataRTNtgWXzUVw8et/iiDBmM13oWjk2F7YN3MNXMSdj4oPNKBuNRL2Vn8dXtO91DY/R0vi\nFBLOgVSUzUakBEgjEm9zVQyF2SsUv0Rsr/BoNRgMu84sYJmqrgBjT7raAAAgAElEQVQQkXuA2cB7\nkT5fAG5R1RrIFoTuMv7fGZP42WPvd+WQBoPB0K/Y3jCH2uanqCy7iKL4RNLuRjbW3kxdy0tAI1CG\njeCTRBlEWWI6zamFoA0oTYBGFB+hNHE04BKzhjJqyI3UNT/LuupvYpMmsDb5KODgZ38IKw9RbjkU\niZ1T2kTyXSQ1sE2Fh8nbptHv1vsBKpG+qhFpM8oSOY1Mg3EEQVE01zl/XJFwJyFqx4NiSByfWy06\nDRpyScTypM4TMziWn9eoWRnz9s8oxeF6tC0yWrA9c90i512Y/Gudux5Bu+9vorHxNhqB7bXfIZU9\nd3A1zsjB1zKk7EIak2+wausXAB/PT6FkPP5iQCq8hxVUFM8m7S2jOTUfHw+fGIqNRRrwKXGOpXLA\np/G9jTSkXmH4gC9Tmpje4Rn0dfYOxc9Y/AwGQ2FGAmsj6+uAw1v1mQggIi8T+KT8SFWfKDSYiFwG\nXAYwZsyYTgkwc9xgAC75y3xWXXfmLohuMBgM/QMNrWOu30hD6gPW1d6O69dT7Exhc+NfKZXtiEBN\n0xO0qE1CvJwbpQBan9UDlE3Ut2zGEkUiykFuUWlMvhTsS5yShkPZ1nA7lrZETV0IwYQuEWWrXl1i\nCIlsCoxgVF990upjiYdoYHezATRQzDyUFD4eioOFhZLAydgGSWoaJXAQtRFUhLS6edcojZdVKlsy\n1jCUBk0Sw8GVGOWl3yPd+Cs8bSEmNnFrX0gcg1V8GmoNh/QCcGYg7ABnSnZsq+yL+PEjwV0JCNLy\nAuCTcleD+zaE6rOQxkJI4xHHDs8tkDlyA2jRFI7YuOoHiimabQuUVR8QmhFUIS4eTeG1LRInUHwF\nLBX8jBVQIK0eDlao7AoxrKyanlKfenVBwtvYSoO0SLG59iaGlF3Iph0/x/ULvaNNRpZrqW66M/sM\nBGN6EUstNKbnUrt9Lk7Yp7rpcRo0jmDhyDAqS89kaOmJ1CeXMzAxg4riSdlnva+yVyh+ttV3b4DB\nYOjzOMAE4ARgFPCiiBykqjtad1TVPwJ/BJg5c2anIulL4rkXU339D4bBYDB0htrkIl7b9EVUXVzN\nWFuEGA4WTdjhr/YaXgp+2kvOWNR6CtRMW8Tq1coglaf85boIvrpsbHqTtB8PlTk/b/c0EA9XfEBV\nqNZ01o4k4bEtAvkC2bzswaxwrdm3qNPhVDkNpLSRbW4JzX6c4U4LG9wyBlktVFj1+GFMm2PFsWUf\nbEmQ1mbAxrImgTQSj02gKD6ZFr8ckvfh6hhKK86nyKrEtwbjFM8m2fIyxYnjsWK5eDcBcEaGayNp\njRWfBvFpwbkWn4mnSSxfwFuKbQ8l5q4ibo3ATb9HSWwEbup9XG8VvkxiW8M9DIzF8RmI5z6L4mMx\nDt+K42k1vjYQ082oetT7CZKaIKU269zBDHWaKbfqKBUXW2y2cgBF+j4xTYVKXeQ+AqiHr+H9aHMW\nua5eq+0+FlijUFXKio6iMbUA1Zb2x9BWy1J4W+s2S0EF0v4WNtTfzur6O7PPoFKKR4qEPZSktxVb\nEog4zBx+M4OLZrQjSc+yVyh+BoPB0A7rgdGR9VFhW5R1wKsaBCKsFJGlBIrg610hwKR9BmSXF66r\n5eDRJnDdYDD0b1bW/oW0n/9uTPDxaWmTTl5FaPJjxCwXXy0UIYWFI4GSJhr8yM9Et/kaWJEsAouf\nCLh+qISphYpNI/ugfg0eQlPjywCUyFAmD7yAIUVHUd34V9R3iTkHsLTuNloUYppJ16L4ODRqjGJJ\n42oMCx9bPBQhSTxQMlAs1SB+TcEXYWW6HKE827YsXYaIsFKLgUGZCxF4WEqCUruKOnddcB34ILR9\nbQb+FSqeCqzGr/lXXnRcwI14wJjiE6lOLaPZW5u31UdQFazwOnoIFjFsiQMt+BGLY5CCJhRPwFeL\nmFWGp2ksHsnq1oEdb1AoWxOW1IVuoAlgTBtNTcSjPuWQsCoQqUAV0lpNTCqzxxIUC58ySZJUm5TG\nUAIroaUeTcSJkWaINKJikVZwBJIaZ4CdZGjiENamNtHo1uG5G6hqWciwAV8jLQNZtPUmiq0G4pJG\nUNJYOKrErEAF99XCQ7FFQ3dUK1DwwxNRBFcluD6ipNQhY/wMtgf/E8k4EDcCNklvMwCeNoPCsprf\nM6vqT/QFjOJnMBj2Zl4HJojIeAKF70KgdcbOfwKfAG4XkUoC188V3SFMU9LdeSeDwWDo4wwtPpZN\nTc+i6qJZt0XBogiPXGktTxUb8LFxfQvFC1wosXDVwkfxVFAEO9zaTCIbmacKjpThauBoCWBLnCJn\nKI1etISXkCRBafEplBXPoKzkuHB/j9dq/46vSVQhhZ0Xz9akMWw8PCzSGhzTDxO0ZH/8a/53m+Xg\n8FmrUGbR0xS16UDpkzxrU2gDE80qFdndW1mlBFjV9CKOeFitrKaWBlc+Y7W0VfE0jWo6cJONjGVH\nxgzE9Ul6DSAesXB/bSWLqhcoSOJnFXCNnGfmArU2nhVaV4R6vyhiuRWaNJHtk8Jiq8ZysY/h8Zr9\nARwy9I+s3PAFUrRgI1hWkGSmLDGLZopo9Nt60RTLPhxV9b/MXf8pEloPouHz6RML40DtcLds3tHw\nOcwo4D6BlVEj1mbFwZJEmLDGQnGxJM7QkuPbyNBbdJviJyK3AWcBW1R1ath2A3A2kAKWA5/LuEuJ\nyFXAJQQvdr6uqk92l2wGg8EAoKquiHwVeJJgbr9NVReJyNXAfFV9KNx2qoi8RzA/fVdVt3eHPCmT\n2dNgMOwBjCw/m4rEFMAi6W2n2d1MeXxfymMTaHLXsiP5Hp7XyIDEQTS6S4hZAym2h1Mam8iGhsfw\nNUnKa6Q2tRhbSkn6W2lKb2JgYn/2L56BY5WzrXk+SX87U4d8lzX1DyLYeJqk2BlGVcmJLNnxZ5rd\njVSVnkRdahkDE5OoLM53txOxOa7qVt7Ydj0N6e046uJrA8FP+DJcduCTwBEHiwQxewAlseHErDLS\nfhPVLYtQmhFKGRzfn4Z0iiZvGSIuaR88lZwljZxi5WmgNLT4DsWWGyZUCWjj0apBf4kodhk0VIR8\nzfNUzLZH1zODFlRUyc+jkt2kEiSlybjDas4y6JPR9ARflXg0a6hmlEMCdUoT2JKmLL4/gxPTSKbX\nsy25gLhVjhDD01qEGIhPRdEEymMHsKxuDkoasLGljJgU4Wodxc4YSp3hFNkJxg2YTZEzhKOrbmZF\n3f0MSkxlUCKIbRyYmMQRVb9gQ+NcbBxKYiOoaVlM3K5g/4GfpCw2mqOqbqEm+R4OJXyw43YqYhNI\nOBWk3UZiTgkjy0+jyBrOmoYHGJQ4hCHFB7Ol6V+owqDEVMri+9HgrmB93YOUxyczrPQYmt11FNlV\nNLlrcKQUFZ8B8QPoK0h3FXQUkeMIinncGVH8TgWeC39sXQ+gqt8TkSnA3QQZ9kYAzwATVdUrPHrA\nzJkzdf78+Z2S5/gbnmfDjmY+uOaM3T4ng8HQ/YjIAlWd2dtyfFh2ZX568K31fOOetwBMgheDoQ+z\nJ8xPuzI3GXafO1f+lKX1C7LrGZfLuLg4kv+SL0whgyOKjYslkNZAxbLFI4afVcAUzaWeEQvPtxAB\nR7ysRSzjrKgQpkcR/NCW6YfbBT9rJcz08xFctYgJuAqWBEqnYgftloenNp7mJ01M+TZuGLmZcZfM\nWMZGl0zk8v3blMc1dDGdnZu6zeKnqi+KyLhWbU9FVucB54fLs4F7VDVJEEOzjEAJ/HdXyTNjzCA8\nv3uUXIPBYPgwzBg7qLdFMBgMBkMB6lMN3LfuIRrTzZQ5ZcRsh5kDp7OwdhF1bgPbWrbT7Cf52MjT\n2LdsLHetvo9VDWvYntpKVZGFLUrKB19timyPJDZJ347E1WmolmVKxjsEilngXuqpT5Hl4vqCZQWl\nFVyVMA7Pw8nkAlWwLcVTn2JnAPsk9qPYKqYuXc3almWhKiih66eLiOL6Nil1sPERgZTGcPCwJXAS\nzaTD8dQnYfk0+w6eb+GLIAqOBMqihuqlpxK4P4axj57GWd4IP3z7OiaVTyRuxxhdMpI6t4EDyvZl\nQfXbzN/xNgeU70dM4lTEyqhO1zB94IEcVDGJxfXLKLGLGFc6utCtMewGvRnj93ng3nB5JIEimGEd\nhVISfQheWLqV7Y0pWtIeRTFT3sFgMPQd9hlQlF0+6EdP8s6PPtqL0hgMBsPex6vb3uaBdc/iqktD\nupnhRYNZ27SRZt3exsXykY1PR6xrAde9fwuOBb7mrHkrmoZg4VPspCmx07S4Stq3idsuuUhBzcbP\nZY7jIziEcXs4pLxEoN55gS3PtgI1Lu2BbTlhCQWLcmsAY4vHcd7oyxgQZvz01eeBdbeycMe/GRyr\nQsSnya2n3q3F1WSrmoKQVjtQ2gCy1juLei+QVwislxpuz7WGsXqpOL7kYjBT/nagmqWNK9DQipm5\nQhnL5bKGVXnX9+ENz1BildHsN6BATMoZWzKaj406icOGTO3kHTUUolcUPxH5PuACf92NfXe5ThbA\n9sYUAMu3NnDgiIpdPazBYDB0G45tccVpB/DzJ5ZQ3+KScn3iTuvcdwaDYW9BRAYTvBwfB6wCLlDV\nmlZ9DgZ+Bwwg+J1+jarei2GXaUg38dPFuayLqrChZSuW+BTZuaQqrRW01slWfPXbthFYwpq8eLY9\n5TpZVQkg7UNMQkubhJklxQ7rFAoapNMEhLQW0ZAcQbPfRExSjCk9gIQV493alVTEynB0NFe+eSf1\n6SYOGrQvzW6SJXXbqE5XMWvwRNY3b6PeLaEuXYZtNzKiaChnVh3EY5seyMqXyWoZFKIPkvA4FqT9\nQMG0xM6LC8yeL0H20EI1FDK1+jJJabLtBa6nKjR4DdhB8lSavEbeqfuARe8t5+YZVzG6ZJ8Cd9HQ\nGXpc8RORzxIkfTlZcwGGnUmpDrBbdbKi3PjkEm7/3Kxd3c1g2CNZsLqadTXNzD64Sw3sht0gbucU\nvdrmNEPLEx30NhgMezhXAs+q6nUicmW4/r1WfZqAi1T1AxEZASwQkScL1Rg1dEzKT7ezRXB9iL6H\ny8TERZO1QNDmqmRrFGaUG9e3aNQYFi62paQ9KLKDwu+KkHRtEjYETptBTJ3rx7DEBwn29zRwGfUR\n6pMJVBpDWWLU7AiSTItAU0s1G5qrs0df07wt72xe3PZO/un5CT5I1/F/LYtoYRDlTpJmL4arNnHL\nRVVJ2B5WxhXUF2JWUH7BAjQsg+ARxgP6FmBjiQaZMtUiG2+oOVU3rwxCNPtnZDntCyJBg6fZqEaa\nvWgRdsOu0qOKn4icBlwBHK+qTZFNDwF/E5GbCJK7TABe6w4Znl+ytTuGNRj6Jef9LgijNYpf7/PM\n4s3Z5YakaxQ/g2HvZjZwQrj8F2AurRQ/VV0aWd4gIluAoYBR/HaRwYkKzq46nic3v4zn+3ji4+Dg\n4+JrnKTnU2QlSNhxXN/nvNGn8uD651Df5aPDj2RTahv7lY1j5pADWbjjfW5f+Q/SvoctFq7vhSUp\nihAEwSPlh4qiD7YFyTSk/EBJsoWsAphzhgzIZAONWtRyheXziZZg0J303ZKqRSgi7efUgrQfhEU1\nuGHiGSusuSd+tqwDRLN35tY9VWJWRtmzOGLwNDYkt5L2XS7f93w8TfPIxhdYXL8iuAZic9G4s4hZ\nMeasfQoFqr06WrzA4VSAUruYM0ccy8TysZ2+r4a2dGc5h7sJJq1KEVkH/A9wFUGVx6clePLmqerl\nYfr0+4D3CFxAv7KzjJ67ylnTqnjk7Y1dOaTBYDB0GfNWVGeXT7xxrsnuaTDs3QxX1cyPlk3A8I46\ni8gsIE5QKsuwG1y2//lctv/5O+8Yct7okwu2jy6p4tR9jmVHqh4Lhyvf/Cvv1S8P4uBUQBSbZKiY\nCR42TmhRUyxQH9sKVDWfbFQdGdVK0DyXSBBUg0yffrgetUZGrZKZsg+Z/SVs83ywLaHZtZFs3k/F\n8yWoCSg2KVdDeWxQRcXG9x0scbPxfkUSp9l3sbCYWDaGC8cfy6zKcQyKl7e5TodVTi94/U6rOman\n196w++xU8RORBcBtwN9a+5d3hKp+okDznzvofw1wTWfH31U+efgYo/gZDIY+y0VHjuXOf6/ubTEM\nBkMPISLPAIWClb4fXVFVlYzPW+FxqoD/Ay5W1YLFQHc3P4Jh56gqf1s+n3o3yYaGWv659m1GFldw\n0sgDWFC9jEW16ylJ2Lm4NhVcivOsdmlAQrOZKqRcQfFxrKAAgxdmBVUVbMsNkruESp7n+1iWkHYd\nfARLfCzxs5bDwKqYOZgNCpalYebNQLFDBdcLijq4vgNhPs+YDWhQ2iE414zVMGdG9DSe3VbtWiQc\nG09h/rZNvLH9AU7ZZxotaZcWTbOirpqjh4/nBwefRsLuzfySey+duer/CXwOeF1E5gO3A09F4vP6\nBSVx84AZDIa+ywUzR+cpfjuaUrywdCv/eHM9d5i4ZINhj0NVP9LeNhHZLCJVqroxVOy2tNNvAPAo\n8H1VnVeoT3isD5UfYU/FVw0tYGH8WfjTViSwojW5aVSV0lgckaBQuRXxldyRbObnbz/NnNUL88Zd\n3rCd5UteIea4iFg0tiTIRAbmdlfCMugogoWHZYW1+gQsy8HzwMfOum1aAmk/hhup9q7YqA++WlkL\nYvYIGlj0ciHkPo6lGVNicJxQh/P9YJmMG6faNLjgWIqvGRkVXwVVK8wGKhCRD8D1rOx5eqo8tOZd\noulc7l35Jot3bGbOyZfgq9LsBvGVMcsmbtvZe+Cqj4VgiWTvD0Da83AsK6/N0Hl2qg2p6jLg+yLy\nQ4KkLLcBnojcDvxaVas7HKCPMHyAiZcxGAx9lyFl8bz1g69+OrvckHQpS5iXVwbDXsRDwMXAdeH3\ng607iEgc+Adwp6rO6Vnx+ie1qRb+9v6bbGyuY/LgYfxowZOUOHG+OuUYNjfXc9vS1/BVmTRgKAkn\nxsLqDbmdM+UbQksboRHWjnlBzFuoQEVj69KuheP4YUbL/GyXrhdkdPbDvh4Orgu+htpXdizNO75l\nZdOjZAmsdrni7UFbMI4quJ5PTIJSDW4YSCVh/T/NxhJKKHfgSuqHbp6uBGY+VbBCj9PArixYVqQE\nRWgNtKxcFlLfz51LqOECsHD7Bsbf9bO2aUGj5xPukrAdzhp1IF+ddjSXzr2XZfXbccTiZ4edwaRB\nw1hZV01NshlflZnDRjF1iMn42RGd+iUhItMIrH5nAA8QlGE4BngOOLjbpOtCqiqKe1sEg6HPcuKN\nc7nr0sMZOdD8O+ktqiqK+dcVJ3Lt44t57J1NedtWb2/kwBEVLN1cz6m/fJGzp4/g26dMZHxlaS9J\nazAYupnrgPtE5BJgNXABgIjMBC5X1UvDtuOAIWHGdIDPqupbvSBvn8f1fU7+5x/Z1hLkFrTsIGtm\nbbKFnyx4NqvwIPB+bZgIsFUZh4juEmwW8F3Bjml+dspwH/Ut0m7Uykdep3SoSRYqZ5BZ9zwJLZAZ\nhc6LWNgsJg/Yh98e83G2tNTT5KYYlihjZOkgfPVZ3bidDY07qCwawL4DKvnj4n9zx7JXGF5UwYbm\nakqcOA1uCrDzjhm4gWqg6GUVQghyrUg2I6cb1HnHESHtKWIF8YLZ02yto2bcQ91cdpjW5S9aJ6Vp\ndl3uW/E2Dyx/B7UzmU59vjfvUdS3spZFgLhlc/epn2DGsFEYCtPZGL8dBPF5V6pqJo/qqyJydHcK\nZzAYuo+ot/bKbY0cfd1zfHDN6Xi+UhSzO9jT0F2MHlzCN06e2Ebxu+zOBTz7neM59ZcvAvDwwg08\nvHCDSQBjMOyhqOp2oE32EFWdD1waLt8F3NXDovVbGtMptrfkEsr7fmDByljxVAE7MORlLVcRpSyv\ndEOewc3CTUVW27FiqQvihMfyIstpwYp5kLGeSc6Cl7d/OLT6DuqBLQ7njJxKXVOKSx78J/XpFJOG\nVPKZgw5hxfY6nl29gkOGV3HOhMn8ev4rvLHpJU4auy/Hl07jna2bGR8by6GVI5k0bAj//caTiPhI\n1iU0UMw8N3e+eUqphhcuCAUkmRasRGEPYnWtwKcUD4loHa1rIra+tnn1/hS8TJn4jIXRVwQNE9qE\nbaos3bHNKH4d0BmL38dVdUWhDap6bhfLYzAYegjPbztJf+y3L/Pu+jqjUPQig0pjbdrW72hm0Yba\nXpDGYOgd1lY3cfwNz/PQV49h6siK3hbHsAdQkSji0smH8efFr+MDZ489kLhl8/iaJZQVxYnZNnHL\n4oiqMaxvqGfiwEou2G8apz74p2xVhVYOlqGzJKFGlnPllJwjZdDuBpk5Na2BZimCepodx0tFyzZo\nzrKoko25U0Lv0lAb8oB/vL84VKoCNjTW89yaldgieKrc//67XP2v52jxA//OBZvyXVff374dFisU\n2UGsYG4TqEa8U6PabAEFzxP85nCbBRDWq7CtyL426nqRoSLXL9Iro2B+fvIspgwaxs/feIG45XDw\n0BG8sH4FccdiaFE521saqSwqpS6VZFNTParK9Moqzh4/ua18hiydifErqPQZDIb+jVtA8Xt3fV0v\nSGKIMqy8qGD7og1t780tzy/jKyfu390iGQw9zjOLN+Mr3Pv6WqP4GbqM7x92Mt8/LN+Q+gvO6nCf\nj1ZNYe6alaTVw8PLldYL49iCSuaRHRQGJ0qpcGKsaN4RbMsqZxLsZ7VeJqLdSb5qpVmDGZlyEAiQ\nljylL6qbRV/stngRZSuiOGaPpwIpIHznqBKeG1agmGX7kesQHS8qg2ZMeEH2UMJ4wvJ4jPp0GqSA\nN5EHEo4xunwgcy+8FFf9bNbP8/ab1nYfw26zV2UL+PQRY7hr3hqTKMFgANJewazfhj7A/B98hJk/\nfYY7Pz+Li257DYD/fnBRm343PLnEKH6GPZLMD1fHNpn7DL3L706dzQc12xhROoD1DXXYIoypGIjr\neby7dQvXzpvLW5s3YSkMjZexrbGZ2roUtZIKKvSJDzbELYfzJkzh/kWL8FA0FrUWht8ZpSujH+WM\ngvko+UpctG9GIc2MW2i81uNgh4olgbtn67i8jFwZ904AN7RsipL1txSwfJti28H1PJKWj/jQrIFr\nKo5y+MhRjKscyPr6Bm464XTWN9byztZNDCsZwIljxmNbFnarwvWGrmOv0n7umrcGgKn/86RxZTPs\n9aS91n8B2uL5SkOLS0VJW/dDQ/dRWZbIzlEXHzmWv0TKPLz2/ZP5zbMfZOczgC31Lby9tpaPTOmw\nxrPB0G+46emlANz+8ipGDizm80ePx7KMEmjoeSwRDhg8FIBJiaHZ9oTtcPjI0Vx//Omc+7e/0eK6\nbE825ylNgmCrDS7EYw7/XrsuWwdP0sHzrBn3yKjhzM8OkHMfzbhdZtYjcXd5pAl+3Ue3ubRVFAl1\nRAliDiNCt1EQ83Zzc30k/C+jQErYucX1QMH2rVwSF18gLRQT59rjTssOV1layvRhIwqciKE72GWV\nWkROEZGnd97TYDD0ZdwOLH6rtjUCcN3ji5l+9VPUt6R7SixDK1pb9IaVF/GT2VOB3B/9T9/6Kpfe\nOZ+WtNd69w/FlvoW1lYHyRBa0p6xEht6jKZU7ln+6aOL+d0Ly/n5E+/TnOraZ9xg+LBMHDKEsydN\nYnBxMd899lhGDhgA5CtLZfE4M6qquPK4Y3GyGWXIKU++IJ4gbvCx0oLlhh8v+JaUYCUFKwVWWpCk\nQEqgBSQZfKykYPmClekb/bRqI5kbx0qFx0y3Wg5liMomfvhRoTQWI25ZxC0LO0xKU+Q4fGzSZIoc\nB9uSrPFRBIaVlvKdY47p4TtkiNKuxU9ETgJ+D4wA/glcT1C8XYBrekS6LmZy1QAWbwziZHY0pRhY\nEt/JHgbDnsvq6qZ2t51w41ye/tZxPLxwIxDUkSsvMla/3qDQPCUiXHTkWB5aGATqL93cAEBNU6pL\nS9fMuuZZAFZddyaTfvgE+1aW8tx/ndBl4xsMneWGJ5cAELMtvnXKxF6WxmDIISJcd+qp2fUvHnZY\nh/2XTJzIG2vX89m75pD0PdzQBCNKtkRgplxCdD0bSxhuECS0vmXNbzk0sl/Ughex+lkqObfQ8EDZ\nmL5Me/iuLxrSB2ArxLC4+eyzOH7C+ILneSOnd3gdDL1DRxa/XwCXAUOAOcC/gTtUdYaq/r0nhOtq\n/vHlo7LL353zdi9KYjD0Ph///b873P6ZPweFbCFwddlTEZHTRGSJiCwTkSs76HeeiGhYR6vHiDsW\npx0YFKT9f2dMyrZXliXY0ZQm5eascM8s3tItMryxpgaAFdsaO7T61TaleXhhLmtcyvX5wT/fYUt9\nS7fIZdhzOWtaVcH27Y3Jgu0GQ3/iuSUrSKZ98IQKK44VWvxsX7C9wGonGlrv0hGrW8by5kmYFIUg\nNs8N3ChJCSQD62D0m1S43Y18PAE/XE4F41leaGH08y2OoqE8GsgjnuB6yj8Xvtfbl9Kwi3Sk+Kmq\nzlXVpKr+E1ivqjf3lGDdQbQ22dPvbe5FSQyGvs+mupZcXPkeqveJiA3cApwOTAE+ISJTCvQrB74B\nvNqzEgb8/jMzWHXdmVx23H7ZtmHlCQDmr6rOttU1p/PqM3YV5/72lezyPa+vbbff9Kuf4mt3v8nV\nDwc/Bp57fwt3zVvD1+9+s8tlMuy5+L4yb0V1wW13zVvDxtpmUq7PuCsf5X+f/aCHpTMYPjynTp5A\nccwhbtscPWYsMV+wFMQDccHyQNLBx/LDTwosN/x4YZ/McuY77Cutvi0/st2LfML9bMB2w+Ong3HF\nA0vDfVyw0uE3YIuQcGw+fujU3r6Uhl2kI8VvoIicm/kATqt1g8HQT/ELlHIoxNb6Pf7t+ixgmaqu\nUNUUcA8wu0C/nxC4u/cZ01VxPHiR9clbc7roDU8u4bdzl3frcX/ySNs3vEnX45bnl2XXb3t5JZtq\nW7j8rgUAzFtRzVn/+69ulcuw5/D3N9ezraH9uefIa5+jKcu9p7AAACAASURBVBVUlr71pZU9JZbB\n0GVMG7kPr3zncq457SO88s4q7HrFbgCrKVC8iCh0uAUUvKgilmlLgtMCThKcVOGPnQz2k+j+GUXP\njYwXfvAISj00gaQCWZwkFDcKn5g8lSPHj+nNy2jYDTrK6vkCcHZk/cXIugL90t3TYDDAB1sa2rTd\n+PHp/Nf9Cwv2X7CqhtMPKux61c8ZCURNWOuAw6MdRORQYLSqPioi3+1oMBG5jMBFnjFjuvcP4qzx\ngwu23/DkEr543L449q7l7rpv/lqumPM2V50+idOnVjFmSEnBfp86fAxrq5sYPTi3/fL/W8DzS7bm\n9XvsnY1566ZGpKGztDcPRbnygXcAMIk+DX2JhuYk67bVsl/VEN5dvYmSRBzHtliwbB0jhwxg1gFj\nEIQv/nYO85evxxfQ0BktU9qPyDsPj7C8QiYzZqaYe/TdreS2Z/ELtGW6euTH/WVK77XO+ukGlsBs\nk0+gBCqk8Pnbiwu5+4WF/OaSczjigDGs3rKDuG2xZMNWSovijB06iJFDKnjl/dUMKE4wfV+TubMv\n0K7ip6qf60lBDAZDz3HOzS+1aTt7ehUrtjYUtBjFdlGJ2FMQEQu4CfhsZ/qr6h+BPwLMnDmz630u\nI1RVFHPomIG8sWZHm22vr6rhyP2G7NJ4V4Rxz9c+/j7XPv4+i378UQCmj6pg4brabL/bX17F7S+v\nyiuJ01rpA7i6gGWwKeVSEt+rqggZdpFCWTvHV5ayMsw0nOGJRZsAqGkyGYcNfYP65iSzr76dmsbm\nnGIWVaY08pXRqDJ19jKJVKLF3AmSqGRLLUTGyQu/KHAMrNy4xTGbZs/LJmopVP8vW/av9TGidQQz\nsrbiG7c+1FaZbI3AiIHlDB9Yzo8+cypjhw1qp6Ohu2n315yIfLvV51si8hkRKZy+x2Aw9BuSbtvZ\nO25bnNJOHbg9uIjyemB0ZH1U2JahHJgKzBWRVcARwEM9neClPf7+5aO55ZOH8tL3Tsxr/8Sf5jHu\nykd5dvHuxzJnsih67cQM/uuDrdmyH51lyn8/yVV/N4m1DO2zrqZttuFBJTEWX31agd4GQ99h1eZq\n6pqSOetZawoUYZdMDF0YTydeLs4vG2fXOi6v9ccNXTgzLp9psJoDt89YM3g1HrF6sEPXTicVuIXm\nxQV6YTxgZNw2srj5MYSSOU+/1TlGP5Hz3lBTz8IVG7hxztwuvOqGXaWj1/jlrT4DgJnA4yJyYQ/I\nZjAYeoB/fuVovn3KRESk3RInbieKvfdTXgcmiMh4EYkDFwIPZTaqaq2qVqrqOFUdB8wDzlHV+b0j\nblvOnFbFqEElrLruTC48bHTetkv+Mp/bX+5cDJTdymfujldWAbB0U1u3YAiyvp5w41xeXratw3ET\nTv6fmbtfaz85jMHQGLH4/euKE7noyLH88j8Pzsa0Ggx9lUmjhjF8UFm2mHleCYXIJ5uJ088pV4Tf\nlqdYvuYlcLFSipUGp1mJtZD7NCuxZoglFceNtKUg5oEdKnUigVLpJCGWDBTAWDpYjjVHxmsKxnNa\nwE5lYgBDefxANjwNZM/I7wZKYOtzLHTeAIm4w+jKgT1wNwzt0ZGr548LtYvIYOAZgiQI7SIitwFn\nAVtUdWrY9nHgR8BkYFb0x5OIXAVcQvA4fV1Vn9ylMzEYDLvMBTNHcfDogRw8OpiI407hd0F7auFu\nVXVF5KvAkwTON7ep6iIRuRqYr6oPdTxC3+K686a1ybr544ffY+rICkYPKuH0X7/I/Zcfyf7Dynni\n3Y3UNKXZZ0ARn7vj9XbHfOGKE/j2vQv52CEjueKBtta6T93acaLTV648iWseW8zf31jfYT+DAaC2\nOXDdHF9ZyujBJVw9O5c1sCzh0JB0e0s0w17Mgy+8w2MvvceU8cN54Lm3KU7E+NMPL2TooDJ+eddc\n1m6uYcPWWiaPGcqh40bwyuLV1DU0k/YAO3DZ9DRXI088cr/ANZd9EwJ3S7EVcSNuoZBzpcwqVpHA\nv2y7ZMfM+874d0Zj+TLWOivcHvYV1WCUUKELLH2KL+DFiBQcDGULlUAAYpkTyB3HESANAxNxvvAf\nRzP7iCn84JZHWbRiE44lbK5p4JLZRzBmn0Hc9tCrfGTWBC4+Oy/U3tCF7HKwhapWi3QqufsdwM3A\nnZG2d4FzgT9EO4bp0y8EDiQoGP+MiExUVY9uZOnmeiYOL+/OQxgMfZITDxjK80u2ct250/LaRw4s\n5hsnT+DXrVKkp/ZQxQ9AVR8DHmvV9t/t9D2hJ2T6MLz2/07mF08tZcLwMn766GIgv2bjR256kZXX\nnsHld73RZt9Z4wZz8ycPYdbPns22VVUUc/dlRwRjr6pmzoJ1nZbl3R9/lLKEw9nTR+Qpfp6veRbG\ne15bw4ThZcwYWzhhjWHvoaYxBcCfL27rTb3ghx/h/Y31zL7l5Z4Wy7CXoKrUN7ZQXBTHsgRLhK01\nDdzwl+dIux5vLQnmsWTK5fo7nmHSuOE89vIi0mH4RG1DCz/8wqnUbGzg9Y1rSWTHBTtM0pKNo2v1\nDiOqt+EqggTJXyCnbEnE+6a9mDoRykvjTB07nK07Glm2sTo4aDZeT/PdMzOKmxCYBrNWOg11wkAo\nS4OSEm0vWii2AunwZDUianhO9c1J3nl3PaXi8NJbK2hO5uJzf3v/Szi2hev5rNqwnRNmTmBsVfD3\noK6hhUTCxhIL3/dJxGMFhDB0ll1W/ETkRKBmZ/1U9UURGdeqbXE4Ruvus4F7VDUJrBSRZQRp1juu\nMP0hOfWXL+YlKDAY9hYaki5H7jsEq0BKvG+dMpHV2xv551u5QtzpPdfVc49j2IAirj8/UOgvnDWG\nz9/+Oq+tyq+JNv6qxwrtyv+cM4WyotyfhQe+dGTe9hs/Pp1rPjaVA37wRJt9Dx49kLfWBolm7rns\nCA4fPzg71594wDBeufIkbnp6KXMWrKMp5VJelPvjfeXfgwyNe+N8fMEf/s2pU4Zz6bH79rYofYLq\nUPEbXNrW7Tzh2EwfPZC/fH4WF9/2WrbdvMQ1dJZV67axbNUWykoSOI5DQ1MLtmVx8IGjefCZhfz2\nry+1SX6igJ/R4CK/Xxct2cCb764FRwJ3SrHwVXn25SXMf2dNmMCl1d/YrMVNIxY7QuVJgyaVcDdF\nfEFal1+KyBezLO792cUk4g6u56MKo4blu1KqKptrGhhUVkza87njwX/zt8cW4HpKPC60eJoL/CqQ\nKEZaa3CZuO+MrPkHy+/bipfmL+eFV5bgxYR4zCaVDuw7tiW4brCcSrlc+I3b2xkBJo6t5IdfPZ2W\npEdZSYxlq7fi+T77jRnGvmMqC+kYhgjtKn4i8g5t3yUMBjYAF3WxHCMJYmcyrAvbDAZDN1Df4jJq\nUOF0/RD8wL/oqHHZwt3/df9Czp8xqqfEM3QRZQmH+y4PlLdtDUl8XznpFy+0cZcbVBLjH18+mnGV\npQA8/o1jeea9zQUtcAnH5oEvHcVf563m72/mrHi//M+DOfHGufzuU4dyxL5tM4qOGFjMoWMGMWfB\nOhqS+Yrf3kptU5rXVlbz2spqo/iF1DSlsAQGdPB8HD9xaODdFv5C+fHDi/jrpUf0kISG/oTvK9f8\n7+PMe3MlDU0tHcarazYpSb7iIKGlSy3AU8QRjjpkPG++swZNK0WOw2fPP4J9hpSz76hKvvajexEP\n1AccOGhCFXWNLWytrqexOZ3Llpk9TE4myRwwtMiJaC6RSgF9ZvK+QxmzT8eeEiLCPoODFyMJ4Ixj\nDuSBpxZii095Io5b15x/HbKCBCsqBIphNNFX1O3UArFB0xFXVtFsn7glzDhoLCXFCea9vpyUqxTH\nHL73hVNpaEry2qI1aIvHi68vR50w7jErTOsUprB01VYu+vadBS2eiYTDXb/8LA899TZPvvgeF5w1\ng0/MPqzD67O30ZHF71yCih0ZFNiuqruWxq2L6co6WaMHFwNw9cPvcfLkYRy9f+WHls9g6A80plzK\ni9r/5+/YFoeOMemW9yQqy4JX1u+GZRoAWtIeIoEyF2Vy1QAmVw1od6wZYwcxY+wgvn3qRI65/nkg\niMnamcUuE0O6YUcLVRXB/Ou1fpu9F9E6Y+qba2qYv6qGLxy39yqBtc1pyotiBb0RonztpAn8JnRJ\n70hJNOzdvL98Ey/MW0pL0s0pNIUeLY1oVm0yGQdWt0wSE0kpr768DD8uiAgfO/EgPnPmYSRTaWKO\nzQVnHMqt9/8bW+HwKWP5xZXn8v/bO+/4KKq1AT9nS3onCQQICb33oqIoSBXUa7v2rlexXa9er2K5\nNuxdPxtX7L0rRaUpRar03lIJCSSk923n+2NmN7ub3SRAKpzn91uYOXN29s2UM/OetxkMAovVxlX/\n/piDh4swm4yMHdWD4KAAdu0/RFhYIClZR5h0el8ysgvYtucgFfoEXWxUMNUWB0YD9EyKp6C4AoQk\nMiyEx++YetTHpFvnWOa9NZ1qi428glLufe57CssqGdqnM6Vl1aQdzKdLQjTnnT2A979fTWm5BSEh\nwCRoFxmGQQpiY8OorLDQLjqMx++aRllFNfe/8CMZBwuwOyT9eybQu2s8PyzcQr/eCbzwr79hsdm5\nbk8Oh/NLuWTiEMaP7MUfK3ezavFubAYQZoGw6dqcdDtJ3ufDWwl2W66utvHy/xazcdsBrDY773y6\nnAunDCEoUI0RTupS/L6WUg5rJjnqS6nuojHrZB0qrmL2ilQ+WJnGByvTTko3I8XJSVmVjbBAVU/t\nZCfIfHyZEjtFBR9Vf7NeFiT9SDnDk7SJhU9Xpx/VPsqrbWTkV9Cvo3/FtK0gvV5oLtQt7DeP6XrS\nuiul5pVTZa0/vP+eCTWKX/uIoKYWS9FGSYiLcNMb3NwQPVwStWUhpVtfd7OclyVLt8AZqzWTl8kB\nl9zwDvkFml3EIAQmKZHAqX0TSUvPo1OnaFJS8/j4+WsICQ7A4ZBYLDYqKi2kpecRHh5Ix4Roiosr\niY0NJ7CJ652GBgcQGhxATGQI82fd7rff5ed4xtpmZReSc6iIdjFhJCW2o6rKitlsxGw08Nht55Dc\nJZacw0UYhKBjQjS3Xno6Dz71IxMvfhUAYQCjFMz5eRPffrMOm57wxuAAh80tWNC7LqG3VdR7zXni\nJKzbmE5YeBBGm4Hw0EACzOpdx526jkZzPnXmAF8IIV5BS+7SE1hX91eOH6tdupIfuPPlukw+XZ3B\nmF6x3D+5T6005wpFW6e82k5IYP0v/VeMSnSl33c4ZL2z8IqTCyEE2x6fREONdk4r8vcbszh/SEc+\nXpXuMQYnz5jP2ofG+32Rf/633byzNAWAq07pQseoYO4Y1+P4/ogWxObnwFVY7ISepBMzf9ZTHsSJ\nEIKtj09i0OMLleKn8Et0VCjXXjiKj79ejcUqNXdNPWZN0zHclAyBHk8n3OLa3O5Rqf8jPV+Qv/ru\nL80bUZ+scehZMQXw9vtLETb99wyC2HbhvP3q1dxxz6fkHnErlaPLYTIZiYkO46NZNxIS7Lu8Ukux\nZOkuZj4/17UeHhZEWVkVUkqEQWAwGLDrieBMJgM2m13Tx0zCdWykA0BSUVGt7UT3exVo5Sdwrxwv\nJVLoGULtEvT3D+nWzWjSy03pbqfOc/rxK9exdfdBhg/sot5bvKirjl+cjyLurk99OxZCfImWnKW3\nECJLCHGTEOJCIUQWcBowXwixAEBKuQP4BtgJ/Abc0VQZPW+px4WmwmLjwR+2sTOnhFnLUlmblt8U\nYjQq1TY7xRXW+jsqFIDN7sBidxDSgFmwZ92yflodJ25mT8WxEx5kJjK4YW40iTEhRIWYWZWST8+H\nf/U58XbKM0u45+vNPl1AnUofwOdrM11F5lsrdodkya7DtSx7TvyVSXGWNFDUTahuFZm1PIWd2SUt\nLI2itXLxtGEM7tOJLglRtAsPJshgoH1UKJedO5xO7cK02nl2iaFa6rXzJNikpmzYJFjc1q0SYZfg\n0OPwbNQkX5Gy9sdRE6MmJRQWlbN85R5KS6t8ujDabA5KSitJz2jYBEhzsnDJdo/1Ul3pA5AOqSl9\n+rrN5vCMv/M+LjpCSq2GoUM/xm7HXSsaL/VagnqNQ5vEaJMY7RKDVXLrpaczILk9AQjMQmCUcN1l\npxEfG86EM/oQHek/l8HJSl2KnxEIo3Yhd+enTqSUV0gpE6SUZillZynl+1LKH/XlQClleynlZLf+\nT0spu0spe0spfz2+P8s/D03tS9qzUzm7T7zP7fd8vdljvdravC+7x6LAXffBOgY/ubAJpFGciFTq\nblTBAXXd/jVce1oSoDJ7KhqHhoypP246yHcbPOsR+lOeqm1NWvXnuPhwZRo3fbyerg/+4lP+ZXvz\nXMvJM+a7lptT8Xt98T6SZ8z3e3ybmy4xIZzZK65BfY0GgdEgKKqwMvWNFThO4nhRhW8cDskD93/N\nrvWZhEkDPeOiodBCcUYxP32+liNpRYhqO8KmKXTC4qhRNmy6Qmh3YLDqy1Jq8X42tP7O+8YmfXwc\nelxgzXU5fEgyZ53eG3OACaNReOYt0Y1bnTtG072b73fUluTSi0d5rEdFBrusacLdquah2OF5TOy6\nIm3Vl+0SYXMgrG6F4vXjLhz6Ma9yIKwO17kRNv1/q4PZ7yxl/+aDGIqtGIutBJTbWf7bdqqr1eSZ\nP+qa8i+QUj7ZbJI0I0IIn7PJUkoW7Djs0VZhadqXCikl7/+Zxt+GdGLf4VKunL2WD28Yybje/m/6\nF37bTUJUMNecqr2Qr0kt8NtXofBmdYpmxf5xUza3nNm93v7d9EyPVpsDV1EiheIYeeaiAdzz9RaP\ntmFdotiSVewxLj/w/TYe+H4bb1wxlOFJ0X5f6p2lJSb0bc9sH7XfGpvsokoy8is4rXvtzKXepOfX\n5EJbtjePe77ezLfTR9MjPgyAh3/c7vN7Rc3kweFwSF5dvBeA+dtyOHdQx2b5XW+yiyr5dE0G90/u\njckgGmxBBs/kQOn55XSLC2sKERVtlNLSSvbuycHhkKTsO+yZIVLPGGmyAlaHWwZJB860sVpCmJo4\nQAlgFJ5FzL2yS2olGWTNNiAiLJA3Xr6KzRsyePuVBVw4cRClZZVkZxZw+bWnk9w1DpPZhMloIDDQ\n1CpjfIcPSWLx3H9TUlqJ1WLnYGY+2dnFlJSUcyAjn/SUXI6UVWGTks5xEVRbbezNOFJzyJ2umEJo\nllDdPVPbpid0kc4YSq/x3mkVdKbz9XN8HA5J/pEy0tOP0Lt3QtMciDZOXYpfdbNJ0QIcKKio1Vbq\nleIc4D/fbWHaoKa7eHYfKuWp+btYuiePUV21lLxrUwvqVPze1t2dnIqfk/Pf/JM5d57RZLK2VTZk\nFHLxO6uY/88z6N8xsqXFaXEy9Ws/r7Rht7jJqFkGh85cxKb/TiTaR30thaKhXDi0MxcO7UyV1c6y\nvXlM6tfe9ZLz9tL9vPCbp/vmP7/c5LE+uns7VqXUdsFfvOswxRVWzCZBoMnIop2HmNSvQ6PHd4x+\n7ncA0p6dWu/L2WdrMl3Lj/68g8IKK8v25pHcLoQeD/t3bGkui9/+vJoYo61ZxZw7qCNbs4o4/82V\nrLh/HIkxzeMm5TymWw4UUW1zuJIAHS1nv7wMgMX3nuVSrhUnNxERwQwalMjmzZl6QT6pKW6uZCDe\n7pa6duJUQARItzSSAqkpiQY9ik/WbJU2zWolhNASxYia9DDVVQ5uufp/NT+3tOYnN/6Vxnfz7yUs\ntPXPrDpjEOf9tJF3XluIxVL7vRlgd47meu2cwrEbAJNB0/NsDoTJUKO8OWRNHB/goUk7D2CtWoZ+\nrPtC0CEhkq5dG+Y1cDJSl+J3fOneWjmpR2pXpXjxt9rxIhUWOxn55SS1C20SOZyzlRsyCukcrWXI\ne3dZCjPO6VPvd6WU3PHFRtf61qxivtuQpeqteXHxO1qmvM/WZPLsRQNbWJqWZ0iiVtz1qQsGNKh/\ngLHGJXTozEUq+62iUQgyG5ncv4NH2+1je3DLmG4UVli5/H+rScmrPU4/+bf+dIkJpazaxrCZizy2\nebu8j0qOcdUxrIs1qfnEhAYcVRHwHzYe5OKjGGudEy4COFBYWWffg0V1b28szG739v+Wp/LQ1L5c\nqxdG/3xtZoOeQ43JqpR8QgKMHmPOsbAho0ApfgpA8/B66pm/c8MV71BaUkXvvglMmjqYF5+f595J\nU9TcvmcyCbr3SWD4iGS++HiVlpXEqYA4E7l45oWpQVdKhK4EAkiHw7euIrT4uEsmvKi5eXZph8EA\nxYUVXHzNaE4f24fY9hHMfm0hAYEmbrxrIkajASEEVquNtH2HMBgNxLWPJCIypN7JqOoqKyazkfLS\nSuw2iUM6CAkNxGgykrrnEPEJkXz5wQqys/I55YzeDBqWRFL3eD56+3eW/LoVcFBUUEm13eFKtuJh\nhXO3fjrbJBjtEux2V39pcbj6SrdjBoABzAYjE6YM4OzJg/jis5Vs2pQBjhrXWmeCF/cDbzAIXn7t\nKgYMTFQJXeqgLsWvXV1JXKSUrzSBPC3Kp2syiAw215pt/eqvAzwwpWkegM5rvdJqZ8dRBqd/uyGL\nX7Yd8mibuyVbKX5+qNZj21alHKFHfBjx4SdnJjhnrF5EcMOyBppNagBVNB8mo4G48ECW/Husq83h\nkHywMo2yahs94jXlLMYUQPpz03hl4R5mLU+l2lY7dnBdegHTP93AbzsOcd+kXtx5dk+klPzzq83M\n3ZLNe9eO4B+frHf1d5/UkFJisTtq1Th0siGzsE7Fz59rakSwmXIv75Kl941l7EtLXesz5+3kpjO6\nAlBSZWXQ4wu5Y1x3/jO5cZ9DvjxfnG6mDZ2AbAwm92/vCrOosNg9FNJj4YHvt3HZSN91ftOOlDPu\npaXNatFUtCzBwQF8+MVtHMwqILlbHCaTkdFn9GL7tkz6DUgkMNBEWkoecfHhZB3IJyk5jph22sRB\n7uFivv9olZbczE3RkzV+i7hSTErwdvt0fuGGW8bywbtLfRdBd4ufz8o44rI6fvDGIj54w3Ny6/tP\nV3u6mLrQlMy4uDCGjupOUo94OifFEhQcwO5tB+jdvxMrft/FvG9rxjuPkgnOZafCJCXrV+4HKTGZ\njNjclTk0hVq69a35X2Awaq6cDpsD3EsGuZVccHrUOsXwcO20A8JORGgww4YnM2x4MlJKtm3OJCQ0\niNj4MFYu20NS11gSOsYQFGRm+9ZMevTsQLu4hk/enazUZ/ELo3nLOjQb907sxSuL9tZq9+Vi887S\nlCZT/A6XVLmWtx0srrPvvd9s9shc9uW6zFp9jtVF5mQgUB+ArnxvLQmRQax+cHwLS9S8zFqWQmSw\nmU66Zbmhs+obMgo91pNnzOeCIR157fKhjS6jQuELg0Fw8xjfGZnvndSbf5zZjQmvLONwSW335d92\naJNjLy3cy0sLPcd8d6UPtEQnceGBxIUHurYtuudMeuqWwPlbc1x9v1ibyfWjk/1aCUurNOVuRFI0\n693uoR82ZnH3+J4efZNjQ9n434mEB5no6eUCulxPAPPWHyncO7F3o5YX+mR1use6973eXMR4ubg5\nGiHRzPcbsnwq5uN0Bfu/P2/noxtG1dquODEJDgmgR68aD4OIyGBGn9Hbtd5vQCcA4uI964PGt4/k\ngktH8eucjZRXWFxWLOGQREaF8vRrV/L4/V+Rl1sKuCpFuJSbmc9fxrBTu2MyGdm+IZ31q1L8uyl6\n4K1B+sIt1k0KpF2Sm1PCwp82+u5tMNRY5wyi9tu9R2xijTuszWoHr/cFISXCavfUQYXgvW9uJyw8\nmNuufJey0kr+fvVoEjpF8/Wnq8g6kO8y1UkBQUEm4ttHEhMXzpb16eCAoBAzYaGBhEWEcMFlNfen\nEIJBQ2vCm6b9zbPM+CmjPcdUhX/qUvxyTtTkLgB3juvhUvymDUrweKADfPGPU7jyvbWAljQA4Jr3\n1zK2d7xrJrYxWJ3a8HIRP2z0rGm/KbOoVp/Fu3L5fG0Gl4/souoP6lx7WhKfrM6ga2yIK3NdTnFV\nPd9qXB75aRuXj+zCgE4tF2P47K+7Afjw+pFATexefQzqHAV4TjL8tDlbKX6KVkN4kJm1D03waCss\nt3DXl5saXBcOcCU6cWfiq8vZ8ugkIkPMHq71AJNeXc6+p8/BZBC8/2caXWJCmKS7rzrdNYckRnko\nfqtS8rl5jPYMGZkczfv6/RjjJ3bW5DaOXzZrNd/dNtq1XlBuITrEfMyJIKYMSGDxrlzXutMtvrmp\n9irY3hgK6L+/3cLAzpF+FfMze6oYIEXDuOXuidxy90S/27+Yc0+tNpvNjsMhCXArxP7kK1fy3usL\nWLNsL6XF5ZSVWTytZW7Klgup+0I6xwGHt8XQT+yb+76cy+59HbImr78DT+ufwau/Q4LDqfy5K5sS\nYdB2EhBo4uXZN9BFj637dN6/qCivJipaC5OafH7N+4KqCdyytJYC7s2OwSB49+phpB2pICrEXEvx\nG9091rU8LEmLiVqx7wgr9h1pVMVvYBMoAg//uJ38Mgv/HK9mQADCg7TLvNLiaJGSBGtS8/lsTSaf\nrclsFfFx7y7TkgOZGjjw9kuI8NlutTv8umQ5U9O3hr9XcXISHRrAZzef4tFmsTnYmFlIx8hgrA4H\nLy3Yw/Wjk9mQWVgrqYw7g59cyOk9fGfxHPviUo+YvCn9O/DuNcP5Zr1WjmJol2ggzbV9QKcI1qVp\nis2zFw0kIsgzg2W3uFBS88pdL0f55RbXtvUZheSWVBEfEcT+3DImvLKMZy8ayBWjfLs11keVrnCN\n7xPP5gNFHr81qV/7Y9rnMcnhVZLjspGJR72P+6f05sOV6R5Jqya9utxjDHJ3bW2FSRMVJxAmHy7i\nRqOB6feew/R7z8FSbeWFB77h8IECysuryE7LB2fCEwkYnGXNdT9M90LzTmzOuLqaDa4lb0UPEA6H\nVhDdibt3vLsCKtGsmm4KnpZ8xQFIIsKDsNkcBIcEs4IrBQAAIABJREFU0SkpBofFziW3jKVX306u\n3QUEmDyUXneU0tey1KX4nfB+cFMGaNk6HQ7J/y3ZR7ZuBbpvUi8Atjw6icFPLsRsMGDzU2j3eLH4\n2e/Vs9fy2Hn9XC5G/gr9OjlvcEfmbsl2rS/ceUgpfjrOCbJyi83v8W5KLv/fmlpts1ekUlRh5b7J\nvX18o2lZm6aV/2hoHE1IgO8Yp398sp53rhpOsNd27/glhaK1EGAycGq3GgXunauHA3BKt3bcPrYH\nlRY7H61KJybUzN7DZezMLnF5Zazcr/3/n8m9CTIbmTlvJ1A7EctvOw551OQbmRztsX37wRK2H9Qz\n3vm4B685NYkn5u5k16ES+neMJL/M4rF91DNL+PH20WQXac+rh3/c5lPxe/P3fby0cC9/PTyBuHBP\nV8rkGfMZ1DnSlT06p7jKQ+kDWLjzMLmlVc0SC11ldTCgU4TruGzLqjvswZ3vbzuNNakF3D62B7ed\n1Z2uD/7it29Gfo3i98TcnVw6IpHQwIbFOisU3tjtDuZ+soLy4iqi48PZvi6F0qIKUnZk0z4xhjPP\nHcJX/7eI0Mhgzjp3CFlpeaz8dSsAXXp3JGOfXj5MV7Kk06VSCK2AuXsMnTMOz2nlc+pOdt1tU6KX\nmHAvd+BWcsL1O65sKq4213ediqZN37/BfXzS92mXlB7RsgFXFlVQcFB7n9ixPg2jkAQEmhk1ri9Z\naXkYjQbCIoLJSs1l+qMXkJdTjMEoiOsYzYKv1xDXMZor7ppEVDuViKk58TviSSlPmuJwBoNg2qAE\n3luhzcqW6LEZIYHaC22V1e6K1zheyqttmIzClTDAplugjAbP2oJ/7j/CM7/s4kM9BuGjlel17vfW\nM7t5KH4jk2MaRd62QEmVFZtd+nWV2ntI872fvzWHuLCWTZecXVRJQmQQT83fBdBsip+vRBOmBsaD\nhvl5MVq6J49Rzyxm2+OTPdq3HKhxQZZSHrMbmkLR3AQHGLltrGdtSykluaXV/LDxIO1CA7h0ZCJS\nSoorLLzx+/569xkXHsi6h8ZzzzebXcqjk8To2slFnM+EaW/86WoLDzLRq324ywXyvRWpDOykeaI4\npO/7zBnPOPLpxS6rV1m1zbWPrVnFbNUVrJ05vhOLfbE2k/F92jN3azYPTe1b7996tFRZ7UgJv+/O\n9WgfmhTt5xu1GZ4Uw/Ak7XlX31gTFuQ5li3dk9ek5ZoUbYOiI6XcPvlFyksquPv5yzllQn+env4h\nORlHMBgNlBdXUFxYgdFk4IYZ57Fi3iZGnd2Xnz/6k5LCcs9wPP0aLDxSyu6N6SAEJYXlfPXmYo/t\nGXsO1bhvguY2CWCz11jwbG7PbXfrm9HgmdZSf75ffttYeg5IZMvaFHoN6Eyn5Fi+/3AFIImJiyQi\nKphlv2zhQOqRmhlxIWvq6tntevyfvn+HVx3remIT7TYHlbZqls3dVGvbk7d+WHs/QjDnw+UIIRj7\nt+Esn7uRoJBATGYDNquDkeP6csujF/Divz5nx18p2G12rr7nHC6/a1KdcijqRk116bgrdu0jtBlO\ns9FAgNFAucXeaHWV+j+2gCCzgTN6xPLqZUNcljxfBeXXZxSSmldG19jQemPSvGPH5mzO5rHz+rvW\nc4oreWreLp69uLZrUVtn+MxFWO2SVTPOpmNUcK3tS/SXioNFlTz9y67mFs+D//60nbN618SWHCqu\nokNk08+o+8p42NDkLiF1zIiXVtlqvXReOXuta7murIgKRVtACEH7iCAPhVAIwb2TenPvpN7szy2j\noNzCqK4xSCldFqe+CRF8cP0IhBDERwTx+c2nujJ0OvHl8jSkS1StttIqG4+e24+/vbUSgF+2HeKI\nmyVw9oo0/nFmN3YfKiE8yEyQyfe9fc/Xm1m083Ct9kCTwTVGrHt4PBe/s4oDBZVsyizitcX7AK3c\nw/6nz6kzNviOzzdyRs/YBrue9vnvbz7lOO84lLHdM6fU2q8T7+fsG0v2KcXvBKW8pJLgsEAMBgMO\nh4OqimrSdmqWuIBAM4u+XUNggJnUXQfZuiaFwjxt8uOVf3/BxdPHsX1dKlaLzUNJsdsczH7qZ5CS\nlB1ZWtITp8XMm7rqBIJeHsKAXs+hdmF59/91i+DZfxvC1jWpgKashkWE8OxntxLfMZpgtwRJoyfU\nvPs9/NpVHj9/1R0TkFJitdrJyy4ioUsMNquDx2/5gP07D1JRUoXdZveM9XPPWOrr7xQCHA7fiqG/\n2EW3NumQ/PGjlkyrvLjCtX3F/M1Yqq1s/nMPUr93P35xPtHtQhk1YSDfvr2I7av3ccmdk0jZdoDO\n3eMZdlZfouLCMejWysK8EiLbhWM8zkzBJxJK8dP56q8DruVBnWuUqPAgE6VVVkqqjl/xc1pdqqwO\nFu/KZeDjC3lkmv9Z1NIqm6sg7dMX+q+5tvnR2kHH3rff+yvSmL8th5HJ0Vx/euPFKLYGnHF7o5/7\nnSfO70+5xcbtY3u0sFS+Ka2ysdnNIlZUaWkWxa/KK3kCQCcfSrIvQvRsqDee3pV/ju/BX+mFHtkQ\nS6psRAb7nkzo/chvrT7OTwgxBXgdLZPxbCnlc17b7wVuBmxAHnCjlDKj2QVVtErc68UJIUh/bppf\nS3dEkJn056ZRYbH5dbUemRzD0C5RtZJ3DU6M4vHz+vH4XM3FdLtbFuh523Lo0i6EWz/dUKesW7Nq\nJwQDrcSEMzYuPjyI+f8cw6DHF7JMzyjqZOobK1h4z1mAVhbnyvfW8sLFg1xW0Pnbcpi/LeeYYw6f\n/Ft/nv1193FNTgaZjfz18ARGPq1ZWKqsdoL0Mcxb8RvTM7bW9xVtn9fu/YyFX67SlAWX2yM+Si3o\n7QYDGPVrxGrjuzcXYgoO0twvvZUwKTGYjPr97a70uVnO3AuOe1n1ahQ8ySlj+5K+9xCxCVHcOONc\nLNVWivJLCQ4JorignPzDJZw6vi+BQYHEtI8gKFjzarJZ7aTtzqFT11hCwo7+/UEIQUCAiU7J2vUf\nEGjgmY9vAaCqopr1y/YSEh5Av+HJrPx1Gzs2pHPpbWcTExfBd7OWUHSklEGn9aRDYjs+f30Bm/7c\nTXVFPdZBh1PR1Y+V8z/3BDRe37db7WxavrvmHCDBAa/d+zkGk0ErFwE8+4/Znr+rJ50JDgukoqyK\ngAAT4y85hRv+eyHh0U1Tk7stoRQ/nQl927N4lzYTanC7ACOCzZRW2Viwo6ZeXmG5hWg/boV1kV1c\nuyiv0+WvPgq94i+cfDf9NKJCNFnWPzKB8mobl85a7ZGcBmruKV+Wn7aM9BpcHpuzA9AKQW85UOSa\nIW8tlFtsHtlZLc10PryTJ0DDA6wNBsHmRycSGmjCbDQw0Svpw7vLPMudXDCkIz9tzvbeTatECGEE\n3gImAlnAX0KIOVLKnW7dNgEjpJQVQojbgBeAy5pfWkVboT6XwxA/SQ+c/Hj76ezMLmHmvJ2sTs1n\n1jVaLOL1p3clo6CCD1emU2Gpuae3HCiqU+n7cl0mb/6+32e5C4CHp/blX19vdq37U7z2Hi5zLd/0\nkTb5c//3W7l0ZCIvLaxJjpM8Y75rwqeg3EJ4kKnemOJLhnfmspFd/NbfOxriwgOZ1K89C3ceZkd2\nscsV9GCRZk149+phTP9sI7P/TOORc/sd9+8pWheLvlpdo/R5IJB6yhSE0HOY6JYnZ3FxKbFLib2y\nhMCIYCxWu/aeYTAgHNrzWjocWMut9BreDbtBkLL9oKeLpr5/TQlxi7XT6/4ZDILw6FBuffRCEpKO\nfvLBZDbSc2DT1GsOCgnkjHMGutbHXzSC8ReNcK1f+U/P0I7H3ruJrav38cjV7yAdktCIYEoKy10W\nOs9zYEcYDVw/4zzSdmUT3zmab95arB07/RiZzQasNqcl0EFFkRVhMNQcPv0cOKx+jre+LB0OKko1\nTzlLlZVfP/uT0uIKHn7/lkY4Sm0bpfjpvHP1MFan5NMhMsgj/XN4kImSKitv/VHzIjt05iK2PzHZ\nb+yTP3xZXZycM6ADv24/RIeIIA6V1Hbr9K4/9eA5fQgwGRjhFssXGxZIbFggUcEBVFo8f8vpbtdW\nFL+V+4/Qs339RdbdLbXeuCt93p4GBuGZUnj3oRIe+3kHX/zj1EYvgxEbFsiRMu2Fa0e2ZyxNWTMl\nQjlSqk0cXD4ysc5j5g/n5IIvPluT4aH4eT9q7Q7ZmkuLjAL2SylTAYQQXwF/A1yKn5TyD7f+a4Cr\nm1VCxUlJv44RfHnLqbXaHz23H3O3ZHOkzMKtZ3bj583ZPp8ZoGUY/W3HIR78YZvf37l0RGcuGNqJ\nIYlRDZrQfGPJPj5ZnUGl2/NMSslbf6R49MsuqiQi2MywmYu48pQuPHPhQI/t3nHH/9aTqjUW5w3u\nyMKdh7n4ndWkPzeNAwUVvLRAe44WlDdO6Iai9VBSUMadYx6nKK+EgOBAqmx2N2OcZmkTsiZPprPN\nw6Ln7rbocFBdVA4mk/YNm10rWG53IHXlY8+6/by26CEs1VY+f/kX8rKLCAwJYMCpPUjfnU1+dhGD\nTu/JwNN60KVHAlkphwmPDmXomX0wGES9E0RthUGn9eSnfS8hdJdUu81O2q5sPntpPns2Z2C12rBU\n2ejUNZanvriDdu01d3YpJVuX7WLXX6lgtwECS5WerMZs1s6HzY40GcGh1f7ziHV0X9b/F/p+hXs7\ngEOyd0Ma1w+6n5ETB3L7S1efMMf/aFGKn47ZaODMXrXr+mz1k10sI7+c/h2PrhRDSR0JYt6+ahgO\nqRX3/c93W+vd1+T+HUiO9W2yDgowejyUQYudgLqVTyf7Dpcy8dXlDE+KZkRyNDef0a1WVrimxGZ3\ncNXstSS3C2Hpf8bV2XdTZk29p/jwQHJLfc9oP3vhQA6VVLniVRwSiiqtroQwU15bAUD3h35pNNdE\np7tXdIiZEUnRrkLS7rz9R0ot62xjUl5tY9ATC12lG7IbqX7h7plTWLzrMHd+salW4qMyr/XUvDJX\ndtpWSCfAXRPOAk7x0xfgJuBXfxuFELcAtwB06XL8lguFwhshBOsfqXHvn3FOH/bnlmE0CG1cq7Bw\nybur2fLYJOwO6XvcuWoY9327hZUPnO1S9ryfJ3PuPJ3z39Qmz3rEh7E/V7P2OevfuvPb9tq/seVA\nEbd9rtU9/GJtpkvxK6+2cbikCpuX4pcQ2TDX82NhfXoBl7y72rV+0bBOPPSjf2VY0TJYLTbWL9hC\np54dSN2ayV8LNtNrWFeGTRzEqp/X43BIug9OorKsitXz1rPp9x26VUdglzWTCbLSoikFZnONRU+3\n8LlcMqWsccmWUitV4LRKuzwLJVjrmCQQguDQAPoM78qg0fVPXPQcfOI+EwxuGUCNJiM9Biby+MfT\n6/yOEILodqFgc74z6AfeIaHazcvN6qacC7fC83Z7TUJSo0E7p/pGCeBweJzjQyk5CCGYN/t35v1v\nEUIIHHZJbMdoZm18joN7c0jfmcXudfsoPFzCOTeNo/+pvZj33hLyDxZw6nnDGXa2/7CrtoJS/I6R\naW/8edQKQkkdCWKEEBgFXDi0E7ml1by4wH9NKYCkdrWzwTlxz6roxGnlsdeTlanKamfiq8sBrYju\nhoxCUvPKee/aEXV+rzH5YKWWXTXdLfW2P9xrzHkrfe4uh+FBZi4f1YW7x/dkzpZs7v5qM7/vzuWS\n4bXdJdwtgal5ZezKKT3qBABLdh3mpo/X8+plgymrtrlqCXrjLC792/YceneIoKsfZd5J2pFyxr20\nlM2PTqzTCufkm/UHsDukK7bllUsHM+KpxVw0tFM936ybILORaQMTuBMte9fhkipXUiRnMp1Hz+3H\nk/N2csV7azxeVNsqQoirgRHAWf76SCn/B/wPYMSIEXXfbApFIyCEqDWx4v5smnfXGbz1x34MQrB8\nXx7L/jOOmNAApg6se0wb1DmKK0/pwhdrM3ni/P5c5Za0yRungueO1UfCMoDpn21gxb4jrvWX/z6Y\ni32Mw8eL+69/6JUVO8hsZFRyDOvSC1pt5mEhRAzwNZAMpAOXSil9VrYXQkSgeSn8JKW8s7lk9Efa\ntkzysvLIP1TMgV3ZICChe3uKDhVz6f3nk7Unm9kPfoHVYsNgNLBl6Q7d8iYQBgNGkwG7TXtpX/TZ\nCsyBZuxWm2aUc7iVOQA09020QuLu7sRSgsVScx34OsdSetS1Ew4HhgATvUZ0o6qimsJDxVqiEyRB\noYEcyS4mLDKYoNAgKiuqmHbdWST1Ob5n6cnO3a9dS+aeHPKyCwiLCKGkoAwhBIEhZqoqLBhNRton\nxTJifH++f32BS2F3t+ZJ0JRDf+fYY1VqX7XaAa0GYu6BfC6Kvxlpl3qTtp9Vc9bTuVcCB3Zr75E/\nvbVA245EOiTtk+KYdst4zps+iZDwYNbO38gfX68iOj6Si+89l+9fmUtAcADxSXEk9k4gvnMsUe0j\nCQ5t+rwOdaEUv3roFhtK6pHyRtmXUzF5ZFpf0o6U8/naTAAm9I139TEZDdwxrgdrUvM9Ho4AI5Ki\nuXdiL9akFRz1g8pZa2rWslS6xIRw1SlJPvu9szSlVluOj9jEpsTP+4JP/JXZOFBQQZW1xq116sAO\ngHZDm/SZqYd+2OZS/HrGh7FPn9Hu9tAvrux1U15bgcXu4JwBU2vFxB0oqGDMC5oXYOozNdu3ZhW5\nZsXv+XoLQK1MeDuemEz/xxYAWgzM9M+0F6ezesXx8Y2jXP2W783jz/1HeGBKH4wGwfn/p6V4H/Lk\nonoz7EHtUgyxYYGNZtF0vwYvfGslt4/rwewVqa42Z9KaI2W+41NbCQcB92rRnfU2D4QQE4CHgbOk\nlL7NygpFK2RAp0hXvcKj5ZkLB7osdbuenMJri/cya3nNPe5UDJ28fvkQIoPNXP/hX/zzS8+U7j0e\n+oVbz+pW67lm9pOB9Hjp5jaJNn9bTq3tY/vEsS69gCqro1Yt0lbCDGCJlPI5IcQMff0BP31nAsub\nQog37pjN6rl/0WdUD+54/UY+fuxrFn2yDGEwENc5hqT+XSg6XMTZV5/JZ098izAaKM7TSih5JjPR\nWPDJMgoPFWLRE+Z5v8tIh3QpfU5sFpu+O6m/+Nd8T+rRX9Lh0Gw97s9ph0SYDFq8mcMrzMUVg6fJ\naA408fd7pnHtIxe1yomAE5XIduHMXvdUg/pOuXoM9095hsLcktoxhM5C9u7rLguh7tJrt2vn1u1a\ncF5n0u77xTPvgFsJHr2eolOVPJyRx4ePfs2+DWl06duJz5763jXjNHfWIiyVzncf56QGhEeHM+m6\nM/nx9fnEdIjmzXXP8so/3mX3uv2MvXw0B3YdZOvyXQQGmxk8dgAPffkvAoOOPqdIXSjFrx4+vnGU\n6+Xemy/WZnKgsILpZ3X3m9Vw3+FSEqKCGaC/5AP8fUQig5+oSen9ro+H8kc3jCIlr4z3V6Tx9XrN\nWjcsKZrRPWIZ3aNhroEWm4NDxVWc+aKn/A//uB0pNSVgyoAOHtt8xWLlllSzMbOQJbsO85/JfWpt\nbwhSSlbsO8KYnrH1DqpOlyKAX7blMLFfe7+JAWb/meaz/UhZNeUWG8O6RPHD7ad7bDtH/5svGNrR\n1ebtdtTj4V9Jf26aq+D7ByvTuHlMN48+S3bVpEXPLq6ks16Ty+ke5c6X6zI91kMDTXSPCyUlr5xh\nMxe52pftzfPIQnftB+sASIgM4oIhnSh1iwkc/8oylv1nHFuzinhjyT7evHKY63tOGuLa2xhkF1fx\nyE/bXeu3ntWNKf314zyko7+vtQb+AnoKIbqiKXyXA1e6dxBCDAVmAVOklLm1d6FQnPgEBxh5cGpf\n7ji7ByFmIyajgQ0ZhR6K37mDOlJu8T0ZZ3PUjgMEGNPA59nRMqBTJPdP6c0Lv/n2ngnXJ8XKqm2t\nVfH7GzBWX/4YWIoPxU8IMRxoD/yG5pHQaMydtZC572jvLn9mrSNr7yHStzvPt4Oc1Fxy0nJBQsqW\ndKzVNrfMjfi0wORm5CH0+Dbv5GzOBB8x7aMIjQrhwJ4cAoLMXHjnFFb8uI7s/YeQehxZcFgglWXV\nNVkcjQZw2AkKCSQ8Koy8A/nEJbbjo92vIB2wb2MauQcL6NyjPZHtwtmxeh/R7SOJ79KO4JBAouKP\nLnRH0fwk9kzgi/1vIKWktLCc7JTDGAxQUlhOQtf2VJVXY62yYA4OpOhQEb2Gd+WpK99gy9IdGExG\nHHYHA87ojdVmJ3VrJtZqO0IvpeGenDU4IojqCgvJAxK55pGLePLS13DYHTUd3JB2SWlBGdv/3O2x\nyVLlPuGtT1I4wFJt5btX5oGU5GXl8+xVr7P5Dy0p4Zy3FrgUWpvFxqqf/2Lm31/mqbkPNupxbDLF\nTwjxAXAukCulHKC3+XRdEJom8DowFagArpdS1vYdaQESY0L4/OZT+GnTQZ65aCA9H64J73HGCLyz\nNMVlRamy2jEZBCajgd2HSlyxY+5EBJk4b3BHV8F1X1Ybo0HQq304z18yiAqrnblbsslsgOsjwO1j\nu/P20hT++9N2l9LojfMlPeWZqR7KXqCP2dfc0mouensVAHO35PD7v8+iwmrn5QV72J9Xxuc3105A\nAJpFLFf3v7969loqrXZX6m+Aapud3o/8xuDEKH6+Q1POrHYH323IqvlbPt/IJcM789LfB3PZrNWs\nTSvgoqGdeOGSQRiEcNVXfOnvg7nv2y2u7y3ZlVtrZtmJwSBoFxrAN+uzWLk/nytP6cKh4ipXIoSa\nv7smHu6p+bu4eUw37A7JnC0HOX9wJ1dadYDMggqX4ueLgZ0i2aanX++ru6dePLyzz5eSx37ewYR+\n7bn105qSCfllllquVhn5Fa6U6gC7D5UyJFELmv5uQxbd4kL57887XP3XPDjer3zHyuDEKJ+uxav2\n52M4R9A9LtRVbqM1IqW0CSHuBBaglXP4QEq5QwjxJLBeSjkHeBEIA77VJy0ypZTnt5jQCkUL4p7x\nc3hSdC0PgoggMyaDcE2mbfzvRI/JLW+OJUN2Q5l+Zne/il+om+LXnDHsR0F7KaXTVHkITbnzQAhh\nAF5GSzg1oa6dHUv8cVWZt3OD51huMGgvzOZAE1HxkZQcKcVmtWPzSiJnDjRhCjBRXWHhmkcvIbZT\nNG/d/RESyejzRrB9lVanzWgyMnhsP257+TrCojzDHm586nJK8kvZuzGV/qf1JiA4gLStmQQEmyk4\nVET/0b0oyS8jMjYck9lEZXkVQSGBronmfqf2xD1/69i/t2vQMVC0LpwJZCLbhRPZro7cAYO0a/z5\n3x6iutJCYHAADoesVc/P4XBwcN8hLJXV7NuUTv/Te5PYy3Oyek7hB/y1YAtlhWV8+fzPlBWVYa22\n0bF7e+ITY5n+8nUU55Xw4LRnKC+qIDo+kvNum8SnT37npjBqRMdHUmyQVOiedEn9EtmybGeNBdOL\nsqLG8Th0R9SecWmkHQtxJlAGfOKm+L0AFLi5LkRLKR8QQkwF7kJT/E4BXpdS1pVgAdBiaNavX19f\nt0bFvUCvO04XveQZ8wH45Z9j2HO4xOXq547zQbl452H6d4qoN6j9q3WZzPhhG3eM694gi9vGzEKX\nolYfHSKCuHZ0EtedlsziXYf54M80tmQVc+uZ3Txceurit3+NoXN0CEt2Heb8wR0RQjB3SzZ3ebn6\nAAzuHElOcRUlVVYPV0ynAvrRyjQPhcrJ9LO68+6ymtni60cn89GqdNd6+nPTXMfeG1+ujb76eu/T\nm89vPqXOOJewQBNDEqNccXvuPH/xQLrGhnHvN5uZf9cYIkPMFFdYGfzkQh97ajhRIWaKKjTlNyY0\ngA2PTGB1ar5LGXTSVLX0Kiw2+j26oFb7UxcM4OpTk7j03dXsyilhzUPjXS9a9SGE2CClbL6g0iai\nJcYnhaK1YrM7sNgd7M8to2NUMD9uPMg1pyXV8lJobD5bk+Ga6Fzy77NIbheK0SBccdiT+7dn1jUN\nH24ac3wSQiwGOvjY9DDwsZQyyq1voZQy2uv7dwIhUsoXhBDXo5WdqTfGr6Fjk8Ph4LELnmfbn7vp\nObw7/35vOpt/3873r86l+5CuDDl7ADEdoshJzWX8VWeQsfMgkXERdOgaT352AVXlVXTu1RGjsVVa\nVBWKJqe0qIy961PpPbI7AYFmzIFminKL+Wzmdww5ewCnXzCKn9/8lQ2LtjL5hnFYqizMuu8TygrL\nad8ljhf/eJzYjjH1/xANH5uaTPHThUgG5rkpfnuAsVLKHCFEArBUStlbCDFLX/7Su19d+2+pF6vs\nokpGP/f7MX13xxOTG/wC7E5mfgWJMcEN8j3PKqzgjOdru6c+Mq0vGfkVfLqm7trTsWGBrH9kAlmF\nFeQUV/F3t2xozcULFw/i/u/rz27qxJmu22QUnPZszbm5f0pvn8Xcz3l9BbtyPEsrPHF+f649LYkz\nX/yDAwUNi2sc0CmC7QdL6uzz0Q0jOatXnM9z9+maDP6rv5RseXRSgxTB3TOn0P+xBbWKEddFUxZR\nt9kd9HCzhM+76wwGdNLcZoY+uZBCXTHdPXNKg17ylOKnUCgai+IKK+e9+Sf3Te7N+YNrZvI3Hyji\nAr3kz9E8l5trfPL3vuTV53NgDOBA80oIAN6WUs6oa99qbFIoTjwaOjY1TVS1f/y5LvhKqe4zVZIQ\n4hYhxHohxPq8vLymk7QOOkYFc8PpycSEBrhcFBvC9mNU+gC6tAtpcMBx5+gQokPM9IwPY9qgBC4b\nkci+p8/h5jHdmHnBAIYnRdf5/cGdI137GZkcQ/pz0/jjvrH06eBpVq9vP8fDpSMT+eLm2kbfdQ+P\n50K3jJQT+rZnxxNaQdHEmBASIoNJf24ad4/vCcBNZ3T1uf9f7x7DsxfV1JXqEhPCNacmIYRgxf1n\nM6przQyLrxhMJ99NH+3zN/Y8NYUNj0xg2+OTGNs73u+5u+bUJD69aRRz7jydyBAzy/8zjhtP1/YX\nHWLm17vHEOIWf3KtPkP+6mVDXG0jk6NrnRtmx6v1AAAKNUlEQVR3Vs042++2xsBkNLDonjOZecEA\n0p6d6lL6AJ69aJBr2aAC5hUKRTMTGWJm+f3jPJQ+gN5umVDr8vRoQeYA1+nL1wE/e3eQUl4lpewi\npUwG7kPzsKpT6VMoFCc3zW3xK/LluiCEmAc8J6X8U29fAjwgpaxzSqq1zFrZHZJdOSUUVVgJMBkY\n1DmSp+fvItBk4M6ze1BYYSX5KBS35qLaZqfSYsdoEJRUaSUHtmcVMywp2qdlxlkWoMpmJ9hsxGw0\nUGW1U211kJZfTnK7EPbllhFsNtIuLKCWC6vN7iC/3EKQ2YjN7iA6JECLEZCSI2UWCiu0YNgecWG1\nMmgWV1gJDjASoMcgllZZMRkMTRqUX1xhxWwShARoynpdab8rXFnHOGbl/mix2BwIASa3QPkKi51A\nkwEhhCu5WUtfd1JKKq1213GsD2XxUygUzYEzpvzykYkNHieb0eLXDvgG6AJkoOVEKBBCjACmSylv\n9up/PY3s6qlQKNoODR2bmjur52EhRIKb64IzQ16DUqq3VowG4WHlAJh5QU2Rx4bUW2sJAk1GAk2a\n4hSuB+zXlTHUaBAYDcKlfIFWDynIbGRIiKbPj0z274tsMhpctd7cEUIQFx5YZ4B9ZIhn1tTwIN9Z\nVBsT79+s68WgoUpNYxLglYhHCNFsSufRIIRokeOjUCgUdWE2GrhiVOssqi2lzAdqZeTSJ8Rv9tH+\nEfBRkwumUCjaNM3t6unPdWEOcK3QOBUori++T6FQKBQKhUKhUCgUDaMps3p+iVaDJhY4DDwG/IRv\n1wUBvAlMQSvncEN9bp76b+Tp+2kIsYDv/P6tFyVz89DWZG5r8sLRyZwkpYxrSmGaAzU+tTramryg\nZG4uTqrx6SjHpqamrV0vSt6mp63J3FrkbdDY1KQxfq0JIcT6thY3pGRuHtqazG1NXmibMjcnbfH4\ntDWZ25q8oGRuLtqizCcKbe3YK3mbnrYmc1uTt7ldPRUKhUKhUCgUCoVC0cwoxU+hUCgUCoVCoVAo\nTnBOJsXvfy0twDGgZG4e2prMbU1eaJsyNydt8fi0NZnbmrygZG4u2qLMJwpt7dgreZuetiZzm5L3\npInxUygUCoVCoVAoFIqTlZPJ4qdQKBQKhUKhUCgUJyUnheInhJgihNgjhNgvhJjRwrJ8IITIFUJs\nd2uLEUIsEkLs0/+P1tuFEOINXe6tQohhbt+5Tu+/Twhxna/faiR5E4UQfwghdgohdggh7m4DMgcJ\nIdYJIbboMj+ht3cVQqzVZftaCBGgtwfq6/v17clu+3pQb98jhJjcVDLrv2UUQmwSQsxrI/KmCyG2\nCSE2CyHW622t9rpojaix6bhlblPjU1sdm/TfU+PTSTY+NQX+zoGfvhFCiCwhxJvNKaOXDPXKK4QY\nIoRYrd/TW4UQl7WAnHU+S+q6L1uCBsh7rz6ubxVCLBFCJLWEnF4yNeh5LYS4WAghhRCtM9OnlPKE\n/gBGIAXoBgQAW4B+LSjPmcAwYLtb2wvADH15BvC8vjwV+BUQwKnAWr09BkjV/4/Wl6ObSN4EYJi+\nHA7sBfq1cpkFEKYvm4G1uizfAJfr7e8Ct+nLtwPv6suXA1/ry/306yUQ6KpfR8YmvDbuBb4A5unr\nrV3edCDWq63VXhet7aPGpkaRuU2NT211bNJ/U41PJ9H41ITnxec58NP3df2ae7M1ywv0Anrqyx2B\nHCCqGWWs91ni775soWPaEHnHASH68m0tKW9DZdb7hQPLgTXAiJaU2e/f0tICNMPJOg1Y4Lb+IPBg\nC8uUjOfL1R4gQV9OAPboy7OAK7z7AVcAs9zaPfo1sew/AxPbisxACLAROAWtwKbJ+7oAFgCn6csm\nvZ/wvlbc+zWBnJ2BJcDZwDz991utvPr+06n9YtUmrovW8FFjU5PI32bGp7YyNun7V+NTC1zPJ+LH\n3znw0W848BVwPS2r+DVIXq/vbEFXBJtJxnqfJf7uyxY6pkf17AOGAitb6ho4GpmB14BpwFJaqeJ3\nMrh6dgIOuK1n6W2tifZSyhx9+RDQXl/2J3uL/E26a8BQtFnqVi2z7pa0GcgFFqHN1BRJKW0+ft8l\nm769GGjXzDK/BtwPOPT1dq1cXgAJLBRCbBBC3KK3terropXRFv72NnM+28r41AbHJlDjk3e74tjx\ndw5cCCEMwMvAfc0pmB/qldcdIcQoNItQSlML5kZDrlN/92VLcLT31U1oFvmWpF6ZdRfxRCnl/OYU\n7GgxtbQACk+klFIIIVtaDm+EEGHA98C/pJQlQgjXttYos5TSDgwRQkQBPwJ9WlgkvwghzgVypZQb\nhBBjW1qeo+AMKeVBIUQ8sEgIsdt9Y2u8LhTHTms+n21pfGpLYxOo8Ulx9AghFgMdfGx62H2ljnNw\nO/CLlDLL/V5uKhpBXud+EoBPgeuklA5//RQNRwhxNTACOKulZakLfbLiFTQLdavmZFD8DgKJbuud\n9bbWxGEhRIKUMkcfOHL1dn+yHwTGerUvbSrhhBBmtJeqz6WUP7QFmZ1IKYuEEH+gmemjhBAmfbbL\n/TpwypwlhDABkUA+zXftnA6cL4SYCgQBEWixDa1VXgCklAf1/3OFED8Co2gj10UrQY1NjUBbHZ/a\nyNgEanwa69W+tKlkPlGQUk7wt00I4e8cuHMaMEYIcTsQBgQIIcqklE2SAKsR5EUIEQHMBx6WUq5p\nCjnroCH3lr/7siVo0FgghJiApnyfJaWsbibZ/FGfzOHAAGCpPlnRAZgjhDhfSrm+2aRsCC3ta9rU\nHzTlNhUtmNwZkNm/hWVKxjOO5kU8g4df0Jen4Rlwvk5vjwHS0ILNo/XlmCaSVQCfAK95tbdmmePQ\nA6uBYGAFcC7wLZ7JCG7Xl+/AM+j5G325P57JCFJp+gQKY6lJntBq5QVCgXC35VXAlNZ8XbS2jxqb\nGkXeNjU+teWxSf9dNT6dJONTE15DPs9BHf2vp2Vj/OqVF238XoLmcdASMtb7LPF3X7ZieYeiucs2\nW6zk8crs1X8prTTGr8UFaKYTNhUt21sK2mxMS8ryJVrGJyuaj/BNaH7WS4B9wGLng0V/CL2ly73N\n/SICbgT2658bmlDeM9BiJbYCm/XP1FYu8yBgky7zduBRvb0bsE7//W+BQL09SF/fr2/v5ravh/W/\nZQ9wTjNcH2OpebFqtfLqsm3RPzuc91Vrvi5a40eNTcctc5san9ry2KT/phqfmuE4n8ifOs7BCGC2\nj/7X07KKX73yAlejjZub3T5DmlnOWs8S4EngfH3Z733ZQse1PnkXA4fdjueclpS3ITJ79V1KK1X8\nhC6gQqFQKBQKhUKhUChOUE6GrJ4KhUKhUCgUCoVCcVKjFD+FQqFQKBQKhUKhOMFRip9CoVAoFAqF\nQqFQnOAoxU+hUCgUCoVCoVAoTnCU4qdQKBQKhUKhUCgUJzhK8VMoFAqFQqFQKBSKExyl+CkUCoVC\noVAoFArFCY5S/BQKhUKhUCgUCoXiBOf/ASAV6SQFnxL3AAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "execution_count": 91, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "fig, ax = plt.subplots(4, 3, figsize=(15, 6))\n", + "for i, r in enumerate(res):\n", + " ax[i, 0].plot(r[2])\n", + " ax[i, 1].plot(r[3])\n", + " #ax[i, 2].plot(r[0], r[1], '->', alpha=0.1)\n", + " ax[i, 2].scatter(r[0], r[1], s=5.0, alpha=1.0, c=np.arange(0, len(r[0]), 1), cmap=cm.viridis)\n", + "\n", + "ax[0, 0].set_title(\"objective\")\n", + "ax[0, 1].set_title(\"norms\")\n", + "ax[0, 2].set_title(\"x vs y\")\n", + "ax[0, 0].set_ylabel(\"orig fast\")\n", + "ax[1, 0].set_ylabel(\"orig slow\")\n", + "ax[2, 0].set_ylabel(\"TTUR x\")\n", + "ax[3, 0].set_ylabel(\"TTUR y\")\n", + "\n", + "fig.savefig(\"toyfunc3d_ttur.pdf\", bbox_inches='tight')\n", + "\n", + "fig\n" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "plt.close('all')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.1" + }, + "toc": { + "navigate_menu": false, + "number_sections": false, + "sideBar": false, + "threshold": 6, + "toc_cell": false, + "toc_section_display": "block", + "toc_window_display": false + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/README.md b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/README.md new file mode 100644 index 00000000..4789a851 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/README.md @@ -0,0 +1,10 @@ +This is a fork of the Improved Wasserstein Implementation + +https://github.com/igul222/improved_wgan_training + +We ported the implementation to Python 3.x and added a FID +evaluation to the image model (gan_64x64_FID.py) which is +logged and trackable with Tensorboard. + +The language model is altered to also log Tensorboard events +i.e. the JSD. diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/data/README.md b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/data/README.md new file mode 100644 index 00000000..a7193d51 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/data/README.md @@ -0,0 +1 @@ +This directory holds the data directories for the training and validation datasets. diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_64x64_FID.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_64x64_FID.py new file mode 100644 index 00000000..fde520eb --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_64x64_FID.py @@ -0,0 +1,798 @@ +import os, sys +sys.path.append(os.getcwd()) + +import time +import functools + +import numpy as np +import tensorflow as tf + +import tflib as lib +import tflib.ops.linear +import tflib.ops.conv2d +import tflib.ops.batchnorm +import tflib.ops.deconv2d +import tflib.save_images +import tflib.data_loader +import tflib.ops.layernorm +import tflib.plot + +import fid + +''' +Works also with 32x32x3 images, just set DIM to 32 +''' + +DATA_DIR = 'data/lsun' +DATASET = "lsun" # celeba, cifar10, svhn, lsun +if len(DATA_DIR) == 0: + raise Exception('Please specify path to data directory in gan_64x64.py!') + +# Download the Inception model from here +# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz +# And set the path to the extracted model here: +INCEPTION_DIR = "inception-2015-12-05" + +# Path to the real world statistics file. +STAT_FILE = "stats/fid_stats_lsun.npz" + +MODE = 'wgan-gp' # dcgan, wgan, wgan-gp, lsgan +DIM = 64 # Model dimensionality + +# Settings for TTUR and orig +TTUR = True +if TTUR: + CRITIC_ITERS = 1 # How many iterations to train the critic for + D_LR = 0.0003 + G_LR = 0.0001 + BETA1_D = 0.0 + BETA1_G = 0.0 + FID_STEP = 1000 # FID evaluation every FID_STEP + ITERS = 100000 # How many iterations to train for +else: + CRITIC_ITERS = 5 # How many iterations to train the critic for + D_LR = 0.0005 + G_LR = 0.0005 + BETA1_D = 0.0 + BETA1_G = 0.0 + FID_STEP = 333 # FID evaluation every FID_STEP + ITERS = 25009 # How many iterations to train for + +OUTPUT_STEP = 200 # Print output every OUTPUT_STEP +SAVE_SAMPLES_STEP = 200 # Generate and save samples every SAVE_SAMPLES_STEP + +LOAD_CHECKPOINT = FALSE +DIR = "mmdd_hhmmss_lrd_lrg" +ITER_START = 0 + +# Switch on and off batchnormalizaton for the discriminator +# and the generator. Default is on for both. +BN_D=True +BN_G=True + +# Log subdirectories are automatically created from +# the above settings and the current timestamp. +CHECKPOINT_STEP = FID_STEP +LOG_DIR = "logs" # Directory for Tensorboard events, checkpoints and samples +N_GPUS = 1 # Number of GPUs +BATCH_SIZE = 64 # Batch size. Must be a multiple of N_GPUS +LAMBDA = 10 # Gradient penalty lambda hyperparameter +OUTPUT_DIM = DIM * DIM * 3 # Number of pixels in each iamge + +if not LOAD_CHECKPOINT: + timestamp = time.strftime("%m%d_%H%M%S") + DIR = "%s_%6f_%.6f" % (timestamp, D_LR, G_LR) + +LOG_DIR = os.path.join(LOG_DIR, DIR) +SAMPLES_DIR = os.path.join(LOG_DIR, "samples") +CHECKPOINT_DIR = os.path.join(LOG_DIR, "checkpoints") +TBOARD_DIR = os.path.join(LOG_DIR, "logs") + +# Create directories if necessary +if not os.path.exists(SAMPLES_DIR): + print("*** create sample dir %s" % SAMPLES_DIR) + os.makedirs(SAMPLES_DIR) +if not os.path.exists(CHECKPOINT_DIR): + print("*** create checkpoint dir %s" % CHECKPOINT_DIR) + os.makedirs(CHECKPOINT_DIR) +if not os.path.exists(TBOARD_DIR): + print("*** create tboard dir %s" % TBOARD_DIR) + os.makedirs(TBOARD_DIR) + +# FID evaluation. +FID_EVAL_SIZE = 50000 # Number of samples for evaluation +FID_SAMPLE_BATCH_SIZE = 1000 # Batch size of generating samples, lower to save GPU memory +FID_BATCH_SIZE = 200 # Batch size for final FID calculation i.e. inception propagation etc. + +# Load checkpoint +# from https://github.com/carpedm20/DCGAN-tensorflow/blob/master/model.py +def load_checkpoint(session, saver, checkpoint_dir): + print(" [*] Reading checkpoints...") + ckpt = tf.train.get_checkpoint_state(checkpoint_dir) + if ckpt and ckpt.model_checkpoint_path: + ckpt_name = os.path.basename(ckpt.model_checkpoint_path) + saver.restore(session, os.path.join(checkpoint_dir, ckpt_name)) + print(" [*] Success to read {}".format(ckpt_name)) + return True + else: + print(" [*] Failed to find a checkpoint") + return False + +lib.print_model_settings(locals().copy(), LOG_DIR) + +def GeneratorAndDiscriminator(): + """ + Choose which generator and discriminator architecture to use by + uncommenting one of these lines. + """ + + # For actually generating decent samples, use this one + return GoodGenerator, GoodDiscriminator + + # Baseline (G: DCGAN, D: DCGAN) + #return DCGANGenerator, DCGANDiscriminator + + # No BN and constant number of filts in G + # return WGANPaper_CrippledDCGANGenerator, DCGANDiscriminator + + # 512-dim 4-layer ReLU MLP G + # return FCGenerator, DCGANDiscriminator + + # No normalization anywhere + # return functools.partial(DCGANGenerator, bn=False), functools.partial(DCGANDiscriminator, bn=False) + + # Gated multiplicative nonlinearities everywhere + # return MultiplicativeDCGANGenerator, MultiplicativeDCGANDiscriminator + + # tanh nonlinearities everywhere + # return functools.partial(DCGANGenerator, bn=True, nonlinearity=tf.tanh), \ + # functools.partial(DCGANDiscriminator, bn=True, nonlinearity=tf.tanh) + + # 101-layer ResNet G and D + #return ResnetGenerator, ResnetDiscriminator + + raise Exception('You must choose an architecture!') + +DEVICES = ['/gpu:{}'.format(i) for i in range(N_GPUS)] + +def LeakyReLU(x, alpha=0.2): + return tf.maximum(alpha*x, x) + +def ReLULayer(name, n_in, n_out, inputs): + output = lib.ops.linear.Linear(name+'.Linear', n_in, n_out, inputs, initialization='he') + return tf.nn.relu(output) + +def LeakyReLULayer(name, n_in, n_out, inputs): + output = lib.ops.linear.Linear(name+'.Linear', n_in, n_out, inputs, initialization='he') + return LeakyReLU(output) + +def Normalize(name, axes, inputs): + if ('Discriminator' in name) and (MODE == 'wgan-gp'): + if axes != [0,2,3]: + raise Exception('Layernorm over non-standard axes is unsupported') + return lib.ops.layernorm.Layernorm(name,[1,2,3],inputs) + else: + #return lib.ops.layernorm.Layernorm(name,[1,2,3],inputs) + return lib.ops.batchnorm.Batchnorm(name,axes,inputs,fused=True) + +def pixcnn_gated_nonlinearity(a, b): + return tf.sigmoid(a) * tf.tanh(b) + +def SubpixelConv2D(*args, **kwargs): + kwargs['output_dim'] = 4*kwargs['output_dim'] + output = lib.ops.conv2d.Conv2D(*args, **kwargs) + output = tf.transpose(output, [0,2,3,1]) + output = tf.depth_to_space(output, 2) + output = tf.transpose(output, [0,3,1,2]) + return output + +def ConvMeanPool(name, input_dim, output_dim, filter_size, inputs, he_init=True, biases=True): + output = lib.ops.conv2d.Conv2D(name, input_dim, output_dim, filter_size, inputs, he_init=he_init, biases=biases) + output = tf.add_n([output[:,:,::2,::2], output[:,:,1::2,::2], output[:,:,::2,1::2], output[:,:,1::2,1::2]]) / 4. + return output + +def MeanPoolConv(name, input_dim, output_dim, filter_size, inputs, he_init=True, biases=True): + output = inputs + output = tf.add_n([output[:,:,::2,::2], output[:,:,1::2,::2], output[:,:,::2,1::2], output[:,:,1::2,1::2]]) / 4. + output = lib.ops.conv2d.Conv2D(name, input_dim, output_dim, filter_size, output, he_init=he_init, biases=biases) + return output + +def UpsampleConv(name, input_dim, output_dim, filter_size, inputs, he_init=True, biases=True): + output = inputs + output = tf.concat([output, output, output, output], axis=1) + output = tf.transpose(output, [0,2,3,1]) + output = tf.depth_to_space(output, 2) + output = tf.transpose(output, [0,3,1,2]) + output = lib.ops.conv2d.Conv2D(name, input_dim, output_dim, filter_size, output, he_init=he_init, biases=biases) + return output + +def BottleneckResidualBlock(name, input_dim, output_dim, filter_size, inputs, resample=None, he_init=True): + """ + resample: None, 'down', or 'up' + """ + if resample=='down': + conv_shortcut = functools.partial(lib.ops.conv2d.Conv2D, stride=2) + conv_1 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=input_dim//2) + conv_1b = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim//2, output_dim=output_dim//2, stride=2) + conv_2 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=output_dim//2, output_dim=output_dim) + elif resample=='up': + conv_shortcut = SubpixelConv2D + conv_1 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=input_dim//2) + conv_1b = functools.partial(lib.ops.deconv2d.Deconv2D, input_dim=input_dim//2, output_dim=output_dim//2) + conv_2 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=output_dim//2, output_dim=output_dim) + elif resample==None: + conv_shortcut = lib.ops.conv2d.Conv2D + conv_1 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=input_dim//2) + conv_1b = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim//2, output_dim=output_dim//2) + conv_2 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim//2, output_dim=output_dim) + + else: + raise Exception('invalid resample value') + + if output_dim==input_dim and resample==None: + shortcut = inputs # Identity skip-connection + else: + shortcut = conv_shortcut(name+'.Shortcut', input_dim=input_dim, output_dim=output_dim, filter_size=1, + he_init=False, biases=True, inputs=inputs) + + output = inputs + output = tf.nn.relu(output) + output = conv_1(name+'.Conv1', filter_size=1, inputs=output, he_init=he_init) + output = tf.nn.relu(output) + output = conv_1b(name+'.Conv1B', filter_size=filter_size, inputs=output, he_init=he_init) + output = tf.nn.relu(output) + output = conv_2(name+'.Conv2', filter_size=1, inputs=output, he_init=he_init, biases=False) + output = Normalize(name+'.BN', [0,2,3], output) + + return shortcut + (0.3*output) + +def ResidualBlock(name, input_dim, output_dim, filter_size, inputs, resample=None, he_init=True, bn=False): + """ + resample: None, 'down', or 'up' + """ + if resample=='down': + conv_shortcut = MeanPoolConv + conv_1 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=input_dim) + conv_2 = functools.partial(ConvMeanPool, input_dim=input_dim, output_dim=output_dim) + elif resample=='up': + conv_shortcut = UpsampleConv + conv_1 = functools.partial(UpsampleConv, input_dim=input_dim, output_dim=output_dim) + conv_2 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=output_dim, output_dim=output_dim) + elif resample==None: + conv_shortcut = lib.ops.conv2d.Conv2D + conv_1 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=input_dim) + conv_2 = functools.partial(lib.ops.conv2d.Conv2D, input_dim=input_dim, output_dim=output_dim) + else: + raise Exception('invalid resample value') + + if output_dim==input_dim and resample==None: + shortcut = inputs # Identity skip-connection + else: + shortcut = conv_shortcut(name+'.Shortcut', input_dim=input_dim, output_dim=output_dim, filter_size=1, + he_init=False, biases=True, inputs=inputs) + + output = inputs + if bn: + output = Normalize(name+'.BN1', [0,2,3], output) + output = tf.nn.relu(output) + output = conv_1(name+'.Conv1', filter_size=filter_size, inputs=output, he_init=he_init, biases=False) + if bn: + output = Normalize(name+'.BN2', [0,2,3], output) + output = tf.nn.relu(output) + output = conv_2(name+'.Conv2', filter_size=filter_size, inputs=output, he_init=he_init) + + return shortcut + output + + +# ! Generators + +def GoodGenerator(n_samples, noise=None, dim=DIM, nonlinearity=tf.nn.relu, bn=BN_G): + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + ## supports 32x32 images + fact = DIM // 16 + + output = lib.ops.linear.Linear('Generator.Input', 128, fact*fact*8*dim, noise) + output = tf.reshape(output, [-1, 8*dim, fact, fact]) + output = ResidualBlock('Generator.Res1', 8*dim, 8*dim, 3, output, resample='up', bn=bn) + output = ResidualBlock('Generator.Res2', 8*dim, 4*dim, 3, output, resample='up', bn=bn) + output = ResidualBlock('Generator.Res3', 4*dim, 2*dim, 3, output, resample='up', bn=bn) + output = ResidualBlock('Generator.Res4', 2*dim, 1*dim, 3, output, resample='up', bn=bn) + if bn: + output = Normalize('Generator.OutputN', [0,2,3], output) + output = tf.nn.relu(output) + output = lib.ops.conv2d.Conv2D('Generator.Output', 1*dim, 3, 3, output) + output = tf.tanh(output) + + return tf.reshape(output, [-1, OUTPUT_DIM]) + +def FCGenerator(n_samples, noise=None, FC_DIM=512): + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + output = ReLULayer('Generator.1', 128, FC_DIM, noise) + output = ReLULayer('Generator.2', FC_DIM, FC_DIM, output) + output = ReLULayer('Generator.3', FC_DIM, FC_DIM, output) + output = ReLULayer('Generator.4', FC_DIM, FC_DIM, output) + output = lib.ops.linear.Linear('Generator.Out', FC_DIM, OUTPUT_DIM, output) + + output = tf.tanh(output) + + return output + +def DCGANGenerator(n_samples, noise=None, dim=DIM, bn=True, nonlinearity=tf.nn.relu): + lib.ops.conv2d.set_weights_stdev(0.02) + lib.ops.deconv2d.set_weights_stdev(0.02) + lib.ops.linear.set_weights_stdev(0.02) + + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + output = lib.ops.linear.Linear('Generator.Input', 128, 4*4*8*dim, noise) + output = tf.reshape(output, [-1, 8*dim, 4, 4]) + if bn: + output = Normalize('Generator.BN1', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.2', 8*dim, 4*dim, 5, output) + if bn: + output = Normalize('Generator.BN2', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.3', 4*dim, 2*dim, 5, output) + if bn: + output = Normalize('Generator.BN3', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.4', 2*dim, dim, 5, output) + if bn: + output = Normalize('Generator.BN4', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.5', dim, 3, 5, output) + output = tf.tanh(output) + + lib.ops.conv2d.unset_weights_stdev() + lib.ops.deconv2d.unset_weights_stdev() + lib.ops.linear.unset_weights_stdev() + + return tf.reshape(output, [-1, OUTPUT_DIM]) + +def WGANPaper_CrippledDCGANGenerator(n_samples, noise=None, dim=DIM): + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + output = lib.ops.linear.Linear('Generator.Input', 128, 4*4*dim, noise) + output = tf.nn.relu(output) + output = tf.reshape(output, [-1, dim, 4, 4]) + + output = lib.ops.deconv2d.Deconv2D('Generator.2', dim, dim, 5, output) + output = tf.nn.relu(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.3', dim, dim, 5, output) + output = tf.nn.relu(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.4', dim, dim, 5, output) + output = tf.nn.relu(output) + + output = lib.ops.deconv2d.Deconv2D('Generator.5', dim, 3, 5, output) + output = tf.tanh(output) + + return tf.reshape(output, [-1, OUTPUT_DIM]) + +def ResnetGenerator(n_samples, noise=None, dim=DIM): + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + output = lib.ops.linear.Linear('Generator.Input', 128, 4*4*8*dim, noise) + output = tf.reshape(output, [-1, 8*dim, 4, 4]) + + for i in range(6): + output = BottleneckResidualBlock('Generator.4x4_{}'.format(i), 8*dim, 8*dim, 3, output, resample=None) + output = BottleneckResidualBlock('Generator.Up1', 8*dim, 4*dim, 3, output, resample='up') + for i in range(6): + output = BottleneckResidualBlock('Generator.8x8_{}'.format(i), 4*dim, 4*dim, 3, output, resample=None) + output = BottleneckResidualBlock('Generator.Up2', 4*dim, 2*dim, 3, output, resample='up') + for i in range(6): + output = BottleneckResidualBlock('Generator.16x16_{}'.format(i), 2*dim, 2*dim, 3, output, resample=None) + output = BottleneckResidualBlock('Generator.Up3', 2*dim, 1*dim, 3, output, resample='up') + for i in range(6): + output = BottleneckResidualBlock('Generator.32x32_{}'.format(i), 1*dim, 1*dim, 3, output, resample=None) + output = BottleneckResidualBlock('Generator.Up4', 1*dim, dim//2, 3, output, resample='up') + for i in range(5): + output = BottleneckResidualBlock('Generator.64x64_{}'.format(i), dim//2, dim//2, 3, output, resample=None) + + output = lib.ops.conv2d.Conv2D('Generator.Out', dim//2, 3, 1, output, he_init=False) + output = tf.tanh(output / 5.) + + return tf.reshape(output, [-1, OUTPUT_DIM]) + + +def MultiplicativeDCGANGenerator(n_samples, noise=None, dim=DIM, bn=True): + if noise is None: + noise = tf.random_normal([n_samples, 128]) + + output = lib.ops.linear.Linear('Generator.Input', 128, 4*4*8*dim*2, noise) + output = tf.reshape(output, [-1, 8*dim*2, 4, 4]) + if bn: + output = Normalize('Generator.BN1', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.deconv2d.Deconv2D('Generator.2', 8*dim, 4*dim*2, 5, output) + if bn: + output = Normalize('Generator.BN2', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.deconv2d.Deconv2D('Generator.3', 4*dim, 2*dim*2, 5, output) + if bn: + output = Normalize('Generator.BN3', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.deconv2d.Deconv2D('Generator.4', 2*dim, dim*2, 5, output) + if bn: + output = Normalize('Generator.BN4', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.deconv2d.Deconv2D('Generator.5', dim, 3, 5, output) + output = tf.tanh(output) + + return tf.reshape(output, [-1, OUTPUT_DIM]) + +# ! Discriminators + +def GoodDiscriminator(inputs, dim=DIM, bn=BN_D): + output = tf.reshape(inputs, [-1, 3, DIM, DIM]) + output = lib.ops.conv2d.Conv2D('Discriminator.Input', 3, dim, 3, output, he_init=False) + + output = ResidualBlock('Discriminator.Res1', dim, 2*dim, 3, output, resample='down', bn=bn) + output = ResidualBlock('Discriminator.Res2', 2*dim, 4*dim, 3, output, resample='down', bn=bn) + output = ResidualBlock('Discriminator.Res3', 4*dim, 8*dim, 3, output, resample='down', bn=bn) + output = ResidualBlock('Discriminator.Res4', 8*dim, 8*dim, 3, output, resample='down', bn=bn) + + output = tf.reshape(output, [-1, 4*4*8*dim]) + output = lib.ops.linear.Linear('Discriminator.Output', 4*4*8*dim, 1, output) + + return tf.reshape(output, [-1]) + +def MultiplicativeDCGANDiscriminator(inputs, dim=DIM, bn=True): + output = tf.reshape(inputs, [-1, 3, DIM, DIM]) + + output = lib.ops.conv2d.Conv2D('Discriminator.1', 3, dim*2, 5, output, stride=2) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.conv2d.Conv2D('Discriminator.2', dim, 2*dim*2, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN2', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.conv2d.Conv2D('Discriminator.3', 2*dim, 4*dim*2, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN3', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = lib.ops.conv2d.Conv2D('Discriminator.4', 4*dim, 8*dim*2, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN4', [0,2,3], output) + output = pixcnn_gated_nonlinearity(output[:,::2], output[:,1::2]) + + output = tf.reshape(output, [-1, 4*4*8*dim]) + output = lib.ops.linear.Linear('Discriminator.Output', 4*4*8*dim, 1, output) + + return tf.reshape(output, [-1]) + + +def ResnetDiscriminator(inputs, dim=DIM): + output = tf.reshape(inputs, [-1, 3, DIM, DIM]) + output = lib.ops.conv2d.Conv2D('Discriminator.In', 3, dim//2, 1, output, he_init=False) + + for i in range(5): + output = BottleneckResidualBlock('Discriminator.64x64_{}'.format(i), dim//2, dim//2, 3, output, resample=None) + output = BottleneckResidualBlock('Discriminator.Down1', dim//2, dim*1, 3, output, resample='down') + for i in range(6): + output = BottleneckResidualBlock('Discriminator.32x32_{}'.format(i), dim*1, dim*1, 3, output, resample=None) + output = BottleneckResidualBlock('Discriminator.Down2', dim*1, dim*2, 3, output, resample='down') + for i in range(6): + output = BottleneckResidualBlock('Discriminator.16x16_{}'.format(i), dim*2, dim*2, 3, output, resample=None) + output = BottleneckResidualBlock('Discriminator.Down3', dim*2, dim*4, 3, output, resample='down') + for i in range(6): + output = BottleneckResidualBlock('Discriminator.8x8_{}'.format(i), dim*4, dim*4, 3, output, resample=None) + output = BottleneckResidualBlock('Discriminator.Down4', dim*4, dim*8, 3, output, resample='down') + for i in range(6): + output = BottleneckResidualBlock('Discriminator.4x4_{}'.format(i), dim*8, dim*8, 3, output, resample=None) + + output = tf.reshape(output, [-1, 4*4*8*dim]) + output = lib.ops.linear.Linear('Discriminator.Output', 4*4*8*dim, 1, output) + + return tf.reshape(output / 5., [-1]) + + +def FCDiscriminator(inputs, FC_DIM=512, n_layers=3): + output = LeakyReLULayer('Discriminator.Input', OUTPUT_DIM, FC_DIM, inputs) + for i in range(n_layers): + output = LeakyReLULayer('Discriminator.{}'.format(i), FC_DIM, FC_DIM, output) + output = lib.ops.linear.Linear('Discriminator.Out', FC_DIM, 1, output) + + return tf.reshape(output, [-1]) + +def DCGANDiscriminator(inputs, dim=DIM, bn=True, nonlinearity=LeakyReLU): + output = tf.reshape(inputs, [-1, 3, DIM, DIM]) + + lib.ops.conv2d.set_weights_stdev(0.02) + lib.ops.deconv2d.set_weights_stdev(0.02) + lib.ops.linear.set_weights_stdev(0.02) + + output = lib.ops.conv2d.Conv2D('Discriminator.1', 3, dim, 5, output, stride=2) + output = nonlinearity(output) + + output = lib.ops.conv2d.Conv2D('Discriminator.2', dim, 2*dim, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN2', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.conv2d.Conv2D('Discriminator.3', 2*dim, 4*dim, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN3', [0,2,3], output) + output = nonlinearity(output) + + output = lib.ops.conv2d.Conv2D('Discriminator.4', 4*dim, 8*dim, 5, output, stride=2) + if bn: + output = Normalize('Discriminator.BN4', [0,2,3], output) + output = nonlinearity(output) + + output = tf.reshape(output, [-1, 4*4*8*dim]) + output = lib.ops.linear.Linear('Discriminator.Output', 4*4*8*dim, 1, output) + + lib.ops.conv2d.unset_weights_stdev() + lib.ops.deconv2d.unset_weights_stdev() + lib.ops.linear.unset_weights_stdev() + + return tf.reshape(output, [-1]) + +Generator, Discriminator = GeneratorAndDiscriminator() + +with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as session: + + all_real_data_conv = tf.placeholder(tf.int32, shape=[BATCH_SIZE, 3, DIM, DIM]) + if tf.__version__.startswith('1.'): + split_real_data_conv = tf.split(all_real_data_conv, len(DEVICES)) + else: + split_real_data_conv = tf.split(0, len(DEVICES), all_real_data_conv) + gen_costs, disc_costs = [],[] + + for device_index, (device, real_data_conv) in enumerate(zip(DEVICES, split_real_data_conv)): + with tf.device(device): + + real_data = tf.reshape(2*((tf.cast(real_data_conv, tf.float32)/255.)-.5), [BATCH_SIZE//len(DEVICES), OUTPUT_DIM]) + fake_data = Generator(BATCH_SIZE//len(DEVICES), bn=BN_G) + + disc_real = Discriminator(real_data) + disc_fake = Discriminator(fake_data) + + if MODE == 'wgan': + gen_cost = -tf.reduce_mean(disc_fake) + disc_cost = tf.reduce_mean(disc_fake) - tf.reduce_mean(disc_real) + + elif MODE == 'wgan-gp': + gen_cost = -tf.reduce_mean(disc_fake) + disc_cost = tf.reduce_mean(disc_fake) - tf.reduce_mean(disc_real) + + alpha = tf.random_uniform( + shape=[BATCH_SIZE//len(DEVICES),1], + minval=0., + maxval=1. + ) + differences = fake_data - real_data + interpolates = real_data + (alpha*differences) + gradients = tf.gradients(Discriminator(interpolates, bn=BN_D), interpolates)[0] + slopes = tf.sqrt(tf.reduce_sum(tf.square(gradients), reduction_indices=[1])) + gradient_penalty = tf.reduce_mean((slopes-1.)**2) + disc_cost += LAMBDA*gradient_penalty + + elif MODE == 'dcgan': + try: # tf pre-1.0 (bottom) vs 1.0 (top) + gen_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, + labels=tf.ones_like(disc_fake))) + disc_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, + labels=tf.zeros_like(disc_fake))) + disc_cost += tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_real, + labels=tf.ones_like(disc_real))) + except Exception as e: + gen_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_fake, tf.ones_like(disc_fake))) + disc_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_fake, tf.zeros_like(disc_fake))) + disc_cost += tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_real, tf.ones_like(disc_real))) + disc_cost /= 2. + + elif MODE == 'lsgan': + gen_cost = tf.reduce_mean((disc_fake - 1)**2) + disc_cost = (tf.reduce_mean((disc_real - 1)**2) + tf.reduce_mean((disc_fake - 0)**2))/2. + + else: + raise Exception() + + gen_costs.append(gen_cost) + disc_costs.append(disc_cost) + + gen_cost = tf.add_n(gen_costs) / len(DEVICES) + disc_cost = tf.add_n(disc_costs) / len(DEVICES) + + if MODE == 'wgan': + gen_train_op = tf.train.RMSPropOptimizer(learning_rate=G_LR).minimize(gen_cost, + var_list=lib.params_with_name('Generator'), colocate_gradients_with_ops=True) + disc_train_op = tf.train.RMSPropOptimizer(learning_rate=D_LR).minimize(disc_cost, + var_list=lib.params_with_name('Discriminator.'), colocate_gradients_with_ops=True) + + clip_ops = [] + for var in lib.params_with_name('Discriminator'): + clip_bounds = [-.01, .01] + clip_ops.append(tf.assign(var, tf.clip_by_value(var, clip_bounds[0], clip_bounds[1]))) + clip_disc_weights = tf.group(*clip_ops) + + elif MODE == 'wgan-gp': + gen_train_op = tf.train.AdamOptimizer(learning_rate=G_LR, beta1=BETA1_G, beta2=0.9).minimize(gen_cost, + var_list=lib.params_with_name('Generator'), colocate_gradients_with_ops=True) + disc_train_op = tf.train.AdamOptimizer(learning_rate=D_LR, beta1=BETA1_D, beta2=0.9).minimize(disc_cost, + var_list=lib.params_with_name('Discriminator.'), colocate_gradients_with_ops=True) + + elif MODE == 'dcgan': + gen_train_op = tf.train.AdamOptimizer(learning_rate=2e-4, beta1=0.5).minimize(gen_cost, + var_list=lib.params_with_name('Generator'), colocate_gradients_with_ops=True) + disc_train_op = tf.train.AdamOptimizer(learning_rate=2e-4, beta1=0.5).minimize(disc_cost, + var_list=lib.params_with_name('Discriminator.'), colocate_gradients_with_ops=True) + + elif MODE == 'lsgan': + gen_train_op = tf.train.RMSPropOptimizer(learning_rate=1e-4).minimize(gen_cost, + var_list=lib.params_with_name('Generator'), colocate_gradients_with_ops=True) + disc_train_op = tf.train.RMSPropOptimizer(learning_rate=1e-4).minimize(disc_cost, + var_list=lib.params_with_name('Discriminator.'), colocate_gradients_with_ops=True) + + else: + raise Exception() + + # For generating samples + fixed_noise = tf.constant(np.random.normal(size=(BATCH_SIZE, 128)).astype('float32')) + all_fixed_noise_samples = [] + for device_index, device in enumerate(DEVICES): + n_samples = BATCH_SIZE // len(DEVICES) + all_fixed_noise_samples.append(Generator(n_samples, + noise=fixed_noise[device_index*n_samples:(device_index+1)*n_samples])) + if tf.__version__.startswith('1.'): + all_fixed_noise_samples = tf.concat(all_fixed_noise_samples, axis=0) + else: + all_fixed_noise_samples = tf.concat(0, all_fixed_noise_samples) + + def generate_image(iteration): + samples = session.run(all_fixed_noise_samples) + samples = ((samples+1.)*(255.99//2)).astype('int32') + lib.save_images.save_images(samples.reshape((BATCH_SIZE, 3, DIM, DIM)), '%s/samples_%d.png' % (SAMPLES_DIR, iteration)) + + fid_tfvar = tf.Variable(0.0, trainable=False) + fid_sum = tf.summary.scalar("FID", fid_tfvar) + writer = tf.summary.FileWriter(TBOARD_DIR, session.graph) + + # Dataset iterator + train_gen, dev_gen = lib.data_loader.load(BATCH_SIZE, DATA_DIR, DATASET) + + def inf_train_gen(): + while True: + for (images,) in train_gen(): + yield images + + # Save a batch of ground-truth samples + _x = inf_train_gen().__next__() + _x_r = session.run(real_data, feed_dict={real_data_conv: _x[:BATCH_SIZE//N_GPUS]}) + _x_r = ((_x_r+1.)*(255.99//2)).astype('int32') + lib.save_images.save_images(_x_r.reshape((BATCH_SIZE//N_GPUS, 3, DIM, DIM)), '%s/samples_groundtruth.png' % SAMPLES_DIR) + + session.run(tf.global_variables_initializer()) + + # Checkpoint saver + ckpt_saver = tf.train.Saver() + + if LOAD_CHECKPOINT: + if load_checkpoint(session, ckpt_saver, CHECKPOINT_DIR): + print(" [*] Load SUCCESS") + else: + print(" [!] Load failed...") + + gen = inf_train_gen() + + # load model + print("load inception model..", end=" ", flush=True) + fid.create_inception_graph(os.path.join(INCEPTION_DIR, "classify_image_graph_def.pb")) + print("ok") + + print("load train stats.. ", end="", flush=True) + # load precalculated training set statistics + f = np.load(STAT_FILE) + mu_real, sigma_real = f['mu'][:], f['sigma'][:] + f.close() + print("ok") + + # Train loop + + for it in range(ITERS): + + iteration = it + ITER_START + + start_time = time.time() + + # Train generator + if iteration > 0: + _ = session.run(gen_train_op) + + # Train critic + if (MODE == 'dcgan') or (MODE == 'lsgan'): + disc_iters = 1 + else: + disc_iters = CRITIC_ITERS + for i in range(disc_iters): + _data = gen.__next__() + _disc_cost, _ = session.run([disc_cost, disc_train_op], feed_dict={all_real_data_conv: _data}) + if MODE == 'wgan': + _ = session.run([clip_disc_weights]) + + lib.plot.plot('train disc cost', _disc_cost) + lib.plot.plot('time', time.time() - start_time) + + if iteration % SAVE_SAMPLES_STEP == 0: + # t = time.time() + # dev_disc_costs = [] + # for (images,) in dev_gen(): + # _dev_disc_cost = session.run(disc_cost, feed_dict={all_real_data_conv: images}) + # dev_disc_costs.append(_dev_disc_cost) + # lib.plot.plot('dev disc cost', np.mean(dev_disc_costs)) + + generate_image(iteration) + + if iteration % OUTPUT_STEP == 0: + lib.plot.flush() + + if (iteration % FID_STEP == 0): + + # FID + samples = np.zeros((FID_EVAL_SIZE, OUTPUT_DIM)) + + n_fid_batches = FID_EVAL_SIZE // FID_SAMPLE_BATCH_SIZE + + for i in range(n_fid_batches): + + print("\rgenerate fid sample batch %d/%d " % (i + 1, n_fid_batches), end="", flush=True) + + frm = i * FID_SAMPLE_BATCH_SIZE + to = frm + FID_SAMPLE_BATCH_SIZE + + samples[frm:to] = session.run(Generator(FID_SAMPLE_BATCH_SIZE)) + + # Cast, reshape and transpose (BCHW -> BHWC) + samples = ((samples + 1.0) * 127.5).astype('uint8') + samples = samples.reshape(FID_EVAL_SIZE, 3, DIM, DIM) + samples = samples.transpose(0,2,3,1) + + print("ok") + + mu_gen, sigma_gen = fid.calculate_activation_statistics(samples, + session, + batch_size=FID_BATCH_SIZE, + verbose=True) + + print("calculate FID:", end=" ", flush=True) + try: + FID = fid.calculate_frechet_distance(mu_gen, sigma_gen, mu_real, sigma_real) + except Exception as e: + print(e) + FID=500 + + print(FID) + + session.run(tf.assign(fid_tfvar, FID)) + summary_str = session.run(fid_sum) + writer.add_summary(summary_str, iteration) + + # Save checkpoint + if (iteration != 0) and (iteration % CHECKPOINT_STEP == 0): + ckpt_saver.save(session, + os.path.join(CHECKPOINT_DIR, "WGAN_GP.model"), + iteration) + + lib.plot.tick() diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_language_JSD.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_language_JSD.py new file mode 100644 index 00000000..48076964 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/gan_language_JSD.py @@ -0,0 +1,306 @@ +import os, sys +sys.path.append(os.getcwd()) + +import time + +import numpy as np +import tensorflow as tf + +import language_helpers +import tflib as lib +import tflib.ops.linear +import tflib.ops.conv1d +import tflib.plot + +# Download Google Billion Word at http://www.statmt.org/lm-benchmark/ and +# fill in the path to the extracted files here! +DATA_DIR = 'data' +if len(DATA_DIR) == 0: + raise Exception('Please specify path to data directory in gan_language.py!') + +BATCH_SIZE = 64 # Batch size +ITERS = 200000 # How many iterations to train for +SEQ_LEN = 32 # Sequence length in characters +DIM = 512 # Model dimensionality. This is fairly slow and overfits, even on + # Billion Word. Consider decreasing for smaller datasets. + +N_NGRAMS = 6 # NGRAM statistics for 1 - N_NGRAMS + +TTUR = True +if TTUR: + CRITIC_ITERS = 1 # How many critic iterations per generator iteration. We + # use 10 for the results in the paper, but 5 should work fine + # as well. + LR_DISC = 0.0003 # Learning rate discriminator + LR_GEN = 0.0001 # learning_rate generator +else: + CRITIC_ITERS = 10 # How many critic iterations per generator iteration. We + # use 10 for the results in the paper, but 5 should work fine + # as well. + LR_DISC = 0.0001 # Learning rate discriminator + LR_GEN = 0.0001 # learning_rate generator + +timestamp = time.strftime("%m%d_%H%M%S") +DIR = "%s_%6f_%.6f" % (timestamp, D_LR, G_LR) + +TBOARD_DIR = "logs/" + DIR # Tensorboard log directory +SAMPLES_DIR = TBOARD_DIR # Samples directory + +LAMBDA = 10 # Gradient penalty lambda hyperparameter. +MAX_N_EXAMPLES = 10000000 # Max number of data examples to load. If data loading + # is too slow or takes too much RAM, you can decrease + # this (at the expense of having less training data). + +if not os.path.exists(TBOARD_DIR): + print("*** create log dir %s" % TBOARD_DIR) + os.makedirs(TBOARD_DIR) +if not os.path.exists(SAMPLES_DIR): + print("*** create sample dir %s" % SAMPLES_DIR) + os.makedirs(SAMPLES_DIR) + +lib.print_model_settings(locals().copy(), TBOARD_DIR) + +# Load data +lines, charmap, inv_charmap = language_helpers.load_dataset( + max_length=SEQ_LEN, + max_n_examples=MAX_N_EXAMPLES, + data_dir=DATA_DIR +) + +print("build model...") + +def softmax(logits): + return tf.reshape( + tf.nn.softmax( + tf.reshape(logits, [-1, len(charmap)]) + ), + tf.shape(logits) + ) + +def make_noise(shape): + return tf.random_normal(shape) + +def ResBlock(name, inputs): + output = inputs + output = tf.nn.relu(output) + output = lib.ops.conv1d.Conv1D(name+'.1', DIM, DIM, 5, output) + output = tf.nn.relu(output) + output = lib.ops.conv1d.Conv1D(name+'.2', DIM, DIM, 5, output) + return inputs + (0.3 * output) + +def Generator(n_samples, prev_outputs=None): + output = make_noise(shape=[n_samples, 128]) + output = lib.ops.linear.Linear('Generator.Input', 128, SEQ_LEN*DIM, output) + output = tf.reshape(output, [-1, DIM, SEQ_LEN]) + output = ResBlock('Generator.1', output) + output = ResBlock('Generator.2', output) + output = ResBlock('Generator.3', output) + output = ResBlock('Generator.4', output) + output = ResBlock('Generator.5', output) + output = lib.ops.conv1d.Conv1D('Generator.Output', DIM, len(charmap), 1, output) + output = tf.transpose(output, [0, 2, 1]) + output = softmax(output) + return output + +def Discriminator(inputs): + output = tf.transpose(inputs, [0,2,1]) + output = lib.ops.conv1d.Conv1D('Discriminator.Input', len(charmap), DIM, 1, output) + output = ResBlock('Discriminator.1', output) + output = ResBlock('Discriminator.2', output) + output = ResBlock('Discriminator.3', output) + output = ResBlock('Discriminator.4', output) + output = ResBlock('Discriminator.5', output) + output = tf.reshape(output, [-1, SEQ_LEN * DIM]) + output = lib.ops.linear.Linear('Discriminator.Output', SEQ_LEN * DIM, 1, output) + return output + +# Inputs +real_inputs_discrete = tf.placeholder(tf.int32, shape=[BATCH_SIZE, SEQ_LEN]) +real_inputs = tf.one_hot(real_inputs_discrete, len(charmap)) +fake_inputs = Generator(BATCH_SIZE) +fake_inputs_discrete = tf.argmax(fake_inputs, fake_inputs.get_shape().ndims-1) + +# Disc prop +disc_real = Discriminator(real_inputs) +disc_fake = Discriminator(fake_inputs) + +# Costs & summaries +disc_cost = tf.reduce_mean(disc_fake) - tf.reduce_mean(disc_real) +gen_cost = -tf.reduce_mean(disc_fake) + +disc_cost_sum = tf.summary.scalar("bill disc cost ws", disc_cost) +gen_cost_sum = tf.summary.scalar("bill gen cost", gen_cost) + +# JSD summaries +js_ph = [] +for i in range(N_NGRAMS): + js_ph.append(tf.placeholder(tf.float32, shape=())) + +js_sums = [] +for i in range(N_NGRAMS): + js_sums.append(tf.summary.scalar("bill js%d" % (i + 1), js_ph[i])) + +js_sum_op = tf.summary.merge(js_sums) + +# WGAN lipschitz-penalty +alpha = tf.random_uniform( + shape=[BATCH_SIZE,1,1], + minval=0., + maxval=1. +) +differences = fake_inputs - real_inputs +interpolates = real_inputs + (alpha*differences) +gradients = tf.gradients(Discriminator(interpolates), [interpolates])[0] +slopes = tf.sqrt(tf.reduce_sum(tf.square(gradients), reduction_indices=[1,2])) +gradient_penalty = tf.reduce_mean((slopes-1.)**2) +disc_cost += LAMBDA*gradient_penalty + +disc_cost_opt_sum = tf.summary.scalar("bill disc cost opt", disc_cost) + +disc_cost_sum_op = tf.summary.merge([disc_cost_sum, disc_cost_opt_sum]) +gen_cost_sum_op = gen_cost_sum + +# Params +gen_params = lib.params_with_name('Generator') +disc_params = lib.params_with_name('Discriminator') + +# Optimizers +#opt_d = tf.train.AdamOptimizer(LR_D, beta1=0.5, beta2=0.9) +#opt_g = tf.train.AdamOptimizer(LR_G, beta1=0.5, beta2=0.9) + +# Discriminator +#grads_and_vars = opt_d.compute_gradients(disc_cost, disc_params) +#disc_train_op = opt_d.apply_gradients(grads_and_vars) + +# Gradient summaries discriminator +#grad_d_sum = [] +#for i, (grad, vars_) in enumerate(grads_and_vars): +# grad_l2 = tf.sqrt(tf.reduce_sum(tf.square(grad))) +# grad_d_sum.append(tf.summary.scalar("grad_l2_%s" % (vars_.name), grad_l2)) + +# Generator +#grads_and_vars = opt_d.compute_gradients(gen_cost, gen_params) +#gen_train_op = opt_d.apply_gradients(grads_and_vars) + +# Gradient summaries generator +#grad_g_sum = [] +#for i, (grad, vars_) in enumerate(grads_and_vars): +# grad_l2 = tf.sqrt(tf.reduce_sum(tf.square(grad))) +# grad_g_sum.append(tf.summary.scalar("grad_l2_%s" % (vars_.name), grad_l2)) + +# Merge disc summaries +#disc_sums_op = tf.summary.merge([disc_cost_sum_op, grad_d_sum]) +disc_sums_op = disc_cost_sum_op + +# Merge gen summaries +#gen_sums_op = tf.summary.merge([gen_cost_sum_op, grad_g_sum]) +gen_sums_op = gen_cost_sum_op + +gen_train_op = tf.train.AdamOptimizer(learning_rate=LR_GEN, beta1=0.5, beta2=0.9).minimize(gen_cost, var_list=gen_params) +disc_train_op = tf.train.AdamOptimizer(learning_rate=LR_DISC, beta1=0.5, beta2=0.9).minimize(disc_cost, var_list=disc_params) + +# Dataset iterator +def inf_train_gen(): + while True: + np.random.shuffle(lines) + for i in range(0, len(lines)-BATCH_SIZE+1, BATCH_SIZE): + yield np.array( + [[charmap[c] for c in l] for l in lines[i:i+BATCH_SIZE]], + dtype='int32' + ) + +# During training we monitor JS divergence between the true & generated ngram +# distributions for n=1,2,3,4. To get an idea of the optimal values, we +# evaluate these statistics on a held-out set first. +#print("true char ngram lms ", end=" ", flush=True) +#true_char_ngram_lms = [] +#for i in range(N_NGRAMS): +# print(i, end=" ", flush=True) +# true_char_ngram_lms.append(language_helpers.NgramLanguageModel(i+1, lines[10*BATCH_SIZE:], tokenize=False)) +#print() +#print("val char ngram lms") +#validation_char_ngram_lms = [language_helpers.NgramLanguageModel(i+1, lines[:10*BATCH_SIZE], tokenize=False) for i in range(N_NGRAMS)] +#for i in range(N_NGRAMS): +# print("validation set JSD for n=%d: %d" % (i+1, true_char_ngram_lms[i].js_with(validation_char_ngram_lms[i]))) +print("true char ngram lms:", end=" ", flush=True) +true_char_ngram_lms = [] +for i in range(N_NGRAMS): + print(i, end=" ", flush=True) + true_char_ngram_lms.append(language_helpers.NgramLanguageModel(i+1, lines, tokenize=False)) +print() + +print("start session") +run_config = tf.ConfigProto() +run_config.gpu_options.allow_growth=True + +with tf.Session(config=run_config) as session: + + session.run(tf.global_variables_initializer()) + + sum_writer = tf.summary.FileWriter(TBOARD_DIR, session.graph) + + def generate_samples(): + samples = session.run(fake_inputs) + samples = np.argmax(samples, axis=2) + decoded_samples = [] + for i in range(len(samples)): + decoded = [] + for j in range(len(samples[i])): + decoded.append(inv_charmap[samples[i][j]]) + decoded_samples.append(tuple(decoded)) + return decoded_samples + + gen = inf_train_gen() + + # Run + for iteration in range(ITERS): + + start_time = time.time() + + # Generate samples and eval JSDs + if iteration % 100 == 0: + samples = [] + for i in range(10): + samples.extend(generate_samples()) + + js = [] + for i in range(N_NGRAMS): + lm = language_helpers.NgramLanguageModel(i+1, samples, tokenize=False) + js.append(lm.js_with(true_char_ngram_lms[i])) + lib.plot.plot('js%d' % (i+1), js[i]) + feed_dict = {k: v for k, v in zip(js_ph, js)} + js_sum = session.run(js_sum_op, feed_dict=feed_dict) + sum_writer.add_summary(js_sum, iteration) + + with open('%s/samples_%d.txt' % (SAMPLES_DIR, iteration + 1), 'w') as f: + for s in samples: + s = "".join(s) + f.write(s + "\n") + + + # Train generator + if iteration > 0: + summary_string, _ = session.run([gen_sums_op, gen_train_op]) + sum_writer.add_summary(summary_string, iteration) + + # Train critic + for i in range(CRITIC_ITERS - 1): + _data = gen.__next__() + _disc_cost, _ = session.run( + [disc_cost, disc_train_op], + feed_dict={real_inputs_discrete:_data} + ) + _data = gen.__next__() + _disc_cost, summary_string, _ = session.run( + [disc_cost, disc_sums_op, disc_train_op], + feed_dict={real_inputs_discrete:_data} + ) + sum_writer.add_summary(summary_string, iteration) + + lib.plot.plot('time', time.time() - start_time) + lib.plot.plot('train disc cost', _disc_cost) + + if iteration % 100 == 0: + lib.plot.flush() + + lib.plot.tick() diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/inception-2015-12-05/README.md b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/inception-2015-12-05/README.md new file mode 100644 index 00000000..c513cbca --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/inception-2015-12-05/README.md @@ -0,0 +1,7 @@ +Contents of the Inception-v3 model. + +Get it from here: + +http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz + +and unpack it in the projects root directory (WGAN_GP). diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/language_helpers.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/language_helpers.py new file mode 100644 index 00000000..2f58be07 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/language_helpers.py @@ -0,0 +1,147 @@ +# +# Taken from: https://github.com/igul222/improved_wgan_training +# + +import collections +import numpy as np +import re + +def tokenize_string(sample): + return tuple(sample.lower().split(' ')) + +class NgramLanguageModel(object): + def __init__(self, n, samples, tokenize=False): + if tokenize: + tokenized_samples = [] + for sample in samples: + tokenized_samples.append(tokenize_string(sample)) + samples = tokenized_samples + + self._n = n + self._samples = samples + self._ngram_counts = collections.defaultdict(int) + self._total_ngrams = 0 + for ngram in self.ngrams(): + self._ngram_counts[ngram] += 1 + self._total_ngrams += 1 + + def ngrams(self): + n = self._n + for sample in self._samples: + for i in range(len(sample)-n+1): + yield sample[i:i+n] + + def unique_ngrams(self): + return set(self._ngram_counts.keys()) + + def log_likelihood(self, ngram): + if ngram not in self._ngram_counts: + return -np.inf + else: + return np.log(self._ngram_counts[ngram]) - np.log(self._total_ngrams) + + def kl_to(self, p): + # p is another NgramLanguageModel + log_likelihood_ratios = [] + for ngram in p.ngrams(): + log_likelihood_ratios.append(p.log_likelihood(ngram) - self.log_likelihood(ngram)) + return np.mean(log_likelihood_ratios) + + def cosine_sim_with(self, p): + # p is another NgramLanguageModel + p_dot_q = 0. + p_norm = 0. + q_norm = 0. + for ngram in p.unique_ngrams(): + p_i = np.exp(p.log_likelihood(ngram)) + q_i = np.exp(self.log_likelihood(ngram)) + p_dot_q += p_i * q_i + p_norm += p_i**2 + for ngram in self.unique_ngrams(): + q_i = np.exp(self.log_likelihood(ngram)) + q_norm += q_i**2 + return p_dot_q / (np.sqrt(p_norm) * np.sqrt(q_norm)) + + def precision_wrt(self, p): + # p is another NgramLanguageModel + num = 0. + denom = 0 + p_ngrams = p.unique_ngrams() + for ngram in self.unique_ngrams(): + if ngram in p_ngrams: + num += self._ngram_counts[ngram] + denom += self._ngram_counts[ngram] + return float(num) / denom + + def recall_wrt(self, p): + return p.precision_wrt(self) + + def js_with(self, p): + log_p = np.array([p.log_likelihood(ngram) for ngram in p.unique_ngrams()]) + log_q = np.array([self.log_likelihood(ngram) for ngram in p.unique_ngrams()]) + log_m = np.logaddexp(log_p - np.log(2), log_q - np.log(2)) + kl_p_m = np.sum(np.exp(log_p) * (log_p - log_m)) + + log_p = np.array([p.log_likelihood(ngram) for ngram in self.unique_ngrams()]) + log_q = np.array([self.log_likelihood(ngram) for ngram in self.unique_ngrams()]) + log_m = np.logaddexp(log_p - np.log(2), log_q - np.log(2)) + kl_q_m = np.sum(np.exp(log_q) * (log_q - log_m)) + + return 0.5*(kl_p_m + kl_q_m) / np.log(2) + +def load_dataset(max_length, max_n_examples, tokenize=False, max_vocab_size=2048, data_dir='/home/ishaan/data/1-billion-word-language-modeling-benchmark-r13output'): + print("loading dataset...") + + lines = [] + + finished = False + + for i in range(99): + path = data_dir+("/training-monolingual.tokenized.shuffled/news.en-{}-of-00100".format(str(i+1).zfill(5))) + with open(path, 'r') as f: + for line in f: + line = line[:-1] + if tokenize: + line = tokenize_string(line) + else: + line = tuple(line) + + if len(line) > max_length: + line = line[:max_length] + + lines.append(line + ( ("`",)*(max_length-len(line)) ) ) + + if len(lines) == max_n_examples: + finished = True + break + if finished: + break + + np.random.shuffle(lines) + + import collections + counts = collections.Counter(char for line in lines for char in line) + + charmap = {'unk':0} + inv_charmap = ['unk'] + + for char,count in counts.most_common(max_vocab_size-1): + if char not in charmap: + charmap[char] = len(inv_charmap) + inv_charmap.append(char) + + filtered_lines = [] + for line in lines: + filtered_line = [] + for char in line: + if char in charmap: + filtered_line.append(char) + else: + filtered_line.append('unk') + filtered_lines.append(tuple(filtered_line)) + + #for i in range(100): + # print(filtered_lines[i]) + + print("loaded %s lines in dataset" % (len(lines))) + return filtered_lines, charmap, inv_charmap diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/logs/README.md b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/logs/README.md new file mode 100644 index 00000000..e1e3eff1 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/logs/README.md @@ -0,0 +1 @@ +Log subdirectories are automatically created in this directory. diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/data_loader.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/data_loader.py new file mode 100644 index 00000000..3be0188e --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/data_loader.py @@ -0,0 +1,51 @@ +import numpy as np +import scipy.misc +import time +import os +from glob import glob + +def make_generator(path, batch_size, dataset): + print("scan files", end=" ", flush=True) + if dataset == "celeba": + files = glob(os.path.join(path, "*.jpg")) + dim = 64 + if dataset == "svhn" or dataset == "cifar10": + files = glob(os.path.join(path, "*.png")) + dim = 32 + if dataset == "lsun": + # It's assumed the lsun images are splitted + # into subdirectories named 0, 1, .., 304 + files = [] + for i in range(304): + print("\rscan files %d" % i, end="", flush=True) + files += glob(os.path.join(path, str(i), "*.jpg")) + dim = 64 + n_files = len(files) + print() + print("%d images found" % n_files) + def get_epoch(): + images = np.zeros((batch_size, 3, dim, dim), dtype='int32') + files_idx = list(range(n_files)) + random_state = np.random.RandomState() + random_state.shuffle(files_idx) + for n, i in enumerate(files_idx): + image = scipy.misc.imread(files[i]) + images[n % batch_size] = image.transpose(2,0,1) + if n > 0 and n % batch_size == 0: + yield (images,) + return get_epoch + +def load(batch_size, data_dir, dataset): + return ( + make_generator(data_dir, batch_size, dataset), + make_generator(data_dir, batch_size, dataset) + ) + +if __name__ == '__main__': + train_gen, valid_gen = load(64) + t0 = time.time() + for i, batch in enumerate(train_gen(), start=1): + print("s\t%d" % (str(time.time() - t0), batch[0][0,0,0,0])) + if i == 1000: + break + t0 = time.time() diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/batchnorm.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/batchnorm.py new file mode 100644 index 00000000..dfea8480 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/batchnorm.py @@ -0,0 +1,87 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +def Batchnorm(name, axes, inputs, is_training=None, stats_iter=None, update_moving_stats=True, fused=True): + if ((axes == [0,2,3]) or (axes == [0,2])) and fused==True: + if axes==[0,2]: + inputs = tf.expand_dims(inputs, 3) + # Old (working but pretty slow) implementation: + ########## + + # inputs = tf.transpose(inputs, [0,2,3,1]) + + # mean, var = tf.nn.moments(inputs, [0,1,2], keep_dims=False) + # offset = lib.param(name+'.offset', np.zeros(mean.get_shape()[-1], dtype='float32')) + # scale = lib.param(name+'.scale', np.ones(var.get_shape()[-1], dtype='float32')) + # result = tf.nn.batch_normalization(inputs, mean, var, offset, scale, 1e-4) + + # return tf.transpose(result, [0,3,1,2]) + + # New (super fast but untested) implementation: + offset = lib.param(name+'.offset', np.zeros(inputs.get_shape()[1], dtype='float32')) + scale = lib.param(name+'.scale', np.ones(inputs.get_shape()[1], dtype='float32')) + + moving_mean = lib.param(name+'.moving_mean', np.zeros(inputs.get_shape()[1], dtype='float32'), trainable=False) + moving_variance = lib.param(name+'.moving_variance', np.ones(inputs.get_shape()[1], dtype='float32'), trainable=False) + + def _fused_batch_norm_training(): + return tf.nn.fused_batch_norm(inputs, scale, offset, epsilon=1e-5, data_format='NCHW') + def _fused_batch_norm_inference(): + # Version which blends in the current item's statistics + batch_size = tf.cast(tf.shape(inputs)[0], 'float32') + mean, var = tf.nn.moments(inputs, [2,3], keep_dims=True) + mean = ((1./batch_size)*mean) + (((batch_size-1.)/batch_size)*moving_mean)[None,:,None,None] + var = ((1./batch_size)*var) + (((batch_size-1.)/batch_size)*moving_variance)[None,:,None,None] + return tf.nn.batch_normalization(inputs, mean, var, offset[None,:,None,None], scale[None,:,None,None], 1e-5), mean, var + + # Standard version + # return tf.nn.fused_batch_norm( + # inputs, + # scale, + # offset, + # epsilon=1e-2, + # mean=moving_mean, + # variance=moving_variance, + # is_training=False, + # data_format='NCHW' + # ) + + if is_training is None: + outputs, batch_mean, batch_var = _fused_batch_norm_training() + else: + outputs, batch_mean, batch_var = tf.cond(is_training, + _fused_batch_norm_training, + _fused_batch_norm_inference) + if update_moving_stats: + no_updates = lambda: outputs + def _force_updates(): + """Internal function forces updates moving_vars if is_training.""" + float_stats_iter = tf.cast(stats_iter, tf.float32) + + update_moving_mean = tf.assign(moving_mean, ((float_stats_iter/(float_stats_iter+1))*moving_mean) + ((1/(float_stats_iter+1))*batch_mean)) + update_moving_variance = tf.assign(moving_variance, ((float_stats_iter/(float_stats_iter+1))*moving_variance) + ((1/(float_stats_iter+1))*batch_var)) + + with tf.control_dependencies([update_moving_mean, update_moving_variance]): + return tf.identity(outputs) + outputs = tf.cond(is_training, _force_updates, no_updates) + + if axes == [0,2]: + return outputs[:,:,:,0] # collapse last dim + else: + return outputs + else: + # raise Exception('old BN') + # TODO we can probably use nn.fused_batch_norm here too for speedup + mean, var = tf.nn.moments(inputs, axes, keep_dims=True) + shape = mean.get_shape().as_list() + if 0 not in axes: + print("WARNING (%s): didn't find 0 in axes, but not using separate BN params for each item in batch" % name) + shape[0] = 1 + offset = lib.param(name+'.offset', np.zeros(shape, dtype='float32')) + scale = lib.param(name+'.scale', np.ones(shape, dtype='float32')) + result = tf.nn.batch_normalization(inputs, mean, var, offset, scale, 1e-5) + + + return result diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv1d.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv1d.py new file mode 100644 index 00000000..3f5d4a07 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv1d.py @@ -0,0 +1,115 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +_default_weightnorm = False +def enable_default_weightnorm(): + global _default_weightnorm + _default_weightnorm = True + +def Conv1D(name, input_dim, output_dim, filter_size, inputs, he_init=True, mask_type=None, stride=1, weightnorm=None, biases=True, gain=1.): + """ + inputs: tensor of shape (batch size, num channels, width) + mask_type: one of None, 'a', 'b' + + returns: tensor of shape (batch size, num channels, width) + """ + with tf.name_scope(name) as scope: + + if mask_type is not None: + mask_type, mask_n_channels = mask_type + + mask = np.ones( + (filter_size, input_dim, output_dim), + dtype='float32' + ) + center = filter_size // 2 + + # Mask out future locations + # filter shape is (width, input channels, output channels) + mask[center+1:, :, :] = 0. + + # Mask out future channels + for i in xrange(mask_n_channels): + for j in xrange(mask_n_channels): + if (mask_type=='a' and i >= j) or (mask_type=='b' and i > j): + mask[ + center, + i::mask_n_channels, + j::mask_n_channels + ] = 0. + + + def uniform(stdev, size): + return np.random.uniform( + low=-stdev * np.sqrt(3), + high=stdev * np.sqrt(3), + size=size + ).astype('float32') + + #def uniform(stdev, size): + # return np.random.normal( + # scale=stdev, + # size=size + # ).astype('float32') + + + fan_in = input_dim * filter_size + fan_out = output_dim * filter_size / stride + + if mask_type is not None: # only approximately correct + fan_in /= 2. + fan_out /= 2. + + if he_init: + filters_stdev = np.sqrt(4./(fan_in+fan_out)) + else: # Normalized init (Glorot & Bengio) + filters_stdev = np.sqrt(2./(fan_in+fan_out)) + + filter_values = uniform( + filters_stdev, + (filter_size, input_dim, output_dim) + ) + # print "WARNING IGNORING GAIN" + filter_values *= gain + + filters = lib.param(name+'.Filters', filter_values) + + if weightnorm==None: + weightnorm = _default_weightnorm + if weightnorm: + norm_values = np.sqrt(np.sum(np.square(filter_values), axis=(0,1))) + target_norms = lib.param( + name + '.g', + norm_values + ) + with tf.name_scope('weightnorm') as scope: + norms = tf.sqrt(tf.reduce_sum(tf.square(filters), reduction_indices=[0,1])) + filters = filters * (target_norms / norms) + + if mask_type is not None: + with tf.name_scope('filter_mask'): + filters = filters * mask + + result = tf.nn.conv1d( + value=inputs, + filters=filters, + stride=stride, + padding='SAME', + data_format='NCHW' + ) + + if biases: + _biases = lib.param( + name+'.Biases', + np.zeros([output_dim], dtype='float32') + ) + + # result = result + _biases + + result = tf.expand_dims(result, 3) + result = tf.nn.bias_add(result, _biases, data_format='NCHW') + result = tf.squeeze(result) + + return result diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv2d.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv2d.py new file mode 100644 index 00000000..9df5be92 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/conv2d.py @@ -0,0 +1,125 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +_default_weightnorm = False +def enable_default_weightnorm(): + global _default_weightnorm + _default_weightnorm = True + +_weights_stdev = None +def set_weights_stdev(weights_stdev): + global _weights_stdev + _weights_stdev = weights_stdev + +def unset_weights_stdev(): + global _weights_stdev + _weights_stdev = None + +def Conv2D(name, input_dim, output_dim, filter_size, inputs, he_init=True, mask_type=None, stride=1, weightnorm=None, biases=True, gain=1.): + """ + inputs: tensor of shape (batch size, num channels, height, width) + mask_type: one of None, 'a', 'b' + + returns: tensor of shape (batch size, num channels, height, width) + """ + with tf.name_scope(name) as scope: + + if mask_type is not None: + mask_type, mask_n_channels = mask_type + + mask = np.ones( + (filter_size, filter_size, input_dim, output_dim), + dtype='float32' + ) + center = filter_size // 2 + + # Mask out future locations + # filter shape is (height, width, input channels, output channels) + mask[center+1:, :, :, :] = 0. + mask[center, center+1:, :, :] = 0. + + # Mask out future channels + for i in xrange(mask_n_channels): + for j in xrange(mask_n_channels): + if (mask_type=='a' and i >= j) or (mask_type=='b' and i > j): + mask[ + center, + center, + i::mask_n_channels, + j::mask_n_channels + ] = 0. + + + def uniform(stdev, size): + return np.random.uniform( + low=-stdev * np.sqrt(3), + high=stdev * np.sqrt(3), + size=size + ).astype('float32') + + fan_in = input_dim * filter_size**2 + fan_out = output_dim * filter_size**2 // (stride**2) + + if mask_type is not None: # only approximately correct + fan_in //= 2. + fan_out //= 2. + + if he_init: + filters_stdev = np.sqrt(4./(fan_in+fan_out)) + else: # Normalized init (Glorot & Bengio) + filters_stdev = np.sqrt(2./(fan_in+fan_out)) + + if _weights_stdev is not None: + filter_values = uniform( + _weights_stdev, + (filter_size, filter_size, input_dim, output_dim) + ) + else: + filter_values = uniform( + filters_stdev, + (filter_size, filter_size, input_dim, output_dim) + ) + + # print "WARNING IGNORING GAIN" + filter_values *= gain + + filters = lib.param(name+'.Filters', filter_values) + #filters = filter_values + + if weightnorm==None: + weightnorm = _default_weightnorm + if weightnorm: + norm_values = np.sqrt(np.sum(np.square(filter_values), axis=(0,1,2))) + target_norms = lib.param( + name + '.g', + # norm_values + ) + #target_norms = norm_values + with tf.name_scope('weightnorm') as scope: + norms = tf.sqrt(tf.reduce_sum(tf.square(filters), reduction_indices=[0,1,2])) + filters = filters * (target_norms / norms) + + if mask_type is not None: + with tf.name_scope('filter_mask'): + filters = filters * mask + + result = tf.nn.conv2d( + input=inputs, + filter=filters, + strides=[1, 1, stride, stride], + padding='SAME', + data_format='NCHW' + ) + + if biases: + _biases = lib.param( + name+'.Biases', + np.zeros(output_dim, dtype='float32') + ) + #_biases = np.zeros(output_dim, dtype='float32') + result = tf.nn.bias_add(result, _biases, data_format='NCHW') + + + return result diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/deconv2d.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/deconv2d.py new file mode 100644 index 00000000..87b75e56 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/deconv2d.py @@ -0,0 +1,118 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +_default_weightnorm = False +def enable_default_weightnorm(): + global _default_weightnorm + _default_weightnorm = True + +_weights_stdev = None +def set_weights_stdev(weights_stdev): + global _weights_stdev + _weights_stdev = weights_stdev + +def unset_weights_stdev(): + global _weights_stdev + _weights_stdev = None + +def Deconv2D( + name, + input_dim, + output_dim, + filter_size, + inputs, + he_init=True, + weightnorm=None, + biases=True, + gain=1., + mask_type=None, + ): + """ + inputs: tensor of shape (batch size, height, width, input_dim) + returns: tensor of shape (batch size, 2*height, 2*width, output_dim) + """ + with tf.name_scope(name) as scope: + + if mask_type != None: + raise Exception('Unsupported configuration') + + def uniform(stdev, size): + return np.random.uniform( + low=-stdev * np.sqrt(3), + high=stdev * np.sqrt(3), + size=size + ).astype('float32') + + stride = 2 + fan_in = input_dim * filter_size**2 // (stride**2) + fan_out = output_dim * filter_size**2 + + if he_init: + filters_stdev = np.sqrt(4./(fan_in+fan_out)) + else: # Normalized init (Glorot & Bengio) + filters_stdev = np.sqrt(2./(fan_in+fan_out)) + + + if _weights_stdev is not None: + filter_values = uniform( + _weights_stdev, + (filter_size, filter_size, output_dim, input_dim) + ) + else: + filter_values = uniform( + filters_stdev, + (filter_size, filter_size, output_dim, input_dim) + ) + + filter_values *= gain + + filters = lib.param( + name+'.Filters', + filter_values + ) + #filters = filter_values + + if weightnorm==None: + weightnorm = _default_weightnorm + if weightnorm: + norm_values = np.sqrt(np.sum(np.square(filter_values), axis=(0,1,3))) + target_norms = lib.param( + name + '.g', + norm_values + ) + target_norms = norm_values + with tf.name_scope('weightnorm') as scope: + norms = tf.sqrt(tf.reduce_sum(tf.square(filters), reduction_indices=[0,1,3])) + filters = filters * tf.expand_dims(target_norms / norms, 1) + + + inputs = tf.transpose(inputs, [0,2,3,1], name='NCHW_to_NHWC') + + input_shape = tf.shape(inputs) + try: # tf pre-1.0 (top) vs 1.0 (bottom) + output_shape = tf.pack([input_shape[0], 2*input_shape[1], 2*input_shape[2], output_dim]) + except Exception as e: + output_shape = tf.stack([input_shape[0], 2*input_shape[1], 2*input_shape[2], output_dim]) + + result = tf.nn.conv2d_transpose( + value=inputs, + filter=filters, + output_shape=output_shape, + strides=[1, 2, 2, 1], + padding='SAME' + ) + + if biases: + _biases = lib.param( + name+'.Biases', + np.zeros(output_dim, dtype='float32') + ) + #_biases = np.zeros(output_dim, dtype='float32') + result = tf.nn.bias_add(result, _biases) + + result = tf.transpose(result, [0,3,1,2], name='NHWC_to_NCHW') + + + return result diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/layernorm.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/layernorm.py new file mode 100644 index 00000000..d7e1ed02 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/layernorm.py @@ -0,0 +1,22 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +def Layernorm(name, norm_axes, inputs): + mean, var = tf.nn.moments(inputs, norm_axes, keep_dims=True) + + # Assume the 'neurons' axis is the first of norm_axes. This is the case for fully-connected and BCHW conv layers. + n_neurons = inputs.get_shape().as_list()[norm_axes[0]] + + offset = lib.param(name+'.offset', np.zeros(n_neurons, dtype='float32')) + #offset = np.zeros(n_neurons, dtype='float32') + scale = lib.param(name+'.scale', np.ones(n_neurons, dtype='float32')) + #scale = np.ones(n_neurons, dtype='float32') + # Add broadcasting dims to offset and scale (e.g. BCHW conv data) + offset = tf.reshape(offset, [-1] + [1 for i in range(len(norm_axes)-1)]) + scale = tf.reshape(scale, [-1] + [1 for i in range(len(norm_axes)-1)]) + + result = tf.nn.batch_normalization(inputs, mean, var, offset, scale, 1e-5) + + return result diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/linear.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/linear.py new file mode 100644 index 00000000..325f680a --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/ops/linear.py @@ -0,0 +1,149 @@ +import tflib as lib + +import numpy as np +import tensorflow as tf + +_default_weightnorm = False +def enable_default_weightnorm(): + global _default_weightnorm + _default_weightnorm = True + +def disable_default_weightnorm(): + global _default_weightnorm + _default_weightnorm = False + +_weights_stdev = None +def set_weights_stdev(weights_stdev): + global _weights_stdev + _weights_stdev = weights_stdev + +def unset_weights_stdev(): + global _weights_stdev + _weights_stdev = None + +def Linear( + name, + input_dim, + output_dim, + inputs, + biases=True, + initialization=None, + weightnorm=None, + gain=1. + ): + """ + initialization: None, `lecun`, 'glorot', `he`, 'glorot_he', `orthogonal`, `("uniform", range)` + """ + with tf.name_scope(name) as scope: + + def uniform(stdev, size): + if _weights_stdev is not None: + stdev = _weights_stdev + return np.random.uniform( + low=-stdev * np.sqrt(3), + high=stdev * np.sqrt(3), + size=size + ).astype('float32') + + if initialization == 'lecun':# and input_dim != output_dim): + # disabling orth. init for now because it's too slow + weight_values = uniform( + np.sqrt(1./input_dim), + (input_dim, output_dim) + ) + + elif initialization == 'glorot' or (initialization == None): + + weight_values = uniform( + np.sqrt(2./(input_dim+output_dim)), + (input_dim, output_dim) + ) + + elif initialization == 'he': + + weight_values = uniform( + np.sqrt(2./input_dim), + (input_dim, output_dim) + ) + + elif initialization == 'glorot_he': + + weight_values = uniform( + np.sqrt(4./(input_dim+output_dim)), + (input_dim, output_dim) + ) + + elif initialization == 'orthogonal' or \ + (initialization == None and input_dim == output_dim): + + # From lasagne + def sample(shape): + if len(shape) < 2: + raise RuntimeError("Only shapes of length 2 or more are " + "supported.") + flat_shape = (shape[0], np.prod(shape[1:])) + # TODO: why normal and not uniform? + a = np.random.normal(0.0, 1.0, flat_shape) + u, _, v = np.linalg.svd(a, full_matrices=False) + # pick the one with the correct shape + q = u if u.shape == flat_shape else v + q = q.reshape(shape) + return q.astype('float32') + weight_values = sample((input_dim, output_dim)) + + elif initialization[0] == 'uniform': + + weight_values = np.random.uniform( + low=-initialization[1], + high=initialization[1], + size=(input_dim, output_dim) + ).astype('float32') + + else: + + raise Exception('Invalid initialization!') + + weight_values *= gain + + weight = lib.param( + name + '.W', + weight_values + ) + + if weightnorm==None: + weightnorm = _default_weightnorm + if weightnorm: + norm_values = np.sqrt(np.sum(np.square(weight_values), axis=0)) + # norm_values = np.linalg.norm(weight_values, axis=0) + + target_norms = lib.param( + name + '.g', + norm_values + ) + + with tf.name_scope('weightnorm') as scope: + norms = tf.sqrt(tf.reduce_sum(tf.square(weight), reduction_indices=[0])) + weight = weight * (target_norms / norms) + + # if 'Discriminator' in name: + # print "WARNING weight constraint on {}".format(name) + # weight = tf.nn.softsign(10.*weight)*.1 + + if inputs.get_shape().ndims == 2: + result = tf.matmul(inputs, weight) + else: + reshaped_inputs = tf.reshape(inputs, [-1, input_dim]) + result = tf.matmul(reshaped_inputs, weight) + result = tf.reshape(result, tf.pack(tf.unpack(tf.shape(inputs))[:-1] + [output_dim])) + + if biases: + result = tf.nn.bias_add( + result, + lib.param( + name + '.b', + np.zeros((output_dim,), dtype='float32') + ) + ) + + return result + diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/plot.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/plot.py new file mode 100644 index 00000000..2d62c77c --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/plot.py @@ -0,0 +1,48 @@ +import numpy as np + +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt + +import collections +import time +import pickle +import math + +_since_beginning = collections.defaultdict(lambda: {}) +_since_last_flush = collections.defaultdict(lambda: {}) + +_iter = [0] +def tick(): + _iter[0] += 1 + +def plot(name, value): + _since_last_flush[name][_iter[0]] = value + +def flush(): + prints = [] + + for name, vals in _since_last_flush.items(): + #prints.append("{}\t{}" % (name, np.mean(vals.values()))) + v = vals.values() + sv = sum(v) + prints.append("%s\t%f" % (name, sv / len(v))) + _since_beginning[name].update(vals) + + x_vals = sorted(_since_beginning[name].keys()) + y_vals = [_since_beginning[name][x] for x in x_vals] + + #plt.clf() + #plt.plot(x_vals, y_vals) + #plt.xlabel('iteration') + #plt.ylabel(name) + #plt.savefig(name.replace(' ', '_')+'.jpg') + + #print("iter %d\t%s" % (_iter[0], "\t".join(prints))) + print("iter %d" % (_iter[0])) + for p in prints: + print(p) + _since_last_flush.clear() + + #with open('log.pkl', 'wb') as f: + # pickle.dump(dict(_since_beginning), f, pickle.HIGHEST_PROTOCOL) diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/save_images.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/save_images.py new file mode 100644 index 00000000..a28b464a --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/save_images.py @@ -0,0 +1,38 @@ +""" +Image grid saver, based on color_grid_vis from github.com/Newmu +""" + +import numpy as np +import scipy.misc +from scipy.misc import imsave + +def save_images(X, save_path): + # [0, 1] -> [0,255] + if isinstance(X.flatten()[0], np.floating): + X = (255.99*X).astype('uint8') + + n_samples = X.shape[0] + rows = int(np.sqrt(n_samples)) + while n_samples % rows != 0: + rows -= 1 + + nh, nw = rows, n_samples//rows + + if X.ndim == 2: + X = np.reshape(X, (X.shape[0], int(np.sqrt(X.shape[1])), int(np.sqrt(X.shape[1])))) + + if X.ndim == 4: + # BCHW -> BHWC + X = X.transpose(0,2,3,1) + h, w = X[0].shape[:2] + img = np.zeros((h*nh, w*nw, 3)) + elif X.ndim == 3: + h, w = X[0].shape[:2] + img = np.zeros((h*nh, w*nw)) + + for n, x in enumerate(X): + j = n//nw + i = n%nw + img[j*h:j*h+h, i*w:i*w+w] = x + + imsave(save_path, img) diff --git a/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/small_imagenet.py b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/small_imagenet.py new file mode 100644 index 00000000..f20713a9 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/WGAN_GP/tflib/small_imagenet.py @@ -0,0 +1,33 @@ +import numpy as np +import scipy.misc +import time + +def make_generator(path, n_files, batch_size): + epoch_count = [1] + def get_epoch(): + images = np.zeros((batch_size, 3, 64, 64), dtype='int32') + files = list(range(n_files)) + random_state = np.random.RandomState(epoch_count[0]) + random_state.shuffle(files) + epoch_count[0] += 1 + for n, i in enumerate(files): + image = scipy.misc.imread("{}/{}.png".format(path, str(i+1).zfill(len(str(n_files))))) + images[n % batch_size] = image.transpose(2,0,1) + if n > 0 and n % batch_size == 0: + yield (images,) + return get_epoch + +def load(batch_size, data_dir='/home/ishaan/data/imagenet64'): + return ( + make_generator(data_dir+'/train_64x64', 1281149, batch_size), + make_generator(data_dir+'/valid_64x64', 49999, batch_size) + ) + +if __name__ == '__main__': + train_gen, valid_gen = load(64) + t0 = time.time() + for i, batch in enumerate(train_gen(), start=1): + print("s\t%d" % (str(time.time() - t0), batch[0][0,0,0,0])) + if i == 1000: + break + t0 = time.time() diff --git a/Wav2Lip-master/evaluation/TTUR-master/fid.py b/Wav2Lip-master/evaluation/TTUR-master/fid.py new file mode 100644 index 00000000..d57a50a9 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/fid.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python3 +''' Calculates the Frechet Inception Distance (FID) to evalulate GANs. + +The FID metric calculates the distance between two distributions of images. +Typically, we have summary statistics (mean & covariance matrix) of one +of these distributions, while the 2nd distribution is given by a GAN. + +When run as a stand-alone program, it compares the distribution of +images that are stored as PNG/JPEG at a specified location with a +distribution given by summary statistics (in pickle format). + +The FID is calculated by assuming that X_1 and X_2 are the activations of +the pool_3 layer of the inception net for generated samples and real world +samples respectivly. + +See --help to see further details. +''' + +from __future__ import absolute_import, division, print_function +import numpy as np +import os +import gzip, pickle +import tensorflow as tf +from imageio import imread +from scipy import linalg +import pathlib +import urllib +import warnings + +class InvalidFIDException(Exception): + pass + + +def create_inception_graph(pth): + """Creates a graph from saved GraphDef file.""" + # Creates graph from saved graph_def.pb. + with tf.io.gfile.GFile( pth, 'rb') as f: + graph_def = tf.compat.v1.GraphDef() + graph_def.ParseFromString( f.read()) + _ = tf.import_graph_def( graph_def, name='FID_Inception_Net') +#------------------------------------------------------------------------------- + + +# code for handling inception net derived from +# https://github.com/openai/improved-gan/blob/master/inception_score/model.py +def _get_inception_layer(sess): + """Prepares inception net for batched usage and returns pool_3 layer. """ + layername = 'FID_Inception_Net/pool_3:0' + pool3 = sess.graph.get_tensor_by_name(layername) + ops = pool3.graph.get_operations() + for op_idx, op in enumerate(ops): + for o in op.outputs: + shape = o.get_shape() + if shape._dims is not None: + #shape = [s.value for s in shape] TF 1.x + shape = [s for s in shape] #TF 2.x + new_shape = [] + for j, s in enumerate(shape): + if s == 1 and j == 0: + new_shape.append(None) + else: + new_shape.append(s) + o.__dict__['_shape_val'] = tf.TensorShape(new_shape) + return pool3 +#------------------------------------------------------------------------------- + + +def get_activations(images, sess, batch_size=50, verbose=False): + """Calculates the activations of the pool_3 layer for all images. + + Params: + -- images : Numpy array of dimension (n_images, hi, wi, 3). The values + must lie between 0 and 256. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the disposable hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- A numpy array of dimension (num images, 2048) that contains the + activations of the given tensor when feeding inception with the query tensor. + """ + inception_layer = _get_inception_layer(sess) + n_images = images.shape[0] + if batch_size > n_images: + print("warning: batch size is bigger than the data size. setting batch size to data size") + batch_size = n_images + n_batches = n_images//batch_size # drops the last batch if < batch_size + pred_arr = np.empty((n_batches * batch_size,2048)) + for i in range(n_batches): + if verbose: + print("\rPropagating batch %d/%d" % (i+1, n_batches), end="", flush=True) + start = i*batch_size + + if start+batch_size < n_images: + end = start+batch_size + else: + end = n_images + + batch = images[start:end] + pred = sess.run(inception_layer, {'FID_Inception_Net/ExpandDims:0': batch}) + pred_arr[start:end] = pred.reshape(batch.shape[0],-1) + if verbose: + print(" done") + return pred_arr +#------------------------------------------------------------------------------- + + +def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6): + """Numpy implementation of the Frechet Distance. + The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) + and X_2 ~ N(mu_2, C_2) is + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + + Stable version by Dougal J. Sutherland. + + Params: + -- mu1 : Numpy array containing the activations of the pool_3 layer of the + inception net ( like returned by the function 'get_predictions') + for generated samples. + -- mu2 : The sample mean over activations of the pool_3 layer, precalcualted + on an representive data set. + -- sigma1: The covariance matrix over activations of the pool_3 layer for + generated samples. + -- sigma2: The covariance matrix over activations of the pool_3 layer, + precalcualted on an representive data set. + + Returns: + -- : The Frechet Distance. + """ + + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert mu1.shape == mu2.shape, "Training and test mean vectors have different lengths" + assert sigma1.shape == sigma2.shape, "Training and test covariances have different dimensions" + + diff = mu1 - mu2 + + # product might be almost singular + covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = "fid calculation produces singular product; adding %s to diagonal of cov estimates" % eps + warnings.warn(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError("Imaginary component {}".format(m)) + covmean = covmean.real + + tr_covmean = np.trace(covmean) + + return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean +#------------------------------------------------------------------------------- + + +def calculate_activation_statistics(images, sess, batch_size=50, verbose=False): + """Calculation of the statistics used by the FID. + Params: + -- images : Numpy array of dimension (n_images, hi, wi, 3). The values + must lie between 0 and 255. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the available hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- mu : The mean over samples of the activations of the pool_3 layer of + the incption model. + -- sigma : The covariance matrix of the activations of the pool_3 layer of + the incption model. + """ + act = get_activations(images, sess, batch_size, verbose) + mu = np.mean(act, axis=0) + sigma = np.cov(act, rowvar=False) + return mu, sigma + + +#------------------ +# The following methods are implemented to obtain a batched version of the activations. +# This has the advantage to reduce memory requirements, at the cost of slightly reduced efficiency. +# - Pyrestone +#------------------ + + +def load_image_batch(files): + """Convenience method for batch-loading images + Params: + -- files : list of paths to image files. Images need to have same dimensions for all files. + Returns: + -- A numpy array of dimensions (num_images,hi, wi, 3) representing the image pixel values. + """ + return np.array([imread(str(fn)).astype(np.float32) for fn in files]) + +def get_activations_from_files(files, sess, batch_size=50, verbose=False): + """Calculates the activations of the pool_3 layer for all images. + + Params: + -- files : list of paths to image files. Images need to have same dimensions for all files. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the disposable hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- A numpy array of dimension (num images, 2048) that contains the + activations of the given tensor when feeding inception with the query tensor. + """ + inception_layer = _get_inception_layer(sess) + n_imgs = len(files) + if batch_size > n_imgs: + print("warning: batch size is bigger than the data size. setting batch size to data size") + batch_size = n_imgs + n_batches = n_imgs//batch_size + 1 + pred_arr = np.empty((n_imgs,2048)) + for i in range(n_batches): + if verbose: + print("\rPropagating batch %d/%d" % (i+1, n_batches), end="", flush=True) + start = i*batch_size + if start+batch_size < n_imgs: + end = start+batch_size + else: + end = n_imgs + + batch = load_image_batch(files[start:end]) + pred = sess.run(inception_layer, {'FID_Inception_Net/ExpandDims:0': batch}) + pred_arr[start:end] = pred.reshape(batch_size,-1) + del batch #clean up memory + if verbose: + print(" done") + return pred_arr + +def calculate_activation_statistics_from_files(files, sess, batch_size=50, verbose=False): + """Calculation of the statistics used by the FID. + Params: + -- files : list of paths to image files. Images need to have same dimensions for all files. + -- sess : current session + -- batch_size : the images numpy array is split into batches with batch size + batch_size. A reasonable batch size depends on the available hardware. + -- verbose : If set to True and parameter out_step is given, the number of calculated + batches is reported. + Returns: + -- mu : The mean over samples of the activations of the pool_3 layer of + the incption model. + -- sigma : The covariance matrix of the activations of the pool_3 layer of + the incption model. + """ + act = get_activations_from_files(files, sess, batch_size, verbose) + mu = np.mean(act, axis=0) + sigma = np.cov(act, rowvar=False) + return mu, sigma + +#------------------------------------------------------------------------------- + + +#------------------------------------------------------------------------------- +# The following functions aren't needed for calculating the FID +# they're just here to make this module work as a stand-alone script +# for calculating FID scores +#------------------------------------------------------------------------------- +def check_or_download_inception(inception_path): + ''' Checks if the path to the inception file is valid, or downloads + the file if it is not present. ''' + INCEPTION_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz' + if inception_path is None: + inception_path = '/tmp' + inception_path = pathlib.Path(inception_path) + model_file = inception_path / 'classify_image_graph_def.pb' + if not model_file.exists(): + print("Downloading Inception model") + from urllib import request + import tarfile + fn, _ = request.urlretrieve(INCEPTION_URL) + with tarfile.open(fn, mode='r') as f: + f.extract('classify_image_graph_def.pb', str(model_file.parent)) + return str(model_file) + + +def _handle_path(path, sess, low_profile=False): + if path.endswith('.npz'): + f = np.load(path) + m, s = f['mu'][:], f['sigma'][:] + f.close() + else: + path = pathlib.Path(path) + files = list(path.glob('*.jpg')) + list(path.glob('*.png')) + if low_profile: + m, s = calculate_activation_statistics_from_files(files, sess) + else: + x = np.array([imread(str(fn)).astype(np.float32) for fn in files]) + m, s = calculate_activation_statistics(x, sess) + del x #clean up memory + return m, s + + +def calculate_fid_given_paths(paths, inception_path, low_profile=False): + ''' Calculates the FID of two paths. ''' + inception_path = check_or_download_inception(inception_path) + + for p in paths: + if not os.path.exists(p): + raise RuntimeError("Invalid path: %s" % p) + + create_inception_graph(str(inception_path)) + with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + m1, s1 = _handle_path(paths[0], sess, low_profile=low_profile) + m2, s2 = _handle_path(paths[1], sess, low_profile=low_profile) + fid_value = calculate_frechet_distance(m1, s1, m2, s2) + return fid_value + + +if __name__ == "__main__": + from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument("path", type=str, nargs=2, + help='Path to the generated images or to .npz statistic files') + parser.add_argument("-i", "--inception", type=str, default=None, + help='Path to Inception model (will be downloaded if not provided)') + parser.add_argument("--gpu", default="", type=str, + help='GPU to use (leave blank for CPU only)') + parser.add_argument("--lowprofile", action="store_true", + help='Keep only one batch of images in memory at a time. This reduces memory footprint, but may decrease speed slightly.') + args = parser.parse_args() + os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu + fid_value = calculate_fid_given_paths(args.path, args.inception, low_profile=args.lowprofile) + print("FID: ", fid_value) diff --git a/Wav2Lip-master/evaluation/TTUR-master/fid_example.py b/Wav2Lip-master/evaluation/TTUR-master/fid_example.py new file mode 100644 index 00000000..6991fcf7 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/fid_example.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +from __future__ import absolute_import, division, print_function +import os +import glob +#os.environ['CUDA_VISIBLE_DEVICES'] = '0' +import numpy as np +import fid +from scipy.misc import imread +import tensorflow as tf + +# Paths +image_path = '/tmp/images' # set path to some generated images +stats_path = 'fid_stats.npz' # training set statistics +inception_path = fid.check_or_download_inception(None) # download inception network + +# loads all images into memory (this might require a lot of RAM!) +image_list = glob.glob(os.path.join(image_path, '*.jpg')) +images = np.array([imread(str(fn)).astype(np.float32) for fn in files]) + +# load precalculated training set statistics +f = np.load(stats_path) +mu_real, sigma_real = f['mu'][:], f['sigma'][:] +f.close() + +fid.create_inception_graph(inception_path) # load the graph into the current TF graph +with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + mu_gen, sigma_gen = fid.calculate_activation_statistics(images, sess, batch_size=100) + +fid_value = fid.calculate_frechet_distance(mu_gen, sigma_gen, mu_real, sigma_real) +print("FID: %s" % fid_value) diff --git a/Wav2Lip-master/evaluation/TTUR-master/precalc_stats_example.py b/Wav2Lip-master/evaluation/TTUR-master/precalc_stats_example.py new file mode 100644 index 00000000..062a4644 --- /dev/null +++ b/Wav2Lip-master/evaluation/TTUR-master/precalc_stats_example.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +import os +import glob +#os.environ['CUDA_VISIBLE_DEVICES'] = '2' +import numpy as np +import fid +from scipy.misc import imread +import tensorflow as tf + +######## +# PATHS +######## +data_path = 'data' # set path to training set images +output_path = 'fid_stats.npz' # path for where to store the statistics +# if you have downloaded and extracted +# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz +# set this path to the directory where the extracted files are, otherwise +# just set it to None and the script will later download the files for you +inception_path = None +print("check for inception model..", end=" ", flush=True) +inception_path = fid.check_or_download_inception(inception_path) # download inception if necessary +print("ok") + +# loads all images into memory (this might require a lot of RAM!) +print("load images..", end=" " , flush=True) +image_list = glob.glob(os.path.join(data_path, '*.jpg')) +images = np.array([imread(str(fn)).astype(np.float32) for fn in image_list]) +print("%d images found and loaded" % len(images)) + +print("create inception graph..", end=" ", flush=True) +fid.create_inception_graph(inception_path) # load the graph into the current TF graph +print("ok") + +print("calculte FID stats..", end=" ", flush=True) +with tf.Session() as sess: + sess.run(tf.global_variables_initializer()) + mu, sigma = fid.calculate_activation_statistics(images, sess, batch_size=100) + np.savez_compressed(output_path, mu=mu, sigma=sigma) +print("finished") diff --git a/Wav2Lip-master/evaluation/gen_videos_from_filelist.py b/Wav2Lip-master/evaluation/gen_videos_from_filelist.py new file mode 100644 index 00000000..bd666b93 --- /dev/null +++ b/Wav2Lip-master/evaluation/gen_videos_from_filelist.py @@ -0,0 +1,238 @@ +from os import listdir, path +import numpy as np +import scipy, cv2, os, sys, argparse +import dlib, json, subprocess +from tqdm import tqdm +from glob import glob +import torch + +sys.path.append('../') +import audio +import face_detection +from models import Wav2Lip + +parser = argparse.ArgumentParser(description='Code to generate results for test filelists') + +parser.add_argument('--filelist', type=str, + help='Filepath of filelist file to read', required=True) +parser.add_argument('--results_dir', type=str, help='Folder to save all results into', + required=True) +parser.add_argument('--data_root', type=str, required=True) +parser.add_argument('--checkpoint_path', type=str, + help='Name of saved checkpoint to load weights from', required=True) + +parser.add_argument('--pads', nargs='+', type=int, default=[0, 0, 0, 0], + help='Padding (top, bottom, left, right)') +parser.add_argument('--face_det_batch_size', type=int, + help='Single GPU batch size for face detection', default=64) +parser.add_argument('--wav2lip_batch_size', type=int, help='Batch size for Wav2Lip', default=128) + +# parser.add_argument('--resize_factor', default=1, type=int) + +args = parser.parse_args() +args.img_size = 96 + +def get_smoothened_boxes(boxes, T): + for i in range(len(boxes)): + if i + T > len(boxes): + window = boxes[len(boxes) - T:] + else: + window = boxes[i : i + T] + boxes[i] = np.mean(window, axis=0) + return boxes + +def face_detect(images): + batch_size = args.face_det_batch_size + + while 1: + predictions = [] + try: + for i in range(0, len(images), batch_size): + predictions.extend(detector.get_detections_for_batch(np.array(images[i:i + batch_size]))) + except RuntimeError: + if batch_size == 1: + raise RuntimeError('Image too big to run face detection on GPU') + batch_size //= 2 + args.face_det_batch_size = batch_size + print('Recovering from OOM error; New batch size: {}'.format(batch_size)) + continue + break + + results = [] + pady1, pady2, padx1, padx2 = args.pads + for rect, image in zip(predictions, images): + if rect is None: + raise ValueError('Face not detected!') + + y1 = max(0, rect[1] - pady1) + y2 = min(image.shape[0], rect[3] + pady2) + x1 = max(0, rect[0] - padx1) + x2 = min(image.shape[1], rect[2] + padx2) + + results.append([x1, y1, x2, y2]) + + boxes = get_smoothened_boxes(np.array(results), T=5) + results = [[image[y1: y2, x1:x2], (y1, y2, x1, x2), True] for image, (x1, y1, x2, y2) in zip(images, boxes)] + + return results + +def datagen(frames, face_det_results, mels): + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + for i, m in enumerate(mels): + if i >= len(frames): raise ValueError('Equal or less lengths only') + + frame_to_save = frames[i].copy() + face, coords, valid_frame = face_det_results[i].copy() + if not valid_frame: + continue + + face = cv2.resize(face, (args.img_size, args.img_size)) + + img_batch.append(face) + mel_batch.append(m) + frame_batch.append(frame_to_save) + coords_batch.append(coords) + + if len(img_batch) >= args.wav2lip_batch_size: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + if len(img_batch) > 0: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + +fps = 25 +mel_step_size = 16 +mel_idx_multiplier = 80./fps +device = 'cuda' if torch.cuda.is_available() else 'cpu' +print('Using {} for inference.'.format(device)) + +detector = face_detection.FaceAlignment(face_detection.LandmarksType._2D, + flip_input=False, device=device) + +def _load(checkpoint_path): + if device == 'cuda': + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + +def load_model(path): + model = Wav2Lip() + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + s = checkpoint["state_dict"] + new_s = {} + for k, v in s.items(): + new_s[k.replace('module.', '')] = v + model.load_state_dict(new_s) + + model = model.to(device) + return model.eval() + +model = load_model(args.checkpoint_path) + +def main(): + assert args.data_root is not None + data_root = args.data_root + + if not os.path.isdir(args.results_dir): os.makedirs(args.results_dir) + + with open(args.filelist, 'r') as filelist: + lines = filelist.readlines() + + for idx, line in enumerate(tqdm(lines)): + audio_src, video = line.strip().split() + + audio_src = os.path.join(data_root, audio_src) + '.mp4' + video = os.path.join(data_root, video) + '.mp4' + + command = 'ffmpeg -loglevel panic -y -i {} -strict -2 {}'.format(audio_src, '../temp/temp.wav') + subprocess.call(command, shell=True) + temp_audio = '../temp/temp.wav' + + wav = audio.load_wav(temp_audio, 16000) + mel = audio.melspectrogram(wav) + if np.isnan(mel.reshape(-1)).sum() > 0: + continue + + mel_chunks = [] + i = 0 + while 1: + start_idx = int(i * mel_idx_multiplier) + if start_idx + mel_step_size > len(mel[0]): + break + mel_chunks.append(mel[:, start_idx : start_idx + mel_step_size]) + i += 1 + + video_stream = cv2.VideoCapture(video) + + full_frames = [] + while 1: + still_reading, frame = video_stream.read() + if not still_reading or len(full_frames) > len(mel_chunks): + video_stream.release() + break + full_frames.append(frame) + + if len(full_frames) < len(mel_chunks): + continue + + full_frames = full_frames[:len(mel_chunks)] + + try: + face_det_results = face_detect(full_frames.copy()) + except ValueError as e: + continue + + batch_size = args.wav2lip_batch_size + gen = datagen(full_frames.copy(), face_det_results, mel_chunks) + + for i, (img_batch, mel_batch, frames, coords) in enumerate(gen): + if i == 0: + frame_h, frame_w = full_frames[0].shape[:-1] + out = cv2.VideoWriter('../temp/result.avi', + cv2.VideoWriter_fourcc(*'DIVX'), fps, (frame_w, frame_h)) + + img_batch = torch.FloatTensor(np.transpose(img_batch, (0, 3, 1, 2))).to(device) + mel_batch = torch.FloatTensor(np.transpose(mel_batch, (0, 3, 1, 2))).to(device) + + with torch.no_grad(): + pred = model(mel_batch, img_batch) + + + pred = pred.cpu().numpy().transpose(0, 2, 3, 1) * 255. + + for pl, f, c in zip(pred, frames, coords): + y1, y2, x1, x2 = c + pl = cv2.resize(pl.astype(np.uint8), (x2 - x1, y2 - y1)) + f[y1:y2, x1:x2] = pl + out.write(f) + + out.release() + + vid = os.path.join(args.results_dir, '{}.mp4'.format(idx)) + + command = 'ffmpeg -loglevel panic -y -i {} -i {} -strict -2 -q:v 1 {}'.format(temp_audio, + '../temp/result.avi', vid) + subprocess.call(command, shell=True) + +if __name__ == '__main__': + main() diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/.flake8 b/Wav2Lip-master/evaluation/pytorch-fid-master/.flake8 new file mode 100644 index 00000000..706e2b12 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/.flake8 @@ -0,0 +1,4 @@ +[flake8] +select = F,W,E,I,B,B9 +ignore = W503,E203,B950 +max-line-length = 88 diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_full.yaml b/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_full.yaml new file mode 100644 index 00000000..cf1949b8 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_full.yaml @@ -0,0 +1,41 @@ +name: "Full set of tests for PRs and master branch" +on: + push: + branches: + - "master" + pull_request: + +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true + +jobs: + tests: + runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + name: Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Nox + run: pip install nox==2024.03.02 + - name: Run tests + run: nox --non-interactive --error-on-missing-interpreter --session "tests-${{ matrix.python-version }}" + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + - name: Install Nox + run: pip install nox==2024.03.02 + - name: Lint + run: nox --non-interactive --error-on-missing-interpreter --session "lint" diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_reduced.yaml b/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_reduced.yaml new file mode 100644 index 00000000..a645b5aa --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/.github/workflows/tests_reduced.yaml @@ -0,0 +1,40 @@ +name: "Reduced set of tests for push events" +on: + push: + branches-ignore: + - master + +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true + +jobs: + tests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + name: Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install Nox + run: pip install nox==2024.03.02 + - name: Run tests + run: nox --non-interactive --error-on-missing-interpreter --session "tests-${{ matrix.python-version }}" + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + - name: Install Nox + run: pip install nox==2024.03.02 + - name: Lint + run: nox --non-interactive --error-on-missing-interpreter --session "lint" diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/.gitignore b/Wav2Lip-master/evaluation/pytorch-fid-master/.gitignore new file mode 100644 index 00000000..9bce9eb0 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/.gitignore @@ -0,0 +1,116 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ \ No newline at end of file diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/CHANGELOG.md b/Wav2Lip-master/evaluation/pytorch-fid-master/CHANGELOG.md new file mode 100644 index 00000000..f43fc390 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/CHANGELOG.md @@ -0,0 +1,41 @@ +# Changelog + +## [0.3.0] - 2023-01-05 + +### Added + +* Add argument `--save-stats` allowing to compute dataset statistics and save them as an `.npz` file ([#80](https://github.com/mseitzer/pytorch-fid/pull/80)). The `.npz` file can be used in subsequent FID computations instead of recomputing the dataset statistics. This option can be used in the following way: `python -m pytorch_fid --save-stats path/to/dataset path/to/outputfile`. + +### Fixed + +* Do not use `os.sched_getaffinity` to get number of available CPUs on Windows, as it is not available there ([232b3b14](https://github.com/mseitzer/pytorch-fid/commit/232b3b1468800102fcceaf6f2bb8977811fc991a), [#84](https://github.com/mseitzer/pytorch-fid/issues/84)). +* Do not use Inception model argument `pretrained`, as it was deprecated in torchvision 0.13 ([#88](https://github.com/mseitzer/pytorch-fid/pull/88)). + +## [0.2.1] - 2021-10-10 + +### Added + +* Add argument `--num-workers` to select number of dataloader processes ([#66](https://github.com/mseitzer/pytorch-fid/pull/66)). Defaults to 8 or the number of available CPUs if less than 8 CPUs are available. + +### Fixed + +* Fixed package setup to work under Windows ([#55](https://github.com/mseitzer/pytorch-fid/pull/55), [#72](https://github.com/mseitzer/pytorch-fid/issues/72)) + +## [0.2.0] - 2020-11-30 + +### Added + +* Load images using a Pytorch dataloader, which should result in a speed-up. ([#47](https://github.com/mseitzer/pytorch-fid/pull/47)) +* Support more image extensions ([#53](https://github.com/mseitzer/pytorch-fid/pull/53)) +* Improve tooling by setting up Nox, add linting and test support ([#52](https://github.com/mseitzer/pytorch-fid/pull/52)) +* Add some unit tests + +## [0.1.1] - 2020-08-16 + +### Fixed + +* Fixed software license string in `setup.py` + +## [0.1.0] - 2020-08-16 + +Initial release as a pypi package. Use `pip install pytorch-fid` to install. diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/LICENSE b/Wav2Lip-master/evaluation/pytorch-fid-master/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/README.md b/Wav2Lip-master/evaluation/pytorch-fid-master/README.md new file mode 100644 index 00000000..607cbbe0 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/README.md @@ -0,0 +1,88 @@ +[![PyPI](https://img.shields.io/pypi/v/pytorch-fid.svg)](https://pypi.org/project/pytorch-fid/) + +# FID score for PyTorch + +This is a port of the official implementation of [Fréchet Inception Distance](https://arxiv.org/abs/1706.08500) to PyTorch. +See [https://github.com/bioinf-jku/TTUR](https://github.com/bioinf-jku/TTUR) for the original implementation using Tensorflow. + +FID is a measure of similarity between two datasets of images. +It was shown to correlate well with human judgement of visual quality and is most often used to evaluate the quality of samples of Generative Adversarial Networks. +FID is calculated by computing the [Fréchet distance](https://en.wikipedia.org/wiki/Fr%C3%A9chet_distance) between two Gaussians fitted to feature representations of the Inception network. + +Further insights and an independent evaluation of the FID score can be found in [Are GANs Created Equal? A Large-Scale Study](https://arxiv.org/abs/1711.10337). + +The weights and the model are exactly the same as in [the official Tensorflow implementation](https://github.com/bioinf-jku/TTUR), and were tested to give very similar results (e.g. `.08` absolute error and `0.0009` relative error on LSUN, using ProGAN generated images). However, due to differences in the image interpolation implementation and library backends, FID results still differ slightly from the original implementation. So if you report FID scores in your paper, and you want them to be *exactly comparable* to FID scores reported in other papers, you should consider using [the official Tensorflow implementation](https://github.com/bioinf-jku/TTUR). + +## Installation + +Install from [pip](https://pypi.org/project/pytorch-fid/): + +``` +pip install pytorch-fid +``` + +Requirements: +- python3 +- pytorch +- torchvision +- pillow +- numpy +- scipy + +## Usage + +To compute the FID score between two datasets, where images of each dataset are contained in an individual folder: +``` +python -m pytorch_fid path/to/dataset1 path/to/dataset2 +``` + +To run the evaluation on GPU, use the flag `--device cuda:N`, where `N` is the index of the GPU to use. + +### Using different layers for feature maps + +In difference to the official implementation, you can choose to use a different feature layer of the Inception network instead of the default `pool3` layer. +As the lower layer features still have spatial extent, the features are first global average pooled to a vector before estimating mean and covariance. + +This might be useful if the datasets you want to compare have less than the otherwise required 2048 images. +Note that this changes the magnitude of the FID score and you can not compare them against scores calculated on another dimensionality. +The resulting scores might also no longer correlate with visual quality. + +You can select the dimensionality of features to use with the flag `--dims N`, where N is the dimensionality of features. +The choices are: +- 64: first max pooling features +- 192: second max pooling features +- 768: pre-aux classifier features +- 2048: final average pooling features (this is the default) + +## Generating a compatible `.npz` archive from a dataset +A frequent use case will be to compare multiple models against an original dataset. +To save training multiple times on the original dataset, there is also the ability to generate a compatible `.npz` archive from a dataset. This is done using any combination of the previously mentioned arguments with the addition of the `--save-stats` flag. For example: +``` +python -m pytorch_fid --save-stats path/to/dataset path/to/outputfile +``` + +The output file may then be used in place of the path to the original dataset for further comparisons. + +## Citing + +If you use this repository in your research, consider citing it using the following Bibtex entry: + +``` +@misc{Seitzer2020FID, + author={Maximilian Seitzer}, + title={{pytorch-fid: FID Score for PyTorch}}, + month={August}, + year={2020}, + note={Version 0.3.0}, + howpublished={\url{https://github.com/mseitzer/pytorch-fid}}, +} +``` + +## License + +This implementation is licensed under the Apache License 2.0. + +FID was introduced by Martin Heusel, Hubert Ramsauer, Thomas Unterthiner, Bernhard Nessler and Sepp Hochreiter in "GANs Trained by a Two Time-Scale Update Rule Converge to a Local Nash Equilibrium", see [https://arxiv.org/abs/1706.08500](https://arxiv.org/abs/1706.08500) + +The original implementation is by the Institute of Bioinformatics, JKU Linz, licensed under the Apache License 2.0. +See [https://github.com/bioinf-jku/TTUR](https://github.com/bioinf-jku/TTUR). diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/noxfile.py b/Wav2Lip-master/evaluation/pytorch-fid-master/noxfile.py new file mode 100644 index 00000000..8dc5c828 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/noxfile.py @@ -0,0 +1,29 @@ +import nox + +LOCATIONS = ("src/", "tests/", "noxfile.py", "setup.py") + + +@nox.session +def lint(session): + session.install("flake8") + session.install("flake8-bugbear") + session.install("flake8-isort") + session.install("black==24.3.0") + + args = session.posargs or LOCATIONS + session.run("flake8", *args) + session.run("black", "--check", "--diff", *args) + + +@nox.session(python=["3.8", "3.9", "3.10", "3.11", "3.12"]) +def tests(session): + session.install( + "torch==2.2.1", + "torchvision", + "--index-url", + "https://download.pytorch.org/whl/cpu", + ) + session.install(".") + session.install("pytest") + session.install("pytest-mock") + session.run("pytest", *session.posargs) diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/pyproject.toml b/Wav2Lip-master/evaluation/pytorch-fid-master/pyproject.toml new file mode 100644 index 00000000..fcffd43e --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/pyproject.toml @@ -0,0 +1,7 @@ +[tool.black] +target-version = ["py311"] + +[tool.isort] +profile = "black" +line_length = 88 +multi_line_output = 3 diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/setup.py b/Wav2Lip-master/evaluation/pytorch-fid-master/setup.py new file mode 100644 index 00000000..8db0f25d --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/setup.py @@ -0,0 +1,55 @@ +import os + +import setuptools + + +def read(rel_path): + base_path = os.path.abspath(os.path.dirname(__file__)) + with open(os.path.join(base_path, rel_path), "r") as f: + return f.read() + + +def get_version(rel_path): + for line in read(rel_path).splitlines(): + if line.startswith("__version__"): + # __version__ = "0.9" + delim = '"' if '"' in line else "'" + return line.split(delim)[1] + + raise RuntimeError("Unable to find version string.") + + +if __name__ == "__main__": + setuptools.setup( + name="pytorch-fid", + version=get_version(os.path.join("src", "pytorch_fid", "__init__.py")), + author="Max Seitzer", + description=( + "Package for calculating Frechet Inception Distance (FID)" " using PyTorch" + ), + long_description=read("README.md"), + long_description_content_type="text/markdown", + url="https://github.com/mseitzer/pytorch-fid", + package_dir={"": "src"}, + packages=setuptools.find_packages(where="src"), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", + ], + python_requires=">=3.5", + entry_points={ + "console_scripts": [ + "pytorch-fid = pytorch_fid.fid_score:main", + ], + }, + install_requires=[ + "numpy", + "pillow", + "scipy", + "torch>=1.0.1", + "torchvision>=0.2.2", + ], + extras_require={ + "dev": ["flake8", "flake8-bugbear", "flake8-isort", "black==24.3.0", "nox"] + }, + ) diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__init__.py b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__init__.py new file mode 100644 index 00000000..493f7415 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__init__.py @@ -0,0 +1 @@ +__version__ = "0.3.0" diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__main__.py b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__main__.py new file mode 100644 index 00000000..197ee40d --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/__main__.py @@ -0,0 +1,3 @@ +import pytorch_fid.fid_score + +pytorch_fid.fid_score.main() diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/fid_score.py b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/fid_score.py new file mode 100644 index 00000000..9c8acb23 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/fid_score.py @@ -0,0 +1,355 @@ +"""Calculates the Frechet Inception Distance (FID) to evalulate GANs + +The FID metric calculates the distance between two distributions of images. +Typically, we have summary statistics (mean & covariance matrix) of one +of these distributions, while the 2nd distribution is given by a GAN. + +When run as a stand-alone program, it compares the distribution of +images that are stored as PNG/JPEG at a specified location with a +distribution given by summary statistics (in pickle format). + +The FID is calculated by assuming that X_1 and X_2 are the activations of +the pool_3 layer of the inception net for generated samples and real world +samples respectively. + +See --help to see further details. + +Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead +of Tensorflow + +Copyright 2018 Institute of Bioinformatics, JKU Linz + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import pathlib +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +import numpy as np +import torch +import torchvision.transforms as TF +from PIL import Image +from scipy import linalg +from torch.nn.functional import adaptive_avg_pool2d + +try: + from tqdm import tqdm +except ImportError: + # If tqdm is not available, provide a mock version of it + def tqdm(x): + return x + + +from pytorch_fid.inception import InceptionV3 + +parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) +parser.add_argument("--batch-size", type=int, default=50, help="Batch size to use") +parser.add_argument( + "--num-workers", + type=int, + help=( + "Number of processes to use for data loading. " "Defaults to `min(8, num_cpus)`" + ), +) +parser.add_argument( + "--device", type=str, default=None, help="Device to use. Like cuda, cuda:0 or cpu" +) +parser.add_argument( + "--dims", + type=int, + default=2048, + choices=list(InceptionV3.BLOCK_INDEX_BY_DIM), + help=( + "Dimensionality of Inception features to use. " + "By default, uses pool3 features" + ), +) +parser.add_argument( + "--save-stats", + action="store_true", + help=( + "Generate an npz archive from a directory of " + "samples. The first path is used as input and the " + "second as output." + ), +) +parser.add_argument( + "path", + type=str, + nargs=2, + help=("Paths to the generated images or " "to .npz statistic files"), +) + +IMAGE_EXTENSIONS = {"bmp", "jpg", "jpeg", "pgm", "png", "ppm", "tif", "tiff", "webp"} + + +class ImagePathDataset(torch.utils.data.Dataset): + def __init__(self, files, transforms=None): + self.files = files + self.transforms = transforms + + def __len__(self): + return len(self.files) + + def __getitem__(self, i): + path = self.files[i] + img = Image.open(path).convert("RGB") + if self.transforms is not None: + img = self.transforms(img) + return img + + +def get_activations( + files, model, batch_size=50, dims=2048, device="cpu", num_workers=1 +): + """Calculates the activations of the pool_3 layer for all images. + + Params: + -- files : List of image files paths + -- model : Instance of inception model + -- batch_size : Batch size of images for the model to process at once. + Make sure that the number of samples is a multiple of + the batch size, otherwise some samples are ignored. This + behavior is retained to match the original FID score + implementation. + -- dims : Dimensionality of features returned by Inception + -- device : Device to run calculations + -- num_workers : Number of parallel dataloader workers + + Returns: + -- A numpy array of dimension (num images, dims) that contains the + activations of the given tensor when feeding inception with the + query tensor. + """ + model.eval() + + if batch_size > len(files): + print( + ( + "Warning: batch size is bigger than the data size. " + "Setting batch size to data size" + ) + ) + batch_size = len(files) + + dataset = ImagePathDataset(files, transforms=TF.ToTensor()) + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=batch_size, + shuffle=False, + drop_last=False, + num_workers=num_workers, + ) + + pred_arr = np.empty((len(files), dims)) + + start_idx = 0 + + for batch in tqdm(dataloader): + batch = batch.to(device) + + with torch.no_grad(): + pred = model(batch)[0] + + # If model output is not scalar, apply global spatial average pooling. + # This happens if you choose a dimensionality not equal 2048. + if pred.size(2) != 1 or pred.size(3) != 1: + pred = adaptive_avg_pool2d(pred, output_size=(1, 1)) + + pred = pred.squeeze(3).squeeze(2).cpu().numpy() + + pred_arr[start_idx : start_idx + pred.shape[0]] = pred + + start_idx = start_idx + pred.shape[0] + + return pred_arr + + +def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6): + """Numpy implementation of the Frechet Distance. + The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) + and X_2 ~ N(mu_2, C_2) is + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + + Stable version by Dougal J. Sutherland. + + Params: + -- mu1 : Numpy array containing the activations of a layer of the + inception net (like returned by the function 'get_predictions') + for generated samples. + -- mu2 : The sample mean over activations, precalculated on an + representative data set. + -- sigma1: The covariance matrix over activations for generated samples. + -- sigma2: The covariance matrix over activations, precalculated on an + representative data set. + + Returns: + -- : The Frechet Distance. + """ + + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert ( + mu1.shape == mu2.shape + ), "Training and test mean vectors have different lengths" + assert ( + sigma1.shape == sigma2.shape + ), "Training and test covariances have different dimensions" + + diff = mu1 - mu2 + + # Product might be almost singular + covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = ( + "fid calculation produces singular product; " + "adding %s to diagonal of cov estimates" + ) % eps + print(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # Numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError("Imaginary component {}".format(m)) + covmean = covmean.real + + tr_covmean = np.trace(covmean) + + return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean + + +def calculate_activation_statistics( + files, model, batch_size=50, dims=2048, device="cpu", num_workers=1 +): + """Calculation of the statistics used by the FID. + Params: + -- files : List of image files paths + -- model : Instance of inception model + -- batch_size : The images numpy array is split into batches with + batch size batch_size. A reasonable batch size + depends on the hardware. + -- dims : Dimensionality of features returned by Inception + -- device : Device to run calculations + -- num_workers : Number of parallel dataloader workers + + Returns: + -- mu : The mean over samples of the activations of the pool_3 layer of + the inception model. + -- sigma : The covariance matrix of the activations of the pool_3 layer of + the inception model. + """ + act = get_activations(files, model, batch_size, dims, device, num_workers) + mu = np.mean(act, axis=0) + sigma = np.cov(act, rowvar=False) + return mu, sigma + + +def compute_statistics_of_path(path, model, batch_size, dims, device, num_workers=1): + if path.endswith(".npz"): + with np.load(path) as f: + m, s = f["mu"][:], f["sigma"][:] + else: + path = pathlib.Path(path) + files = sorted( + [file for ext in IMAGE_EXTENSIONS for file in path.glob("*.{}".format(ext))] + ) + m, s = calculate_activation_statistics( + files, model, batch_size, dims, device, num_workers + ) + + return m, s + + +def calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1): + """Calculates the FID of two paths""" + for p in paths: + if not os.path.exists(p): + raise RuntimeError("Invalid path: %s" % p) + + block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims] + + model = InceptionV3([block_idx]).to(device) + + m1, s1 = compute_statistics_of_path( + paths[0], model, batch_size, dims, device, num_workers + ) + m2, s2 = compute_statistics_of_path( + paths[1], model, batch_size, dims, device, num_workers + ) + fid_value = calculate_frechet_distance(m1, s1, m2, s2) + + return fid_value + + +def save_fid_stats(paths, batch_size, device, dims, num_workers=1): + """Saves FID statistics of one path""" + if not os.path.exists(paths[0]): + raise RuntimeError("Invalid path: %s" % paths[0]) + + if os.path.exists(paths[1]): + raise RuntimeError("Existing output file: %s" % paths[1]) + + block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims] + + model = InceptionV3([block_idx]).to(device) + + print(f"Saving statistics for {paths[0]}") + + m1, s1 = compute_statistics_of_path( + paths[0], model, batch_size, dims, device, num_workers + ) + + np.savez_compressed(paths[1], mu=m1, sigma=s1) + + +def main(): + args = parser.parse_args() + + if args.device is None: + device = torch.device("cuda" if (torch.cuda.is_available()) else "cpu") + else: + device = torch.device(args.device) + + if args.num_workers is None: + try: + num_cpus = len(os.sched_getaffinity(0)) + except AttributeError: + # os.sched_getaffinity is not available under Windows, use + # os.cpu_count instead (which may not return the *available* number + # of CPUs). + num_cpus = os.cpu_count() + + num_workers = min(num_cpus, 8) if num_cpus is not None else 0 + else: + num_workers = args.num_workers + + if args.save_stats: + save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers) + return + + fid_value = calculate_fid_given_paths( + args.path, args.batch_size, device, args.dims, num_workers + ) + print("FID: ", fid_value) + + +if __name__ == "__main__": + main() diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/inception.py b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/inception.py new file mode 100644 index 00000000..a6fb4652 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/src/pytorch_fid/inception.py @@ -0,0 +1,344 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision + +try: + from torchvision.models.utils import load_state_dict_from_url +except ImportError: + from torch.utils.model_zoo import load_url as load_state_dict_from_url + +# Inception weights ported to Pytorch from +# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz +FID_WEIGHTS_URL = "https://github.com/mseitzer/pytorch-fid/releases/download/fid_weights/pt_inception-2015-12-05-6726825d.pth" # noqa: E501 + + +class InceptionV3(nn.Module): + """Pretrained InceptionV3 network returning feature maps""" + + # Index of default block of inception to return, + # corresponds to output of final average pooling + DEFAULT_BLOCK_INDEX = 3 + + # Maps feature dimensionality to their output blocks indices + BLOCK_INDEX_BY_DIM = { + 64: 0, # First max pooling features + 192: 1, # Second max pooling featurs + 768: 2, # Pre-aux classifier features + 2048: 3, # Final average pooling features + } + + def __init__( + self, + output_blocks=(DEFAULT_BLOCK_INDEX,), + resize_input=True, + normalize_input=True, + requires_grad=False, + use_fid_inception=True, + ): + """Build pretrained InceptionV3 + + Parameters + ---------- + output_blocks : list of int + Indices of blocks to return features of. Possible values are: + - 0: corresponds to output of first max pooling + - 1: corresponds to output of second max pooling + - 2: corresponds to output which is fed to aux classifier + - 3: corresponds to output of final average pooling + resize_input : bool + If true, bilinearly resizes input to width and height 299 before + feeding input to model. As the network without fully connected + layers is fully convolutional, it should be able to handle inputs + of arbitrary size, so resizing might not be strictly needed + normalize_input : bool + If true, scales the input from range (0, 1) to the range the + pretrained Inception network expects, namely (-1, 1) + requires_grad : bool + If true, parameters of the model require gradients. Possibly useful + for finetuning the network + use_fid_inception : bool + If true, uses the pretrained Inception model used in Tensorflow's + FID implementation. If false, uses the pretrained Inception model + available in torchvision. The FID Inception model has different + weights and a slightly different structure from torchvision's + Inception model. If you want to compute FID scores, you are + strongly advised to set this parameter to true to get comparable + results. + """ + super(InceptionV3, self).__init__() + + self.resize_input = resize_input + self.normalize_input = normalize_input + self.output_blocks = sorted(output_blocks) + self.last_needed_block = max(output_blocks) + + assert self.last_needed_block <= 3, "Last possible output block index is 3" + + self.blocks = nn.ModuleList() + + if use_fid_inception: + inception = fid_inception_v3() + else: + inception = _inception_v3(weights="DEFAULT") + + # Block 0: input to maxpool1 + block0 = [ + inception.Conv2d_1a_3x3, + inception.Conv2d_2a_3x3, + inception.Conv2d_2b_3x3, + nn.MaxPool2d(kernel_size=3, stride=2), + ] + self.blocks.append(nn.Sequential(*block0)) + + # Block 1: maxpool1 to maxpool2 + if self.last_needed_block >= 1: + block1 = [ + inception.Conv2d_3b_1x1, + inception.Conv2d_4a_3x3, + nn.MaxPool2d(kernel_size=3, stride=2), + ] + self.blocks.append(nn.Sequential(*block1)) + + # Block 2: maxpool2 to aux classifier + if self.last_needed_block >= 2: + block2 = [ + inception.Mixed_5b, + inception.Mixed_5c, + inception.Mixed_5d, + inception.Mixed_6a, + inception.Mixed_6b, + inception.Mixed_6c, + inception.Mixed_6d, + inception.Mixed_6e, + ] + self.blocks.append(nn.Sequential(*block2)) + + # Block 3: aux classifier to final avgpool + if self.last_needed_block >= 3: + block3 = [ + inception.Mixed_7a, + inception.Mixed_7b, + inception.Mixed_7c, + nn.AdaptiveAvgPool2d(output_size=(1, 1)), + ] + self.blocks.append(nn.Sequential(*block3)) + + for param in self.parameters(): + param.requires_grad = requires_grad + + def forward(self, inp): + """Get Inception feature maps + + Parameters + ---------- + inp : torch.autograd.Variable + Input tensor of shape Bx3xHxW. Values are expected to be in + range (0, 1) + + Returns + ------- + List of torch.autograd.Variable, corresponding to the selected output + block, sorted ascending by index + """ + outp = [] + x = inp + + if self.resize_input: + x = F.interpolate(x, size=(299, 299), mode="bilinear", align_corners=False) + + if self.normalize_input: + x = 2 * x - 1 # Scale from range (0, 1) to range (-1, 1) + + for idx, block in enumerate(self.blocks): + x = block(x) + if idx in self.output_blocks: + outp.append(x) + + if idx == self.last_needed_block: + break + + return outp + + +def _inception_v3(*args, **kwargs): + """Wraps `torchvision.models.inception_v3`""" + try: + version = tuple(map(int, torchvision.__version__.split(".")[:2])) + except ValueError: + # Just a caution against weird version strings + version = (0,) + + # Skips default weight inititialization if supported by torchvision + # version. See https://github.com/mseitzer/pytorch-fid/issues/28. + if version >= (0, 6): + kwargs["init_weights"] = False + + # Backwards compatibility: `weights` argument was handled by `pretrained` + # argument prior to version 0.13. + if version < (0, 13) and "weights" in kwargs: + if kwargs["weights"] == "DEFAULT": + kwargs["pretrained"] = True + elif kwargs["weights"] is None: + kwargs["pretrained"] = False + else: + raise ValueError( + "weights=={} not supported in torchvision {}".format( + kwargs["weights"], torchvision.__version__ + ) + ) + del kwargs["weights"] + + return torchvision.models.inception_v3(*args, **kwargs) + + +def fid_inception_v3(): + """Build pretrained Inception model for FID computation + + The Inception model for FID computation uses a different set of weights + and has a slightly different structure than torchvision's Inception. + + This method first constructs torchvision's Inception and then patches the + necessary parts that are different in the FID Inception model. + """ + inception = _inception_v3(num_classes=1008, aux_logits=False, weights=None) + inception.Mixed_5b = FIDInceptionA(192, pool_features=32) + inception.Mixed_5c = FIDInceptionA(256, pool_features=64) + inception.Mixed_5d = FIDInceptionA(288, pool_features=64) + inception.Mixed_6b = FIDInceptionC(768, channels_7x7=128) + inception.Mixed_6c = FIDInceptionC(768, channels_7x7=160) + inception.Mixed_6d = FIDInceptionC(768, channels_7x7=160) + inception.Mixed_6e = FIDInceptionC(768, channels_7x7=192) + inception.Mixed_7b = FIDInceptionE_1(1280) + inception.Mixed_7c = FIDInceptionE_2(2048) + + state_dict = load_state_dict_from_url(FID_WEIGHTS_URL, progress=True) + inception.load_state_dict(state_dict) + return inception + + +class FIDInceptionA(torchvision.models.inception.InceptionA): + """InceptionA block patched for FID computation""" + + def __init__(self, in_channels, pool_features): + super(FIDInceptionA, self).__init__(in_channels, pool_features) + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch5x5 = self.branch5x5_1(x) + branch5x5 = self.branch5x5_2(branch5x5) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) + + # Patch: Tensorflow's average pool does not use the padded zero's in + # its average calculation + branch_pool = F.avg_pool2d( + x, kernel_size=3, stride=1, padding=1, count_include_pad=False + ) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) + + +class FIDInceptionC(torchvision.models.inception.InceptionC): + """InceptionC block patched for FID computation""" + + def __init__(self, in_channels, channels_7x7): + super(FIDInceptionC, self).__init__(in_channels, channels_7x7) + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch7x7 = self.branch7x7_1(x) + branch7x7 = self.branch7x7_2(branch7x7) + branch7x7 = self.branch7x7_3(branch7x7) + + branch7x7dbl = self.branch7x7dbl_1(x) + branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl) + + # Patch: Tensorflow's average pool does not use the padded zero's in + # its average calculation + branch_pool = F.avg_pool2d( + x, kernel_size=3, stride=1, padding=1, count_include_pad=False + ) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool] + return torch.cat(outputs, 1) + + +class FIDInceptionE_1(torchvision.models.inception.InceptionE): + """First InceptionE block patched for FID computation""" + + def __init__(self, in_channels): + super(FIDInceptionE_1, self).__init__(in_channels) + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch3x3 = self.branch3x3_1(x) + branch3x3 = [ + self.branch3x3_2a(branch3x3), + self.branch3x3_2b(branch3x3), + ] + branch3x3 = torch.cat(branch3x3, 1) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = [ + self.branch3x3dbl_3a(branch3x3dbl), + self.branch3x3dbl_3b(branch3x3dbl), + ] + branch3x3dbl = torch.cat(branch3x3dbl, 1) + + # Patch: Tensorflow's average pool does not use the padded zero's in + # its average calculation + branch_pool = F.avg_pool2d( + x, kernel_size=3, stride=1, padding=1, count_include_pad=False + ) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) + + +class FIDInceptionE_2(torchvision.models.inception.InceptionE): + """Second InceptionE block patched for FID computation""" + + def __init__(self, in_channels): + super(FIDInceptionE_2, self).__init__(in_channels) + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch3x3 = self.branch3x3_1(x) + branch3x3 = [ + self.branch3x3_2a(branch3x3), + self.branch3x3_2b(branch3x3), + ] + branch3x3 = torch.cat(branch3x3, 1) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = [ + self.branch3x3dbl_3a(branch3x3dbl), + self.branch3x3dbl_3b(branch3x3dbl), + ] + branch3x3dbl = torch.cat(branch3x3dbl, 1) + + # Patch: The FID Inception model uses max pooling instead of average + # pooling. This is likely an error in this specific Inception + # implementation, as other Inception models use average pooling here + # (which matches the description in the paper). + branch_pool = F.max_pool2d(x, kernel_size=3, stride=1, padding=1) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) diff --git a/Wav2Lip-master/evaluation/pytorch-fid-master/tests/test_fid_score.py b/Wav2Lip-master/evaluation/pytorch-fid-master/tests/test_fid_score.py new file mode 100644 index 00000000..4e4b6a15 --- /dev/null +++ b/Wav2Lip-master/evaluation/pytorch-fid-master/tests/test_fid_score.py @@ -0,0 +1,102 @@ +import numpy as np +import pytest +import torch +from PIL import Image + +from pytorch_fid import fid_score, inception + + +@pytest.fixture +def device(): + return torch.device("cpu") + + +def test_calculate_fid_given_statistics(mocker, tmp_path, device): + dim = 2048 + m1, m2 = np.zeros((dim,)), np.ones((dim,)) + sigma = np.eye(dim) + + def dummy_statistics(path, model, batch_size, dims, device, num_workers): + if path.endswith("1"): + return m1, sigma + elif path.endswith("2"): + return m2, sigma + else: + raise ValueError + + mocker.patch( + "pytorch_fid.fid_score.compute_statistics_of_path", side_effect=dummy_statistics + ) + + dir_names = ["1", "2"] + paths = [] + for name in dir_names: + path = tmp_path / name + path.mkdir() + paths.append(str(path)) + + fid_value = fid_score.calculate_fid_given_paths( + paths, batch_size=dim, device=device, dims=dim, num_workers=0 + ) + + # Given equal covariance, FID is just the squared norm of difference + assert fid_value == np.sum((m1 - m2) ** 2) + + +def test_compute_statistics_of_path(mocker, tmp_path, device): + model = mocker.MagicMock(inception.InceptionV3)() + model.side_effect = lambda inp: [inp.mean(dim=(2, 3), keepdim=True)] + + size = (4, 4, 3) + arrays = [np.zeros(size), np.ones(size) * 0.5, np.ones(size)] + images = [(arr * 255).astype(np.uint8) for arr in arrays] + + paths = [] + for idx, image in enumerate(images): + paths.append(str(tmp_path / "{}.png".format(idx))) + Image.fromarray(image, mode="RGB").save(paths[-1]) + + stats = fid_score.compute_statistics_of_path( + str(tmp_path), + model, + batch_size=len(images), + dims=3, + device=device, + num_workers=0, + ) + + assert np.allclose(stats[0], np.ones((3,)) * 0.5, atol=1e-3) + assert np.allclose(stats[1], np.ones((3, 3)) * 0.25) + + +def test_compute_statistics_of_path_from_file(mocker, tmp_path, device): + model = mocker.MagicMock(inception.InceptionV3)() + + mu = np.random.randn(5) + sigma = np.random.randn(5, 5) + + path = tmp_path / "stats.npz" + with path.open("wb") as f: + np.savez(f, mu=mu, sigma=sigma) + + stats = fid_score.compute_statistics_of_path( + str(path), model, batch_size=1, dims=5, device=device, num_workers=0 + ) + + assert np.allclose(stats[0], mu) + assert np.allclose(stats[1], sigma) + + +def test_image_types(tmp_path): + in_arr = np.ones((24, 24, 3), dtype=np.uint8) * 255 + in_image = Image.fromarray(in_arr, mode="RGB") + + paths = [] + for ext in fid_score.IMAGE_EXTENSIONS: + paths.append(str(tmp_path / "img.{}".format(ext))) + in_image.save(paths[-1]) + + dataset = fid_score.ImagePathDataset(paths) + + for img in dataset: + assert np.allclose(np.array(img), in_arr) diff --git a/Wav2Lip-master/evaluation/real_videos_inference.py b/Wav2Lip-master/evaluation/real_videos_inference.py new file mode 100644 index 00000000..8c9fb15e --- /dev/null +++ b/Wav2Lip-master/evaluation/real_videos_inference.py @@ -0,0 +1,305 @@ +from os import listdir, path +import numpy as np +import scipy, cv2, os, sys, argparse +import dlib, json, subprocess +from tqdm import tqdm +from glob import glob +import torch + +sys.path.append('../') +import audio +import face_detection +from models import Wav2Lip + +parser = argparse.ArgumentParser(description='Code to generate results on ReSyncED evaluation set') + +parser.add_argument('--mode', type=str, + help='random | dubbed | tts', required=True) + +parser.add_argument('--filelist', type=str, + help='Filepath of filelist file to read', default=None) + +parser.add_argument('--results_dir', type=str, help='Folder to save all results into', + required=True) +parser.add_argument('--data_root', type=str, required=True) +parser.add_argument('--checkpoint_path', type=str, + help='Name of saved checkpoint to load weights from', required=True) +parser.add_argument('--pads', nargs='+', type=int, default=[0, 10, 0, 0], + help='Padding (top, bottom, left, right)') + +parser.add_argument('--face_det_batch_size', type=int, + help='Single GPU batch size for face detection', default=16) + +parser.add_argument('--wav2lip_batch_size', type=int, help='Batch size for Wav2Lip', default=128) +parser.add_argument('--face_res', help='Approximate resolution of the face at which to test', default=180) +parser.add_argument('--min_frame_res', help='Do not downsample further below this frame resolution', default=480) +parser.add_argument('--max_frame_res', help='Downsample to at least this frame resolution', default=720) +# parser.add_argument('--resize_factor', default=1, type=int) + +args = parser.parse_args() +args.img_size = 96 + +def get_smoothened_boxes(boxes, T): + for i in range(len(boxes)): + if i + T > len(boxes): + window = boxes[len(boxes) - T:] + else: + window = boxes[i : i + T] + boxes[i] = np.mean(window, axis=0) + return boxes + +def rescale_frames(images): + rect = detector.get_detections_for_batch(np.array([images[0]]))[0] + if rect is None: + raise ValueError('Face not detected!') + h, w = images[0].shape[:-1] + + x1, y1, x2, y2 = rect + + face_size = max(np.abs(y1 - y2), np.abs(x1 - x2)) + + diff = np.abs(face_size - args.face_res) + for factor in range(2, 16): + downsampled_res = face_size // factor + if min(h//factor, w//factor) < args.min_frame_res: break + if np.abs(downsampled_res - args.face_res) >= diff: break + + factor -= 1 + if factor == 1: return images + + return [cv2.resize(im, (im.shape[1]//(factor), im.shape[0]//(factor))) for im in images] + + +def face_detect(images): + batch_size = args.face_det_batch_size + images = rescale_frames(images) + + while 1: + predictions = [] + try: + for i in range(0, len(images), batch_size): + predictions.extend(detector.get_detections_for_batch(np.array(images[i:i + batch_size]))) + except RuntimeError: + if batch_size == 1: + raise RuntimeError('Image too big to run face detection on GPU') + batch_size //= 2 + print('Recovering from OOM error; New batch size: {}'.format(batch_size)) + continue + break + + results = [] + pady1, pady2, padx1, padx2 = args.pads + for rect, image in zip(predictions, images): + if rect is None: + raise ValueError('Face not detected!') + + y1 = max(0, rect[1] - pady1) + y2 = min(image.shape[0], rect[3] + pady2) + x1 = max(0, rect[0] - padx1) + x2 = min(image.shape[1], rect[2] + padx2) + + results.append([x1, y1, x2, y2]) + + boxes = get_smoothened_boxes(np.array(results), T=5) + results = [[image[y1: y2, x1:x2], (y1, y2, x1, x2), True] for image, (x1, y1, x2, y2) in zip(images, boxes)] + + return results, images + +def datagen(frames, face_det_results, mels): + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + for i, m in enumerate(mels): + if i >= len(frames): raise ValueError('Equal or less lengths only') + + frame_to_save = frames[i].copy() + face, coords, valid_frame = face_det_results[i].copy() + if not valid_frame: + continue + + face = cv2.resize(face, (args.img_size, args.img_size)) + + img_batch.append(face) + mel_batch.append(m) + frame_batch.append(frame_to_save) + coords_batch.append(coords) + + if len(img_batch) >= args.wav2lip_batch_size: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + if len(img_batch) > 0: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + +def increase_frames(frames, l): + ## evenly duplicating frames to increase length of video + while len(frames) < l: + dup_every = float(l) / len(frames) + + final_frames = [] + next_duplicate = 0. + + for i, f in enumerate(frames): + final_frames.append(f) + + if int(np.ceil(next_duplicate)) == i: + final_frames.append(f) + + next_duplicate += dup_every + + frames = final_frames + + return frames[:l] + +mel_step_size = 16 +device = 'cuda' if torch.cuda.is_available() else 'cpu' +print('Using {} for inference.'.format(device)) + +detector = face_detection.FaceAlignment(face_detection.LandmarksType._2D, + flip_input=False, device=device) + +def _load(checkpoint_path): + if device == 'cuda': + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + +def load_model(path): + model = Wav2Lip() + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + s = checkpoint["state_dict"] + new_s = {} + for k, v in s.items(): + new_s[k.replace('module.', '')] = v + model.load_state_dict(new_s) + + model = model.to(device) + return model.eval() + +model = load_model(args.checkpoint_path) + +def main(): + if not os.path.isdir(args.results_dir): os.makedirs(args.results_dir) + + if args.mode == 'dubbed': + files = listdir(args.data_root) + lines = ['{} {}'.format(f, f) for f in files] + + else: + assert args.filelist is not None + with open(args.filelist, 'r') as filelist: + lines = filelist.readlines() + + for idx, line in enumerate(tqdm(lines)): + video, audio_src = line.strip().split() + + audio_src = os.path.join(args.data_root, audio_src) + video = os.path.join(args.data_root, video) + + command = 'ffmpeg -loglevel panic -y -i {} -strict -2 {}'.format(audio_src, '../temp/temp.wav') + subprocess.call(command, shell=True) + temp_audio = '../temp/temp.wav' + + wav = audio.load_wav(temp_audio, 16000) + mel = audio.melspectrogram(wav) + + if np.isnan(mel.reshape(-1)).sum() > 0: + raise ValueError('Mel contains nan!') + + video_stream = cv2.VideoCapture(video) + + fps = video_stream.get(cv2.CAP_PROP_FPS) + mel_idx_multiplier = 80./fps + + full_frames = [] + while 1: + still_reading, frame = video_stream.read() + if not still_reading: + video_stream.release() + break + + if min(frame.shape[:-1]) > args.max_frame_res: + h, w = frame.shape[:-1] + scale_factor = min(h, w) / float(args.max_frame_res) + h = int(h/scale_factor) + w = int(w/scale_factor) + + frame = cv2.resize(frame, (w, h)) + full_frames.append(frame) + + mel_chunks = [] + i = 0 + while 1: + start_idx = int(i * mel_idx_multiplier) + if start_idx + mel_step_size > len(mel[0]): + break + mel_chunks.append(mel[:, start_idx : start_idx + mel_step_size]) + i += 1 + + if len(full_frames) < len(mel_chunks): + if args.mode == 'tts': + full_frames = increase_frames(full_frames, len(mel_chunks)) + else: + raise ValueError('#Frames, audio length mismatch') + + else: + full_frames = full_frames[:len(mel_chunks)] + + try: + face_det_results, full_frames = face_detect(full_frames.copy()) + except ValueError as e: + continue + + batch_size = args.wav2lip_batch_size + gen = datagen(full_frames.copy(), face_det_results, mel_chunks) + + for i, (img_batch, mel_batch, frames, coords) in enumerate(gen): + if i == 0: + frame_h, frame_w = full_frames[0].shape[:-1] + + out = cv2.VideoWriter('../temp/result.avi', + cv2.VideoWriter_fourcc(*'DIVX'), fps, (frame_w, frame_h)) + + img_batch = torch.FloatTensor(np.transpose(img_batch, (0, 3, 1, 2))).to(device) + mel_batch = torch.FloatTensor(np.transpose(mel_batch, (0, 3, 1, 2))).to(device) + + with torch.no_grad(): + pred = model(mel_batch, img_batch) + + + pred = pred.cpu().numpy().transpose(0, 2, 3, 1) * 255. + + for pl, f, c in zip(pred, frames, coords): + y1, y2, x1, x2 = c + pl = cv2.resize(pl.astype(np.uint8), (x2 - x1, y2 - y1)) + f[y1:y2, x1:x2] = pl + out.write(f) + + out.release() + + vid = os.path.join(args.results_dir, '{}.mp4'.format(idx)) + command = 'ffmpeg -loglevel panic -y -i {} -i {} -strict -2 -q:v 1 {}'.format('../temp/temp.wav', + '../temp/result.avi', vid) + subprocess.call(command, shell=True) + + +if __name__ == '__main__': + main() diff --git a/Wav2Lip-master/evaluation/scores_LSE/SyncNetInstance_calc_scores.py b/Wav2Lip-master/evaluation/scores_LSE/SyncNetInstance_calc_scores.py new file mode 100644 index 00000000..64906e25 --- /dev/null +++ b/Wav2Lip-master/evaluation/scores_LSE/SyncNetInstance_calc_scores.py @@ -0,0 +1,210 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- +# Video 25 FPS, Audio 16000HZ + +import torch +import numpy +import time, pdb, argparse, subprocess, os, math, glob +import cv2 +import python_speech_features + +from scipy import signal +from scipy.io import wavfile +from SyncNetModel import * +from shutil import rmtree + + +# ==================== Get OFFSET ==================== + +def calc_pdist(feat1, feat2, vshift=10): + + win_size = vshift*2+1 + + feat2p = torch.nn.functional.pad(feat2,(0,0,vshift,vshift)) + + dists = [] + + for i in range(0,len(feat1)): + + dists.append(torch.nn.functional.pairwise_distance(feat1[[i],:].repeat(win_size, 1), feat2p[i:i+win_size,:])) + + return dists + +# ==================== MAIN DEF ==================== + +class SyncNetInstance(torch.nn.Module): + + def __init__(self, dropout = 0, num_layers_in_fc_layers = 1024): + super(SyncNetInstance, self).__init__(); + + self.__S__ = S(num_layers_in_fc_layers = num_layers_in_fc_layers).cuda(); + + def evaluate(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Convert files + # ========== ========== + + if os.path.exists(os.path.join(opt.tmp_dir,opt.reference)): + rmtree(os.path.join(opt.tmp_dir,opt.reference)) + + os.makedirs(os.path.join(opt.tmp_dir,opt.reference)) + + command = ("ffmpeg -loglevel error -y -i %s -threads 1 -f image2 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'%06d.jpg'))) + output = subprocess.call(command, shell=True, stdout=None) + + command = ("ffmpeg -loglevel error -y -i %s -async 1 -ac 1 -vn -acodec pcm_s16le -ar 16000 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'audio.wav'))) + output = subprocess.call(command, shell=True, stdout=None) + + # ========== ========== + # Load video + # ========== ========== + + images = [] + + flist = glob.glob(os.path.join(opt.tmp_dir,opt.reference,'*.jpg')) + flist.sort() + + for fname in flist: + img_input = cv2.imread(fname) + img_input = cv2.resize(img_input, (224,224)) #HARD CODED, CHANGE BEFORE RELEASE + images.append(img_input) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Load audio + # ========== ========== + + sample_rate, audio = wavfile.read(os.path.join(opt.tmp_dir,opt.reference,'audio.wav')) + mfcc = zip(*python_speech_features.mfcc(audio,sample_rate)) + mfcc = numpy.stack([numpy.array(i) for i in mfcc]) + + cc = numpy.expand_dims(numpy.expand_dims(mfcc,axis=0),axis=0) + cct = torch.autograd.Variable(torch.from_numpy(cc.astype(float)).float()) + + # ========== ========== + # Check audio and video input length + # ========== ========== + + #if (float(len(audio))/16000) != (float(len(images))/25) : + # print("WARNING: Audio (%.4fs) and video (%.4fs) lengths are different."%(float(len(audio))/16000,float(len(images))/25)) + + min_length = min(len(images),math.floor(len(audio)/640)) + + # ========== ========== + # Generate video and audio feats + # ========== ========== + + lastframe = min_length-5 + im_feat = [] + cc_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lip(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + cc_batch = [ cct[:,:,:,vframe*4:vframe*4+20] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + cc_in = torch.cat(cc_batch,0) + cc_out = self.__S__.forward_aud(cc_in.cuda()) + cc_feat.append(cc_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + cc_feat = torch.cat(cc_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + #print('Compute time %.3f sec.' % (time.time()-tS)) + + dists = calc_pdist(im_feat,cc_feat,vshift=opt.vshift) + mdist = torch.mean(torch.stack(dists,1),1) + + minval, minidx = torch.min(mdist,0) + + offset = opt.vshift-minidx + conf = torch.median(mdist) - minval + + fdist = numpy.stack([dist[minidx].numpy() for dist in dists]) + # fdist = numpy.pad(fdist, (3,3), 'constant', constant_values=15) + fconf = torch.median(mdist).numpy() - fdist + fconfm = signal.medfilt(fconf,kernel_size=9) + + numpy.set_printoptions(formatter={'float': '{: 0.3f}'.format}) + #print('Framewise conf: ') + #print(fconfm) + #print('AV offset: \t%d \nMin dist: \t%.3f\nConfidence: \t%.3f' % (offset,minval,conf)) + + dists_npy = numpy.array([ dist.numpy() for dist in dists ]) + return offset.numpy(), conf.numpy(), minval.numpy() + + def extract_feature(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Load video + # ========== ========== + cap = cv2.VideoCapture(videofile) + + frame_num = 1; + images = [] + while frame_num: + frame_num += 1 + ret, image = cap.read() + if ret == 0: + break + + images.append(image) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Generate video feats + # ========== ========== + + lastframe = len(images)-4 + im_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lipfeat(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + print('Compute time %.3f sec.' % (time.time()-tS)) + + return im_feat + + + def loadParameters(self, path): + loaded_state = torch.load(path, map_location=lambda storage, loc: storage); + + self_state = self.__S__.state_dict(); + + for name, param in loaded_state.items(): + + self_state[name].copy_(param); diff --git a/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_LRS.py b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_LRS.py new file mode 100644 index 00000000..eda02b8f --- /dev/null +++ b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_LRS.py @@ -0,0 +1,53 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess +import glob +import os +from tqdm import tqdm + +from SyncNetInstance_calc_scores import * + +# ==================== LOAD PARAMS ==================== + + +parser = argparse.ArgumentParser(description = "SyncNet"); + +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--data_root', type=str, required=True, help=''); +parser.add_argument('--tmp_dir', type=str, default="data/work/pytmp", help=''); +parser.add_argument('--reference', type=str, default="demo", help=''); + +opt = parser.parse_args(); + + +# ==================== RUN EVALUATION ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +#print("Model %s loaded."%opt.initial_model); +path = os.path.join(opt.data_root, "*.mp4") + +all_videos = glob.glob(path) + +prog_bar = tqdm(range(len(all_videos))) +avg_confidence = 0. +avg_min_distance = 0. + + +for videofile_idx in prog_bar: + videofile = all_videos[videofile_idx] + offset, confidence, min_distance = s.evaluate(opt, videofile=videofile) + avg_confidence += confidence + avg_min_distance += min_distance + prog_bar.set_description('Avg Confidence: {}, Avg Minimum Dist: {}'.format(round(avg_confidence / (videofile_idx + 1), 3), round(avg_min_distance / (videofile_idx + 1), 3))) + prog_bar.refresh() + +print ('Average Confidence: {}'.format(avg_confidence/len(all_videos))) +print ('Average Minimum Distance: {}'.format(avg_min_distance/len(all_videos))) + + + diff --git a/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.py b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.py new file mode 100644 index 00000000..09622584 --- /dev/null +++ b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.py @@ -0,0 +1,45 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess, pickle, os, gzip, glob + +from SyncNetInstance_calc_scores import * + +# ==================== PARSE ARGUMENT ==================== + +parser = argparse.ArgumentParser(description = "SyncNet"); +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--data_dir', type=str, default='data/work', help=''); +parser.add_argument('--videofile', type=str, default='', help=''); +parser.add_argument('--reference', type=str, default='', help=''); +opt = parser.parse_args(); + +setattr(opt,'avi_dir',os.path.join(opt.data_dir,'pyavi')) +setattr(opt,'tmp_dir',os.path.join(opt.data_dir,'pytmp')) +setattr(opt,'work_dir',os.path.join(opt.data_dir,'pywork')) +setattr(opt,'crop_dir',os.path.join(opt.data_dir,'pycrop')) + + +# ==================== LOAD MODEL AND FILE LIST ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +#print("Model %s loaded."%opt.initial_model); + +flist = glob.glob(os.path.join(opt.crop_dir,opt.reference,'0*.avi')) +flist.sort() + +# ==================== GET OFFSETS ==================== + +dists = [] +for idx, fname in enumerate(flist): + offset, conf, dist = s.evaluate(opt,videofile=fname) + print (str(dist)+" "+str(conf)) + +# ==================== PRINT RESULTS TO FILE ==================== + +#with open(os.path.join(opt.work_dir,opt.reference,'activesd.pckl'), 'wb') as fil: +# pickle.dump(dists, fil) diff --git a/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.sh b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.sh new file mode 100644 index 00000000..4a45cd56 --- /dev/null +++ b/Wav2Lip-master/evaluation/scores_LSE/calculate_scores_real_videos.sh @@ -0,0 +1,8 @@ +rm all_scores.txt +yourfilenames=`ls $1` + +for eachfile in $yourfilenames +do + python run_pipeline.py --videofile $1/$eachfile --reference wav2lip --data_dir tmp_dir + python calculate_scores_real_videos.py --videofile $1/$eachfile --reference wav2lip --data_dir tmp_dir >> all_scores.txt +done diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/.gitignore b/Wav2Lip-master/evaluation/syncnet_python-master/.gitignore new file mode 100644 index 00000000..350ada00 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/.gitignore @@ -0,0 +1,45 @@ +# Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so +*.pyc + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Specific to this demo # +######################### +data/ +protos/ +utils/ +*.pth diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/LICENSE.md b/Wav2Lip-master/evaluation/syncnet_python-master/LICENSE.md new file mode 100644 index 00000000..de4a5458 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/LICENSE.md @@ -0,0 +1,19 @@ +Copyright (c) 2016-present Joon Son Chung. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/README.md b/Wav2Lip-master/evaluation/syncnet_python-master/README.md new file mode 100644 index 00000000..7da53541 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/README.md @@ -0,0 +1,59 @@ +# SyncNet + +This repository contains the demo for the audio-to-video synchronisation network (SyncNet). This network can be used for audio-visual synchronisation tasks including: +1. Removing temporal lags between the audio and visual streams in a video; +2. Determining who is speaking amongst multiple faces in a video. + +Please cite the paper below if you make use of the software. + +## Dependencies +``` +pip install -r requirements.txt +``` + +In addition, `ffmpeg` is required. + + +## Demo + +SyncNet demo: +``` +python demo_syncnet.py --videofile data/example.avi --tmp_dir /path/to/temp/directory +``` + +Check that this script returns: +``` +AV offset: 3 +Min dist: 5.353 +Confidence: 10.021 +``` + +Full pipeline: +``` +sh download_model.sh +python run_pipeline.py --videofile /path/to/video.mp4 --reference name_of_video --data_dir /path/to/output +python run_syncnet.py --videofile /path/to/video.mp4 --reference name_of_video --data_dir /path/to/output +python run_visualise.py --videofile /path/to/video.mp4 --reference name_of_video --data_dir /path/to/output +``` + +Outputs: +``` +$DATA_DIR/pycrop/$REFERENCE/*.avi - cropped face tracks +$DATA_DIR/pywork/$REFERENCE/offsets.txt - audio-video offset values +$DATA_DIR/pyavi/$REFERENCE/video_out.avi - output video (as shown below) +``` +

+ + +

+ +## Publications + +``` +@InProceedings{Chung16a, + author = "Chung, J.~S. and Zisserman, A.", + title = "Out of time: automated lip sync in the wild", + booktitle = "Workshop on Multi-view Lip-reading, ACCV", + year = "2016", +} +``` diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance.py b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance.py new file mode 100644 index 00000000..497d44fc --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance.py @@ -0,0 +1,208 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- +# Video 25 FPS, Audio 16000HZ + +import torch +import numpy +import time, pdb, argparse, subprocess, os, math, glob +import cv2 +import python_speech_features + +from scipy import signal +from scipy.io import wavfile +from SyncNetModel import * +from shutil import rmtree + + +# ==================== Get OFFSET ==================== + +def calc_pdist(feat1, feat2, vshift=10): + + win_size = vshift*2+1 + + feat2p = torch.nn.functional.pad(feat2,(0,0,vshift,vshift)) + + dists = [] + + for i in range(0,len(feat1)): + + dists.append(torch.nn.functional.pairwise_distance(feat1[[i],:].repeat(win_size, 1), feat2p[i:i+win_size,:])) + + return dists + +# ==================== MAIN DEF ==================== + +class SyncNetInstance(torch.nn.Module): + + def __init__(self, dropout = 0, num_layers_in_fc_layers = 1024): + super(SyncNetInstance, self).__init__(); + + self.__S__ = S(num_layers_in_fc_layers = num_layers_in_fc_layers).cuda(); + + def evaluate(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Convert files + # ========== ========== + + if os.path.exists(os.path.join(opt.tmp_dir,opt.reference)): + rmtree(os.path.join(opt.tmp_dir,opt.reference)) + + os.makedirs(os.path.join(opt.tmp_dir,opt.reference)) + + command = ("ffmpeg -y -i %s -threads 1 -f image2 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'%06d.jpg'))) + output = subprocess.call(command, shell=True, stdout=None) + + command = ("ffmpeg -y -i %s -async 1 -ac 1 -vn -acodec pcm_s16le -ar 16000 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'audio.wav'))) + output = subprocess.call(command, shell=True, stdout=None) + + # ========== ========== + # Load video + # ========== ========== + + images = [] + + flist = glob.glob(os.path.join(opt.tmp_dir,opt.reference,'*.jpg')) + flist.sort() + + for fname in flist: + images.append(cv2.imread(fname)) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Load audio + # ========== ========== + + sample_rate, audio = wavfile.read(os.path.join(opt.tmp_dir,opt.reference,'audio.wav')) + mfcc = zip(*python_speech_features.mfcc(audio,sample_rate)) + mfcc = numpy.stack([numpy.array(i) for i in mfcc]) + + cc = numpy.expand_dims(numpy.expand_dims(mfcc,axis=0),axis=0) + cct = torch.autograd.Variable(torch.from_numpy(cc.astype(float)).float()) + + # ========== ========== + # Check audio and video input length + # ========== ========== + + if (float(len(audio))/16000) != (float(len(images))/25) : + print("WARNING: Audio (%.4fs) and video (%.4fs) lengths are different."%(float(len(audio))/16000,float(len(images))/25)) + + min_length = min(len(images),math.floor(len(audio)/640)) + + # ========== ========== + # Generate video and audio feats + # ========== ========== + + lastframe = min_length-5 + im_feat = [] + cc_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lip(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + cc_batch = [ cct[:,:,:,vframe*4:vframe*4+20] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + cc_in = torch.cat(cc_batch,0) + cc_out = self.__S__.forward_aud(cc_in.cuda()) + cc_feat.append(cc_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + cc_feat = torch.cat(cc_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + print('Compute time %.3f sec.' % (time.time()-tS)) + + dists = calc_pdist(im_feat,cc_feat,vshift=opt.vshift) + mdist = torch.mean(torch.stack(dists,1),1) + + minval, minidx = torch.min(mdist,0) + + offset = opt.vshift-minidx + conf = torch.median(mdist) - minval + + fdist = numpy.stack([dist[minidx].numpy() for dist in dists]) + # fdist = numpy.pad(fdist, (3,3), 'constant', constant_values=15) + fconf = torch.median(mdist).numpy() - fdist + fconfm = signal.medfilt(fconf,kernel_size=9) + + numpy.set_printoptions(formatter={'float': '{: 0.3f}'.format}) + print('Framewise conf: ') + print(fconfm) + print('AV offset: \t%d \nMin dist: \t%.3f\nConfidence: \t%.3f' % (offset,minval,conf)) + + dists_npy = numpy.array([ dist.numpy() for dist in dists ]) + return offset.numpy(), conf.numpy(), dists_npy + + def extract_feature(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Load video + # ========== ========== + cap = cv2.VideoCapture(videofile) + + frame_num = 1; + images = [] + while frame_num: + frame_num += 1 + ret, image = cap.read() + if ret == 0: + break + + images.append(image) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Generate video feats + # ========== ========== + + lastframe = len(images)-4 + im_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lipfeat(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + print('Compute time %.3f sec.' % (time.time()-tS)) + + return im_feat + + + def loadParameters(self, path): + loaded_state = torch.load(path, map_location=lambda storage, loc: storage); + + self_state = self.__S__.state_dict(); + + for name, param in loaded_state.items(): + + self_state[name].copy_(param); diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance_calc_scores.py b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance_calc_scores.py new file mode 100644 index 00000000..480fbfcc --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetInstance_calc_scores.py @@ -0,0 +1,210 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- +# Video 25 FPS, Audio 16000HZ + +import torch +import numpy +import time, pdb, argparse, subprocess, os, math, glob +import cv2 +import python_speech_features + +from scipy import signal +from scipy.io import wavfile +from SyncNetModel import * +from shutil import rmtree + + +# ==================== Get OFFSET ==================== + +def calc_pdist(feat1, feat2, vshift=10): + + win_size = vshift*2+1 + + feat2p = torch.nn.functional.pad(feat2,(0,0,vshift,vshift)) + + dists = [] + + for i in range(0,len(feat1)): + + dists.append(torch.nn.functional.pairwise_distance(feat1[[i],:].repeat(win_size, 1), feat2p[i:i+win_size,:])) + + return dists + +# ==================== MAIN DEF ==================== + +class SyncNetInstance(torch.nn.Module): + + def __init__(self, dropout = 0, num_layers_in_fc_layers = 1024): + super(SyncNetInstance, self).__init__(); + + self.__S__ = S(num_layers_in_fc_layers = num_layers_in_fc_layers).cuda(); + + def evaluate(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Convert files + # ========== ========== + + if os.path.exists(os.path.join(opt.tmp_dir,opt.reference)): + rmtree(os.path.join(opt.tmp_dir,opt.reference)) + + os.makedirs(os.path.join(opt.tmp_dir,opt.reference)) + + command = ("ffmpeg -loglevel error -y -i %s -threads 1 -f image2 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'%06d.jpg'))) + output = subprocess.call(command, shell=True, stdout=None) + + command = ("ffmpeg -loglevel error -y -i %s -async 1 -ac 1 -vn -acodec pcm_s16le -ar 16000 %s" % (videofile,os.path.join(opt.tmp_dir,opt.reference,'audio.wav'))) + output = subprocess.call(command, shell=True, stdout=None) + + # ========== ========== + # Load video + # ========== ========== + + images = [] + + flist = glob.glob(os.path.join(opt.tmp_dir,opt.reference,'*.jpg')) + flist.sort() + + for fname in flist: + img_input = cv2.imread(fname) + img_input = cv2.resize(img_input, (224,224)) #HARD CODED, CHANGE BEFORE RELEASE + images.append(img_input) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Load audio + # ========== ========== + + sample_rate, audio = wavfile.read(os.path.join(opt.tmp_dir,opt.reference,'audio.wav')) + mfcc = zip(*python_speech_features.mfcc(audio,sample_rate)) + mfcc = numpy.stack([numpy.array(i) for i in mfcc]) + + cc = numpy.expand_dims(numpy.expand_dims(mfcc,axis=0),axis=0) + cct = torch.autograd.Variable(torch.from_numpy(cc.astype(float)).float()) + + # ========== ========== + # Check audio and video input length + # ========== ========== + + #if (float(len(audio))/16000) != (float(len(images))/25) : + # print("WARNING: Audio (%.4fs) and video (%.4fs) lengths are different."%(float(len(audio))/16000,float(len(images))/25)) + + min_length = min(len(images),math.floor(len(audio)/640)) + + # ========== ========== + # Generate video and audio feats + # ========== ========== + + lastframe = min_length-5 + im_feat = [] + cc_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lip(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + cc_batch = [ cct[:,:,:,vframe*4:vframe*4+20] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + cc_in = torch.cat(cc_batch,0) + cc_out = self.__S__.forward_aud(cc_in.cuda()) + cc_feat.append(cc_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + cc_feat = torch.cat(cc_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + #print('Compute time %.3f sec.' % (time.time()-tS)) + + dists = calc_pdist(im_feat,cc_feat,vshift=opt.vshift) + mdist = torch.mean(torch.stack(dists,1),1) + + minval, minidx = torch.min(mdist,0) + + offset = opt.vshift-minidx + conf = torch.median(mdist) - minval + + fdist = numpy.stack([dist[minidx].numpy() for dist in dists]) + # fdist = numpy.pad(fdist, (3,3), 'constant', constant_values=15) + fconf = torch.median(mdist).numpy() - fdist + fconfm = signal.medfilt(fconf,kernel_size=9) + + numpy.set_printoptions(formatter={'float': '{: 0.3f}'.format}) + #print('Framewise conf: ') + #print(fconfm) + #print('AV offset: \t%d \nMin dist: \t%.3f\nConfidence: \t%.3f' % (offset,minval,conf)) + + dists_npy = numpy.array([ dist.numpy() for dist in dists ]) + return offset.numpy(), conf.numpy(), minval.numpy() + + def extract_feature(self, opt, videofile): + + self.__S__.eval(); + + # ========== ========== + # Load video + # ========== ========== + cap = cv2.VideoCapture(videofile) + + frame_num = 1; + images = [] + while frame_num: + frame_num += 1 + ret, image = cap.read() + if ret == 0: + break + + images.append(image) + + im = numpy.stack(images,axis=3) + im = numpy.expand_dims(im,axis=0) + im = numpy.transpose(im,(0,3,4,1,2)) + + imtv = torch.autograd.Variable(torch.from_numpy(im.astype(float)).float()) + + # ========== ========== + # Generate video feats + # ========== ========== + + lastframe = len(images)-4 + im_feat = [] + + tS = time.time() + for i in range(0,lastframe,opt.batch_size): + + im_batch = [ imtv[:,:,vframe:vframe+5,:,:] for vframe in range(i,min(lastframe,i+opt.batch_size)) ] + im_in = torch.cat(im_batch,0) + im_out = self.__S__.forward_lipfeat(im_in.cuda()); + im_feat.append(im_out.data.cpu()) + + im_feat = torch.cat(im_feat,0) + + # ========== ========== + # Compute offset + # ========== ========== + + print('Compute time %.3f sec.' % (time.time()-tS)) + + return im_feat + + + def loadParameters(self, path): + loaded_state = torch.load(path, map_location=lambda storage, loc: storage, weights_only=True); + + self_state = self.__S__.state_dict(); + + for name, param in loaded_state.items(): + + self_state[name].copy_(param); diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetModel.py b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetModel.py new file mode 100644 index 00000000..c21ce25c --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/SyncNetModel.py @@ -0,0 +1,117 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import torch +import torch.nn as nn + +def save(model, filename): + with open(filename, "wb") as f: + torch.save(model, f); + print("%s saved."%filename); + +def load(filename): + net = torch.load(filename) + return net; + +class S(nn.Module): + def __init__(self, num_layers_in_fc_layers = 1024): + super(S, self).__init__(); + + self.__nFeatures__ = 24; + self.__nChs__ = 32; + self.__midChs__ = 32; + + self.netcnnaud = nn.Sequential( + nn.Conv2d(1, 64, kernel_size=(3,3), stride=(1,1), padding=(1,1)), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=(1,1), stride=(1,1)), + + nn.Conv2d(64, 192, kernel_size=(3,3), stride=(1,1), padding=(1,1)), + nn.BatchNorm2d(192), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=(3,3), stride=(1,2)), + + nn.Conv2d(192, 384, kernel_size=(3,3), padding=(1,1)), + nn.BatchNorm2d(384), + nn.ReLU(inplace=True), + + nn.Conv2d(384, 256, kernel_size=(3,3), padding=(1,1)), + nn.BatchNorm2d(256), + nn.ReLU(inplace=True), + + nn.Conv2d(256, 256, kernel_size=(3,3), padding=(1,1)), + nn.BatchNorm2d(256), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=(3,3), stride=(2,2)), + + nn.Conv2d(256, 512, kernel_size=(5,4), padding=(0,0)), + nn.BatchNorm2d(512), + nn.ReLU(), + ); + + self.netfcaud = nn.Sequential( + nn.Linear(512, 512), + nn.BatchNorm1d(512), + nn.ReLU(), + nn.Linear(512, num_layers_in_fc_layers), + ); + + self.netfclip = nn.Sequential( + nn.Linear(512, 512), + nn.BatchNorm1d(512), + nn.ReLU(), + nn.Linear(512, num_layers_in_fc_layers), + ); + + self.netcnnlip = nn.Sequential( + nn.Conv3d(3, 96, kernel_size=(5,7,7), stride=(1,2,2), padding=0), + nn.BatchNorm3d(96), + nn.ReLU(inplace=True), + nn.MaxPool3d(kernel_size=(1,3,3), stride=(1,2,2)), + + nn.Conv3d(96, 256, kernel_size=(1,5,5), stride=(1,2,2), padding=(0,1,1)), + nn.BatchNorm3d(256), + nn.ReLU(inplace=True), + nn.MaxPool3d(kernel_size=(1,3,3), stride=(1,2,2), padding=(0,1,1)), + + nn.Conv3d(256, 256, kernel_size=(1,3,3), padding=(0,1,1)), + nn.BatchNorm3d(256), + nn.ReLU(inplace=True), + + nn.Conv3d(256, 256, kernel_size=(1,3,3), padding=(0,1,1)), + nn.BatchNorm3d(256), + nn.ReLU(inplace=True), + + nn.Conv3d(256, 256, kernel_size=(1,3,3), padding=(0,1,1)), + nn.BatchNorm3d(256), + nn.ReLU(inplace=True), + nn.MaxPool3d(kernel_size=(1,3,3), stride=(1,2,2)), + + nn.Conv3d(256, 512, kernel_size=(1,6,6), padding=0), + nn.BatchNorm3d(512), + nn.ReLU(inplace=True), + ); + + def forward_aud(self, x): + + mid = self.netcnnaud(x); # N x ch x 24 x M + mid = mid.view((mid.size()[0], -1)); # N x (ch x 24) + out = self.netfcaud(mid); + + return out; + + def forward_lip(self, x): + + mid = self.netcnnlip(x); + mid = mid.view((mid.size()[0], -1)); # N x (ch x 24) + out = self.netfclip(mid); + + return out; + + def forward_lipfeat(self, x): + + mid = self.netcnnlip(x); + out = mid.view((mid.size()[0], -1)); # N x (ch x 24) + + return out; \ No newline at end of file diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/all_scores.txt b/Wav2Lip-master/evaluation/syncnet_python-master/all_scores.txt new file mode 100644 index 00000000..64f715e7 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/all_scores.txt @@ -0,0 +1,14 @@ +5.9223375 9.719323 +5.9223375 9.719323 +5.9223375 9.719323 +5.294093 10.546199 +5.697413 10.127726 +5.916816 8.754602 +6.2492943 7.7773724 +5.8220344 10.318459 +5.801061 8.821535 +5.4944816 10.029444 +5.498189 9.814372 +6.336245 9.415861 +5.6072683 8.995091 +5.914622 10.038904 diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_LRS.py b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_LRS.py new file mode 100644 index 00000000..eda02b8f --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_LRS.py @@ -0,0 +1,53 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess +import glob +import os +from tqdm import tqdm + +from SyncNetInstance_calc_scores import * + +# ==================== LOAD PARAMS ==================== + + +parser = argparse.ArgumentParser(description = "SyncNet"); + +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--data_root', type=str, required=True, help=''); +parser.add_argument('--tmp_dir', type=str, default="data/work/pytmp", help=''); +parser.add_argument('--reference', type=str, default="demo", help=''); + +opt = parser.parse_args(); + + +# ==================== RUN EVALUATION ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +#print("Model %s loaded."%opt.initial_model); +path = os.path.join(opt.data_root, "*.mp4") + +all_videos = glob.glob(path) + +prog_bar = tqdm(range(len(all_videos))) +avg_confidence = 0. +avg_min_distance = 0. + + +for videofile_idx in prog_bar: + videofile = all_videos[videofile_idx] + offset, confidence, min_distance = s.evaluate(opt, videofile=videofile) + avg_confidence += confidence + avg_min_distance += min_distance + prog_bar.set_description('Avg Confidence: {}, Avg Minimum Dist: {}'.format(round(avg_confidence / (videofile_idx + 1), 3), round(avg_min_distance / (videofile_idx + 1), 3))) + prog_bar.refresh() + +print ('Average Confidence: {}'.format(avg_confidence/len(all_videos))) +print ('Average Minimum Distance: {}'.format(avg_min_distance/len(all_videos))) + + + diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.py b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.py new file mode 100644 index 00000000..09622584 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.py @@ -0,0 +1,45 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess, pickle, os, gzip, glob + +from SyncNetInstance_calc_scores import * + +# ==================== PARSE ARGUMENT ==================== + +parser = argparse.ArgumentParser(description = "SyncNet"); +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--data_dir', type=str, default='data/work', help=''); +parser.add_argument('--videofile', type=str, default='', help=''); +parser.add_argument('--reference', type=str, default='', help=''); +opt = parser.parse_args(); + +setattr(opt,'avi_dir',os.path.join(opt.data_dir,'pyavi')) +setattr(opt,'tmp_dir',os.path.join(opt.data_dir,'pytmp')) +setattr(opt,'work_dir',os.path.join(opt.data_dir,'pywork')) +setattr(opt,'crop_dir',os.path.join(opt.data_dir,'pycrop')) + + +# ==================== LOAD MODEL AND FILE LIST ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +#print("Model %s loaded."%opt.initial_model); + +flist = glob.glob(os.path.join(opt.crop_dir,opt.reference,'0*.avi')) +flist.sort() + +# ==================== GET OFFSETS ==================== + +dists = [] +for idx, fname in enumerate(flist): + offset, conf, dist = s.evaluate(opt,videofile=fname) + print (str(dist)+" "+str(conf)) + +# ==================== PRINT RESULTS TO FILE ==================== + +#with open(os.path.join(opt.work_dir,opt.reference,'activesd.pckl'), 'wb') as fil: +# pickle.dump(dists, fil) diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.sh b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.sh new file mode 100644 index 00000000..4a45cd56 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/calculate_scores_real_videos.sh @@ -0,0 +1,8 @@ +rm all_scores.txt +yourfilenames=`ls $1` + +for eachfile in $yourfilenames +do + python run_pipeline.py --videofile $1/$eachfile --reference wav2lip --data_dir tmp_dir + python calculate_scores_real_videos.py --videofile $1/$eachfile --reference wav2lip --data_dir tmp_dir >> all_scores.txt +done diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/demo_feature.py b/Wav2Lip-master/evaluation/syncnet_python-master/demo_feature.py new file mode 100644 index 00000000..e3bd290e --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/demo_feature.py @@ -0,0 +1,32 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess + +from SyncNetInstance import * + +# ==================== LOAD PARAMS ==================== + + +parser = argparse.ArgumentParser(description = "SyncNet"); + +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--videofile', type=str, default="data/example.avi", help=''); +parser.add_argument('--tmp_dir', type=str, default="data", help=''); +parser.add_argument('--save_as', type=str, default="data/features.pt", help=''); + +opt = parser.parse_args(); + + +# ==================== RUN EVALUATION ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +print("Model %s loaded."%opt.initial_model); + +feats = s.extract_feature(opt, videofile=opt.videofile) + +torch.save(feats, opt.save_as) diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/demo_syncnet.py b/Wav2Lip-master/evaluation/syncnet_python-master/demo_syncnet.py new file mode 100644 index 00000000..01c25a6f --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/demo_syncnet.py @@ -0,0 +1,30 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess + +from SyncNetInstance import * + +# ==================== LOAD PARAMS ==================== + + +parser = argparse.ArgumentParser(description = "SyncNet"); + +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--videofile', type=str, default="data/example.avi", help=''); +parser.add_argument('--tmp_dir', type=str, default="data/work/pytmp", help=''); +parser.add_argument('--reference', type=str, default="demo", help=''); + +opt = parser.parse_args(); + + +# ==================== RUN EVALUATION ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +print("Model %s loaded."%opt.initial_model); + +s.evaluate(opt, videofile=opt.videofile) diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/detectors/README.md b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/README.md new file mode 100644 index 00000000..f5a8d4fe --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/README.md @@ -0,0 +1,3 @@ +# Face detector + +This face detector is adapted from `https://github.com/cs-giung/face-detection-pytorch`. diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/detectors/__init__.py b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/__init__.py new file mode 100644 index 00000000..059d49bf --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/__init__.py @@ -0,0 +1 @@ +from .s3fd import S3FD \ No newline at end of file diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/__init__.py b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/__init__.py new file mode 100644 index 00000000..d7f35e05 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/__init__.py @@ -0,0 +1,61 @@ +import time +import numpy as np +import cv2 +import torch +from torchvision import transforms +from .nets import S3FDNet +from .box_utils import nms_ + +PATH_WEIGHT = './detectors/s3fd/weights/sfd_face.pth' +img_mean = np.array([104., 117., 123.])[:, np.newaxis, np.newaxis].astype('float32') + + +class S3FD(): + + def __init__(self, device='cuda'): + + tstamp = time.time() + self.device = device + + print('[S3FD] loading with', self.device) + self.net = S3FDNet(device=self.device).to(self.device) + state_dict = torch.load(PATH_WEIGHT, map_location=self.device) + self.net.load_state_dict(state_dict) + self.net.eval() + print('[S3FD] finished loading (%.4f sec)' % (time.time() - tstamp)) + + def detect_faces(self, image, conf_th=0.8, scales=[1]): + + w, h = image.shape[1], image.shape[0] + + bboxes = np.empty(shape=(0, 5)) + + with torch.no_grad(): + for s in scales: + scaled_img = cv2.resize(image, dsize=(0, 0), fx=s, fy=s, interpolation=cv2.INTER_LINEAR) + + scaled_img = np.swapaxes(scaled_img, 1, 2) + scaled_img = np.swapaxes(scaled_img, 1, 0) + scaled_img = scaled_img[[2, 1, 0], :, :] + scaled_img = scaled_img.astype('float32') + scaled_img -= img_mean + scaled_img = scaled_img[[2, 1, 0], :, :] + x = torch.from_numpy(scaled_img).unsqueeze(0).to(self.device) + y = self.net(x) + + detections = y.data + scale = torch.Tensor([w, h, w, h]) + + for i in range(detections.size(1)): + j = 0 + while detections[0, i, j, 0] > conf_th: + score = detections[0, i, j, 0] + pt = (detections[0, i, j, 1:] * scale).cpu().numpy() + bbox = (pt[0], pt[1], pt[2], pt[3], score) + bboxes = np.vstack((bboxes, bbox)) + j += 1 + + keep = nms_(bboxes, 0.1) + bboxes = bboxes[keep] + + return bboxes diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/box_utils.py b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/box_utils.py new file mode 100644 index 00000000..1bf4be2c --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/box_utils.py @@ -0,0 +1,217 @@ +import numpy as np +from itertools import product as product +import torch +from torch.autograd import Function + + +def nms_(dets, thresh): + """ + Courtesy of Ross Girshick + [https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/nms/py_cpu_nms.py] + """ + x1 = dets[:, 0] + y1 = dets[:, 1] + x2 = dets[:, 2] + y2 = dets[:, 3] + scores = dets[:, 4] + + areas = (x2 - x1) * (y2 - y1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(int(i)) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1) + h = np.maximum(0.0, yy2 - yy1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= thresh)[0] + order = order[inds + 1] + + return np.array(keep).astype(int) + + +def decode(loc, priors, variances): + """Decode locations from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + loc (tensor): location predictions for loc layers, + Shape: [num_priors,4] + priors (tensor): Prior boxes in center-offset form. + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded bounding box predictions + """ + + boxes = torch.cat(( + priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:], + priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1) + boxes[:, :2] -= boxes[:, 2:] / 2 + boxes[:, 2:] += boxes[:, :2] + return boxes + + +def nms(boxes, scores, overlap=0.5, top_k=200): + """Apply non-maximum suppression at test time to avoid detecting too many + overlapping bounding boxes for a given object. + Args: + boxes: (tensor) The location preds for the img, Shape: [num_priors,4]. + scores: (tensor) The class predscores for the img, Shape:[num_priors]. + overlap: (float) The overlap thresh for suppressing unnecessary boxes. + top_k: (int) The Maximum number of box preds to consider. + Return: + The indices of the kept boxes with respect to num_priors. + """ + + keep = scores.new(scores.size(0)).zero_().long() + if boxes.numel() == 0: + return keep, 0 + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + area = torch.mul(x2 - x1, y2 - y1) + v, idx = scores.sort(0) # sort in ascending order + # I = I[v >= 0.01] + idx = idx[-top_k:] # indices of the top-k largest vals + xx1 = boxes.new() + yy1 = boxes.new() + xx2 = boxes.new() + yy2 = boxes.new() + w = boxes.new() + h = boxes.new() + + # keep = torch.Tensor() + count = 0 + while idx.numel() > 0: + i = idx[-1] # index of current largest val + # keep.append(i) + keep[count] = i + count += 1 + if idx.size(0) == 1: + break + idx = idx[:-1] # remove kept element from view + # load bboxes of next highest vals + torch.index_select(x1, 0, idx, out=xx1) + torch.index_select(y1, 0, idx, out=yy1) + torch.index_select(x2, 0, idx, out=xx2) + torch.index_select(y2, 0, idx, out=yy2) + # store element-wise max with next highest score + xx1 = torch.clamp(xx1, min=x1[i]) + yy1 = torch.clamp(yy1, min=y1[i]) + xx2 = torch.clamp(xx2, max=x2[i]) + yy2 = torch.clamp(yy2, max=y2[i]) + w.resize_as_(xx2) + h.resize_as_(yy2) + w = xx2 - xx1 + h = yy2 - yy1 + # check sizes of xx1 and xx2.. after each iteration + w = torch.clamp(w, min=0.0) + h = torch.clamp(h, min=0.0) + inter = w * h + # IoU = i / (area(a) + area(b) - i) + rem_areas = torch.index_select(area, 0, idx) # load remaining areas) + union = (rem_areas - inter) + area[i] + IoU = inter / union # store result in iou + # keep only elements with an IoU <= overlap + idx = idx[IoU.le(overlap)] + return keep, count + + +class Detect(object): + + def __init__(self, num_classes=2, + top_k=750, nms_thresh=0.3, conf_thresh=0.05, + variance=[0.1, 0.2], nms_top_k=5000): + + self.num_classes = num_classes + self.top_k = top_k + self.nms_thresh = nms_thresh + self.conf_thresh = conf_thresh + self.variance = variance + self.nms_top_k = nms_top_k + + def forward(self, loc_data, conf_data, prior_data): + + num = loc_data.size(0) + num_priors = prior_data.size(0) + + conf_preds = conf_data.view(num, num_priors, self.num_classes).transpose(2, 1) + batch_priors = prior_data.view(-1, num_priors, 4).expand(num, num_priors, 4) + batch_priors = batch_priors.contiguous().view(-1, 4) + + decoded_boxes = decode(loc_data.view(-1, 4), batch_priors, self.variance) + decoded_boxes = decoded_boxes.view(num, num_priors, 4) + + output = torch.zeros(num, self.num_classes, self.top_k, 5) + + for i in range(num): + boxes = decoded_boxes[i].clone() + conf_scores = conf_preds[i].clone() + + for cl in range(1, self.num_classes): + c_mask = conf_scores[cl].gt(self.conf_thresh) + scores = conf_scores[cl][c_mask] + + if scores.dim() == 0: + continue + l_mask = c_mask.unsqueeze(1).expand_as(boxes) + boxes_ = boxes[l_mask].view(-1, 4) + ids, count = nms(boxes_, scores, self.nms_thresh, self.nms_top_k) + count = count if count < self.top_k else self.top_k + + output[i, cl, :count] = torch.cat((scores[ids[:count]].unsqueeze(1), boxes_[ids[:count]]), 1) + + return output + + +class PriorBox(object): + + def __init__(self, input_size, feature_maps, + variance=[0.1, 0.2], + min_sizes=[16, 32, 64, 128, 256, 512], + steps=[4, 8, 16, 32, 64, 128], + clip=False): + + super(PriorBox, self).__init__() + + self.imh = input_size[0] + self.imw = input_size[1] + self.feature_maps = feature_maps + + self.variance = variance + self.min_sizes = min_sizes + self.steps = steps + self.clip = clip + + def forward(self): + mean = [] + for k, fmap in enumerate(self.feature_maps): + feath = fmap[0] + featw = fmap[1] + for i, j in product(range(feath), range(featw)): + f_kw = self.imw / self.steps[k] + f_kh = self.imh / self.steps[k] + + cx = (j + 0.5) / f_kw + cy = (i + 0.5) / f_kh + + s_kw = self.min_sizes[k] / self.imw + s_kh = self.min_sizes[k] / self.imh + + mean += [cx, cy, s_kw, s_kh] + + output = torch.FloatTensor(mean).view(-1, 4) + + if self.clip: + output.clamp_(max=1, min=0) + + return output diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/nets.py b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/nets.py new file mode 100644 index 00000000..85b5c82c --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/detectors/s3fd/nets.py @@ -0,0 +1,174 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.nn.init as init +from .box_utils import Detect, PriorBox + + +class L2Norm(nn.Module): + + def __init__(self, n_channels, scale): + super(L2Norm, self).__init__() + self.n_channels = n_channels + self.gamma = scale or None + self.eps = 1e-10 + self.weight = nn.Parameter(torch.Tensor(self.n_channels)) + self.reset_parameters() + + def reset_parameters(self): + init.constant_(self.weight, self.gamma) + + def forward(self, x): + norm = x.pow(2).sum(dim=1, keepdim=True).sqrt() + self.eps + x = torch.div(x, norm) + out = self.weight.unsqueeze(0).unsqueeze(2).unsqueeze(3).expand_as(x) * x + return out + + +class S3FDNet(nn.Module): + + def __init__(self, device='cuda'): + super(S3FDNet, self).__init__() + self.device = device + + self.vgg = nn.ModuleList([ + nn.Conv2d(3, 64, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(64, 64, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(2, 2), + + nn.Conv2d(64, 128, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(128, 128, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(2, 2), + + nn.Conv2d(128, 256, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(256, 256, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(256, 256, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(2, 2, ceil_mode=True), + + nn.Conv2d(256, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(512, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(512, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(2, 2), + + nn.Conv2d(512, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(512, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(512, 512, 3, 1, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(2, 2), + + nn.Conv2d(512, 1024, 3, 1, padding=6, dilation=6), + nn.ReLU(inplace=True), + nn.Conv2d(1024, 1024, 1, 1), + nn.ReLU(inplace=True), + ]) + + self.L2Norm3_3 = L2Norm(256, 10) + self.L2Norm4_3 = L2Norm(512, 8) + self.L2Norm5_3 = L2Norm(512, 5) + + self.extras = nn.ModuleList([ + nn.Conv2d(1024, 256, 1, 1), + nn.Conv2d(256, 512, 3, 2, padding=1), + nn.Conv2d(512, 128, 1, 1), + nn.Conv2d(128, 256, 3, 2, padding=1), + ]) + + self.loc = nn.ModuleList([ + nn.Conv2d(256, 4, 3, 1, padding=1), + nn.Conv2d(512, 4, 3, 1, padding=1), + nn.Conv2d(512, 4, 3, 1, padding=1), + nn.Conv2d(1024, 4, 3, 1, padding=1), + nn.Conv2d(512, 4, 3, 1, padding=1), + nn.Conv2d(256, 4, 3, 1, padding=1), + ]) + + self.conf = nn.ModuleList([ + nn.Conv2d(256, 4, 3, 1, padding=1), + nn.Conv2d(512, 2, 3, 1, padding=1), + nn.Conv2d(512, 2, 3, 1, padding=1), + nn.Conv2d(1024, 2, 3, 1, padding=1), + nn.Conv2d(512, 2, 3, 1, padding=1), + nn.Conv2d(256, 2, 3, 1, padding=1), + ]) + + self.softmax = nn.Softmax(dim=-1) + self.detect = Detect() + + def forward(self, x): + size = x.size()[2:] + sources = list() + loc = list() + conf = list() + + for k in range(16): + x = self.vgg[k](x) + s = self.L2Norm3_3(x) + sources.append(s) + + for k in range(16, 23): + x = self.vgg[k](x) + s = self.L2Norm4_3(x) + sources.append(s) + + for k in range(23, 30): + x = self.vgg[k](x) + s = self.L2Norm5_3(x) + sources.append(s) + + for k in range(30, len(self.vgg)): + x = self.vgg[k](x) + sources.append(x) + + # apply extra layers and cache source layer outputs + for k, v in enumerate(self.extras): + x = F.relu(v(x), inplace=True) + if k % 2 == 1: + sources.append(x) + + # apply multibox head to source layers + loc_x = self.loc[0](sources[0]) + conf_x = self.conf[0](sources[0]) + + max_conf, _ = torch.max(conf_x[:, 0:3, :, :], dim=1, keepdim=True) + conf_x = torch.cat((max_conf, conf_x[:, 3:, :, :]), dim=1) + + loc.append(loc_x.permute(0, 2, 3, 1).contiguous()) + conf.append(conf_x.permute(0, 2, 3, 1).contiguous()) + + for i in range(1, len(sources)): + x = sources[i] + conf.append(self.conf[i](x).permute(0, 2, 3, 1).contiguous()) + loc.append(self.loc[i](x).permute(0, 2, 3, 1).contiguous()) + + features_maps = [] + for i in range(len(loc)): + feat = [] + feat += [loc[i].size(1), loc[i].size(2)] + features_maps += [feat] + + loc = torch.cat([o.view(o.size(0), -1) for o in loc], 1) + conf = torch.cat([o.view(o.size(0), -1) for o in conf], 1) + + with torch.no_grad(): + self.priorbox = PriorBox(size, features_maps) + self.priors = self.priorbox.forward() + + output = self.detect.forward( + loc.view(loc.size(0), -1, 4), + self.softmax(conf.view(conf.size(0), -1, 2)), + self.priors.type(type(x.data)).to(self.device) + ) + + return output diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/download_model.sh b/Wav2Lip-master/evaluation/syncnet_python-master/download_model.sh new file mode 100644 index 00000000..3e3a9dc2 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/download_model.sh @@ -0,0 +1,9 @@ +# SyncNet model + +mkdir data +wget http://www.robots.ox.ac.uk/~vgg/software/lipsync/data/syncnet_v2.model -O data/syncnet_v2.model +wget http://www.robots.ox.ac.uk/~vgg/software/lipsync/data/example.avi -O data/example.avi + +# For the pre-processing pipeline +mkdir detectors/s3fd/weights +wget https://www.robots.ox.ac.uk/~vgg/software/lipsync/data/sfd_face.pth -O detectors/s3fd/weights/sfd_face.pth \ No newline at end of file diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/requirememts.txt b/Wav2Lip-master/evaluation/syncnet_python-master/requirememts.txt new file mode 100644 index 00000000..ee3a418d --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/requirememts.txt @@ -0,0 +1,31 @@ +# packages in environment at H:\anaconda3\envs\syncnet: +# +# Name Version Build Channel +appdirs 1.4.4 pypi_0 pypi +ca-certificates 2024.11.26 haa95532_0 +certifi 2024.12.14 pypi_0 pypi +charset-normalizer 3.4.0 pypi_0 pypi +click 8.1.7 pypi_0 pypi +colorama 0.4.6 pypi_0 pypi +idna 3.10 pypi_0 pypi +numpy 1.22.4 pypi_0 pypi +opencv-contrib-python 4.10.0.84 pypi_0 pypi +openssl 1.1.1w h2bbff1b_0 +pillow 11.0.0 pypi_0 pypi +pip 24.2 py39haa95532_0 +python 3.9.0 h6244533_2 +python-speech-features 0.6 pypi_0 pypi +requests 2.32.3 pypi_0 pypi +scenedetect 0.6 pypi_0 pypi +scipy 1.13.1 pypi_0 pypi +setuptools 75.1.0 py39haa95532_0 +sqlite 3.45.3 h2bbff1b_0 +torch 1.11.0 pypi_0 pypi +torchvision 0.12.0 pypi_0 pypi +tqdm 4.67.1 pypi_0 pypi +typing-extensions 4.12.2 pypi_0 pypi +tzdata 2024b h04d1e81_0 +urllib3 2.2.3 pypi_0 pypi +vc 14.40 haa95532_2 +vs2015_runtime 14.42.34433 h9531ae6_2 +wheel 0.44.0 py39haa95532_0 diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/requirements.txt b/Wav2Lip-master/evaluation/syncnet_python-master/requirements.txt new file mode 100644 index 00000000..89197409 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/requirements.txt @@ -0,0 +1,7 @@ +torch>=1.4.0 +torchvision>=0.5.0 +numpy>=1.18.1 +scipy>=1.2.1 +scenedetect==0.5.1 +opencv-contrib-python +python_speech_features diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/run_pipeline.py b/Wav2Lip-master/evaluation/syncnet_python-master/run_pipeline.py new file mode 100644 index 00000000..f5fc22e0 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/run_pipeline.py @@ -0,0 +1,322 @@ +#!/usr/bin/python + +import sys, time, os, pdb, argparse, pickle, subprocess, glob, cv2 +import numpy as np +from shutil import rmtree + +import scenedetect +from scenedetect.video_manager import VideoManager +from scenedetect.scene_manager import SceneManager +from scenedetect.frame_timecode import FrameTimecode +from scenedetect.stats_manager import StatsManager +from scenedetect.detectors import ContentDetector + +from scipy.interpolate import interp1d +from scipy.io import wavfile +from scipy import signal + +from detectors import S3FD + +# ========== ========== ========== ========== +# # PARSE ARGS +# ========== ========== ========== ========== + +parser = argparse.ArgumentParser(description = "FaceTracker"); +parser.add_argument('--data_dir', type=str, default='data/work', help='Output direcotry'); +parser.add_argument('--videofile', type=str, default='', help='Input video file'); +parser.add_argument('--reference', type=str, default='', help='Video reference'); +parser.add_argument('--facedet_scale', type=float, default=0.25, help='Scale factor for face detection'); +parser.add_argument('--crop_scale', type=float, default=0.40, help='Scale bounding box'); +parser.add_argument('--min_track', type=int, default=100, help='Minimum facetrack duration'); +parser.add_argument('--frame_rate', type=int, default=25, help='Frame rate'); +parser.add_argument('--num_failed_det', type=int, default=25, help='Number of missed detections allowed before tracking is stopped'); +parser.add_argument('--min_face_size', type=int, default=100, help='Minimum face size in pixels'); +opt = parser.parse_args(); + +setattr(opt,'avi_dir',os.path.join(opt.data_dir,'pyavi')) +setattr(opt,'tmp_dir',os.path.join(opt.data_dir,'pytmp')) +setattr(opt,'work_dir',os.path.join(opt.data_dir,'pywork')) +setattr(opt,'crop_dir',os.path.join(opt.data_dir,'pycrop')) +setattr(opt,'frames_dir',os.path.join(opt.data_dir,'pyframes')) + +# ========== ========== ========== ========== +# # IOU FUNCTION +# ========== ========== ========== ========== + +def bb_intersection_over_union(boxA, boxB): + + xA = max(boxA[0], boxB[0]) + yA = max(boxA[1], boxB[1]) + xB = min(boxA[2], boxB[2]) + yB = min(boxA[3], boxB[3]) + + interArea = max(0, xB - xA) * max(0, yB - yA) + + boxAArea = (boxA[2] - boxA[0]) * (boxA[3] - boxA[1]) + boxBArea = (boxB[2] - boxB[0]) * (boxB[3] - boxB[1]) + + iou = interArea / float(boxAArea + boxBArea - interArea) + + return iou + +# ========== ========== ========== ========== +# # FACE TRACKING +# ========== ========== ========== ========== + +def track_shot(opt,scenefaces): + + iouThres = 0.5 # Minimum IOU between consecutive face detections + tracks = [] + + while True: + track = [] + for framefaces in scenefaces: + for face in framefaces: + if track == []: + track.append(face) + framefaces.remove(face) + elif face['frame'] - track[-1]['frame'] <= opt.num_failed_det: + iou = bb_intersection_over_union(face['bbox'], track[-1]['bbox']) + if iou > iouThres: + track.append(face) + framefaces.remove(face) + continue + else: + break + + if track == []: + break + elif len(track) > opt.min_track: + + framenum = np.array([ f['frame'] for f in track ]) + bboxes = np.array([np.array(f['bbox']) for f in track]) + + frame_i = np.arange(framenum[0],framenum[-1]+1) + + bboxes_i = [] + for ij in range(0,4): + interpfn = interp1d(framenum, bboxes[:,ij]) + bboxes_i.append(interpfn(frame_i)) + bboxes_i = np.stack(bboxes_i, axis=1) + + if max(np.mean(bboxes_i[:,2]-bboxes_i[:,0]), np.mean(bboxes_i[:,3]-bboxes_i[:,1])) > opt.min_face_size: + tracks.append({'frame':frame_i,'bbox':bboxes_i}) + + return tracks + +# ========== ========== ========== ========== +# # VIDEO CROP AND SAVE +# ========== ========== ========== ========== + +def crop_video(opt,track,cropfile): + + flist = glob.glob(os.path.join(opt.frames_dir,opt.reference,'*.jpg')) + flist.sort() + + fourcc = cv2.VideoWriter_fourcc(*'XVID') + vOut = cv2.VideoWriter(cropfile+'t.avi', fourcc, opt.frame_rate, (224,224)) + + dets = {'x':[], 'y':[], 's':[]} + + for det in track['bbox']: + + dets['s'].append(max((det[3]-det[1]),(det[2]-det[0]))/2) + dets['y'].append((det[1]+det[3])/2) # crop center x + dets['x'].append((det[0]+det[2])/2) # crop center y + + # Smooth detections + dets['s'] = signal.medfilt(dets['s'],kernel_size=13) + dets['x'] = signal.medfilt(dets['x'],kernel_size=13) + dets['y'] = signal.medfilt(dets['y'],kernel_size=13) + + for fidx, frame in enumerate(track['frame']): + + cs = opt.crop_scale + + bs = dets['s'][fidx] # Detection box size + bsi = int(bs*(1+2*cs)) # Pad videos by this amount + + image = cv2.imread(flist[frame]) + + frame = np.pad(image,((bsi,bsi),(bsi,bsi),(0,0)), 'constant', constant_values=(110,110)) + my = dets['y'][fidx]+bsi # BBox center Y + mx = dets['x'][fidx]+bsi # BBox center X + + face = frame[int(my-bs):int(my+bs*(1+2*cs)),int(mx-bs*(1+cs)):int(mx+bs*(1+cs))] + + vOut.write(cv2.resize(face,(224,224))) + + audiotmp = os.path.join(opt.tmp_dir,opt.reference,'audio.wav') + audiostart = (track['frame'][0])/opt.frame_rate + audioend = (track['frame'][-1]+1)/opt.frame_rate + + vOut.release() + + # ========== CROP AUDIO FILE ========== + + command = ("ffmpeg -y -i %s -ss %.3f -to %.3f %s" % (os.path.join(opt.avi_dir,opt.reference,'audio.wav'),audiostart,audioend,audiotmp)) + output = subprocess.call(command, shell=True, stdout=None) + + if output != 0: + pdb.set_trace() + + sample_rate, audio = wavfile.read(audiotmp) + + # ========== COMBINE AUDIO AND VIDEO FILES ========== + + command = ("ffmpeg -y -i %st.avi -i %s -c:v copy -c:a copy %s.avi" % (cropfile,audiotmp,cropfile)) + output = subprocess.call(command, shell=True, stdout=None) + + if output != 0: + pdb.set_trace() + + print('Written %s'%cropfile) + + os.remove(cropfile+'t.avi') + + print('Mean pos: x %.2f y %.2f s %.2f'%(np.mean(dets['x']),np.mean(dets['y']),np.mean(dets['s']))) + + return {'track':track, 'proc_track':dets} + +# ========== ========== ========== ========== +# # FACE DETECTION +# ========== ========== ========== ========== + +def inference_video(opt): + + DET = S3FD(device='cuda') + + flist = glob.glob(os.path.join(opt.frames_dir,opt.reference,'*.jpg')) + flist.sort() + + dets = [] + + for fidx, fname in enumerate(flist): + + start_time = time.time() + + image = cv2.imread(fname) + + image_np = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + bboxes = DET.detect_faces(image_np, conf_th=0.9, scales=[opt.facedet_scale]) + + dets.append([]); + for bbox in bboxes: + dets[-1].append({'frame':fidx, 'bbox':(bbox[:-1]).tolist(), 'conf':bbox[-1]}) + + elapsed_time = time.time() - start_time + + print('%s-%05d; %d dets; %.2f Hz' % (os.path.join(opt.avi_dir,opt.reference,'video.avi'),fidx,len(dets[-1]),(1/elapsed_time))) + + savepath = os.path.join(opt.work_dir,opt.reference,'faces.pckl') + + with open(savepath, 'wb') as fil: + pickle.dump(dets, fil) + + return dets + +# ========== ========== ========== ========== +# # SCENE DETECTION +# ========== ========== ========== ========== + +def scene_detect(opt): + + video_manager = VideoManager([os.path.join(opt.avi_dir,opt.reference,'video.avi')]) + stats_manager = StatsManager() + scene_manager = SceneManager(stats_manager) + # Add ContentDetector algorithm (constructor takes detector options like threshold). + scene_manager.add_detector(ContentDetector()) + base_timecode = video_manager.get_base_timecode() + + video_manager.set_downscale_factor() + + video_manager.start() + + scene_manager.detect_scenes(frame_source=video_manager) + + scene_list = scene_manager.get_scene_list(base_timecode) + + savepath = os.path.join(opt.work_dir,opt.reference,'scene.pckl') + + if scene_list == []: + scene_list = [(video_manager.get_base_timecode(),video_manager.get_current_timecode())] + + with open(savepath, 'wb') as fil: + pickle.dump(scene_list, fil) + + print('%s - scenes detected %d'%(os.path.join(opt.avi_dir,opt.reference,'video.avi'),len(scene_list))) + + return scene_list + + +# ========== ========== ========== ========== +# # EXECUTE DEMO +# ========== ========== ========== ========== + +# ========== DELETE EXISTING DIRECTORIES ========== + +if os.path.exists(os.path.join(opt.work_dir,opt.reference)): + rmtree(os.path.join(opt.work_dir,opt.reference)) + +if os.path.exists(os.path.join(opt.crop_dir,opt.reference)): + rmtree(os.path.join(opt.crop_dir,opt.reference)) + +if os.path.exists(os.path.join(opt.avi_dir,opt.reference)): + rmtree(os.path.join(opt.avi_dir,opt.reference)) + +if os.path.exists(os.path.join(opt.frames_dir,opt.reference)): + rmtree(os.path.join(opt.frames_dir,opt.reference)) + +if os.path.exists(os.path.join(opt.tmp_dir,opt.reference)): + rmtree(os.path.join(opt.tmp_dir,opt.reference)) + +# ========== MAKE NEW DIRECTORIES ========== + +os.makedirs(os.path.join(opt.work_dir,opt.reference)) +os.makedirs(os.path.join(opt.crop_dir,opt.reference)) +os.makedirs(os.path.join(opt.avi_dir,opt.reference)) +os.makedirs(os.path.join(opt.frames_dir,opt.reference)) +os.makedirs(os.path.join(opt.tmp_dir,opt.reference)) + +# ========== CONVERT VIDEO AND EXTRACT FRAMES ========== + +command = ("ffmpeg -y -i %s -qscale:v 2 -async 1 -r 25 %s" % (opt.videofile,os.path.join(opt.avi_dir,opt.reference,'video.avi'))) +output = subprocess.call(command, shell=True, stdout=None) + +command = ("ffmpeg -y -i %s -qscale:v 2 -threads 1 -f image2 %s" % (os.path.join(opt.avi_dir,opt.reference,'video.avi'),os.path.join(opt.frames_dir,opt.reference,'%06d.jpg'))) +output = subprocess.call(command, shell=True, stdout=None) + +command = ("ffmpeg -y -i %s -ac 1 -vn -acodec pcm_s16le -ar 16000 %s" % (os.path.join(opt.avi_dir,opt.reference,'video.avi'),os.path.join(opt.avi_dir,opt.reference,'audio.wav'))) +output = subprocess.call(command, shell=True, stdout=None) + +# ========== FACE DETECTION ========== + +faces = inference_video(opt) + +# ========== SCENE DETECTION ========== + +scene = scene_detect(opt) + +# ========== FACE TRACKING ========== + +alltracks = [] +vidtracks = [] + +for shot in scene: + + if shot[1].frame_num - shot[0].frame_num >= opt.min_track : + alltracks.extend(track_shot(opt,faces[shot[0].frame_num:shot[1].frame_num])) + +# ========== FACE TRACK CROP ========== + +for ii, track in enumerate(alltracks): + vidtracks.append(crop_video(opt,track,os.path.join(opt.crop_dir,opt.reference,'%05d'%ii))) + +# ========== SAVE RESULTS ========== + +savepath = os.path.join(opt.work_dir,opt.reference,'tracks.pckl') + +with open(savepath, 'wb') as fil: + pickle.dump(vidtracks, fil) + +rmtree(os.path.join(opt.tmp_dir,opt.reference)) diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/run_syncnet.py b/Wav2Lip-master/evaluation/syncnet_python-master/run_syncnet.py new file mode 100644 index 00000000..45099fd6 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/run_syncnet.py @@ -0,0 +1,45 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import time, pdb, argparse, subprocess, pickle, os, gzip, glob + +from SyncNetInstance import * + +# ==================== PARSE ARGUMENT ==================== + +parser = argparse.ArgumentParser(description = "SyncNet"); +parser.add_argument('--initial_model', type=str, default="data/syncnet_v2.model", help=''); +parser.add_argument('--batch_size', type=int, default='20', help=''); +parser.add_argument('--vshift', type=int, default='15', help=''); +parser.add_argument('--data_dir', type=str, default='data/work', help=''); +parser.add_argument('--videofile', type=str, default='', help=''); +parser.add_argument('--reference', type=str, default='', help=''); +opt = parser.parse_args(); + +setattr(opt,'avi_dir',os.path.join(opt.data_dir,'pyavi')) +setattr(opt,'tmp_dir',os.path.join(opt.data_dir,'pytmp')) +setattr(opt,'work_dir',os.path.join(opt.data_dir,'pywork')) +setattr(opt,'crop_dir',os.path.join(opt.data_dir,'pycrop')) + + +# ==================== LOAD MODEL AND FILE LIST ==================== + +s = SyncNetInstance(); + +s.loadParameters(opt.initial_model); +print("Model %s loaded."%opt.initial_model); + +flist = glob.glob(os.path.join(opt.crop_dir,opt.reference,'0*.avi')) +flist.sort() + +# ==================== GET OFFSETS ==================== + +dists = [] +for idx, fname in enumerate(flist): + offset, conf, dist = s.evaluate(opt,videofile=fname) + dists.append(dist) + +# ==================== PRINT RESULTS TO FILE ==================== + +with open(os.path.join(opt.work_dir,opt.reference,'activesd.pckl'), 'wb') as fil: + pickle.dump(dists, fil) diff --git a/Wav2Lip-master/evaluation/syncnet_python-master/run_visualise.py b/Wav2Lip-master/evaluation/syncnet_python-master/run_visualise.py new file mode 100644 index 00000000..85d89253 --- /dev/null +++ b/Wav2Lip-master/evaluation/syncnet_python-master/run_visualise.py @@ -0,0 +1,88 @@ +#!/usr/bin/python +#-*- coding: utf-8 -*- + +import torch +import numpy +import time, pdb, argparse, subprocess, pickle, os, glob +import cv2 + +from scipy import signal + +# ==================== PARSE ARGUMENT ==================== + +parser = argparse.ArgumentParser(description = "SyncNet"); +parser.add_argument('--data_dir', type=str, default='data/work', help=''); +parser.add_argument('--videofile', type=str, default='', help=''); +parser.add_argument('--reference', type=str, default='', help=''); +parser.add_argument('--frame_rate', type=int, default=25, help='Frame rate'); +opt = parser.parse_args(); + +setattr(opt,'avi_dir',os.path.join(opt.data_dir,'pyavi')) +setattr(opt,'tmp_dir',os.path.join(opt.data_dir,'pytmp')) +setattr(opt,'work_dir',os.path.join(opt.data_dir,'pywork')) +setattr(opt,'crop_dir',os.path.join(opt.data_dir,'pycrop')) +setattr(opt,'frames_dir',os.path.join(opt.data_dir,'pyframes')) + +# ==================== LOAD FILES ==================== + +with open(os.path.join(opt.work_dir,opt.reference,'tracks.pckl'), 'rb') as fil: + tracks = pickle.load(fil, encoding='latin1') + +with open(os.path.join(opt.work_dir,opt.reference,'activesd.pckl'), 'rb') as fil: + dists = pickle.load(fil, encoding='latin1') + +flist = glob.glob(os.path.join(opt.frames_dir,opt.reference,'*.jpg')) +flist.sort() + +# ==================== SMOOTH FACES ==================== + +faces = [[] for i in range(len(flist))] + +for tidx, track in enumerate(tracks): + + mean_dists = numpy.mean(numpy.stack(dists[tidx],1),1) + minidx = numpy.argmin(mean_dists,0) + minval = mean_dists[minidx] + + fdist = numpy.stack([dist[minidx] for dist in dists[tidx]]) + fdist = numpy.pad(fdist, (3,3), 'constant', constant_values=10) + + fconf = numpy.median(mean_dists) - fdist + fconfm = signal.medfilt(fconf,kernel_size=9) + + for fidx, frame in enumerate(track['track']['frame'].tolist()) : + faces[frame].append({'track': tidx, 'conf':fconfm[fidx], 's':track['proc_track']['s'][fidx], 'x':track['proc_track']['x'][fidx], 'y':track['proc_track']['y'][fidx]}) + +# ==================== ADD DETECTIONS TO VIDEO ==================== + +first_image = cv2.imread(flist[0]) + +fw = first_image.shape[1] +fh = first_image.shape[0] + +fourcc = cv2.VideoWriter_fourcc(*'XVID') +vOut = cv2.VideoWriter(os.path.join(opt.avi_dir,opt.reference,'video_only.avi'), fourcc, opt.frame_rate, (fw,fh)) + +for fidx, fname in enumerate(flist): + + image = cv2.imread(fname) + + for face in faces[fidx]: + + clr = max(min(face['conf']*25,255),0) + + cv2.rectangle(image,(int(face['x']-face['s']),int(face['y']-face['s'])),(int(face['x']+face['s']),int(face['y']+face['s'])),(0,clr,255-clr),3) + cv2.putText(image,'Track %d, Conf %.3f'%(face['track'],face['conf']), (int(face['x']-face['s']),int(face['y']-face['s'])),cv2.FONT_HERSHEY_SIMPLEX,0.5,(255,255,255),2) + + vOut.write(image) + + print('Frame %d'%fidx) + +vOut.release() + +# ========== COMBINE AUDIO AND VIDEO FILES ========== + +command = ("ffmpeg -y -i %s -i %s -c:v copy -c:a copy %s" % (os.path.join(opt.avi_dir,opt.reference,'video_only.avi'),os.path.join(opt.avi_dir,opt.reference,'audio.wav'),os.path.join(opt.avi_dir,opt.reference,'video_out.avi'))) #-async 1 +output = subprocess.call(command, shell=True, stdout=None) + + diff --git a/Wav2Lip-master/evaluation/test_filelists/README.md b/Wav2Lip-master/evaluation/test_filelists/README.md new file mode 100644 index 00000000..84c9acae --- /dev/null +++ b/Wav2Lip-master/evaluation/test_filelists/README.md @@ -0,0 +1,13 @@ +This folder contains the filelists for the new evaluation framework proposed in the paper. + +## Test filelists for LRS2, LRS3, and LRW. + +This folder contains three filelists, each containing a list of names of audio-video pairs from the test sets of LRS2, LRS3, and LRW. The LRS2 and LRW filelists are strictly "Copyright BBC" and can only be used for “non-commercial research by applicants who have an agreement with the BBC to access the Lip Reading in the Wild and/or Lip Reading Sentences in the Wild datasets”. Please follow this link for more details: [https://www.bbc.co.uk/rd/projects/lip-reading-datasets](https://www.bbc.co.uk/rd/projects/lip-reading-datasets). + + +## ReSynCED benchmark + +The sub-folder `ReSynCED` contains filelists for our own Real-world lip-Sync Evaluation Dataset (ReSyncED). + + +#### Instructions on how to use the above two filelists are available in the README of the parent folder. diff --git a/Wav2Lip-master/evaluation/test_filelists/ReSyncED/random_pairs.txt b/Wav2Lip-master/evaluation/test_filelists/ReSyncED/random_pairs.txt new file mode 100644 index 00000000..ffe2c40e --- /dev/null +++ b/Wav2Lip-master/evaluation/test_filelists/ReSyncED/random_pairs.txt @@ -0,0 +1,160 @@ +sachin.mp4 emma_cropped.mp4 +sachin.mp4 mourinho.mp4 +sachin.mp4 elon.mp4 +sachin.mp4 messi2.mp4 +sachin.mp4 cr1.mp4 +sachin.mp4 sachin.mp4 +sachin.mp4 sg.mp4 +sachin.mp4 fergi.mp4 +sachin.mp4 spanish_lec1.mp4 +sachin.mp4 bush_small.mp4 +sachin.mp4 macca_cut.mp4 +sachin.mp4 ca_cropped.mp4 +sachin.mp4 lecun.mp4 +sachin.mp4 spanish_lec0.mp4 +srk.mp4 emma_cropped.mp4 +srk.mp4 mourinho.mp4 +srk.mp4 elon.mp4 +srk.mp4 messi2.mp4 +srk.mp4 cr1.mp4 +srk.mp4 srk.mp4 +srk.mp4 sachin.mp4 +srk.mp4 sg.mp4 +srk.mp4 fergi.mp4 +srk.mp4 spanish_lec1.mp4 +srk.mp4 bush_small.mp4 +srk.mp4 macca_cut.mp4 +srk.mp4 ca_cropped.mp4 +srk.mp4 guardiola.mp4 +srk.mp4 lecun.mp4 +srk.mp4 spanish_lec0.mp4 +cr1.mp4 emma_cropped.mp4 +cr1.mp4 elon.mp4 +cr1.mp4 messi2.mp4 +cr1.mp4 cr1.mp4 +cr1.mp4 spanish_lec1.mp4 +cr1.mp4 bush_small.mp4 +cr1.mp4 macca_cut.mp4 +cr1.mp4 ca_cropped.mp4 +cr1.mp4 lecun.mp4 +cr1.mp4 spanish_lec0.mp4 +macca_cut.mp4 emma_cropped.mp4 +macca_cut.mp4 elon.mp4 +macca_cut.mp4 messi2.mp4 +macca_cut.mp4 spanish_lec1.mp4 +macca_cut.mp4 macca_cut.mp4 +macca_cut.mp4 ca_cropped.mp4 +macca_cut.mp4 spanish_lec0.mp4 +lecun.mp4 emma_cropped.mp4 +lecun.mp4 elon.mp4 +lecun.mp4 messi2.mp4 +lecun.mp4 spanish_lec1.mp4 +lecun.mp4 macca_cut.mp4 +lecun.mp4 ca_cropped.mp4 +lecun.mp4 lecun.mp4 +lecun.mp4 spanish_lec0.mp4 +messi2.mp4 emma_cropped.mp4 +messi2.mp4 elon.mp4 +messi2.mp4 messi2.mp4 +messi2.mp4 spanish_lec1.mp4 +messi2.mp4 macca_cut.mp4 +messi2.mp4 ca_cropped.mp4 +messi2.mp4 spanish_lec0.mp4 +ca_cropped.mp4 emma_cropped.mp4 +ca_cropped.mp4 elon.mp4 +ca_cropped.mp4 spanish_lec1.mp4 +ca_cropped.mp4 ca_cropped.mp4 +ca_cropped.mp4 spanish_lec0.mp4 +spanish_lec1.mp4 spanish_lec1.mp4 +spanish_lec1.mp4 spanish_lec0.mp4 +elon.mp4 elon.mp4 +elon.mp4 spanish_lec1.mp4 +elon.mp4 spanish_lec0.mp4 +guardiola.mp4 emma_cropped.mp4 +guardiola.mp4 mourinho.mp4 +guardiola.mp4 elon.mp4 +guardiola.mp4 messi2.mp4 +guardiola.mp4 cr1.mp4 +guardiola.mp4 sachin.mp4 +guardiola.mp4 sg.mp4 +guardiola.mp4 fergi.mp4 +guardiola.mp4 spanish_lec1.mp4 +guardiola.mp4 bush_small.mp4 +guardiola.mp4 macca_cut.mp4 +guardiola.mp4 ca_cropped.mp4 +guardiola.mp4 guardiola.mp4 +guardiola.mp4 lecun.mp4 +guardiola.mp4 spanish_lec0.mp4 +fergi.mp4 emma_cropped.mp4 +fergi.mp4 mourinho.mp4 +fergi.mp4 elon.mp4 +fergi.mp4 messi2.mp4 +fergi.mp4 cr1.mp4 +fergi.mp4 sachin.mp4 +fergi.mp4 sg.mp4 +fergi.mp4 fergi.mp4 +fergi.mp4 spanish_lec1.mp4 +fergi.mp4 bush_small.mp4 +fergi.mp4 macca_cut.mp4 +fergi.mp4 ca_cropped.mp4 +fergi.mp4 lecun.mp4 +fergi.mp4 spanish_lec0.mp4 +spanish.mp4 emma_cropped.mp4 +spanish.mp4 spanish.mp4 +spanish.mp4 mourinho.mp4 +spanish.mp4 elon.mp4 +spanish.mp4 messi2.mp4 +spanish.mp4 cr1.mp4 +spanish.mp4 srk.mp4 +spanish.mp4 sachin.mp4 +spanish.mp4 sg.mp4 +spanish.mp4 fergi.mp4 +spanish.mp4 spanish_lec1.mp4 +spanish.mp4 bush_small.mp4 +spanish.mp4 macca_cut.mp4 +spanish.mp4 ca_cropped.mp4 +spanish.mp4 guardiola.mp4 +spanish.mp4 lecun.mp4 +spanish.mp4 spanish_lec0.mp4 +bush_small.mp4 emma_cropped.mp4 +bush_small.mp4 elon.mp4 +bush_small.mp4 messi2.mp4 +bush_small.mp4 spanish_lec1.mp4 +bush_small.mp4 bush_small.mp4 +bush_small.mp4 macca_cut.mp4 +bush_small.mp4 ca_cropped.mp4 +bush_small.mp4 lecun.mp4 +bush_small.mp4 spanish_lec0.mp4 +emma_cropped.mp4 emma_cropped.mp4 +emma_cropped.mp4 elon.mp4 +emma_cropped.mp4 spanish_lec1.mp4 +emma_cropped.mp4 spanish_lec0.mp4 +sg.mp4 emma_cropped.mp4 +sg.mp4 mourinho.mp4 +sg.mp4 elon.mp4 +sg.mp4 messi2.mp4 +sg.mp4 cr1.mp4 +sg.mp4 sachin.mp4 +sg.mp4 sg.mp4 +sg.mp4 fergi.mp4 +sg.mp4 spanish_lec1.mp4 +sg.mp4 bush_small.mp4 +sg.mp4 macca_cut.mp4 +sg.mp4 ca_cropped.mp4 +sg.mp4 lecun.mp4 +sg.mp4 spanish_lec0.mp4 +spanish_lec0.mp4 spanish_lec0.mp4 +mourinho.mp4 emma_cropped.mp4 +mourinho.mp4 mourinho.mp4 +mourinho.mp4 elon.mp4 +mourinho.mp4 messi2.mp4 +mourinho.mp4 cr1.mp4 +mourinho.mp4 sachin.mp4 +mourinho.mp4 sg.mp4 +mourinho.mp4 fergi.mp4 +mourinho.mp4 spanish_lec1.mp4 +mourinho.mp4 bush_small.mp4 +mourinho.mp4 macca_cut.mp4 +mourinho.mp4 ca_cropped.mp4 +mourinho.mp4 lecun.mp4 +mourinho.mp4 spanish_lec0.mp4 diff --git a/Wav2Lip-master/evaluation/test_filelists/ReSyncED/tts_pairs.txt b/Wav2Lip-master/evaluation/test_filelists/ReSyncED/tts_pairs.txt new file mode 100644 index 00000000..b7dc1a8c --- /dev/null +++ b/Wav2Lip-master/evaluation/test_filelists/ReSyncED/tts_pairs.txt @@ -0,0 +1,18 @@ +adam_1.mp4 andreng_optimization.wav +agad_2.mp4 agad_2.wav +agad_1.mp4 agad_1.wav +agad_3.mp4 agad_3.wav +rms_prop_1.mp4 rms_prop_tts.wav +tf_1.mp4 tf_1.wav +tf_2.mp4 tf_2.wav +andrew_ng_ai_business.mp4 andrewng_business_tts.wav +covid_autopsy_1.mp4 autopsy_tts.wav +news_1.mp4 news_tts.wav +andrew_ng_fund_1.mp4 andrewng_ai_fund.wav +covid_treatments_1.mp4 covid_tts.wav +pytorch_v_tf.mp4 pytorch_vs_tf_eng.wav +pytorch_1.mp4 pytorch.wav +pkb_1.mp4 pkb_1.wav +ss_1.mp4 ss_1.wav +carlsen_1.mp4 carlsen_eng.wav +french.mp4 french.wav \ No newline at end of file diff --git a/Wav2Lip-master/face_detection/README.md b/Wav2Lip-master/face_detection/README.md new file mode 100644 index 00000000..c073376e --- /dev/null +++ b/Wav2Lip-master/face_detection/README.md @@ -0,0 +1 @@ +The code for Face Detection in this folder has been taken from the wonderful [face_alignment](https://github.com/1adrianb/face-alignment) repository. This has been modified to take batches of faces at a time. \ No newline at end of file diff --git a/Wav2Lip-master/face_detection/__init__.py b/Wav2Lip-master/face_detection/__init__.py new file mode 100644 index 00000000..4bae29fd --- /dev/null +++ b/Wav2Lip-master/face_detection/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- + +__author__ = """Adrian Bulat""" +__email__ = 'adrian.bulat@nottingham.ac.uk' +__version__ = '1.0.1' + +from .api import FaceAlignment, LandmarksType, NetworkSize diff --git a/Wav2Lip-master/face_detection/api.py b/Wav2Lip-master/face_detection/api.py new file mode 100644 index 00000000..cb02d525 --- /dev/null +++ b/Wav2Lip-master/face_detection/api.py @@ -0,0 +1,79 @@ +from __future__ import print_function +import os +import torch +from torch.utils.model_zoo import load_url +from enum import Enum +import numpy as np +import cv2 +try: + import urllib.request as request_file +except BaseException: + import urllib as request_file + +from .models import FAN, ResNetDepth +from .utils import * + + +class LandmarksType(Enum): + """Enum class defining the type of landmarks to detect. + + ``_2D`` - the detected points ``(x,y)`` are detected in a 2D space and follow the visible contour of the face + ``_2halfD`` - this points represent the projection of the 3D points into 3D + ``_3D`` - detect the points ``(x,y,z)``` in a 3D space + + """ + _2D = 1 + _2halfD = 2 + _3D = 3 + + +class NetworkSize(Enum): + # TINY = 1 + # SMALL = 2 + # MEDIUM = 3 + LARGE = 4 + + def __new__(cls, value): + member = object.__new__(cls) + member._value_ = value + return member + + def __int__(self): + return self.value + +ROOT = os.path.dirname(os.path.abspath(__file__)) + +class FaceAlignment: + def __init__(self, landmarks_type, network_size=NetworkSize.LARGE, + device='cuda', flip_input=False, face_detector='sfd', verbose=False): + self.device = device + self.flip_input = flip_input + self.landmarks_type = landmarks_type + self.verbose = verbose + + network_size = int(network_size) + + if 'cuda' in device: + torch.backends.cudnn.benchmark = True + + # Get the face detector + face_detector_module = __import__('face_detection.detection.' + face_detector, + globals(), locals(), [face_detector], 0) + self.face_detector = face_detector_module.FaceDetector(device=device, verbose=verbose) + + def get_detections_for_batch(self, images): + images = images[..., ::-1] + detected_faces = self.face_detector.detect_from_batch(images.copy()) + results = [] + + for i, d in enumerate(detected_faces): + if len(d) == 0: + results.append(None) + continue + d = d[0] + d = np.clip(d, 0, None) + + x1, y1, x2, y2 = map(int, d[:-1]) + results.append((x1, y1, x2, y2)) + + return results \ No newline at end of file diff --git a/Wav2Lip-master/face_detection/detection/__init__.py b/Wav2Lip-master/face_detection/detection/__init__.py new file mode 100644 index 00000000..1a6b0402 --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/__init__.py @@ -0,0 +1 @@ +from .core import FaceDetector \ No newline at end of file diff --git a/Wav2Lip-master/face_detection/detection/core.py b/Wav2Lip-master/face_detection/detection/core.py new file mode 100644 index 00000000..0f8275e8 --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/core.py @@ -0,0 +1,130 @@ +import logging +import glob +from tqdm import tqdm +import numpy as np +import torch +import cv2 + + +class FaceDetector(object): + """An abstract class representing a face detector. + + Any other face detection implementation must subclass it. All subclasses + must implement ``detect_from_image``, that return a list of detected + bounding boxes. Optionally, for speed considerations detect from path is + recommended. + """ + + def __init__(self, device, verbose): + self.device = device + self.verbose = verbose + + if verbose: + if 'cpu' in device: + logger = logging.getLogger(__name__) + logger.warning("Detection running on CPU, this may be potentially slow.") + + if 'cpu' not in device and 'cuda' not in device: + if verbose: + logger.error("Expected values for device are: {cpu, cuda} but got: %s", device) + raise ValueError + + def detect_from_image(self, tensor_or_path): + """Detects faces in a given image. + + This function detects the faces present in a provided BGR(usually) + image. The input can be either the image itself or the path to it. + + Arguments: + tensor_or_path {numpy.ndarray, torch.tensor or string} -- the path + to an image or the image itself. + + Example:: + + >>> path_to_image = 'data/image_01.jpg' + ... detected_faces = detect_from_image(path_to_image) + [A list of bounding boxes (x1, y1, x2, y2)] + >>> image = cv2.imread(path_to_image) + ... detected_faces = detect_from_image(image) + [A list of bounding boxes (x1, y1, x2, y2)] + + """ + raise NotImplementedError + + def detect_from_directory(self, path, extensions=['.jpg', '.png'], recursive=False, show_progress_bar=True): + """Detects faces from all the images present in a given directory. + + Arguments: + path {string} -- a string containing a path that points to the folder containing the images + + Keyword Arguments: + extensions {list} -- list of string containing the extensions to be + consider in the following format: ``.extension_name`` (default: + {['.jpg', '.png']}) recursive {bool} -- option wherever to scan the + folder recursively (default: {False}) show_progress_bar {bool} -- + display a progressbar (default: {True}) + + Example: + >>> directory = 'data' + ... detected_faces = detect_from_directory(directory) + {A dictionary of [lists containing bounding boxes(x1, y1, x2, y2)]} + + """ + if self.verbose: + logger = logging.getLogger(__name__) + + if len(extensions) == 0: + if self.verbose: + logger.error("Expected at list one extension, but none was received.") + raise ValueError + + if self.verbose: + logger.info("Constructing the list of images.") + additional_pattern = '/**/*' if recursive else '/*' + files = [] + for extension in extensions: + files.extend(glob.glob(path + additional_pattern + extension, recursive=recursive)) + + if self.verbose: + logger.info("Finished searching for images. %s images found", len(files)) + logger.info("Preparing to run the detection.") + + predictions = {} + for image_path in tqdm(files, disable=not show_progress_bar): + if self.verbose: + logger.info("Running the face detector on image: %s", image_path) + predictions[image_path] = self.detect_from_image(image_path) + + if self.verbose: + logger.info("The detector was successfully run on all %s images", len(files)) + + return predictions + + @property + def reference_scale(self): + raise NotImplementedError + + @property + def reference_x_shift(self): + raise NotImplementedError + + @property + def reference_y_shift(self): + raise NotImplementedError + + @staticmethod + def tensor_or_path_to_ndarray(tensor_or_path, rgb=True): + """Convert path (represented as a string) or torch.tensor to a numpy.ndarray + + Arguments: + tensor_or_path {numpy.ndarray, torch.tensor or string} -- path to the image, or the image itself + """ + if isinstance(tensor_or_path, str): + return cv2.imread(tensor_or_path) if not rgb else cv2.imread(tensor_or_path)[..., ::-1] + elif torch.is_tensor(tensor_or_path): + # Call cpu in case its coming from cuda + return tensor_or_path.cpu().numpy()[..., ::-1].copy() if not rgb else tensor_or_path.cpu().numpy() + elif isinstance(tensor_or_path, np.ndarray): + return tensor_or_path[..., ::-1].copy() if not rgb else tensor_or_path + else: + raise TypeError diff --git a/Wav2Lip-master/face_detection/detection/sfd/__init__.py b/Wav2Lip-master/face_detection/detection/sfd/__init__.py new file mode 100644 index 00000000..5a63ecd4 --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/sfd/__init__.py @@ -0,0 +1 @@ +from .sfd_detector import SFDDetector as FaceDetector \ No newline at end of file diff --git a/Wav2Lip-master/face_detection/detection/sfd/bbox.py b/Wav2Lip-master/face_detection/detection/sfd/bbox.py new file mode 100644 index 00000000..4bd7222e --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/sfd/bbox.py @@ -0,0 +1,129 @@ +from __future__ import print_function +import os +import sys +import cv2 +import random +import datetime +import time +import math +import argparse +import numpy as np +import torch + +try: + from iou import IOU +except BaseException: + # IOU cython speedup 10x + def IOU(ax1, ay1, ax2, ay2, bx1, by1, bx2, by2): + sa = abs((ax2 - ax1) * (ay2 - ay1)) + sb = abs((bx2 - bx1) * (by2 - by1)) + x1, y1 = max(ax1, bx1), max(ay1, by1) + x2, y2 = min(ax2, bx2), min(ay2, by2) + w = x2 - x1 + h = y2 - y1 + if w < 0 or h < 0: + return 0.0 + else: + return 1.0 * w * h / (sa + sb - w * h) + + +def bboxlog(x1, y1, x2, y2, axc, ayc, aww, ahh): + xc, yc, ww, hh = (x2 + x1) / 2, (y2 + y1) / 2, x2 - x1, y2 - y1 + dx, dy = (xc - axc) / aww, (yc - ayc) / ahh + dw, dh = math.log(ww / aww), math.log(hh / ahh) + return dx, dy, dw, dh + + +def bboxloginv(dx, dy, dw, dh, axc, ayc, aww, ahh): + xc, yc = dx * aww + axc, dy * ahh + ayc + ww, hh = math.exp(dw) * aww, math.exp(dh) * ahh + x1, x2, y1, y2 = xc - ww / 2, xc + ww / 2, yc - hh / 2, yc + hh / 2 + return x1, y1, x2, y2 + + +def nms(dets, thresh): + if 0 == len(dets): + return [] + x1, y1, x2, y2, scores = dets[:, 0], dets[:, 1], dets[:, 2], dets[:, 3], dets[:, 4] + areas = (x2 - x1 + 1) * (y2 - y1 + 1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1, yy1 = np.maximum(x1[i], x1[order[1:]]), np.maximum(y1[i], y1[order[1:]]) + xx2, yy2 = np.minimum(x2[i], x2[order[1:]]), np.minimum(y2[i], y2[order[1:]]) + + w, h = np.maximum(0.0, xx2 - xx1 + 1), np.maximum(0.0, yy2 - yy1 + 1) + ovr = w * h / (areas[i] + areas[order[1:]] - w * h) + + inds = np.where(ovr <= thresh)[0] + order = order[inds + 1] + + return keep + + +def encode(matched, priors, variances): + """Encode the variances from the priorbox layers into the ground truth boxes + we have matched (based on jaccard overlap) with the prior boxes. + Args: + matched: (tensor) Coords of ground truth for each prior in point-form + Shape: [num_priors, 4]. + priors: (tensor) Prior boxes in center-offset form + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + encoded boxes (tensor), Shape: [num_priors, 4] + """ + + # dist b/t match center and prior's center + g_cxcy = (matched[:, :2] + matched[:, 2:]) / 2 - priors[:, :2] + # encode variance + g_cxcy /= (variances[0] * priors[:, 2:]) + # match wh / prior wh + g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:] + g_wh = torch.log(g_wh) / variances[1] + # return target for smooth_l1_loss + return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4] + + +def decode(loc, priors, variances): + """Decode locations from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + loc (tensor): location predictions for loc layers, + Shape: [num_priors,4] + priors (tensor): Prior boxes in center-offset form. + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded bounding box predictions + """ + + boxes = torch.cat(( + priors[:, :2] + loc[:, :2] * variances[0] * priors[:, 2:], + priors[:, 2:] * torch.exp(loc[:, 2:] * variances[1])), 1) + boxes[:, :2] -= boxes[:, 2:] / 2 + boxes[:, 2:] += boxes[:, :2] + return boxes + +def batch_decode(loc, priors, variances): + """Decode locations from predictions using priors to undo + the encoding we did for offset regression at train time. + Args: + loc (tensor): location predictions for loc layers, + Shape: [num_priors,4] + priors (tensor): Prior boxes in center-offset form. + Shape: [num_priors,4]. + variances: (list[float]) Variances of priorboxes + Return: + decoded bounding box predictions + """ + + boxes = torch.cat(( + priors[:, :, :2] + loc[:, :, :2] * variances[0] * priors[:, :, 2:], + priors[:, :, 2:] * torch.exp(loc[:, :, 2:] * variances[1])), 2) + boxes[:, :, :2] -= boxes[:, :, 2:] / 2 + boxes[:, :, 2:] += boxes[:, :, :2] + return boxes diff --git a/Wav2Lip-master/face_detection/detection/sfd/detect.py b/Wav2Lip-master/face_detection/detection/sfd/detect.py new file mode 100644 index 00000000..efef6273 --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/sfd/detect.py @@ -0,0 +1,112 @@ +import torch +import torch.nn.functional as F + +import os +import sys +import cv2 +import random +import datetime +import math +import argparse +import numpy as np + +import scipy.io as sio +import zipfile +from .net_s3fd import s3fd +from .bbox import * + + +def detect(net, img, device): + img = img - np.array([104, 117, 123]) + img = img.transpose(2, 0, 1) + img = img.reshape((1,) + img.shape) + + if 'cuda' in device: + torch.backends.cudnn.benchmark = True + + img = torch.from_numpy(img).float().to(device) + BB, CC, HH, WW = img.size() + with torch.no_grad(): + olist = net(img) + + bboxlist = [] + for i in range(len(olist) // 2): + olist[i * 2] = F.softmax(olist[i * 2], dim=1) + olist = [oelem.data.cpu() for oelem in olist] + for i in range(len(olist) // 2): + ocls, oreg = olist[i * 2], olist[i * 2 + 1] + FB, FC, FH, FW = ocls.size() # feature map size + stride = 2**(i + 2) # 4,8,16,32,64,128 + anchor = stride * 4 + poss = zip(*np.where(ocls[:, 1, :, :] > 0.05)) + for Iindex, hindex, windex in poss: + axc, ayc = stride / 2 + windex * stride, stride / 2 + hindex * stride + score = ocls[0, 1, hindex, windex] + loc = oreg[0, :, hindex, windex].contiguous().view(1, 4) + priors = torch.Tensor([[axc / 1.0, ayc / 1.0, stride * 4 / 1.0, stride * 4 / 1.0]]) + variances = [0.1, 0.2] + box = decode(loc, priors, variances) + x1, y1, x2, y2 = box[0] * 1.0 + # cv2.rectangle(imgshow,(int(x1),int(y1)),(int(x2),int(y2)),(0,0,255),1) + bboxlist.append([x1, y1, x2, y2, score]) + bboxlist = np.array(bboxlist) + if 0 == len(bboxlist): + bboxlist = np.zeros((1, 5)) + + return bboxlist + +def batch_detect(net, imgs, device): + imgs = imgs - np.array([104, 117, 123]) + imgs = imgs.transpose(0, 3, 1, 2) + + if 'cuda' in device: + torch.backends.cudnn.benchmark = True + + imgs = torch.from_numpy(imgs).float().to(device) + BB, CC, HH, WW = imgs.size() + with torch.no_grad(): + olist = net(imgs) + + bboxlist = [] + for i in range(len(olist) // 2): + olist[i * 2] = F.softmax(olist[i * 2], dim=1) + olist = [oelem.data.cpu() for oelem in olist] + for i in range(len(olist) // 2): + ocls, oreg = olist[i * 2], olist[i * 2 + 1] + FB, FC, FH, FW = ocls.size() # feature map size + stride = 2**(i + 2) # 4,8,16,32,64,128 + anchor = stride * 4 + poss = zip(*np.where(ocls[:, 1, :, :] > 0.05)) + for Iindex, hindex, windex in poss: + axc, ayc = stride / 2 + windex * stride, stride / 2 + hindex * stride + score = ocls[:, 1, hindex, windex] + loc = oreg[:, :, hindex, windex].contiguous().view(BB, 1, 4) + priors = torch.Tensor([[axc / 1.0, ayc / 1.0, stride * 4 / 1.0, stride * 4 / 1.0]]).view(1, 1, 4) + variances = [0.1, 0.2] + box = batch_decode(loc, priors, variances) + box = box[:, 0] * 1.0 + # cv2.rectangle(imgshow,(int(x1),int(y1)),(int(x2),int(y2)),(0,0,255),1) + bboxlist.append(torch.cat([box, score.unsqueeze(1)], 1).cpu().numpy()) + bboxlist = np.array(bboxlist) + if 0 == len(bboxlist): + bboxlist = np.zeros((1, BB, 5)) + + return bboxlist + +def flip_detect(net, img, device): + img = cv2.flip(img, 1) + b = detect(net, img, device) + + bboxlist = np.zeros(b.shape) + bboxlist[:, 0] = img.shape[1] - b[:, 2] + bboxlist[:, 1] = b[:, 1] + bboxlist[:, 2] = img.shape[1] - b[:, 0] + bboxlist[:, 3] = b[:, 3] + bboxlist[:, 4] = b[:, 4] + return bboxlist + + +def pts_to_bb(pts): + min_x, min_y = np.min(pts, axis=0) + max_x, max_y = np.max(pts, axis=0) + return np.array([min_x, min_y, max_x, max_y]) diff --git a/Wav2Lip-master/face_detection/detection/sfd/net_s3fd.py b/Wav2Lip-master/face_detection/detection/sfd/net_s3fd.py new file mode 100644 index 00000000..fc64313c --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/sfd/net_s3fd.py @@ -0,0 +1,129 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class L2Norm(nn.Module): + def __init__(self, n_channels, scale=1.0): + super(L2Norm, self).__init__() + self.n_channels = n_channels + self.scale = scale + self.eps = 1e-10 + self.weight = nn.Parameter(torch.Tensor(self.n_channels)) + self.weight.data *= 0.0 + self.weight.data += self.scale + + def forward(self, x): + norm = x.pow(2).sum(dim=1, keepdim=True).sqrt() + self.eps + x = x / norm * self.weight.view(1, -1, 1, 1) + return x + + +class s3fd(nn.Module): + def __init__(self): + super(s3fd, self).__init__() + self.conv1_1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1) + self.conv1_2 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1) + + self.conv2_1 = nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1) + self.conv2_2 = nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1) + + self.conv3_1 = nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1) + self.conv3_2 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) + self.conv3_3 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) + + self.conv4_1 = nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1) + self.conv4_2 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) + self.conv4_3 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) + + self.conv5_1 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) + self.conv5_2 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) + self.conv5_3 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) + + self.fc6 = nn.Conv2d(512, 1024, kernel_size=3, stride=1, padding=3) + self.fc7 = nn.Conv2d(1024, 1024, kernel_size=1, stride=1, padding=0) + + self.conv6_1 = nn.Conv2d(1024, 256, kernel_size=1, stride=1, padding=0) + self.conv6_2 = nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1) + + self.conv7_1 = nn.Conv2d(512, 128, kernel_size=1, stride=1, padding=0) + self.conv7_2 = nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1) + + self.conv3_3_norm = L2Norm(256, scale=10) + self.conv4_3_norm = L2Norm(512, scale=8) + self.conv5_3_norm = L2Norm(512, scale=5) + + self.conv3_3_norm_mbox_conf = nn.Conv2d(256, 4, kernel_size=3, stride=1, padding=1) + self.conv3_3_norm_mbox_loc = nn.Conv2d(256, 4, kernel_size=3, stride=1, padding=1) + self.conv4_3_norm_mbox_conf = nn.Conv2d(512, 2, kernel_size=3, stride=1, padding=1) + self.conv4_3_norm_mbox_loc = nn.Conv2d(512, 4, kernel_size=3, stride=1, padding=1) + self.conv5_3_norm_mbox_conf = nn.Conv2d(512, 2, kernel_size=3, stride=1, padding=1) + self.conv5_3_norm_mbox_loc = nn.Conv2d(512, 4, kernel_size=3, stride=1, padding=1) + + self.fc7_mbox_conf = nn.Conv2d(1024, 2, kernel_size=3, stride=1, padding=1) + self.fc7_mbox_loc = nn.Conv2d(1024, 4, kernel_size=3, stride=1, padding=1) + self.conv6_2_mbox_conf = nn.Conv2d(512, 2, kernel_size=3, stride=1, padding=1) + self.conv6_2_mbox_loc = nn.Conv2d(512, 4, kernel_size=3, stride=1, padding=1) + self.conv7_2_mbox_conf = nn.Conv2d(256, 2, kernel_size=3, stride=1, padding=1) + self.conv7_2_mbox_loc = nn.Conv2d(256, 4, kernel_size=3, stride=1, padding=1) + + def forward(self, x): + h = F.relu(self.conv1_1(x)) + h = F.relu(self.conv1_2(h)) + h = F.max_pool2d(h, 2, 2) + + h = F.relu(self.conv2_1(h)) + h = F.relu(self.conv2_2(h)) + h = F.max_pool2d(h, 2, 2) + + h = F.relu(self.conv3_1(h)) + h = F.relu(self.conv3_2(h)) + h = F.relu(self.conv3_3(h)) + f3_3 = h + h = F.max_pool2d(h, 2, 2) + + h = F.relu(self.conv4_1(h)) + h = F.relu(self.conv4_2(h)) + h = F.relu(self.conv4_3(h)) + f4_3 = h + h = F.max_pool2d(h, 2, 2) + + h = F.relu(self.conv5_1(h)) + h = F.relu(self.conv5_2(h)) + h = F.relu(self.conv5_3(h)) + f5_3 = h + h = F.max_pool2d(h, 2, 2) + + h = F.relu(self.fc6(h)) + h = F.relu(self.fc7(h)) + ffc7 = h + h = F.relu(self.conv6_1(h)) + h = F.relu(self.conv6_2(h)) + f6_2 = h + h = F.relu(self.conv7_1(h)) + h = F.relu(self.conv7_2(h)) + f7_2 = h + + f3_3 = self.conv3_3_norm(f3_3) + f4_3 = self.conv4_3_norm(f4_3) + f5_3 = self.conv5_3_norm(f5_3) + + cls1 = self.conv3_3_norm_mbox_conf(f3_3) + reg1 = self.conv3_3_norm_mbox_loc(f3_3) + cls2 = self.conv4_3_norm_mbox_conf(f4_3) + reg2 = self.conv4_3_norm_mbox_loc(f4_3) + cls3 = self.conv5_3_norm_mbox_conf(f5_3) + reg3 = self.conv5_3_norm_mbox_loc(f5_3) + cls4 = self.fc7_mbox_conf(ffc7) + reg4 = self.fc7_mbox_loc(ffc7) + cls5 = self.conv6_2_mbox_conf(f6_2) + reg5 = self.conv6_2_mbox_loc(f6_2) + cls6 = self.conv7_2_mbox_conf(f7_2) + reg6 = self.conv7_2_mbox_loc(f7_2) + + # max-out background label + chunk = torch.chunk(cls1, 4, 1) + bmax = torch.max(torch.max(chunk[0], chunk[1]), chunk[2]) + cls1 = torch.cat([bmax, chunk[3]], dim=1) + + return [cls1, reg1, cls2, reg2, cls3, reg3, cls4, reg4, cls5, reg5, cls6, reg6] diff --git a/Wav2Lip-master/face_detection/detection/sfd/sfd_detector.py b/Wav2Lip-master/face_detection/detection/sfd/sfd_detector.py new file mode 100644 index 00000000..8fbce152 --- /dev/null +++ b/Wav2Lip-master/face_detection/detection/sfd/sfd_detector.py @@ -0,0 +1,59 @@ +import os +import cv2 +from torch.utils.model_zoo import load_url + +from ..core import FaceDetector + +from .net_s3fd import s3fd +from .bbox import * +from .detect import * + +models_urls = { + 's3fd': 'https://www.adrianbulat.com/downloads/python-fan/s3fd-619a316812.pth', +} + + +class SFDDetector(FaceDetector): + def __init__(self, device, path_to_detector=os.path.join(os.path.dirname(os.path.abspath(__file__)), 's3fd.pth'), verbose=False): + super(SFDDetector, self).__init__(device, verbose) + + # Initialise the face detector + if not os.path.isfile(path_to_detector): + model_weights = load_url(models_urls['s3fd']) + else: + model_weights = torch.load(path_to_detector) + + self.face_detector = s3fd() + self.face_detector.load_state_dict(model_weights) + self.face_detector.to(device) + self.face_detector.eval() + + def detect_from_image(self, tensor_or_path): + image = self.tensor_or_path_to_ndarray(tensor_or_path) + + bboxlist = detect(self.face_detector, image, device=self.device) + keep = nms(bboxlist, 0.3) + bboxlist = bboxlist[keep, :] + bboxlist = [x for x in bboxlist if x[-1] > 0.5] + + return bboxlist + + def detect_from_batch(self, images): + bboxlists = batch_detect(self.face_detector, images, device=self.device) + keeps = [nms(bboxlists[:, i, :], 0.3) for i in range(bboxlists.shape[1])] + bboxlists = [bboxlists[keep, i, :] for i, keep in enumerate(keeps)] + bboxlists = [[x for x in bboxlist if x[-1] > 0.5] for bboxlist in bboxlists] + + return bboxlists + + @property + def reference_scale(self): + return 195 + + @property + def reference_x_shift(self): + return 0 + + @property + def reference_y_shift(self): + return 0 diff --git a/Wav2Lip-master/face_detection/models.py b/Wav2Lip-master/face_detection/models.py new file mode 100644 index 00000000..ee2dde32 --- /dev/null +++ b/Wav2Lip-master/face_detection/models.py @@ -0,0 +1,261 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import math + + +def conv3x3(in_planes, out_planes, strd=1, padding=1, bias=False): + "3x3 convolution with padding" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, + stride=strd, padding=padding, bias=bias) + + +class ConvBlock(nn.Module): + def __init__(self, in_planes, out_planes): + super(ConvBlock, self).__init__() + self.bn1 = nn.BatchNorm2d(in_planes) + self.conv1 = conv3x3(in_planes, int(out_planes / 2)) + self.bn2 = nn.BatchNorm2d(int(out_planes / 2)) + self.conv2 = conv3x3(int(out_planes / 2), int(out_planes / 4)) + self.bn3 = nn.BatchNorm2d(int(out_planes / 4)) + self.conv3 = conv3x3(int(out_planes / 4), int(out_planes / 4)) + + if in_planes != out_planes: + self.downsample = nn.Sequential( + nn.BatchNorm2d(in_planes), + nn.ReLU(True), + nn.Conv2d(in_planes, out_planes, + kernel_size=1, stride=1, bias=False), + ) + else: + self.downsample = None + + def forward(self, x): + residual = x + + out1 = self.bn1(x) + out1 = F.relu(out1, True) + out1 = self.conv1(out1) + + out2 = self.bn2(out1) + out2 = F.relu(out2, True) + out2 = self.conv2(out2) + + out3 = self.bn3(out2) + out3 = F.relu(out3, True) + out3 = self.conv3(out3) + + out3 = torch.cat((out1, out2, out3), 1) + + if self.downsample is not None: + residual = self.downsample(residual) + + out3 += residual + + return out3 + + +class Bottleneck(nn.Module): + + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class HourGlass(nn.Module): + def __init__(self, num_modules, depth, num_features): + super(HourGlass, self).__init__() + self.num_modules = num_modules + self.depth = depth + self.features = num_features + + self._generate_network(self.depth) + + def _generate_network(self, level): + self.add_module('b1_' + str(level), ConvBlock(self.features, self.features)) + + self.add_module('b2_' + str(level), ConvBlock(self.features, self.features)) + + if level > 1: + self._generate_network(level - 1) + else: + self.add_module('b2_plus_' + str(level), ConvBlock(self.features, self.features)) + + self.add_module('b3_' + str(level), ConvBlock(self.features, self.features)) + + def _forward(self, level, inp): + # Upper branch + up1 = inp + up1 = self._modules['b1_' + str(level)](up1) + + # Lower branch + low1 = F.avg_pool2d(inp, 2, stride=2) + low1 = self._modules['b2_' + str(level)](low1) + + if level > 1: + low2 = self._forward(level - 1, low1) + else: + low2 = low1 + low2 = self._modules['b2_plus_' + str(level)](low2) + + low3 = low2 + low3 = self._modules['b3_' + str(level)](low3) + + up2 = F.interpolate(low3, scale_factor=2, mode='nearest') + + return up1 + up2 + + def forward(self, x): + return self._forward(self.depth, x) + + +class FAN(nn.Module): + + def __init__(self, num_modules=1): + super(FAN, self).__init__() + self.num_modules = num_modules + + # Base part + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3) + self.bn1 = nn.BatchNorm2d(64) + self.conv2 = ConvBlock(64, 128) + self.conv3 = ConvBlock(128, 128) + self.conv4 = ConvBlock(128, 256) + + # Stacking part + for hg_module in range(self.num_modules): + self.add_module('m' + str(hg_module), HourGlass(1, 4, 256)) + self.add_module('top_m_' + str(hg_module), ConvBlock(256, 256)) + self.add_module('conv_last' + str(hg_module), + nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)) + self.add_module('bn_end' + str(hg_module), nn.BatchNorm2d(256)) + self.add_module('l' + str(hg_module), nn.Conv2d(256, + 68, kernel_size=1, stride=1, padding=0)) + + if hg_module < self.num_modules - 1: + self.add_module( + 'bl' + str(hg_module), nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0)) + self.add_module('al' + str(hg_module), nn.Conv2d(68, + 256, kernel_size=1, stride=1, padding=0)) + + def forward(self, x): + x = F.relu(self.bn1(self.conv1(x)), True) + x = F.avg_pool2d(self.conv2(x), 2, stride=2) + x = self.conv3(x) + x = self.conv4(x) + + previous = x + + outputs = [] + for i in range(self.num_modules): + hg = self._modules['m' + str(i)](previous) + + ll = hg + ll = self._modules['top_m_' + str(i)](ll) + + ll = F.relu(self._modules['bn_end' + str(i)] + (self._modules['conv_last' + str(i)](ll)), True) + + # Predict heatmaps + tmp_out = self._modules['l' + str(i)](ll) + outputs.append(tmp_out) + + if i < self.num_modules - 1: + ll = self._modules['bl' + str(i)](ll) + tmp_out_ = self._modules['al' + str(i)](tmp_out) + previous = previous + ll + tmp_out_ + + return outputs + + +class ResNetDepth(nn.Module): + + def __init__(self, block=Bottleneck, layers=[3, 8, 36, 3], num_classes=68): + self.inplanes = 64 + super(ResNetDepth, self).__init__() + self.conv1 = nn.Conv2d(3 + 68, 64, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.avgpool = nn.AvgPool2d(7) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x diff --git a/Wav2Lip-master/face_detection/utils.py b/Wav2Lip-master/face_detection/utils.py new file mode 100644 index 00000000..3dc4cf3e --- /dev/null +++ b/Wav2Lip-master/face_detection/utils.py @@ -0,0 +1,313 @@ +from __future__ import print_function +import os +import sys +import time +import torch +import math +import numpy as np +import cv2 + + +def _gaussian( + size=3, sigma=0.25, amplitude=1, normalize=False, width=None, + height=None, sigma_horz=None, sigma_vert=None, mean_horz=0.5, + mean_vert=0.5): + # handle some defaults + if width is None: + width = size + if height is None: + height = size + if sigma_horz is None: + sigma_horz = sigma + if sigma_vert is None: + sigma_vert = sigma + center_x = mean_horz * width + 0.5 + center_y = mean_vert * height + 0.5 + gauss = np.empty((height, width), dtype=np.float32) + # generate kernel + for i in range(height): + for j in range(width): + gauss[i][j] = amplitude * math.exp(-(math.pow((j + 1 - center_x) / ( + sigma_horz * width), 2) / 2.0 + math.pow((i + 1 - center_y) / (sigma_vert * height), 2) / 2.0)) + if normalize: + gauss = gauss / np.sum(gauss) + return gauss + + +def draw_gaussian(image, point, sigma): + # Check if the gaussian is inside + ul = [math.floor(point[0] - 3 * sigma), math.floor(point[1] - 3 * sigma)] + br = [math.floor(point[0] + 3 * sigma), math.floor(point[1] + 3 * sigma)] + if (ul[0] > image.shape[1] or ul[1] > image.shape[0] or br[0] < 1 or br[1] < 1): + return image + size = 6 * sigma + 1 + g = _gaussian(size) + g_x = [int(max(1, -ul[0])), int(min(br[0], image.shape[1])) - int(max(1, ul[0])) + int(max(1, -ul[0]))] + g_y = [int(max(1, -ul[1])), int(min(br[1], image.shape[0])) - int(max(1, ul[1])) + int(max(1, -ul[1]))] + img_x = [int(max(1, ul[0])), int(min(br[0], image.shape[1]))] + img_y = [int(max(1, ul[1])), int(min(br[1], image.shape[0]))] + assert (g_x[0] > 0 and g_y[1] > 0) + image[img_y[0] - 1:img_y[1], img_x[0] - 1:img_x[1] + ] = image[img_y[0] - 1:img_y[1], img_x[0] - 1:img_x[1]] + g[g_y[0] - 1:g_y[1], g_x[0] - 1:g_x[1]] + image[image > 1] = 1 + return image + + +def transform(point, center, scale, resolution, invert=False): + """Generate and affine transformation matrix. + + Given a set of points, a center, a scale and a targer resolution, the + function generates and affine transformation matrix. If invert is ``True`` + it will produce the inverse transformation. + + Arguments: + point {torch.tensor} -- the input 2D point + center {torch.tensor or numpy.array} -- the center around which to perform the transformations + scale {float} -- the scale of the face/object + resolution {float} -- the output resolution + + Keyword Arguments: + invert {bool} -- define wherever the function should produce the direct or the + inverse transformation matrix (default: {False}) + """ + _pt = torch.ones(3) + _pt[0] = point[0] + _pt[1] = point[1] + + h = 200.0 * scale + t = torch.eye(3) + t[0, 0] = resolution / h + t[1, 1] = resolution / h + t[0, 2] = resolution * (-center[0] / h + 0.5) + t[1, 2] = resolution * (-center[1] / h + 0.5) + + if invert: + t = torch.inverse(t) + + new_point = (torch.matmul(t, _pt))[0:2] + + return new_point.int() + + +def crop(image, center, scale, resolution=256.0): + """Center crops an image or set of heatmaps + + Arguments: + image {numpy.array} -- an rgb image + center {numpy.array} -- the center of the object, usually the same as of the bounding box + scale {float} -- scale of the face + + Keyword Arguments: + resolution {float} -- the size of the output cropped image (default: {256.0}) + + Returns: + [type] -- [description] + """ # Crop around the center point + """ Crops the image around the center. Input is expected to be an np.ndarray """ + ul = transform([1, 1], center, scale, resolution, True) + br = transform([resolution, resolution], center, scale, resolution, True) + # pad = math.ceil(torch.norm((ul - br).float()) / 2.0 - (br[0] - ul[0]) / 2.0) + if image.ndim > 2: + newDim = np.array([br[1] - ul[1], br[0] - ul[0], + image.shape[2]], dtype=np.int32) + newImg = np.zeros(newDim, dtype=np.uint8) + else: + newDim = np.array([br[1] - ul[1], br[0] - ul[0]], dtype=np.int) + newImg = np.zeros(newDim, dtype=np.uint8) + ht = image.shape[0] + wd = image.shape[1] + newX = np.array( + [max(1, -ul[0] + 1), min(br[0], wd) - ul[0]], dtype=np.int32) + newY = np.array( + [max(1, -ul[1] + 1), min(br[1], ht) - ul[1]], dtype=np.int32) + oldX = np.array([max(1, ul[0] + 1), min(br[0], wd)], dtype=np.int32) + oldY = np.array([max(1, ul[1] + 1), min(br[1], ht)], dtype=np.int32) + newImg[newY[0] - 1:newY[1], newX[0] - 1:newX[1] + ] = image[oldY[0] - 1:oldY[1], oldX[0] - 1:oldX[1], :] + newImg = cv2.resize(newImg, dsize=(int(resolution), int(resolution)), + interpolation=cv2.INTER_LINEAR) + return newImg + + +def get_preds_fromhm(hm, center=None, scale=None): + """Obtain (x,y) coordinates given a set of N heatmaps. If the center + and the scale is provided the function will return the points also in + the original coordinate frame. + + Arguments: + hm {torch.tensor} -- the predicted heatmaps, of shape [B, N, W, H] + + Keyword Arguments: + center {torch.tensor} -- the center of the bounding box (default: {None}) + scale {float} -- face scale (default: {None}) + """ + max, idx = torch.max( + hm.view(hm.size(0), hm.size(1), hm.size(2) * hm.size(3)), 2) + idx += 1 + preds = idx.view(idx.size(0), idx.size(1), 1).repeat(1, 1, 2).float() + preds[..., 0].apply_(lambda x: (x - 1) % hm.size(3) + 1) + preds[..., 1].add_(-1).div_(hm.size(2)).floor_().add_(1) + + for i in range(preds.size(0)): + for j in range(preds.size(1)): + hm_ = hm[i, j, :] + pX, pY = int(preds[i, j, 0]) - 1, int(preds[i, j, 1]) - 1 + if pX > 0 and pX < 63 and pY > 0 and pY < 63: + diff = torch.FloatTensor( + [hm_[pY, pX + 1] - hm_[pY, pX - 1], + hm_[pY + 1, pX] - hm_[pY - 1, pX]]) + preds[i, j].add_(diff.sign_().mul_(.25)) + + preds.add_(-.5) + + preds_orig = torch.zeros(preds.size()) + if center is not None and scale is not None: + for i in range(hm.size(0)): + for j in range(hm.size(1)): + preds_orig[i, j] = transform( + preds[i, j], center, scale, hm.size(2), True) + + return preds, preds_orig + +def get_preds_fromhm_batch(hm, centers=None, scales=None): + """Obtain (x,y) coordinates given a set of N heatmaps. If the centers + and the scales is provided the function will return the points also in + the original coordinate frame. + + Arguments: + hm {torch.tensor} -- the predicted heatmaps, of shape [B, N, W, H] + + Keyword Arguments: + centers {torch.tensor} -- the centers of the bounding box (default: {None}) + scales {float} -- face scales (default: {None}) + """ + max, idx = torch.max( + hm.view(hm.size(0), hm.size(1), hm.size(2) * hm.size(3)), 2) + idx += 1 + preds = idx.view(idx.size(0), idx.size(1), 1).repeat(1, 1, 2).float() + preds[..., 0].apply_(lambda x: (x - 1) % hm.size(3) + 1) + preds[..., 1].add_(-1).div_(hm.size(2)).floor_().add_(1) + + for i in range(preds.size(0)): + for j in range(preds.size(1)): + hm_ = hm[i, j, :] + pX, pY = int(preds[i, j, 0]) - 1, int(preds[i, j, 1]) - 1 + if pX > 0 and pX < 63 and pY > 0 and pY < 63: + diff = torch.FloatTensor( + [hm_[pY, pX + 1] - hm_[pY, pX - 1], + hm_[pY + 1, pX] - hm_[pY - 1, pX]]) + preds[i, j].add_(diff.sign_().mul_(.25)) + + preds.add_(-.5) + + preds_orig = torch.zeros(preds.size()) + if centers is not None and scales is not None: + for i in range(hm.size(0)): + for j in range(hm.size(1)): + preds_orig[i, j] = transform( + preds[i, j], centers[i], scales[i], hm.size(2), True) + + return preds, preds_orig + +def shuffle_lr(parts, pairs=None): + """Shuffle the points left-right according to the axis of symmetry + of the object. + + Arguments: + parts {torch.tensor} -- a 3D or 4D object containing the + heatmaps. + + Keyword Arguments: + pairs {list of integers} -- [order of the flipped points] (default: {None}) + """ + if pairs is None: + pairs = [16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, + 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 27, 28, 29, 30, 35, + 34, 33, 32, 31, 45, 44, 43, 42, 47, 46, 39, 38, 37, 36, 41, + 40, 54, 53, 52, 51, 50, 49, 48, 59, 58, 57, 56, 55, 64, 63, + 62, 61, 60, 67, 66, 65] + if parts.ndimension() == 3: + parts = parts[pairs, ...] + else: + parts = parts[:, pairs, ...] + + return parts + + +def flip(tensor, is_label=False): + """Flip an image or a set of heatmaps left-right + + Arguments: + tensor {numpy.array or torch.tensor} -- [the input image or heatmaps] + + Keyword Arguments: + is_label {bool} -- [denote wherever the input is an image or a set of heatmaps ] (default: {False}) + """ + if not torch.is_tensor(tensor): + tensor = torch.from_numpy(tensor) + + if is_label: + tensor = shuffle_lr(tensor).flip(tensor.ndimension() - 1) + else: + tensor = tensor.flip(tensor.ndimension() - 1) + + return tensor + +# From pyzolib/paths.py (https://bitbucket.org/pyzo/pyzolib/src/tip/paths.py) + + +def appdata_dir(appname=None, roaming=False): + """ appdata_dir(appname=None, roaming=False) + + Get the path to the application directory, where applications are allowed + to write user specific files (e.g. configurations). For non-user specific + data, consider using common_appdata_dir(). + If appname is given, a subdir is appended (and created if necessary). + If roaming is True, will prefer a roaming directory (Windows Vista/7). + """ + + # Define default user directory + userDir = os.getenv('FACEALIGNMENT_USERDIR', None) + if userDir is None: + userDir = os.path.expanduser('~') + if not os.path.isdir(userDir): # pragma: no cover + userDir = '/var/tmp' # issue #54 + + # Get system app data dir + path = None + if sys.platform.startswith('win'): + path1, path2 = os.getenv('LOCALAPPDATA'), os.getenv('APPDATA') + path = (path2 or path1) if roaming else (path1 or path2) + elif sys.platform.startswith('darwin'): + path = os.path.join(userDir, 'Library', 'Application Support') + # On Linux and as fallback + if not (path and os.path.isdir(path)): + path = userDir + + # Maybe we should store things local to the executable (in case of a + # portable distro or a frozen application that wants to be portable) + prefix = sys.prefix + if getattr(sys, 'frozen', None): + prefix = os.path.abspath(os.path.dirname(sys.executable)) + for reldir in ('settings', '../settings'): + localpath = os.path.abspath(os.path.join(prefix, reldir)) + if os.path.isdir(localpath): # pragma: no cover + try: + open(os.path.join(localpath, 'test.write'), 'wb').close() + os.remove(os.path.join(localpath, 'test.write')) + except IOError: + pass # We cannot write in this directory + else: + path = localpath + break + + # Get path specific for this app + if appname: + if path == userDir: + appname = '.' + appname.lstrip('.') # Make it a hidden directory + path = os.path.join(path, appname) + if not os.path.isdir(path): # pragma: no cover + os.mkdir(path) + + # Done + return path diff --git a/Wav2Lip-master/filelists/README.md b/Wav2Lip-master/filelists/README.md new file mode 100644 index 00000000..e7d7e7bb --- /dev/null +++ b/Wav2Lip-master/filelists/README.md @@ -0,0 +1 @@ +Place LRS2 (and any other) filelists here for training. \ No newline at end of file diff --git a/Wav2Lip-master/get_frames.py b/Wav2Lip-master/get_frames.py new file mode 100644 index 00000000..eb5b4998 --- /dev/null +++ b/Wav2Lip-master/get_frames.py @@ -0,0 +1,67 @@ +import cv2 # 导入OpenCV库,用于视频处理 +import os # 导入os库,用于文件和目录操作 +import concurrent.futures # 导入并发库,用于多线程处理 +import time # 导入时间库,用于计算时间 +import argparse # 导入argparse库,用于命令行参数解析 + + +# 定义保存帧的函数 +def save_frame(frame, frame_filename): + cv2.imwrite(frame_filename, frame) # 使用OpenCV保存帧到文件 + + +# 定义提取视频帧的函数 +def extract_frames(video_path, output_folder, frame_interval, max_threads): + cap = cv2.VideoCapture(video_path) # 打开视频文件 + frame_count = 0 # 初始化帧计数器 + futures = [] # 初始化futures列表,用于存储线程任务 + + start_time = time.time() # 记录开始时间 + + # 使用ThreadPoolExecutor创建一个线程池,最大线程数为max_threads + with concurrent.futures.ThreadPoolExecutor(max_workers=max_threads) as executor: + while cap.isOpened(): # 当视频文件打开时 + ret, frame = cap.read() # 读取一帧 + if not ret: # 如果读取失败,跳出循环 + break + + if frame_count % frame_interval == 0: # 每隔frame_interval帧处理一次 + seconds = frame_count // frame_interval # 计算当前帧对应的秒数 + frame_filename = os.path.join(output_folder, f'frame_{seconds:04d}.jpg') # 构建帧文件名 + futures.append(executor.submit(save_frame, frame, frame_filename)) # 提交保存帧的任务到线程池 + + frame_count += 1 # 增加帧计数器 + + cap.release() # 释放视频文件 + + for future in concurrent.futures.as_completed(futures): # 等待所有线程完成 + future.result() # 获取线程结果 + + end_time = time.time() # 记录结束时间 + elapsed_time = end_time - start_time # 计算执行时间 + + print(f'提取完成,共提取了 {frame_count // frame_interval} 帧') # 输出提取的帧数 + print(f'程序执行时间: {elapsed_time:.2f} 秒') # 输出程序执行时间 + + +# 创建文件夹保存帧 +output_folder = 'video_frames' +os.makedirs(output_folder, exist_ok=True) # 如果文件夹不存在,则创建 + +# 使用argparse解析命令行参数 +parser = argparse.ArgumentParser(description='Extract frames from a video file.') +parser.add_argument('video_path', type=str, help='Path to the video file.') +parser.add_argument('--output_folder', type=str, default='video_frames', help='Folder to save extracted frames.') +parser.add_argument('--frame_interval', type=int, default=1, help='Interval (in seconds) between frames to extract.') +parser.add_argument('--max_threads', type=int, default=8, help='Maximum number of threads to use.') + +args = parser.parse_args() + +# 获取视频帧率 +cap = cv2.VideoCapture(args.video_path) +fps = cap.get(cv2.CAP_PROP_FPS) # 获取视频的帧率 +cap.release() # 释放视频文件 +frame_interval = int(args.frame_interval * fps) # 每秒提取一帧 + +# 提取视频帧 +extract_frames(args.video_path, args.output_folder, frame_interval, args.max_threads) # 调用提取帧的函数 \ No newline at end of file diff --git a/Wav2Lip-master/hparams.py b/Wav2Lip-master/hparams.py new file mode 100644 index 00000000..1c019046 --- /dev/null +++ b/Wav2Lip-master/hparams.py @@ -0,0 +1,101 @@ +from glob import glob +import os + +def get_image_list(data_root, split): + filelist = [] + + with open('filelists/{}.txt'.format(split)) as f: + for line in f: + line = line.strip() + if ' ' in line: line = line.split()[0] + filelist.append(os.path.join(data_root, line)) + + return filelist + +class HParams: + def __init__(self, **kwargs): + self.data = {} + + for key, value in kwargs.items(): + self.data[key] = value + + def __getattr__(self, key): + if key not in self.data: + raise AttributeError("'HParams' object has no attribute %s" % key) + return self.data[key] + + def set_hparam(self, key, value): + self.data[key] = value + + +# Default hyperparameters +hparams = HParams( + num_mels=80, # Number of mel-spectrogram channels and local conditioning dimensionality + # network + rescale=True, # Whether to rescale audio prior to preprocessing + rescaling_max=0.9, # Rescaling value + + # Use LWS (https://github.com/Jonathan-LeRoux/lws) for STFT and phase reconstruction + # It"s preferred to set True to use with https://github.com/r9y9/wavenet_vocoder + # Does not work if n_ffit is not multiple of hop_size!! + use_lws=False, + + n_fft=800, # Extra window size is filled with 0 paddings to match this parameter + hop_size=200, # For 16000Hz, 200 = 12.5 ms (0.0125 * sample_rate) + win_size=800, # For 16000Hz, 800 = 50 ms (If None, win_size = n_fft) (0.05 * sample_rate) + sample_rate=16000, # 16000Hz (corresponding to librispeech) (sox --i ) + + frame_shift_ms=None, # Can replace hop_size parameter. (Recommended: 12.5) + + # Mel and Linear spectrograms normalization/scaling and clipping + signal_normalization=True, + # Whether to normalize mel spectrograms to some predefined range (following below parameters) + allow_clipping_in_normalization=True, # Only relevant if mel_normalization = True + symmetric_mels=True, + # Whether to scale the data to be symmetric around 0. (Also multiplies the output range by 2, + # faster and cleaner convergence) + max_abs_value=4., + # max absolute value of data. If symmetric, data will be [-max, max] else [0, max] (Must not + # be too big to avoid gradient explosion, + # not too small for fast convergence) + # Contribution by @begeekmyfriend + # Spectrogram Pre-Emphasis (Lfilter: Reduce spectrogram noise and helps model certitude + # levels. Also allows for better G&L phase reconstruction) + preemphasize=True, # whether to apply filter + preemphasis=0.97, # filter coefficient. + + # Limits + min_level_db=-100, + ref_level_db=20, + fmin=55, + # Set this to 55 if your speaker is male! if female, 95 should help taking off noise. (To + # test depending on dataset. Pitch info: male~[65, 260], female~[100, 525]) + fmax=7600, # To be increased/reduced depending on data. + + ###################### Our training parameters ################################# + img_size=96, + fps=25, + + batch_size=16, + initial_learning_rate=1e-4, + nepochs=200000000000000000, ### ctrl + c, stop whenever eval loss is consistently greater than train loss for ~10 epochs + num_workers=16, + checkpoint_interval=3000, + eval_interval=3000, + save_optimizer_state=True, + + syncnet_wt=0.0, # is initially zero, will be set automatically to 0.03 later. Leads to faster convergence. + syncnet_batch_size=64, + syncnet_lr=1e-4, + syncnet_eval_interval=10000, + syncnet_checkpoint_interval=10000, + + disc_wt=0.07, + disc_initial_learning_rate=1e-4, +) + + +def hparams_debug_string(): + values = hparams.values() + hp = [" %s: %s" % (name, values[name]) for name in sorted(values) if name != "sentences"] + return "Hyperparameters:\n" + "\n".join(hp) diff --git a/Wav2Lip-master/hq_wav2lip_train.py b/Wav2Lip-master/hq_wav2lip_train.py new file mode 100644 index 00000000..c384ad9d --- /dev/null +++ b/Wav2Lip-master/hq_wav2lip_train.py @@ -0,0 +1,443 @@ +from os.path import dirname, join, basename, isfile +from tqdm import tqdm + +from models import SyncNet_color as SyncNet +from models import Wav2Lip, Wav2Lip_disc_qual +import audio + +import torch +from torch import nn +from torch.nn import functional as F +from torch import optim +import torch.backends.cudnn as cudnn +from torch.utils import data as data_utils +import numpy as np + +from glob import glob + +import os, random, cv2, argparse +from hparams import hparams, get_image_list + +parser = argparse.ArgumentParser(description='Code to train the Wav2Lip model WITH the visual quality discriminator') + +parser.add_argument("--data_root", help="Root folder of the preprocessed LRS2 dataset", required=True, type=str) + +parser.add_argument('--checkpoint_dir', help='Save checkpoints to this directory', required=True, type=str) +parser.add_argument('--syncnet_checkpoint_path', help='Load the pre-trained Expert discriminator', required=True, type=str) + +parser.add_argument('--checkpoint_path', help='Resume generator from this checkpoint', default=None, type=str) +parser.add_argument('--disc_checkpoint_path', help='Resume quality disc from this checkpoint', default=None, type=str) + +args = parser.parse_args() + + +global_step = 0 +global_epoch = 0 +use_cuda = torch.cuda.is_available() +print('use_cuda: {}'.format(use_cuda)) + +syncnet_T = 5 +syncnet_mel_step_size = 16 + +class Dataset(object): + def __init__(self, split): + self.all_videos = get_image_list(args.data_root, split) + + def get_frame_id(self, frame): + return int(basename(frame).split('.')[0]) + + def get_window(self, start_frame): + start_id = self.get_frame_id(start_frame) + vidname = dirname(start_frame) + + window_fnames = [] + for frame_id in range(start_id, start_id + syncnet_T): + frame = join(vidname, '{}.jpg'.format(frame_id)) + if not isfile(frame): + return None + window_fnames.append(frame) + return window_fnames + + def read_window(self, window_fnames): + if window_fnames is None: return None + window = [] + for fname in window_fnames: + img = cv2.imread(fname) + if img is None: + return None + try: + img = cv2.resize(img, (hparams.img_size, hparams.img_size)) + except Exception as e: + return None + + window.append(img) + + return window + + def crop_audio_window(self, spec, start_frame): + if type(start_frame) == int: + start_frame_num = start_frame + else: + start_frame_num = self.get_frame_id(start_frame) + start_idx = int(80. * (start_frame_num / float(hparams.fps))) + + end_idx = start_idx + syncnet_mel_step_size + + return spec[start_idx : end_idx, :] + + def get_segmented_mels(self, spec, start_frame): + mels = [] + assert syncnet_T == 5 + start_frame_num = self.get_frame_id(start_frame) + 1 # 0-indexing ---> 1-indexing + if start_frame_num - 2 < 0: return None + for i in range(start_frame_num, start_frame_num + syncnet_T): + m = self.crop_audio_window(spec, i - 2) + if m.shape[0] != syncnet_mel_step_size: + return None + mels.append(m.T) + + mels = np.asarray(mels) + + return mels + + def prepare_window(self, window): + # 3 x T x H x W + x = np.asarray(window) / 255. + x = np.transpose(x, (3, 0, 1, 2)) + + return x + + def __len__(self): + return len(self.all_videos) + + def __getitem__(self, idx): + while 1: + idx = random.randint(0, len(self.all_videos) - 1) + vidname = self.all_videos[idx] + img_names = list(glob(join(vidname, '*.jpg'))) + if len(img_names) <= 3 * syncnet_T: + continue + + img_name = random.choice(img_names) + wrong_img_name = random.choice(img_names) + while wrong_img_name == img_name: + wrong_img_name = random.choice(img_names) + + window_fnames = self.get_window(img_name) + wrong_window_fnames = self.get_window(wrong_img_name) + if window_fnames is None or wrong_window_fnames is None: + continue + + window = self.read_window(window_fnames) + if window is None: + continue + + wrong_window = self.read_window(wrong_window_fnames) + if wrong_window is None: + continue + + try: + wavpath = join(vidname, "audio.wav") + wav = audio.load_wav(wavpath, hparams.sample_rate) + + orig_mel = audio.melspectrogram(wav).T + except Exception as e: + continue + + mel = self.crop_audio_window(orig_mel.copy(), img_name) + + if (mel.shape[0] != syncnet_mel_step_size): + continue + + indiv_mels = self.get_segmented_mels(orig_mel.copy(), img_name) + if indiv_mels is None: continue + + window = self.prepare_window(window) + y = window.copy() + window[:, :, window.shape[2]//2:] = 0. + + wrong_window = self.prepare_window(wrong_window) + x = np.concatenate([window, wrong_window], axis=0) + + x = torch.FloatTensor(x) + mel = torch.FloatTensor(mel.T).unsqueeze(0) + indiv_mels = torch.FloatTensor(indiv_mels).unsqueeze(1) + y = torch.FloatTensor(y) + return x, indiv_mels, mel, y + +def save_sample_images(x, g, gt, global_step, checkpoint_dir): + x = (x.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + g = (g.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + gt = (gt.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + + refs, inps = x[..., 3:], x[..., :3] + folder = join(checkpoint_dir, "samples_step{:09d}".format(global_step)) + if not os.path.exists(folder): os.mkdir(folder) + collage = np.concatenate((refs, inps, g, gt), axis=-2) + for batch_idx, c in enumerate(collage): + for t in range(len(c)): + cv2.imwrite('{}/{}_{}.jpg'.format(folder, batch_idx, t), c[t]) + +logloss = nn.BCELoss() +def cosine_loss(a, v, y): + d = nn.functional.cosine_similarity(a, v) + loss = logloss(d.unsqueeze(1), y) + + return loss + +device = torch.device("cuda" if use_cuda else "cpu") +syncnet = SyncNet().to(device) +for p in syncnet.parameters(): + p.requires_grad = False + +recon_loss = nn.L1Loss() +def get_sync_loss(mel, g): + g = g[:, :, :, g.size(3)//2:] + g = torch.cat([g[:, :, i] for i in range(syncnet_T)], dim=1) + # B, 3 * T, H//2, W + a, v = syncnet(mel, g) + y = torch.ones(g.size(0), 1).float().to(device) + return cosine_loss(a, v, y) + +def train(device, model, disc, train_data_loader, test_data_loader, optimizer, disc_optimizer, + checkpoint_dir=None, checkpoint_interval=None, nepochs=None): + global global_step, global_epoch + resumed_step = global_step + + while global_epoch < nepochs: + print('Starting Epoch: {}'.format(global_epoch)) + running_sync_loss, running_l1_loss, disc_loss, running_perceptual_loss = 0., 0., 0., 0. + running_disc_real_loss, running_disc_fake_loss = 0., 0. + prog_bar = tqdm(enumerate(train_data_loader)) + for step, (x, indiv_mels, mel, gt) in prog_bar: + disc.train() + model.train() + + x = x.to(device) + mel = mel.to(device) + indiv_mels = indiv_mels.to(device) + gt = gt.to(device) + + ### Train generator now. Remove ALL grads. + optimizer.zero_grad() + disc_optimizer.zero_grad() + + g = model(indiv_mels, x) + + if hparams.syncnet_wt > 0.: + sync_loss = get_sync_loss(mel, g) + else: + sync_loss = 0. + + if hparams.disc_wt > 0.: + perceptual_loss = disc.perceptual_forward(g) + else: + perceptual_loss = 0. + + l1loss = recon_loss(g, gt) + + loss = hparams.syncnet_wt * sync_loss + hparams.disc_wt * perceptual_loss + \ + (1. - hparams.syncnet_wt - hparams.disc_wt) * l1loss + + loss.backward() + optimizer.step() + + ### Remove all gradients before Training disc + disc_optimizer.zero_grad() + + pred = disc(gt) + disc_real_loss = F.binary_cross_entropy(pred, torch.ones((len(pred), 1)).to(device)) + disc_real_loss.backward() + + pred = disc(g.detach()) + disc_fake_loss = F.binary_cross_entropy(pred, torch.zeros((len(pred), 1)).to(device)) + disc_fake_loss.backward() + + disc_optimizer.step() + + running_disc_real_loss += disc_real_loss.item() + running_disc_fake_loss += disc_fake_loss.item() + + if global_step % checkpoint_interval == 0: + save_sample_images(x, g, gt, global_step, checkpoint_dir) + + # Logs + global_step += 1 + cur_session_steps = global_step - resumed_step + + running_l1_loss += l1loss.item() + if hparams.syncnet_wt > 0.: + running_sync_loss += sync_loss.item() + else: + running_sync_loss += 0. + + if hparams.disc_wt > 0.: + running_perceptual_loss += perceptual_loss.item() + else: + running_perceptual_loss += 0. + + if global_step == 1 or global_step % checkpoint_interval == 0: + save_checkpoint( + model, optimizer, global_step, checkpoint_dir, global_epoch) + save_checkpoint(disc, disc_optimizer, global_step, checkpoint_dir, global_epoch, prefix='disc_') + + + if global_step % hparams.eval_interval == 0: + with torch.no_grad(): + average_sync_loss = eval_model(test_data_loader, global_step, device, model, disc) + + if average_sync_loss < .75: + hparams.set_hparam('syncnet_wt', 0.03) + + prog_bar.set_description('L1: {}, Sync: {}, Percep: {} | Fake: {}, Real: {}'.format(running_l1_loss / (step + 1), + running_sync_loss / (step + 1), + running_perceptual_loss / (step + 1), + running_disc_fake_loss / (step + 1), + running_disc_real_loss / (step + 1))) + + global_epoch += 1 + +def eval_model(test_data_loader, global_step, device, model, disc): + eval_steps = 300 + print('Evaluating for {} steps'.format(eval_steps)) + running_sync_loss, running_l1_loss, running_disc_real_loss, running_disc_fake_loss, running_perceptual_loss = [], [], [], [], [] + while 1: + for step, (x, indiv_mels, mel, gt) in enumerate((test_data_loader)): + model.eval() + disc.eval() + + x = x.to(device) + mel = mel.to(device) + indiv_mels = indiv_mels.to(device) + gt = gt.to(device) + + pred = disc(gt) + disc_real_loss = F.binary_cross_entropy(pred, torch.ones((len(pred), 1)).to(device)) + + g = model(indiv_mels, x) + pred = disc(g) + disc_fake_loss = F.binary_cross_entropy(pred, torch.zeros((len(pred), 1)).to(device)) + + running_disc_real_loss.append(disc_real_loss.item()) + running_disc_fake_loss.append(disc_fake_loss.item()) + + sync_loss = get_sync_loss(mel, g) + + if hparams.disc_wt > 0.: + perceptual_loss = disc.perceptual_forward(g) + else: + perceptual_loss = 0. + + l1loss = recon_loss(g, gt) + + loss = hparams.syncnet_wt * sync_loss + hparams.disc_wt * perceptual_loss + \ + (1. - hparams.syncnet_wt - hparams.disc_wt) * l1loss + + running_l1_loss.append(l1loss.item()) + running_sync_loss.append(sync_loss.item()) + + if hparams.disc_wt > 0.: + running_perceptual_loss.append(perceptual_loss.item()) + else: + running_perceptual_loss.append(0.) + + if step > eval_steps: break + + print('L1: {}, Sync: {}, Percep: {} | Fake: {}, Real: {}'.format(sum(running_l1_loss) / len(running_l1_loss), + sum(running_sync_loss) / len(running_sync_loss), + sum(running_perceptual_loss) / len(running_perceptual_loss), + sum(running_disc_fake_loss) / len(running_disc_fake_loss), + sum(running_disc_real_loss) / len(running_disc_real_loss))) + return sum(running_sync_loss) / len(running_sync_loss) + + +def save_checkpoint(model, optimizer, step, checkpoint_dir, epoch, prefix=''): + checkpoint_path = join( + checkpoint_dir, "{}checkpoint_step{:09d}.pth".format(prefix, global_step)) + optimizer_state = optimizer.state_dict() if hparams.save_optimizer_state else None + torch.save({ + "state_dict": model.state_dict(), + "optimizer": optimizer_state, + "global_step": step, + "global_epoch": epoch, + }, checkpoint_path) + print("Saved checkpoint:", checkpoint_path) + +def _load(checkpoint_path): + if use_cuda: + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + + +def load_checkpoint(path, model, optimizer, reset_optimizer=False, overwrite_global_states=True): + global global_step + global global_epoch + + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + s = checkpoint["state_dict"] + new_s = {} + for k, v in s.items(): + new_s[k.replace('module.', '')] = v + model.load_state_dict(new_s) + if not reset_optimizer: + optimizer_state = checkpoint["optimizer"] + if optimizer_state is not None: + print("Load optimizer state from {}".format(path)) + optimizer.load_state_dict(checkpoint["optimizer"]) + if overwrite_global_states: + global_step = checkpoint["global_step"] + global_epoch = checkpoint["global_epoch"] + + return model + +if __name__ == "__main__": + checkpoint_dir = args.checkpoint_dir + + # Dataset and Dataloader setup + train_dataset = Dataset('train') + test_dataset = Dataset('val') + + train_data_loader = data_utils.DataLoader( + train_dataset, batch_size=hparams.batch_size, shuffle=True, + num_workers=hparams.num_workers) + + test_data_loader = data_utils.DataLoader( + test_dataset, batch_size=hparams.batch_size, + num_workers=4) + + device = torch.device("cuda" if use_cuda else "cpu") + + # Model + model = Wav2Lip().to(device) + disc = Wav2Lip_disc_qual().to(device) + + print('total trainable params {}'.format(sum(p.numel() for p in model.parameters() if p.requires_grad))) + print('total DISC trainable params {}'.format(sum(p.numel() for p in disc.parameters() if p.requires_grad))) + + optimizer = optim.Adam([p for p in model.parameters() if p.requires_grad], + lr=hparams.initial_learning_rate, betas=(0.5, 0.999)) + disc_optimizer = optim.Adam([p for p in disc.parameters() if p.requires_grad], + lr=hparams.disc_initial_learning_rate, betas=(0.5, 0.999)) + + if args.checkpoint_path is not None: + load_checkpoint(args.checkpoint_path, model, optimizer, reset_optimizer=False) + + if args.disc_checkpoint_path is not None: + load_checkpoint(args.disc_checkpoint_path, disc, disc_optimizer, + reset_optimizer=False, overwrite_global_states=False) + + load_checkpoint(args.syncnet_checkpoint_path, syncnet, None, reset_optimizer=True, + overwrite_global_states=False) + + if not os.path.exists(checkpoint_dir): + os.mkdir(checkpoint_dir) + + # Train! + train(device, model, disc, train_data_loader, test_data_loader, optimizer, disc_optimizer, + checkpoint_dir=checkpoint_dir, + checkpoint_interval=hparams.checkpoint_interval, + nepochs=hparams.nepochs) diff --git a/Wav2Lip-master/inference.py b/Wav2Lip-master/inference.py new file mode 100644 index 00000000..af68dd7b --- /dev/null +++ b/Wav2Lip-master/inference.py @@ -0,0 +1,280 @@ +from os import listdir, path +import numpy as np +import scipy, cv2, os, sys, argparse, audio +import json, subprocess, random, string +from tqdm import tqdm +from glob import glob +import torch, face_detection +from models import Wav2Lip +import platform + +parser = argparse.ArgumentParser(description='Inference code to lip-sync videos in the wild using Wav2Lip models') + +parser.add_argument('--checkpoint_path', type=str, + help='Name of saved checkpoint to load weights from', required=True) + +parser.add_argument('--face', type=str, + help='Filepath of video/image that contains faces to use', required=True) +parser.add_argument('--audio', type=str, + help='Filepath of video/audio file to use as raw audio source', required=True) +parser.add_argument('--outfile', type=str, help='Video path to save result. See default for an e.g.', + default='results/result_voice.mp4') + +parser.add_argument('--static', type=bool, + help='If True, then use only first video frame for inference', default=False) +parser.add_argument('--fps', type=float, help='Can be specified only if input is a static image (default: 25)', + default=25., required=False) + +parser.add_argument('--pads', nargs='+', type=int, default=[0, 10, 0, 0], + help='Padding (top, bottom, left, right). Please adjust to include chin at least') + +parser.add_argument('--face_det_batch_size', type=int, + help='Batch size for face detection', default=1) +parser.add_argument('--wav2lip_batch_size', type=int, help='Batch size for Wav2Lip model(s)', default=128) + +parser.add_argument('--resize_factor', default=1, type=int, + help='Reduce the resolution by this factor. Sometimes, best results are obtained at 480p or 720p') + +parser.add_argument('--crop', nargs='+', type=int, default=[0, -1, 0, -1], + help='Crop video to a smaller region (top, bottom, left, right). Applied after resize_factor and rotate arg. ' + 'Useful if multiple face present. -1 implies the value will be auto-inferred based on height, width') + +parser.add_argument('--box', nargs='+', type=int, default=[-1, -1, -1, -1], + help='Specify a constant bounding box for the face. Use only as a last resort if the face is not detected.' + 'Also, might work only if the face is not moving around much. Syntax: (top, bottom, left, right).') + +parser.add_argument('--rotate', default=False, action='store_true', + help='Sometimes videos taken from a phone can be flipped 90deg. If true, will flip video right by 90deg.' + 'Use if you get a flipped result, despite feeding a normal looking video') + +parser.add_argument('--nosmooth', default=False, action='store_true', + help='Prevent smoothing face detections over a short temporal window') + +args = parser.parse_args() +args.img_size = 96 + +if os.path.isfile(args.face) and args.face.split('.')[1] in ['jpg', 'png', 'jpeg']: + args.static = True + +def get_smoothened_boxes(boxes, T): + for i in range(len(boxes)): + if i + T > len(boxes): + window = boxes[len(boxes) - T:] + else: + window = boxes[i : i + T] + boxes[i] = np.mean(window, axis=0) + return boxes + +def face_detect(images): + detector = face_detection.FaceAlignment(face_detection.LandmarksType._2D, + flip_input=False, device=device) + + batch_size = args.face_det_batch_size + + while 1: + predictions = [] + try: + for i in tqdm(range(0, len(images), batch_size)): + predictions.extend(detector.get_detections_for_batch(np.array(images[i:i + batch_size]))) + except RuntimeError: + if batch_size == 1: + raise RuntimeError('Image too big to run face detection on GPU. Please use the --resize_factor argument') + batch_size //= 2 + print('Recovering from OOM error; New batch size: {}'.format(batch_size)) + continue + break + + results = [] + pady1, pady2, padx1, padx2 = args.pads + for rect, image in zip(predictions, images): + if rect is None: + cv2.imwrite('temp/faulty_frame.jpg', image) # check this frame where the face was not detected. + raise ValueError('Face not detected! Ensure the video contains a face in all the frames.') + + y1 = max(0, rect[1] - pady1) + y2 = min(image.shape[0], rect[3] + pady2) + x1 = max(0, rect[0] - padx1) + x2 = min(image.shape[1], rect[2] + padx2) + + results.append([x1, y1, x2, y2]) + + boxes = np.array(results) + if not args.nosmooth: boxes = get_smoothened_boxes(boxes, T=5) + results = [[image[y1: y2, x1:x2], (y1, y2, x1, x2)] for image, (x1, y1, x2, y2) in zip(images, boxes)] + + del detector + return results + +def datagen(frames, mels): + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + if args.box[0] == -1: + if not args.static: + face_det_results = face_detect(frames) # BGR2RGB for CNN face detection + else: + face_det_results = face_detect([frames[0]]) + else: + print('Using the specified bounding box instead of face detection...') + y1, y2, x1, x2 = args.box + face_det_results = [[f[y1: y2, x1:x2], (y1, y2, x1, x2)] for f in frames] + + for i, m in enumerate(mels): + idx = 0 if args.static else i%len(frames) + frame_to_save = frames[idx].copy() + face, coords = face_det_results[idx].copy() + + face = cv2.resize(face, (args.img_size, args.img_size)) + + img_batch.append(face) + mel_batch.append(m) + frame_batch.append(frame_to_save) + coords_batch.append(coords) + + if len(img_batch) >= args.wav2lip_batch_size: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + img_batch, mel_batch, frame_batch, coords_batch = [], [], [], [] + + if len(img_batch) > 0: + img_batch, mel_batch = np.asarray(img_batch), np.asarray(mel_batch) + + img_masked = img_batch.copy() + img_masked[:, args.img_size//2:] = 0 + + img_batch = np.concatenate((img_masked, img_batch), axis=3) / 255. + mel_batch = np.reshape(mel_batch, [len(mel_batch), mel_batch.shape[1], mel_batch.shape[2], 1]) + + yield img_batch, mel_batch, frame_batch, coords_batch + +mel_step_size = 16 +device = 'cuda' if torch.cuda.is_available() else 'cpu' +print('Using {} for inference.'.format(device)) + +def _load(checkpoint_path): + if device == 'cuda': + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + +def load_model(path): + model = Wav2Lip() + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + s = checkpoint["state_dict"] + new_s = {} + for k, v in s.items(): + new_s[k.replace('module.', '')] = v + model.load_state_dict(new_s) + + model = model.to(device) + return model.eval() + +def main(): + if not os.path.isfile(args.face): + raise ValueError('--face argument must be a valid path to video/image file') + + elif args.face.split('.')[1] in ['jpg', 'png', 'jpeg']: + full_frames = [cv2.imread(args.face)] + fps = args.fps + + else: + video_stream = cv2.VideoCapture(args.face) + fps = video_stream.get(cv2.CAP_PROP_FPS) + + print('Reading video frames...') + + full_frames = [] + while 1: + still_reading, frame = video_stream.read() + if not still_reading: + video_stream.release() + break + if args.resize_factor > 1: + frame = cv2.resize(frame, (frame.shape[1]//args.resize_factor, frame.shape[0]//args.resize_factor)) + + if args.rotate: + frame = cv2.rotate(frame, cv2.cv2.ROTATE_90_CLOCKWISE) + + y1, y2, x1, x2 = args.crop + if x2 == -1: x2 = frame.shape[1] + if y2 == -1: y2 = frame.shape[0] + + frame = frame[y1:y2, x1:x2] + + full_frames.append(frame) + + print ("Number of frames available for inference: "+str(len(full_frames))) + + if not args.audio.endswith('.wav'): + print('Extracting raw audio...') + command = 'ffmpeg -y -i {} -strict -2 {}'.format(args.audio, 'temp/temp.wav') + + subprocess.call(command, shell=True) + args.audio = 'temp/temp.wav' + + wav = audio.load_wav(args.audio, 16000) + mel = audio.melspectrogram(wav) + print(mel.shape) + + if np.isnan(mel.reshape(-1)).sum() > 0: + raise ValueError('Mel contains nan! Using a TTS voice? Add a small epsilon noise to the wav file and try again') + + mel_chunks = [] + mel_idx_multiplier = 80./fps + i = 0 + while 1: + start_idx = int(i * mel_idx_multiplier) + if start_idx + mel_step_size > len(mel[0]): + mel_chunks.append(mel[:, len(mel[0]) - mel_step_size:]) + break + mel_chunks.append(mel[:, start_idx : start_idx + mel_step_size]) + i += 1 + + print("Length of mel chunks: {}".format(len(mel_chunks))) + + full_frames = full_frames[:len(mel_chunks)] + + batch_size = args.wav2lip_batch_size + gen = datagen(full_frames.copy(), mel_chunks) + + for i, (img_batch, mel_batch, frames, coords) in enumerate(tqdm(gen, + total=int(np.ceil(float(len(mel_chunks))/batch_size)))): + if i == 0: + model = load_model(args.checkpoint_path) + print ("Model loaded") + + frame_h, frame_w = full_frames[0].shape[:-1] + out = cv2.VideoWriter('temp/result.avi', + cv2.VideoWriter_fourcc(*'DIVX'), fps, (frame_w, frame_h)) + + img_batch = torch.FloatTensor(np.transpose(img_batch, (0, 3, 1, 2))).to(device) + mel_batch = torch.FloatTensor(np.transpose(mel_batch, (0, 3, 1, 2))).to(device) + + with torch.no_grad(): + pred = model(mel_batch, img_batch) + + pred = pred.cpu().numpy().transpose(0, 2, 3, 1) * 255. + + for p, f, c in zip(pred, frames, coords): + y1, y2, x1, x2 = c + p = cv2.resize(p.astype(np.uint8), (x2 - x1, y2 - y1)) + + f[y1:y2, x1:x2] = p + out.write(f) + + out.release() + + command = 'ffmpeg -y -i {} -i {} -strict -2 -q:v 1 {}'.format(args.audio, 'temp/result.avi', args.outfile) + subprocess.call(command, shell=platform.system() != 'Windows') + +if __name__ == '__main__': + main() diff --git a/Wav2Lip-master/models/__init__.py b/Wav2Lip-master/models/__init__.py new file mode 100644 index 00000000..43743704 --- /dev/null +++ b/Wav2Lip-master/models/__init__.py @@ -0,0 +1,2 @@ +from .wav2lip import Wav2Lip, Wav2Lip_disc_qual +from .syncnet import SyncNet_color \ No newline at end of file diff --git a/Wav2Lip-master/models/conv.py b/Wav2Lip-master/models/conv.py new file mode 100644 index 00000000..ed83da00 --- /dev/null +++ b/Wav2Lip-master/models/conv.py @@ -0,0 +1,44 @@ +import torch +from torch import nn +from torch.nn import functional as F + +class Conv2d(nn.Module): + def __init__(self, cin, cout, kernel_size, stride, padding, residual=False, *args, **kwargs): + super().__init__(*args, **kwargs) + self.conv_block = nn.Sequential( + nn.Conv2d(cin, cout, kernel_size, stride, padding), + nn.BatchNorm2d(cout) + ) + self.act = nn.ReLU() + self.residual = residual + + def forward(self, x): + out = self.conv_block(x) + if self.residual: + out += x + return self.act(out) + +class nonorm_Conv2d(nn.Module): + def __init__(self, cin, cout, kernel_size, stride, padding, residual=False, *args, **kwargs): + super().__init__(*args, **kwargs) + self.conv_block = nn.Sequential( + nn.Conv2d(cin, cout, kernel_size, stride, padding), + ) + self.act = nn.LeakyReLU(0.01, inplace=True) + + def forward(self, x): + out = self.conv_block(x) + return self.act(out) + +class Conv2dTranspose(nn.Module): + def __init__(self, cin, cout, kernel_size, stride, padding, output_padding=0, *args, **kwargs): + super().__init__(*args, **kwargs) + self.conv_block = nn.Sequential( + nn.ConvTranspose2d(cin, cout, kernel_size, stride, padding, output_padding), + nn.BatchNorm2d(cout) + ) + self.act = nn.ReLU() + + def forward(self, x): + out = self.conv_block(x) + return self.act(out) diff --git a/Wav2Lip-master/models/syncnet.py b/Wav2Lip-master/models/syncnet.py new file mode 100644 index 00000000..e773cdca --- /dev/null +++ b/Wav2Lip-master/models/syncnet.py @@ -0,0 +1,66 @@ +import torch +from torch import nn +from torch.nn import functional as F + +from .conv import Conv2d + +class SyncNet_color(nn.Module): + def __init__(self): + super(SyncNet_color, self).__init__() + + self.face_encoder = nn.Sequential( + Conv2d(15, 32, kernel_size=(7, 7), stride=1, padding=3), + + Conv2d(32, 64, kernel_size=5, stride=(1, 2), padding=1), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(64, 128, kernel_size=3, stride=2, padding=1), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(128, 256, kernel_size=3, stride=2, padding=1), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(256, 512, kernel_size=3, stride=2, padding=1), + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(512, 512, kernel_size=3, stride=2, padding=1), + Conv2d(512, 512, kernel_size=3, stride=1, padding=0), + Conv2d(512, 512, kernel_size=1, stride=1, padding=0),) + + self.audio_encoder = nn.Sequential( + Conv2d(1, 32, kernel_size=3, stride=1, padding=1), + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(32, 64, kernel_size=3, stride=(3, 1), padding=1), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(64, 128, kernel_size=3, stride=3, padding=1), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(128, 256, kernel_size=3, stride=(3, 2), padding=1), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(256, 512, kernel_size=3, stride=1, padding=0), + Conv2d(512, 512, kernel_size=1, stride=1, padding=0),) + + def forward(self, audio_sequences, face_sequences): # audio_sequences := (B, dim, T) + face_embedding = self.face_encoder(face_sequences) + audio_embedding = self.audio_encoder(audio_sequences) + + audio_embedding = audio_embedding.view(audio_embedding.size(0), -1) + face_embedding = face_embedding.view(face_embedding.size(0), -1) + + audio_embedding = F.normalize(audio_embedding, p=2, dim=1) + face_embedding = F.normalize(face_embedding, p=2, dim=1) + + + return audio_embedding, face_embedding diff --git a/Wav2Lip-master/models/wav2lip.py b/Wav2Lip-master/models/wav2lip.py new file mode 100644 index 00000000..ae5d6919 --- /dev/null +++ b/Wav2Lip-master/models/wav2lip.py @@ -0,0 +1,184 @@ +import torch +from torch import nn +from torch.nn import functional as F +import math + +from .conv import Conv2dTranspose, Conv2d, nonorm_Conv2d + +class Wav2Lip(nn.Module): + def __init__(self): + super(Wav2Lip, self).__init__() + + self.face_encoder_blocks = nn.ModuleList([ + nn.Sequential(Conv2d(6, 16, kernel_size=7, stride=1, padding=3)), # 96,96 + + nn.Sequential(Conv2d(16, 32, kernel_size=3, stride=2, padding=1), # 48,48 + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True)), + + nn.Sequential(Conv2d(32, 64, kernel_size=3, stride=2, padding=1), # 24,24 + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True)), + + nn.Sequential(Conv2d(64, 128, kernel_size=3, stride=2, padding=1), # 12,12 + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True)), + + nn.Sequential(Conv2d(128, 256, kernel_size=3, stride=2, padding=1), # 6,6 + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True)), + + nn.Sequential(Conv2d(256, 512, kernel_size=3, stride=2, padding=1), # 3,3 + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True),), + + nn.Sequential(Conv2d(512, 512, kernel_size=3, stride=1, padding=0), # 1, 1 + Conv2d(512, 512, kernel_size=1, stride=1, padding=0)),]) + + self.audio_encoder = nn.Sequential( + Conv2d(1, 32, kernel_size=3, stride=1, padding=1), + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(32, 32, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(32, 64, kernel_size=3, stride=(3, 1), padding=1), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(64, 128, kernel_size=3, stride=3, padding=1), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(128, 256, kernel_size=3, stride=(3, 2), padding=1), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + + Conv2d(256, 512, kernel_size=3, stride=1, padding=0), + Conv2d(512, 512, kernel_size=1, stride=1, padding=0),) + + self.face_decoder_blocks = nn.ModuleList([ + nn.Sequential(Conv2d(512, 512, kernel_size=1, stride=1, padding=0),), + + nn.Sequential(Conv2dTranspose(1024, 512, kernel_size=3, stride=1, padding=0), # 3,3 + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True),), + + nn.Sequential(Conv2dTranspose(1024, 512, kernel_size=3, stride=2, padding=1, output_padding=1), + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(512, 512, kernel_size=3, stride=1, padding=1, residual=True),), # 6, 6 + + nn.Sequential(Conv2dTranspose(768, 384, kernel_size=3, stride=2, padding=1, output_padding=1), + Conv2d(384, 384, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(384, 384, kernel_size=3, stride=1, padding=1, residual=True),), # 12, 12 + + nn.Sequential(Conv2dTranspose(512, 256, kernel_size=3, stride=2, padding=1, output_padding=1), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(256, 256, kernel_size=3, stride=1, padding=1, residual=True),), # 24, 24 + + nn.Sequential(Conv2dTranspose(320, 128, kernel_size=3, stride=2, padding=1, output_padding=1), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(128, 128, kernel_size=3, stride=1, padding=1, residual=True),), # 48, 48 + + nn.Sequential(Conv2dTranspose(160, 64, kernel_size=3, stride=2, padding=1, output_padding=1), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True), + Conv2d(64, 64, kernel_size=3, stride=1, padding=1, residual=True),),]) # 96,96 + + self.output_block = nn.Sequential(Conv2d(80, 32, kernel_size=3, stride=1, padding=1), + nn.Conv2d(32, 3, kernel_size=1, stride=1, padding=0), + nn.Sigmoid()) + + def forward(self, audio_sequences, face_sequences): + # audio_sequences = (B, T, 1, 80, 16) + B = audio_sequences.size(0) + + input_dim_size = len(face_sequences.size()) + if input_dim_size > 4: + audio_sequences = torch.cat([audio_sequences[:, i] for i in range(audio_sequences.size(1))], dim=0) + face_sequences = torch.cat([face_sequences[:, :, i] for i in range(face_sequences.size(2))], dim=0) + + audio_embedding = self.audio_encoder(audio_sequences) # B, 512, 1, 1 + + feats = [] + x = face_sequences + for f in self.face_encoder_blocks: + x = f(x) + feats.append(x) + + x = audio_embedding + for f in self.face_decoder_blocks: + x = f(x) + try: + x = torch.cat((x, feats[-1]), dim=1) + except Exception as e: + print(x.size()) + print(feats[-1].size()) + raise e + + feats.pop() + + x = self.output_block(x) + + if input_dim_size > 4: + x = torch.split(x, B, dim=0) # [(B, C, H, W)] + outputs = torch.stack(x, dim=2) # (B, C, T, H, W) + + else: + outputs = x + + return outputs + +class Wav2Lip_disc_qual(nn.Module): + def __init__(self): + super(Wav2Lip_disc_qual, self).__init__() + + self.face_encoder_blocks = nn.ModuleList([ + nn.Sequential(nonorm_Conv2d(3, 32, kernel_size=7, stride=1, padding=3)), # 48,96 + + nn.Sequential(nonorm_Conv2d(32, 64, kernel_size=5, stride=(1, 2), padding=2), # 48,48 + nonorm_Conv2d(64, 64, kernel_size=5, stride=1, padding=2)), + + nn.Sequential(nonorm_Conv2d(64, 128, kernel_size=5, stride=2, padding=2), # 24,24 + nonorm_Conv2d(128, 128, kernel_size=5, stride=1, padding=2)), + + nn.Sequential(nonorm_Conv2d(128, 256, kernel_size=5, stride=2, padding=2), # 12,12 + nonorm_Conv2d(256, 256, kernel_size=5, stride=1, padding=2)), + + nn.Sequential(nonorm_Conv2d(256, 512, kernel_size=3, stride=2, padding=1), # 6,6 + nonorm_Conv2d(512, 512, kernel_size=3, stride=1, padding=1)), + + nn.Sequential(nonorm_Conv2d(512, 512, kernel_size=3, stride=2, padding=1), # 3,3 + nonorm_Conv2d(512, 512, kernel_size=3, stride=1, padding=1),), + + nn.Sequential(nonorm_Conv2d(512, 512, kernel_size=3, stride=1, padding=0), # 1, 1 + nonorm_Conv2d(512, 512, kernel_size=1, stride=1, padding=0)),]) + + self.binary_pred = nn.Sequential(nn.Conv2d(512, 1, kernel_size=1, stride=1, padding=0), nn.Sigmoid()) + self.label_noise = .0 + + def get_lower_half(self, face_sequences): + return face_sequences[:, :, face_sequences.size(2)//2:] + + def to_2d(self, face_sequences): + B = face_sequences.size(0) + face_sequences = torch.cat([face_sequences[:, :, i] for i in range(face_sequences.size(2))], dim=0) + return face_sequences + + def perceptual_forward(self, false_face_sequences): + false_face_sequences = self.to_2d(false_face_sequences) + false_face_sequences = self.get_lower_half(false_face_sequences) + + false_feats = false_face_sequences + for f in self.face_encoder_blocks: + false_feats = f(false_feats) + + false_pred_loss = F.binary_cross_entropy(self.binary_pred(false_feats).view(len(false_feats), -1), + torch.ones((len(false_feats), 1)).cuda()) + + return false_pred_loss + + def forward(self, face_sequences): + face_sequences = self.to_2d(face_sequences) + face_sequences = self.get_lower_half(face_sequences) + + x = face_sequences + for f in self.face_encoder_blocks: + x = f(x) + + return self.binary_pred(x).view(len(x), -1) diff --git a/Wav2Lip-master/preprocess.py b/Wav2Lip-master/preprocess.py new file mode 100644 index 00000000..5322012a --- /dev/null +++ b/Wav2Lip-master/preprocess.py @@ -0,0 +1,113 @@ +import sys + +if sys.version_info[0] < 3 and sys.version_info[1] < 2: + raise Exception("Must be using >= Python 3.2") + +from os import listdir, path + +if not path.isfile('face_detection/detection/sfd/s3fd.pth'): + raise FileNotFoundError('Save the s3fd model to face_detection/detection/sfd/s3fd.pth \ + before running this script!') + +import multiprocessing as mp +from concurrent.futures import ThreadPoolExecutor, as_completed +import numpy as np +import argparse, os, cv2, traceback, subprocess +from tqdm import tqdm +from glob import glob +import audio +from hparams import hparams as hp + +import face_detection + +parser = argparse.ArgumentParser() + +parser.add_argument('--ngpu', help='Number of GPUs across which to run in parallel', default=1, type=int) +parser.add_argument('--batch_size', help='Single GPU Face detection batch size', default=32, type=int) +parser.add_argument("--data_root", help="Root folder of the LRS2 dataset", required=True) +parser.add_argument("--preprocessed_root", help="Root folder of the preprocessed dataset", required=True) + +args = parser.parse_args() + +fa = [face_detection.FaceAlignment(face_detection.LandmarksType._2D, flip_input=False, + device='cuda:{}'.format(id)) for id in range(args.ngpu)] + +template = 'ffmpeg -loglevel panic -y -i {} -strict -2 {}' +# template2 = 'ffmpeg -hide_banner -loglevel panic -threads 1 -y -i {} -async 1 -ac 1 -vn -acodec pcm_s16le -ar 16000 {}' + +def process_video_file(vfile, args, gpu_id): + video_stream = cv2.VideoCapture(vfile) + + frames = [] + while 1: + still_reading, frame = video_stream.read() + if not still_reading: + video_stream.release() + break + frames.append(frame) + + vidname = os.path.basename(vfile).split('.')[0] + dirname = vfile.split('/')[-2] + + fulldir = path.join(args.preprocessed_root, dirname, vidname) + os.makedirs(fulldir, exist_ok=True) + + batches = [frames[i:i + args.batch_size] for i in range(0, len(frames), args.batch_size)] + + i = -1 + for fb in batches: + preds = fa[gpu_id].get_detections_for_batch(np.asarray(fb)) + + for j, f in enumerate(preds): + i += 1 + if f is None: + continue + + x1, y1, x2, y2 = f + cv2.imwrite(path.join(fulldir, '{}.jpg'.format(i)), fb[j][y1:y2, x1:x2]) + +def process_audio_file(vfile, args): + vidname = os.path.basename(vfile).split('.')[0] + dirname = vfile.split('/')[-2] + + fulldir = path.join(args.preprocessed_root, dirname, vidname) + os.makedirs(fulldir, exist_ok=True) + + wavpath = path.join(fulldir, 'audio.wav') + + command = template.format(vfile, wavpath) + subprocess.call(command, shell=True) + + +def mp_handler(job): + vfile, args, gpu_id = job + try: + process_video_file(vfile, args, gpu_id) + except KeyboardInterrupt: + exit(0) + except: + traceback.print_exc() + +def main(args): + print('Started processing for {} with {} GPUs'.format(args.data_root, args.ngpu)) + + filelist = glob(path.join(args.data_root, '*/*.mp4')) + + jobs = [(vfile, args, i%args.ngpu) for i, vfile in enumerate(filelist)] + p = ThreadPoolExecutor(args.ngpu) + futures = [p.submit(mp_handler, j) for j in jobs] + _ = [r.result() for r in tqdm(as_completed(futures), total=len(futures))] + + print('Dumping audios...') + + for vfile in tqdm(filelist): + try: + process_audio_file(vfile, args) + except KeyboardInterrupt: + exit(0) + except: + traceback.print_exc() + continue + +if __name__ == '__main__': + main(args) \ No newline at end of file diff --git a/Wav2Lip-master/requirements.txt b/Wav2Lip-master/requirements.txt new file mode 100644 index 00000000..bc839f29 --- /dev/null +++ b/Wav2Lip-master/requirements.txt @@ -0,0 +1,14 @@ +librosa==0.8.0 +numpy==1.23.3 +opencv-contrib-python +opencv-python +tqdm +numba +pytorch_fid +ffmpeg +torch==2.4.1 +torchvision==0.20.0 +scipy==1.10.1 +pytorch==2.4.1 +pillow +scenedetect==0.6 \ No newline at end of file diff --git a/Wav2Lip-master/results/README.md b/Wav2Lip-master/results/README.md new file mode 100644 index 00000000..b1bbfd53 --- /dev/null +++ b/Wav2Lip-master/results/README.md @@ -0,0 +1 @@ +Generated results will be placed in this folder by default. \ No newline at end of file diff --git a/Wav2Lip-master/temp/README.md b/Wav2Lip-master/temp/README.md new file mode 100644 index 00000000..04c91049 --- /dev/null +++ b/Wav2Lip-master/temp/README.md @@ -0,0 +1 @@ +Temporary files at the time of inference/testing will be saved here. You can ignore them. \ No newline at end of file diff --git a/Wav2Lip-master/wav2lip_train.py b/Wav2Lip-master/wav2lip_train.py new file mode 100644 index 00000000..6e081180 --- /dev/null +++ b/Wav2Lip-master/wav2lip_train.py @@ -0,0 +1,374 @@ +from os.path import dirname, join, basename, isfile +from tqdm import tqdm + +from models import SyncNet_color as SyncNet +from models import Wav2Lip as Wav2Lip +import audio + +import torch +from torch import nn +from torch import optim +import torch.backends.cudnn as cudnn +from torch.utils import data as data_utils +import numpy as np + +from glob import glob + +import os, random, cv2, argparse +from hparams import hparams, get_image_list + +parser = argparse.ArgumentParser(description='Code to train the Wav2Lip model without the visual quality discriminator') + +parser.add_argument("--data_root", help="Root folder of the preprocessed LRS2 dataset", required=True, type=str) + +parser.add_argument('--checkpoint_dir', help='Save checkpoints to this directory', required=True, type=str) +parser.add_argument('--syncnet_checkpoint_path', help='Load the pre-trained Expert discriminator', required=True, type=str) + +parser.add_argument('--checkpoint_path', help='Resume from this checkpoint', default=None, type=str) + +args = parser.parse_args() + + +global_step = 0 +global_epoch = 0 +use_cuda = torch.cuda.is_available() +print('use_cuda: {}'.format(use_cuda)) + +syncnet_T = 5 +syncnet_mel_step_size = 16 + +class Dataset(object): + def __init__(self, split): + self.all_videos = get_image_list(args.data_root, split) + + def get_frame_id(self, frame): + return int(basename(frame).split('.')[0]) + + def get_window(self, start_frame): + start_id = self.get_frame_id(start_frame) + vidname = dirname(start_frame) + + window_fnames = [] + for frame_id in range(start_id, start_id + syncnet_T): + frame = join(vidname, '{}.jpg'.format(frame_id)) + if not isfile(frame): + return None + window_fnames.append(frame) + return window_fnames + + def read_window(self, window_fnames): + if window_fnames is None: return None + window = [] + for fname in window_fnames: + img = cv2.imread(fname) + if img is None: + return None + try: + img = cv2.resize(img, (hparams.img_size, hparams.img_size)) + except Exception as e: + return None + + window.append(img) + + return window + + def crop_audio_window(self, spec, start_frame): + if type(start_frame) == int: + start_frame_num = start_frame + else: + start_frame_num = self.get_frame_id(start_frame) # 0-indexing ---> 1-indexing + start_idx = int(80. * (start_frame_num / float(hparams.fps))) + + end_idx = start_idx + syncnet_mel_step_size + + return spec[start_idx : end_idx, :] + + def get_segmented_mels(self, spec, start_frame): + mels = [] + assert syncnet_T == 5 + start_frame_num = self.get_frame_id(start_frame) + 1 # 0-indexing ---> 1-indexing + if start_frame_num - 2 < 0: return None + for i in range(start_frame_num, start_frame_num + syncnet_T): + m = self.crop_audio_window(spec, i - 2) + if m.shape[0] != syncnet_mel_step_size: + return None + mels.append(m.T) + + mels = np.asarray(mels) + + return mels + + def prepare_window(self, window): + # 3 x T x H x W + x = np.asarray(window) / 255. + x = np.transpose(x, (3, 0, 1, 2)) + + return x + + def __len__(self): + return len(self.all_videos) + + def __getitem__(self, idx): + while 1: + idx = random.randint(0, len(self.all_videos) - 1) + vidname = self.all_videos[idx] + img_names = list(glob(join(vidname, '*.jpg'))) + if len(img_names) <= 3 * syncnet_T: + continue + + img_name = random.choice(img_names) + wrong_img_name = random.choice(img_names) + while wrong_img_name == img_name: + wrong_img_name = random.choice(img_names) + + window_fnames = self.get_window(img_name) + wrong_window_fnames = self.get_window(wrong_img_name) + if window_fnames is None or wrong_window_fnames is None: + continue + + window = self.read_window(window_fnames) + if window is None: + continue + + wrong_window = self.read_window(wrong_window_fnames) + if wrong_window is None: + continue + + try: + wavpath = join(vidname, "audio.wav") + wav = audio.load_wav(wavpath, hparams.sample_rate) + + orig_mel = audio.melspectrogram(wav).T + except Exception as e: + continue + + mel = self.crop_audio_window(orig_mel.copy(), img_name) + + if (mel.shape[0] != syncnet_mel_step_size): + continue + + indiv_mels = self.get_segmented_mels(orig_mel.copy(), img_name) + if indiv_mels is None: continue + + window = self.prepare_window(window) + y = window.copy() + window[:, :, window.shape[2]//2:] = 0. + + wrong_window = self.prepare_window(wrong_window) + x = np.concatenate([window, wrong_window], axis=0) + + x = torch.FloatTensor(x) + mel = torch.FloatTensor(mel.T).unsqueeze(0) + indiv_mels = torch.FloatTensor(indiv_mels).unsqueeze(1) + y = torch.FloatTensor(y) + return x, indiv_mels, mel, y + +def save_sample_images(x, g, gt, global_step, checkpoint_dir): + x = (x.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + g = (g.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + gt = (gt.detach().cpu().numpy().transpose(0, 2, 3, 4, 1) * 255.).astype(np.uint8) + + refs, inps = x[..., 3:], x[..., :3] + folder = join(checkpoint_dir, "samples_step{:09d}".format(global_step)) + if not os.path.exists(folder): os.mkdir(folder) + collage = np.concatenate((refs, inps, g, gt), axis=-2) + for batch_idx, c in enumerate(collage): + for t in range(len(c)): + cv2.imwrite('{}/{}_{}.jpg'.format(folder, batch_idx, t), c[t]) + +logloss = nn.BCELoss() +def cosine_loss(a, v, y): + d = nn.functional.cosine_similarity(a, v) + loss = logloss(d.unsqueeze(1), y) + + return loss + +device = torch.device("cuda" if use_cuda else "cpu") +syncnet = SyncNet().to(device) +for p in syncnet.parameters(): + p.requires_grad = False + +recon_loss = nn.L1Loss() +def get_sync_loss(mel, g): + g = g[:, :, :, g.size(3)//2:] + g = torch.cat([g[:, :, i] for i in range(syncnet_T)], dim=1) + # B, 3 * T, H//2, W + a, v = syncnet(mel, g) + y = torch.ones(g.size(0), 1).float().to(device) + return cosine_loss(a, v, y) + +def train(device, model, train_data_loader, test_data_loader, optimizer, + checkpoint_dir=None, checkpoint_interval=None, nepochs=None): + + global global_step, global_epoch + resumed_step = global_step + + while global_epoch < nepochs: + print('Starting Epoch: {}'.format(global_epoch)) + running_sync_loss, running_l1_loss = 0., 0. + prog_bar = tqdm(enumerate(train_data_loader)) + for step, (x, indiv_mels, mel, gt) in prog_bar: + model.train() + optimizer.zero_grad() + + # Move data to CUDA device + x = x.to(device) + mel = mel.to(device) + indiv_mels = indiv_mels.to(device) + gt = gt.to(device) + + g = model(indiv_mels, x) + + if hparams.syncnet_wt > 0.: + sync_loss = get_sync_loss(mel, g) + else: + sync_loss = 0. + + l1loss = recon_loss(g, gt) + + loss = hparams.syncnet_wt * sync_loss + (1 - hparams.syncnet_wt) * l1loss + loss.backward() + optimizer.step() + + if global_step % checkpoint_interval == 0: + save_sample_images(x, g, gt, global_step, checkpoint_dir) + + global_step += 1 + cur_session_steps = global_step - resumed_step + + running_l1_loss += l1loss.item() + if hparams.syncnet_wt > 0.: + running_sync_loss += sync_loss.item() + else: + running_sync_loss += 0. + + if global_step == 1 or global_step % checkpoint_interval == 0: + save_checkpoint( + model, optimizer, global_step, checkpoint_dir, global_epoch) + + if global_step == 1 or global_step % hparams.eval_interval == 0: + with torch.no_grad(): + average_sync_loss = eval_model(test_data_loader, global_step, device, model, checkpoint_dir) + + if average_sync_loss < .75: + hparams.set_hparam('syncnet_wt', 0.01) # without image GAN a lesser weight is sufficient + + prog_bar.set_description('L1: {}, Sync Loss: {}'.format(running_l1_loss / (step + 1), + running_sync_loss / (step + 1))) + + global_epoch += 1 + + +def eval_model(test_data_loader, global_step, device, model, checkpoint_dir): + eval_steps = 700 + print('Evaluating for {} steps'.format(eval_steps)) + sync_losses, recon_losses = [], [] + step = 0 + while 1: + for x, indiv_mels, mel, gt in test_data_loader: + step += 1 + model.eval() + + # Move data to CUDA device + x = x.to(device) + gt = gt.to(device) + indiv_mels = indiv_mels.to(device) + mel = mel.to(device) + + g = model(indiv_mels, x) + + sync_loss = get_sync_loss(mel, g) + l1loss = recon_loss(g, gt) + + sync_losses.append(sync_loss.item()) + recon_losses.append(l1loss.item()) + + if step > eval_steps: + averaged_sync_loss = sum(sync_losses) / len(sync_losses) + averaged_recon_loss = sum(recon_losses) / len(recon_losses) + + print('L1: {}, Sync loss: {}'.format(averaged_recon_loss, averaged_sync_loss)) + + return averaged_sync_loss + +def save_checkpoint(model, optimizer, step, checkpoint_dir, epoch): + + checkpoint_path = join( + checkpoint_dir, "checkpoint_step{:09d}.pth".format(global_step)) + optimizer_state = optimizer.state_dict() if hparams.save_optimizer_state else None + torch.save({ + "state_dict": model.state_dict(), + "optimizer": optimizer_state, + "global_step": step, + "global_epoch": epoch, + }, checkpoint_path) + print("Saved checkpoint:", checkpoint_path) + + +def _load(checkpoint_path): + if use_cuda: + checkpoint = torch.load(checkpoint_path) + else: + checkpoint = torch.load(checkpoint_path, + map_location=lambda storage, loc: storage) + return checkpoint + +def load_checkpoint(path, model, optimizer, reset_optimizer=False, overwrite_global_states=True): + global global_step + global global_epoch + + print("Load checkpoint from: {}".format(path)) + checkpoint = _load(path) + s = checkpoint["state_dict"] + new_s = {} + for k, v in s.items(): + new_s[k.replace('module.', '')] = v + model.load_state_dict(new_s) + if not reset_optimizer: + optimizer_state = checkpoint["optimizer"] + if optimizer_state is not None: + print("Load optimizer state from {}".format(path)) + optimizer.load_state_dict(checkpoint["optimizer"]) + if overwrite_global_states: + global_step = checkpoint["global_step"] + global_epoch = checkpoint["global_epoch"] + + return model + +if __name__ == "__main__": + checkpoint_dir = args.checkpoint_dir + + # Dataset and Dataloader setup + train_dataset = Dataset('train') + test_dataset = Dataset('val') + + train_data_loader = data_utils.DataLoader( + train_dataset, batch_size=hparams.batch_size, shuffle=True, + num_workers=hparams.num_workers) + + test_data_loader = data_utils.DataLoader( + test_dataset, batch_size=hparams.batch_size, + num_workers=4) + + device = torch.device("cuda" if use_cuda else "cpu") + + # Model + model = Wav2Lip().to(device) + print('total trainable params {}'.format(sum(p.numel() for p in model.parameters() if p.requires_grad))) + + optimizer = optim.Adam([p for p in model.parameters() if p.requires_grad], + lr=hparams.initial_learning_rate) + + if args.checkpoint_path is not None: + load_checkpoint(args.checkpoint_path, model, optimizer, reset_optimizer=False) + + load_checkpoint(args.syncnet_checkpoint_path, syncnet, None, reset_optimizer=True, overwrite_global_states=False) + + if not os.path.exists(checkpoint_dir): + os.mkdir(checkpoint_dir) + + # Train! + train(device, model, train_data_loader, test_data_loader, optimizer, + checkpoint_dir=checkpoint_dir, + checkpoint_interval=hparams.checkpoint_interval, + nepochs=hparams.nepochs) diff --git "a/Wav2Lip-master/\345\215\225\344\272\272\347\273\204\351\230\237\350\207\252\350\257\204.docx" "b/Wav2Lip-master/\345\215\225\344\272\272\347\273\204\351\230\237\350\207\252\350\257\204.docx" new file mode 100644 index 00000000..fae7b848 Binary files /dev/null and "b/Wav2Lip-master/\345\215\225\344\272\272\347\273\204\351\230\237\350\207\252\350\257\204.docx" differ diff --git "a/Wav2Lip-master/\345\221\275\344\273\244\344\275\277\347\224\250.txt" "b/Wav2Lip-master/\345\221\275\344\273\244\344\275\277\347\224\250.txt" new file mode 100644 index 00000000..34f298dd --- /dev/null +++ "b/Wav2Lip-master/\345\221\275\344\273\244\344\275\277\347\224\250.txt" @@ -0,0 +1,28 @@ +Windows下运行命令参考 +python run_pipeline.py --videofile G:\Wav2Lip-master\Wav2Lip-master\results\result_voice.mp4 --reference wav2lip --data_dir tmp_dir +python calculate_scores_real_videos.py --videofile G:\Wav2Lip-master\Wav2Lip-master\results\result_voice.mp4 --reference wav2lip --data_dir tmp_dir >> all_scores.txt +python inference.py --checkpoint_path checkpoints\wav2lip_gan.pth --face inputmp4\video1.mp4 --audio inputwav\test1.wav +conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia +python get_frames.py path/to/your/video.mp4 --output_folder Wav2Lip-master\Wav2Lip-master\evaluation\outputframes --frame_interval 1 --max_threads 4 +python run_pipeline.py --videofile /Wav2Lip-master/Wav2Lip-master/evaluation/syncnet_python-master/video/result_voice.mp4 --reference wav2lip --data_dir tmp_dir +python -m pytorch_fid path/to/dataset1 path/to/dataset2 +set KMP_DUPLICATE_LIB_OK=TRUE #如果在评测阶段出现类似相关报错请输该行代码 +G:\Wav2Lip-master\Wav2Lip-master\videos\May.mp4 +>python get_frames.py G:\Wav2Lip-master\Wav2Lip-master\results\result_voice.mp4 --output_folder evaluation\outputframes --frame_interval 1 --max_threads 4 +>python get_frames.py videos\Jae-in.mp4 --output_folder evaluation\inputframes --frame_interval 1 --max_threads 4 +python -m pytorch_fid evaluation\inputframes evaluation\outputframes + + + +Docker下运行命令参考 +docker exec -it yuyinshibie /bin/bash +python inference.py --checkpoint_path checkpoints/wav2lip_gan.pth --face /workspace/inputmp4/video1.mp4 --audio /workspace/inputwav/test1.wav +python run_pipeline.py --videofile /workspace/Wav2Lip-master/Wav2Lip-master/results/result_voice.mp4 --reference wav2lip --data_dir tmp_dir +python calculate_scores_real_videos.py --videofile /workspace/Wav2Lip-master/Wav2Lip-master/results/result_voice.mp4 --reference wav2lip --data_dir tmp_dir >> all_scores.txt +python get_frames.py /workspace/Wav2Lip-master/Wav2Lip-master/results/result_voice.mp4 --output_folder /workspace/Wav2Lip-master/Wav2Lip-master/evaluation/pytorch-fid-master/outputframes --frame_interval 1 --max_threads 4 +python get_frames.py /workspace/inputmp4/video1.mp4 --output_folder /workspace/Wav2Lip-master/Wav2Lip-master/evaluation/pytorch-fid-master/inputframes --frame_interval 1 --max_threads 4 +python -m pytorch_fid inputframes outputframes --num-workers 0 --device cuda:0 +rm -r inputframes/*.jpg +rm -r outputframes/*.jpg +docker run --hostname=docker-desktop --env=PYTORCH_VERSION=2.4.1 --env=PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/opt/conda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin --env=NVIDIA_VISIBLE_DEVICES=all --env=NVIDIA_DRIVER_CAPABILITIES=compute,utility --env=LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 --volume=G:\yuyin\result:/workspace/Wav2Lip-master/Wav2Lip-master/results --volume=G:\yuyin\inputmp4:/workspace/inputmp4 --volume=G:\yuyin\inputwzv:/workspace/inputwav --network=host --privileged --workdir=/workspace --restart=no --label=''com.nvidia.volumes.needed=nvidia_driver'' --label=''org.opencontainers.image.ref.name=ubuntu'' --label=''org.opencontainers.image.version=22.04'' --label='com.nvidia.volumes.needed=nvidia_driver' --label='org.opencontainers.image.ref.name=ubuntu' --label='org.opencontainers.image.version=22.04' --shm-size=3g --gpus all --runtime=runc --name=xxxx -t -d yuyinshibie:v3 + diff --git "a/Wav2Lip-master/\351\205\215\347\275\256\346\226\207\344\273\266.md" "b/Wav2Lip-master/\351\205\215\347\275\256\346\226\207\344\273\266.md" new file mode 100644 index 00000000..0de3bc53 --- /dev/null +++ "b/Wav2Lip-master/\351\205\215\347\275\256\346\226\207\344\273\266.md" @@ -0,0 +1,122 @@ +# 项目名称 +Wav2lip-master +## 1. 项目简介 +这个项目是基于Wav2lip-master项目(https://github.com/Rudrabha/Wav2lip/)的复现项目,在原项目的基础上通过更加简单的操作来实现从生成视频到评测完毕这一流程。 + +## 2. 系统要求 +- 操作系统:Windows / Linux +- Python 版本:3.8 + +## 3. 环境配置 + +### 3.1 安装 Python +请确保系统上安装了 Python 3.8。可以从 [Python 官网](https://www.python.org/downloads/) 下载并安装。 + +### 3.2 创建虚拟环境 +建议使用虚拟环境来管理项目依赖。使用conda创建虚拟环境。 + +```bash +# 使用 conda +conda create --name myenv python=3.8 +conda activate myenv +``` +在Windows下如果要使用gpu运行,请输入命令: +```BASH +conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia +``` +### 3.3 安装依赖 +在项目根目录下,使用 pip 或 conda 安装项目所需的依赖。 +```BASH +# 使用 pip 安装 +pip install -r requirements.txt +``` +### 3.4 docker镜像安装 +我已经将运行此项目docker打包并上传至该网址,读者可以自行下载使用: +```BASH +https://share.weiyun.com/sI9Hlbbb +``` +```bash +docker run ... --shm-size=3g --gpus all --name=xxxx -t -d name +#docker运行 +docker exec -it yuyinshibie /bin/bash +#进入docker +conda activate xuni +#进入进行工作的虚拟环境 +``` +### 3.5 模型安装 +我把模型都放在一起,方便在windows下的读者能够直接下载模型安装到对应位置。 +链接: https://pan.baidu.com/s/1xIi0HJKMs7_V8mt5JVSsgw 提取码: kgmg +```BASH +syncnet_v2.model和example.avi需要放置到Wav2Lip-master\evaluation\syncnet_python-master\data下,无data文件夹请自行创建 +sfd_face.pth请放置在Wav2Lip-master\evaluation\syncnet_python-master\detectors\s3fd\weights下 +s3fd-619a316812.pth请放置在Wav2Lip-master\face_detection\detection\sfd下 +wav2lip.pth和wav2lip_gan.pth文件请放在Wav2Lip-master\checkpoints下 +``` +## 4. 项目结构 +这里只显示操作涉及的文件夹和文件,没有提到的文件或文件夹不代表不需要 +```bash +Wav2Lip-master/ +│ +├── checkpoints/ # wav2lip模型存放地 +│ ├── .pth #预训练的模型文件 +│ ├── README +│ +├── evaluation/ #评测文件夹 +│ ├── inputframes/ # 由输入视频提取的图像 +│ ├── outputframes/ # 由输出视频提取的图像 +│ ├── scores_LSE/ # 评测文件,请保证该文件下的py文件复制到syncnet_python-master中 +│ ├── syncnet_python-master/ # LSE-C LSE-D评估请在该文件夹下进行 +│ ├── pytorch-fid-master/ #这里存放pytorch-fid项目 +│ ├── .../ +│ +├── inputmp4/ # 可以将作为人脸输入的jpg,mp4放在这个文件夹下 +│ ├── ... # +│ +├── inputwav/ # 可以将作为音频输入的wav放在这个文件夹下 +│ ├── ... +│ +├── face_detection/ # 请将下载到的s3fd-619a316812.pth文件放置在face_detection\detection\sfd下 +│ ├── ... +│ +├── results/ # 生成的输出视频会出现在这个文件夹下 +│ ├── ... +├── requirements.txt # 依赖项 +├── README.md # 项目说明 +``` +## 5. 运行项目 +确保inputmp4文件夹中和inputwav中存在格式正确的文件 +```bash +运行下列命令: +python inference.py --checkpoint_path checkpoints\wav2lip_gan.pth --face inputmp4\video1.mp4 --audio inputwav\test1.wav#生成视频 +``` + +## 6. 测试 +### 6.1 LSE-C、LSE-D指数测试方法 +```BASH +cd evaluation/synsyncnet_python-master +python run_pipeline.py --videofile path to\result_voice.mp4 --reference wav2lip --data_dir tmp_dir#将path to\result_voice.mp4改为你要检测的生成视频路径 +python calculate_scores_real_videos.py --videofile path to\result_voice.mp4 --reference wav2lip --data_dir tmp_dir >> all_scores.txt +#将path to\result_voice.mp4改为你要检测的生成视频路径,结果会生成在all_scores.txt +#注意:在结束测试后,如果要进行下一次测试前请先删除产生的tmp_dir文件夹 +``` +### 6.2 FID指数测试方法 +```BASH +python get_frames.py path/to/your/video.mp4 --output_folder evaluation\outputframes(inputframes) --frame_interval 1 --max_threads 4#用该命令将生成视频(输入视频)提取为图像存放在指定文件夹中 +python -m pytorch_fid evaluation\inputframes evaluation\outputframes#生成结果 +#注意:在结束测试后,如果要进行下一次测试前请先删除产生的inputframes和outputframes文件夹中的图片 +``` +## 7. 常见问题 + +问题 1 FID指数测试无法运行 +检查被比对的文件夹中存放数据是否正确 +在python -m pytorch_fid evaluation\inputframes evaluation\outputframes后加--num-workers 0 --device cuda:0 + +问题 2 linux下运行LSE-C测试在最后一步被KILLED +尝试缩短生成视频的长度,如果无法实现,请尝试在windows环境下运行 + + +## 8. 联系信息 + +姓名:majunchen +邮箱:2406722613@qq.com +GitHub: GitHub 链接