From 65cb7a2111a554c9210ae08419b04c1ae73f0d30 Mon Sep 17 00:00:00 2001 From: Elijah Harmon Date: Sun, 25 Sep 2022 20:27:52 -0400 Subject: [PATCH] Fixing bugs on yolo update --- .gitignore | 3 + main.py | 44 +- main_onnx_cpu.py | 43 +- main_onnx_gpu.py | 43 +- main_tensorrt_gpu.py | 52 +- main_torch_gpu.py | 45 +- models/__pycache__/__init__.cpython-39.pyc | Bin 136 -> 165 bytes models/__pycache__/common.cpython-310.pyc | Bin 32508 -> 37450 bytes models/__pycache__/common.cpython-39.pyc | Bin 32562 -> 37073 bytes .../__pycache__/experimental.cpython-310.pyc | Bin 4801 -> 4864 bytes models/__pycache__/yolo.cpython-310.pyc | Bin 13047 -> 16078 bytes models/common.py | 552 ++++++++++++------ models/hub/yolov5s-LeakyReLU.yaml | 49 ++ models/segment/yolov5l-seg.yaml | 48 ++ models/segment/yolov5m-seg.yaml | 48 ++ models/segment/yolov5n-seg.yaml | 48 ++ models/segment/yolov5s-seg.yaml | 48 ++ models/segment/yolov5x-seg.yaml | 48 ++ models/tf.py | 53 +- models/yolo.py | 76 ++- utils/__init__.py | 10 +- utils/__pycache__/__init__.cpython-310.pyc | Bin 1016 -> 2302 bytes utils/__pycache__/__init__.cpython-39.pyc | Bin 2093 -> 2300 bytes .../__pycache__/augmentations.cpython-310.pyc | Bin 9113 -> 13716 bytes .../__pycache__/augmentations.cpython-39.pyc | Bin 13475 -> 13757 bytes utils/__pycache__/autoanchor.cpython-310.pyc | Bin 6487 -> 6510 bytes utils/__pycache__/autoanchor.cpython-39.pyc | Bin 6505 -> 6522 bytes utils/__pycache__/dataloaders.cpython-310.pyc | Bin 37573 -> 41745 bytes utils/__pycache__/dataloaders.cpython-39.pyc | Bin 39740 -> 41753 bytes utils/__pycache__/downloads.cpython-310.pyc | Bin 4951 -> 5431 bytes utils/__pycache__/downloads.cpython-39.pyc | Bin 5405 -> 5419 bytes utils/__pycache__/general.cpython-310.pyc | Bin 35521 -> 37664 bytes utils/__pycache__/general.cpython-39.pyc | Bin 37059 -> 37550 bytes utils/__pycache__/metrics.cpython-310.pyc | Bin 11363 -> 11632 bytes utils/__pycache__/metrics.cpython-39.pyc | Bin 11601 -> 11638 bytes utils/__pycache__/plots.cpython-310.pyc | Bin 18474 -> 21634 bytes utils/__pycache__/plots.cpython-39.pyc | Bin 20099 -> 21652 bytes utils/__pycache__/torch_utils.cpython-310.pyc | Bin 13896 -> 16786 bytes utils/__pycache__/torch_utils.cpython-39.pyc | Bin 16740 -> 16770 bytes utils/augmentations.py | 31 +- utils/autoanchor.py | 4 +- utils/autobatch.py | 9 +- utils/dataloaders.py | 86 ++- utils/docker/Dockerfile | 2 +- utils/downloads.py | 11 +- utils/general.py | 193 +++--- utils/loggers/__init__.py | 81 ++- utils/loggers/clearml/clearml_utils.py | 1 + utils/loggers/comet/README.md | 256 ++++++++ utils/loggers/comet/__init__.py | 501 ++++++++++++++++ utils/loggers/comet/comet_utils.py | 150 +++++ utils/loggers/comet/hpo.py | 118 ++++ utils/loggers/comet/optimizer_config.json | 209 +++++++ utils/metrics.py | 21 +- utils/plots.py | 78 ++- utils/segment/__init__.py | 0 utils/segment/augmentations.py | 104 ++++ utils/segment/dataloaders.py | 330 +++++++++++ utils/segment/general.py | 134 +++++ utils/segment/loss.py | 186 ++++++ utils/segment/metrics.py | 210 +++++++ utils/segment/plots.py | 143 +++++ utils/torch_utils.py | 3 +- utils/triton.py | 85 +++ 64 files changed, 3697 insertions(+), 459 deletions(-) create mode 100644 models/hub/yolov5s-LeakyReLU.yaml create mode 100644 models/segment/yolov5l-seg.yaml create mode 100644 models/segment/yolov5m-seg.yaml create mode 100644 models/segment/yolov5n-seg.yaml create mode 100644 models/segment/yolov5s-seg.yaml create mode 100644 models/segment/yolov5x-seg.yaml create mode 100644 utils/loggers/comet/README.md create mode 100644 utils/loggers/comet/__init__.py create mode 100644 utils/loggers/comet/comet_utils.py create mode 100644 utils/loggers/comet/hpo.py create mode 100644 utils/loggers/comet/optimizer_config.json create mode 100644 utils/segment/__init__.py create mode 100644 utils/segment/augmentations.py create mode 100644 utils/segment/dataloaders.py create mode 100644 utils/segment/general.py create mode 100644 utils/segment/loss.py create mode 100644 utils/segment/metrics.py create mode 100644 utils/segment/plots.py create mode 100644 utils/triton.py diff --git a/.gitignore b/.gitignore index 7acff58..f218b77 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,6 @@ yolov5s.engine yolov5 tensorrt-8.4.1.5-cp39-none-win_amd64.whl tensorrt-8.4.1.5-cp310-none-win_amd64.whl +__pycache__ +utils/__pycache__ +models/__pycache__ \ No newline at end of file diff --git a/main.py b/main.py index 2f17afb..45ed8fb 100644 --- a/main.py +++ b/main.py @@ -5,12 +5,13 @@ import gc import numpy as np import cv2 import time -import win32api, win32con +import win32api +import win32con import pandas as pd -from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, cv2, - increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.general import (cv2, non_max_suppression, xyxy2xywh) import dxcam + def main(): # Window title of the game, don't need the entire name videoGameWindowTitle = "Counter" @@ -63,8 +64,10 @@ def main(): # sctArea = {"mon": 1, "top": 0, "left": 0, "width": 1920, "height": 1080} # Starting screenshoting engine - left = aaRightShift + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) - top = videoGameWindow.top + (videoGameWindow.height - screenShotHeight) // 2 + left = aaRightShift + \ + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) + top = videoGameWindow.top + \ + (videoGameWindow.height - screenShotHeight) // 2 right, bottom = left + screenShotWidth, top + screenShotHeight region = (left, top, right, bottom) @@ -81,7 +84,8 @@ def main(): sTime = time.time() # Loading Yolo5 Small AI Model - model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True, force_reload=True) + model = torch.hub.load('ultralytics/yolov5', 'yolov5s', + pretrained=True, force_reload=True) stride, names, pt = model.stride, model.names, model.pt model.half() @@ -96,7 +100,7 @@ def main(): # Getting Frame npImg = np.array(camera.get_latest_frame()) - + # Normalizing Data im = torch.from_numpy(npImg).to('cuda') im = torch.movedim(im, 2, 0) @@ -109,23 +113,25 @@ def main(): results = model(im, size=screenShotHeight) # Suppressing results that dont meet thresholds - pred = non_max_suppression(results, 0.25, 0.25, 0, False, max_det=1000) - + pred = non_max_suppression( + results, 0.25, 0.25, 0, False, max_det=1000) # Converting output to usable cords targets = [] for i, det in enumerate(pred): s = "" - gn = torch.tensor(im.shape)[[0, 0, 0, 0]] + gn = torch.tensor(im.shape)[[0, 0, 0, 0]] if len(det): for c in det[:, -1].unique(): n = (det[:, -1] == c).sum() # detections per class s += f"{n} {names[int(c)]}, " # add to string for *xyxy, conf, cls in reversed(det): - targets.append((xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist()) # normalized xywh + targets.append((xyxy2xywh(torch.tensor(xyxy).view( + 1, 4)) / gn).view(-1).tolist()) # normalized xywh - targets = pd.DataFrame(targets, columns = ['current_mid_x', 'current_mid_y', 'width', "height"]) + targets = pd.DataFrame( + targets, columns=['current_mid_x', 'current_mid_y', 'width', "height"]) # If there are people in the center bounding box if len(targets) > 0: @@ -134,7 +140,8 @@ def main(): targets['last_mid_x'] = last_mid_coord[0] targets['last_mid_y'] = last_mid_coord[1] # Take distance between current person mid coordinate and last person mid coordinate - targets['dist'] = np.linalg.norm(targets.iloc[:, [0,1]].values - targets.iloc[:, [4,5]], axis=1) + targets['dist'] = np.linalg.norm( + targets.iloc[:, [0, 1]].values - targets.iloc[:, [4, 5]], axis=1) targets.sort_values(by="dist", ascending=False) # Take the first person that shows up in the dataframe (Recall that we sort based on Euclidean distance) @@ -151,7 +158,8 @@ def main(): # Moving the mouse if win32api.GetKeyState(0x14): - win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int(mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) + win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int( + mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) last_mid_coord = [xMid, yMid] else: @@ -165,7 +173,8 @@ def main(): halfH = round(targets["height"][i] / 2) midX = targets['current_mid_x'][i] midY = targets['current_mid_y'][i] - (startX, startY, endX, endY) = int(midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) + (startX, startY, endX, endY) = int( + midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) confidence = .5 @@ -174,10 +183,10 @@ def main(): # draw the bounding box and label on the frame label = "{}: {:.2f}%".format("Human", confidence * 100) cv2.rectangle(npImg, (startX, startY), (endX, endY), - COLORS[idx], 2) + COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(npImg, label, (startX, y), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) + cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) # Forced garbage cleanup every second count += 1 @@ -197,5 +206,6 @@ def main(): exit() camera.stop() + if __name__ == "__main__": main() diff --git a/main_onnx_cpu.py b/main_onnx_cpu.py index 4181d10..c01f6ff 100644 --- a/main_onnx_cpu.py +++ b/main_onnx_cpu.py @@ -5,13 +5,14 @@ import gc import numpy as np import cv2 import time -import win32api, win32con +import win32api +import win32con import pandas as pd -from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, cv2, - increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.general import (cv2, non_max_suppression, xyxy2xywh) import dxcam import torch + def main(): # Window title to go after and the height of the screenshots videoGameWindowTitle = "Counter" @@ -61,8 +62,10 @@ def main(): "height": screenShotHeight} # Starting screenshoting engine - left = aaRightShift + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) - top = videoGameWindow.top + (videoGameWindow.height - screenShotHeight) // 2 + left = aaRightShift + \ + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) + top = videoGameWindow.top + \ + (videoGameWindow.height - screenShotHeight) // 2 right, bottom = left + 320, top + 320 region = (left, top, right, bottom) @@ -82,7 +85,8 @@ def main(): so = ort.SessionOptions() so.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL - ort_sess = ort.InferenceSession('yolov5s320.onnx', sess_options=so, providers=['CUDAExecutionProvider']) + ort_sess = ort.InferenceSession('yolov5s320.onnx', sess_options=so, providers=[ + 'CUDAExecutionProvider']) # Used for colors drawn on bounding boxes COLORS = np.random.uniform(0, 255, size=(1500, 3)) @@ -99,21 +103,24 @@ def main(): im = torch.from_numpy(outputs[0]).to('cpu') - pred = non_max_suppression(im, confidence, confidence, 0, False, max_det=10) + pred = non_max_suppression( + im, confidence, confidence, 0, False, max_det=10) targets = [] for i, det in enumerate(pred): s = "" - gn = torch.tensor(npImg.shape)[[0, 0, 0, 0]] + gn = torch.tensor(npImg.shape)[[0, 0, 0, 0]] if len(det): for c in det[:, -1].unique(): n = (det[:, -1] == c).sum() # detections per class s += f"{n} {int(c)}, " # add to string for *xyxy, conf, cls in reversed(det): - targets.append((xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist()) # normalized xywh + targets.append((xyxy2xywh(torch.tensor(xyxy).view( + 1, 4)) / gn).view(-1).tolist()) # normalized xywh - targets = pd.DataFrame(targets, columns = ['current_mid_x', 'current_mid_y', 'width', "height"]) + targets = pd.DataFrame( + targets, columns=['current_mid_x', 'current_mid_y', 'width', "height"]) # If there are people in the center bounding box if len(targets) > 0: @@ -122,7 +129,8 @@ def main(): targets['last_mid_x'] = last_mid_coord[0] targets['last_mid_y'] = last_mid_coord[1] # Take distance between current person mid coordinate and last person mid coordinate - targets['dist'] = np.linalg.norm(targets.iloc[:, [0,1]].values - targets.iloc[:, [4,5]], axis=1) + targets['dist'] = np.linalg.norm( + targets.iloc[:, [0, 1]].values - targets.iloc[:, [4, 5]], axis=1) targets.sort_values(by="dist", ascending=False) # Take the first person that shows up in the dataframe (Recall that we sort based on Euclidean distance) @@ -139,7 +147,8 @@ def main(): # Moving the mouse if win32api.GetKeyState(0x14): - win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int(mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) + win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int( + mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) last_mid_coord = [xMid, yMid] else: @@ -153,16 +162,17 @@ def main(): halfH = round(targets["height"][i] / 2) midX = targets['current_mid_x'][i] midY = targets['current_mid_y'][i] - (startX, startY, endX, endY) = int(midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) + (startX, startY, endX, endY) = int(midX + halfW), int(midY + + halfH), int(midX - halfW), int(midY - halfH) idx = 0 # draw the bounding box and label on the frame label = "{}: {:.2f}%".format("Human", confidence * 100) cv2.rectangle(npImg, (startX, startY), (endX, endY), - COLORS[idx], 2) + COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(npImg, label, (startX, y), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) + cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) # Forced garbage cleanup every second count += 1 @@ -183,5 +193,6 @@ def main(): camera.stop() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/main_onnx_gpu.py b/main_onnx_gpu.py index 91d85f4..cbeedd9 100644 --- a/main_onnx_gpu.py +++ b/main_onnx_gpu.py @@ -6,13 +6,14 @@ import gc import numpy as np import cv2 import time -import win32api, win32con +import win32api +import win32con import pandas as pd -from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, cv2, - increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.general import (cv2, non_max_suppression, xyxy2xywh) import dxcam import torch + def main(): # Window title to go after and the height of the screenshots videoGameWindowTitle = "Counter" @@ -62,8 +63,10 @@ def main(): "height": screenShotHeight} # Starting screenshoting engine - left = aaRightShift + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) - top = videoGameWindow.top + (videoGameWindow.height - screenShotHeight) // 2 + left = aaRightShift + \ + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) + top = videoGameWindow.top + \ + (videoGameWindow.height - screenShotHeight) // 2 right, bottom = left + 320, top + 320 region = (left, top, right, bottom) @@ -81,7 +84,8 @@ def main(): so = ort.SessionOptions() so.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL - ort_sess = ort.InferenceSession('yolov5s320.onnx', sess_options=so, providers=['CUDAExecutionProvider']) + ort_sess = ort.InferenceSession('yolov5s320.onnx', sess_options=so, providers=[ + 'CUDAExecutionProvider']) # Used for colors drawn on bounding boxes COLORS = np.random.uniform(0, 255, size=(1500, 3)) @@ -98,21 +102,24 @@ def main(): im = torch.from_numpy(outputs[0]).to('cpu') - pred = non_max_suppression(im, confidence, confidence, 0, False, max_det=10) + pred = non_max_suppression( + im, confidence, confidence, 0, False, max_det=10) targets = [] for i, det in enumerate(pred): s = "" - gn = torch.tensor(npImg.shape)[[0, 0, 0, 0]] + gn = torch.tensor(npImg.shape)[[0, 0, 0, 0]] if len(det): for c in det[:, -1].unique(): n = (det[:, -1] == c).sum() # detections per class s += f"{n} {int(c)}, " # add to string for *xyxy, conf, cls in reversed(det): - targets.append((xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist()) # normalized xywh + targets.append((xyxy2xywh(torch.tensor(xyxy).view( + 1, 4)) / gn).view(-1).tolist()) # normalized xywh - targets = pd.DataFrame(targets, columns = ['current_mid_x', 'current_mid_y', 'width', "height"]) + targets = pd.DataFrame( + targets, columns=['current_mid_x', 'current_mid_y', 'width', "height"]) # If there are people in the center bounding box if len(targets) > 0: @@ -121,7 +128,8 @@ def main(): targets['last_mid_x'] = last_mid_coord[0] targets['last_mid_y'] = last_mid_coord[1] # Take distance between current person mid coordinate and last person mid coordinate - targets['dist'] = np.linalg.norm(targets.iloc[:, [0,1]].values - targets.iloc[:, [4,5]], axis=1) + targets['dist'] = np.linalg.norm( + targets.iloc[:, [0, 1]].values - targets.iloc[:, [4, 5]], axis=1) targets.sort_values(by="dist", ascending=False) # Take the first person that shows up in the dataframe (Recall that we sort based on Euclidean distance) @@ -138,7 +146,8 @@ def main(): # Moving the mouse if win32api.GetKeyState(0x14): - win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int(mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) + win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int( + mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) last_mid_coord = [xMid, yMid] else: @@ -152,16 +161,17 @@ def main(): halfH = round(targets["height"][i] / 2) midX = targets['current_mid_x'][i] midY = targets['current_mid_y'][i] - (startX, startY, endX, endY) = int(midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) + (startX, startY, endX, endY) = int(midX + halfW), int(midY + + halfH), int(midX - halfW), int(midY - halfH) idx = 0 # draw the bounding box and label on the frame label = "{}: {:.2f}%".format("Human", confidence * 100) cv2.rectangle(npImg, (startX, startY), (endX, endY), - COLORS[idx], 2) + COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(npImg, label, (startX, y), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) + cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) # Forced garbage cleanup every second count += 1 @@ -181,5 +191,6 @@ def main(): exit() camera.stop() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/main_tensorrt_gpu.py b/main_tensorrt_gpu.py index c9c9d46..a45b8f4 100644 --- a/main_tensorrt_gpu.py +++ b/main_tensorrt_gpu.py @@ -5,14 +5,15 @@ import gc import numpy as np import cv2 import time -import win32api, win32con +import win32api +import win32con import pandas as pd -from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, cv2, - increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.general import (cv2, non_max_suppression, xyxy2xywh) from models.common import DetectMultiBackend import dxcam import cupy as cp + def main(): # Window title to go after and the height of the screenshots videoGameWindowTitle = "Counter" @@ -62,8 +63,10 @@ def main(): "height": screenShotHeight} # Starting screenshoting engine - left = aaRightShift + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) - top = videoGameWindow.top + (videoGameWindow.height - screenShotHeight) // 2 + left = aaRightShift + \ + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) + top = videoGameWindow.top + \ + (videoGameWindow.height - screenShotHeight) // 2 right, bottom = left + screenShotWidth, top + screenShotHeight region = (left, top, right, bottom) @@ -80,9 +83,10 @@ def main(): sTime = time.time() # Loading Yolo5 Small AI Model - model = DetectMultiBackend('yolov5s320Half.engine', device=torch.device('cuda'), dnn=False, data='', fp16=True) + model = DetectMultiBackend('yolov5s320Half.engine', device=torch.device( + 'cuda'), dnn=False, data='', fp16=True) stride, names, pt = model.stride, model.names, model.pt - + # Used for colors drawn on bounding boxes COLORS = np.random.uniform(0, 255, size=(1500, 3)) @@ -96,30 +100,34 @@ def main(): im = cp.moveaxis(npImg, 3, 1) im = torch.from_numpy(cp.asnumpy(im)).to('cuda') - - #Converting to numpy for visuals + + # Converting to numpy for visuals im0 = im[0].permute(1, 2, 0) * 255 im0 = im0.cpu().numpy().astype(np.uint8) - im0 = cv2.cvtColor(im0, cv2.COLOR_RGB2BGR) #Image has to be in BGR for visualization - + # Image has to be in BGR for visualization + im0 = cv2.cvtColor(im0, cv2.COLOR_RGB2BGR) + # Detecting all the objects results = model(im) - pred = non_max_suppression(results, confidence, confidence, 0, False, max_det=10) + pred = non_max_suppression( + results, confidence, confidence, 0, False, max_det=10) targets = [] for i, det in enumerate(pred): s = "" - gn = torch.tensor(im.shape)[[0, 0, 0, 0]] + gn = torch.tensor(im.shape)[[0, 0, 0, 0]] if len(det): for c in det[:, -1].unique(): n = (det[:, -1] == c).sum() # detections per class s += f"{n} {names[int(c)]}, " # add to string for *xyxy, conf, cls in reversed(det): - targets.append((xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist()) # normalized xywh + targets.append((xyxy2xywh(torch.tensor(xyxy).view( + 1, 4)) / gn).view(-1).tolist()) # normalized xywh - targets = pd.DataFrame(targets, columns = ['current_mid_x', 'current_mid_y', 'width', "height"]) + targets = pd.DataFrame( + targets, columns=['current_mid_x', 'current_mid_y', 'width', "height"]) # If there are people in the center bounding box if len(targets) > 0: @@ -128,7 +136,8 @@ def main(): targets['last_mid_x'] = last_mid_coord[0] targets['last_mid_y'] = last_mid_coord[1] # Take distance between current person mid coordinate and last person mid coordinate - targets['dist'] = np.linalg.norm(targets.iloc[:, [0,1]].values - targets.iloc[:, [4,5]], axis=1) + targets['dist'] = np.linalg.norm( + targets.iloc[:, [0, 1]].values - targets.iloc[:, [4, 5]], axis=1) targets.sort_values(by="dist", ascending=False) # Take the first person that shows up in the dataframe (Recall that we sort based on Euclidean distance) @@ -145,7 +154,8 @@ def main(): # Moving the mouse if win32api.GetKeyState(0x14): - win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int(mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) + win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int( + mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) last_mid_coord = [xMid, yMid] else: @@ -159,16 +169,17 @@ def main(): halfH = round(targets["height"][i] / 2) midX = targets['current_mid_x'][i] midY = targets['current_mid_y'][i] - (startX, startY, endX, endY) = int(midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) + (startX, startY, endX, endY) = int( + midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) idx = 0 # draw the bounding box and label on the frame label = "{}: {:.2f}%".format("Human", confidence * 100) cv2.rectangle(im0, (startX, startY), (endX, endY), - COLORS[idx], 2) + COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(im0, label, (startX, y), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) + cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) # Forced garbage cleanup every second count += 1 @@ -188,5 +199,6 @@ def main(): exit() camera.stop() + if __name__ == "__main__": main() diff --git a/main_torch_gpu.py b/main_torch_gpu.py index 54f85b6..aad4830 100644 --- a/main_torch_gpu.py +++ b/main_torch_gpu.py @@ -5,12 +5,13 @@ import gc import numpy as np import cv2 import time -import win32api, win32con +import win32api +import win32con import pandas as pd -from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, cv2, - increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.general import (cv2, non_max_suppression, xyxy2xywh) import dxcam + def main(): # Window title to go after and the height of the screenshots videoGameWindowTitle = "Counter" @@ -60,8 +61,10 @@ def main(): "height": screenShotHeight} # Starting screenshoting engine - left = aaRightShift + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) - top = videoGameWindow.top + (videoGameWindow.height - screenShotHeight) // 2 + left = aaRightShift + \ + ((videoGameWindow.left + videoGameWindow.right) // 2) - (screenShotWidth // 2) + top = videoGameWindow.top + \ + (videoGameWindow.height - screenShotHeight) // 2 right, bottom = left + screenShotWidth, top + screenShotHeight region = (left, top, right, bottom) @@ -78,7 +81,8 @@ def main(): sTime = time.time() # Loading Yolo5 Small AI Model - model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True, force_reload=True) + model = torch.hub.load('ultralytics/yolov5', 'yolov5s', + pretrained=True, force_reload=True) stride, names, pt = model.stride, model.names, model.pt model.half() @@ -93,7 +97,7 @@ def main(): # Getting Frame npImg = np.array(camera.get_latest_frame()) - + # Normalizing Data im = torch.from_numpy(npImg).to('cuda') im = torch.movedim(im, 2, 0) @@ -106,22 +110,25 @@ def main(): results = model(im, size=screenShotHeight) # Suppressing results that dont meet thresholds - pred = non_max_suppression(results, confidence, confidence, 0, False, max_det=10) + pred = non_max_suppression( + results, confidence, confidence, 0, False, max_det=10) # Converting output to usable cords targets = [] for i, det in enumerate(pred): s = "" - gn = torch.tensor(im.shape)[[0, 0, 0, 0]] + gn = torch.tensor(im.shape)[[0, 0, 0, 0]] if len(det): for c in det[:, -1].unique(): n = (det[:, -1] == c).sum() # detections per class s += f"{n} {names[int(c)]}, " # add to string for *xyxy, conf, cls in reversed(det): - targets.append((xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist()) # normalized xywh + targets.append((xyxy2xywh(torch.tensor(xyxy).view( + 1, 4)) / gn).view(-1).tolist()) # normalized xywh - targets = pd.DataFrame(targets, columns = ['current_mid_x', 'current_mid_y', 'width', "height"]) + targets = pd.DataFrame( + targets, columns=['current_mid_x', 'current_mid_y', 'width', "height"]) # If there are people in the center bounding box if len(targets) > 0: @@ -130,7 +137,8 @@ def main(): targets['last_mid_x'] = last_mid_coord[0] targets['last_mid_y'] = last_mid_coord[1] # Take distance between current person mid coordinate and last person mid coordinate - targets['dist'] = np.linalg.norm(targets.iloc[:, [0,1]].values - targets.iloc[:, [4,5]], axis=1) + targets['dist'] = np.linalg.norm( + targets.iloc[:, [0, 1]].values - targets.iloc[:, [4, 5]], axis=1) targets.sort_values(by="dist", ascending=False) # Take the first person that shows up in the dataframe (Recall that we sort based on Euclidean distance) @@ -147,7 +155,8 @@ def main(): # Moving the mouse if win32api.GetKeyState(0x14): - win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int(mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) + win32api.mouse_event(win32con.MOUSEEVENTF_MOVE, int( + mouseMove[0] * aaMovementAmp), int(mouseMove[1] * aaMovementAmp), 0, 0) last_mid_coord = [xMid, yMid] else: @@ -161,16 +170,17 @@ def main(): halfH = round(targets["height"][i] / 2) midX = targets['current_mid_x'][i] midY = targets['current_mid_y'][i] - (startX, startY, endX, endY) = int(midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) + (startX, startY, endX, endY) = int( + midX + halfW), int(midY + halfH), int(midX - halfW), int(midY - halfH) idx = 0 # draw the bounding box and label on the frame label = "{}: {:.2f}%".format("Human", confidence * 100) cv2.rectangle(npImg, (startX, startY), (endX, endY), - COLORS[idx], 2) + COLORS[idx], 2) y = startY - 15 if startY - 15 > 15 else startY + 15 cv2.putText(npImg, label, (startX, y), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) + cv2.FONT_HERSHEY_SIMPLEX, 0.5, COLORS[idx], 2) # Forced garbage cleanup every second count += 1 @@ -190,5 +200,6 @@ def main(): exit() camera.stop() + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/models/__pycache__/__init__.cpython-39.pyc b/models/__pycache__/__init__.cpython-39.pyc index f65bc00f0baaa3fc165f56e9fc6556d4506314fb..230c9e6ca0e4f24b06ffc867329cb3e564773a83 100644 GIT binary patch delta 80 zcmeBRT*}Ck$ji&c00ia74U#AFSX+2GTg8MHrxq2*q~>I1CB_>V7{s{bCzs}?=9Lu3 ixM!AllqSUl6y;~7CYKnQ#W;HEI%ei3<(EuMQUd_MF&Xy& delta 51 zcmZ3=*ult?$ji&c00b$IuO&_7v6j|#wu%YODap)DEJ;;xEG@}T%*>6c%+JX$Go4tk F1^{+n5N-ef diff --git a/models/__pycache__/common.cpython-310.pyc b/models/__pycache__/common.cpython-310.pyc index 6f5ff248289cf0da553b8b3aa58b566885de3400..95d2011be7a746ffdd88ea630407aca25cef5c50 100644 GIT binary patch literal 37450 zcmb`w3z!_obsjq1J@1{}on0&zz~aHhgCG_pmUx3A!3VH-kP^6n2#^#eM->PMxmx_a{U6oBa0v{OvoT(C;$P{TD{y zDLl@fnW2y!%7kpgHp@oFkY6)n%CD8N%(lvXnfof^neoc@%=XHT%#O;=%+AWL%&yA) znfoidGrKDfWFDyO$?U02WF{(mGkYr!W*(IMedUKL`!f3~lbOlN!+%NIgmL} zIhZ+!aKAn9S}1eKK4Yitq1TMe;j^@XDdA47Q5 z-hl81q<91|A3)4TdlOoq<@3kL9 z_(2IzA^ecN58-`5iKh@VX+MmZhl7~Yh}mx+K+J(4<_uyE+J_KxD2O?Wn8WrF#2g7? zo<_`3`xs)51u@Sc<`Me?i1|Pe^FhQsYCndU$AXv-A?CRKIAR_TV$LDv3HwRJJQ>71 ziTX~dikVlLR`#n8mrdOA5(t5j;$@k-5JC>NaM zU9OPn3DfU8uk3;<*r$ql*H2b+m4fXq%$Exjh9Az?=5O%#Le8DTZ+t5e1`5}UGg(*Vs?L1PDfo$U!F3Dja_#!WfFC_~{_NS)&-vl!&Y!>J$1bSa zOtD<>hx2oV{FSUKyt+_Sg-W67I(|yWISVs0#p`~*4qYuMr&z1{@qDdZQ;w_rfnqhU zvt;MdIDep$yHd#7#j8c9co{{ntJbR7O742rS(u+!1xK>?Ir&_o>07p!fO>c(2X*i5s~}Ia{vf>$`AzQ0TDd={BLzK@6x>uJz`n(^n!FRP- ztz`zYEw6OGeCZ78ieD;Jotk>?k{>NpXN%QBJvOO5XV{eJR81A0J;xEogqG{^$x4|c zCz=hW+O_eE7<&6zzViDx4YsZ%lgf65*tYt*u3TqTZhr1mVa7M-FLMAcONzm+ z6z7U=!H>E#W&EOT+QX*zlylmiEsS5f@Vp-@z~{~{)CaSHFUWF+S^7H#PnH}%?9P;& zOfq||a9OvOi884RIomE5wChLbC6wt0C^I@3JC1$yOifjCt{*?4W*4|X{OCo!GOVc! z&u6T&7oNXk_%Q(ILZ#~5G0{oePn^oRxibnY8SjkicS7n2VC~(f^N+s#yn~hWa-m!- z<+A(t?|=DJEic77FP|;CPcK}48B4QN$h!xRzI@`DeJ6^Q%Qg39nGNUVydaRt`5Rp8 z^EH6_j5N*}Cpd3`6SbnoYX8E2Hr%g{A;Dd)fV>5#^+$i}aXyZ~Ei8#EVKwH4ypU@w znri44e73PMGy-=+s2cxNXa@VYZM_!vLZ?EDMkC}Ic4#4_Cfj))K#uTOs2a5+NQeBA z&hQK@is-a@1oh$;o*)UKy%9grA8$zEmU&XSzV@Lrqs2uUh z{pv9Cajb2LR7Vl?lgGsth_{AiLLzx7otIrQl)v8xz$fqym z6H=gd=qKez_;C~*H?$aP8cQJ~$Er;6bkwYc~6q03ABbM0olP#x6RmY76s@AGeF; zoXedme7Tr&E=~0N5o|yO<;Szw7>aH->zmc851d!MdeD|p<1Smh>i3<@x%s(itf+&w zZ(gqYeRg3cw@`MoxxA}RqIZ52xDR``AAiOMCMdc$u=nQOiG&|^3T5E8`~f@%)hVVm z9CccLXZ4gmj{qPvk3J8lH9|g6h>Ph@3EW16a4>7QDRDR8V$yX2B_Z z2nl52rj7bgcg^y4BB~JqmzG8<623H%Js|&fZexsg6Ha~o^ zc%EP+2-TzUdF9Dqkp1#v9P0W%&?TN1zKVti*C z61Ntk9g#(7*lubuwV1_;^ciCvq;RV6w9ip{(Xw*`S zS#zA#EZ$-yhjc?OwJ2jOdjgD-t#J9k_TkMM1IytA$_fUzcVG?0HKK~FaqYnaaR3Z4 ze^9M5bTYVQaH;bLn}s$o#o#8aP6BgKv0FEMlTE>#ZM?sy--6*IP6b12s4C8fpR_qGG_usaF!^6Qo(TTyX zAUj7V27_LT5xQ;bwuwC46F3Q-6p0lvNr8Cm4WJc1h$fIxC{!+kVzrAEKT%z%WakPw z+nMO=EC7uUqUS&$=ag@}8sPD({_wNd1&U060vr_(KeiXMlJl(@AbdCnv937EzL8hg z@Vnad60JA`aFWKb{7b_HI^DEM9pJyXI1NQ80H4sUaRpndyXL+?wF49K2lynkPZvUC9#^TYvNuc?f8}3{M9% zZ47A^d*!Xr6-y1f7|2_pMS`2@hMSRFAd7)eL)oYsF2z0-`jjzal8|{V?#8|FqCxWZ zhgd>_#Bx@HpEbQIDR~I>I%9A?iKMT@8-|O*Z-LT&0d&)=CLvevl!kN}8}wsH>>FA5 z1Q`pAwGpqMI6hmbf+0{(TxH2|IOff8$TWuU)gS7b6&a2;cy*D#t?WIELH6gfC`*=w z;yU)DWxY0%7a=g+ zuuX#8i5n@9Kxz1r_mXDM3hC`a&kEt{AUMhj;kE)w68qkQp^|`qtcE3nM0?42IovQC zRwLZNnrLDYmyD*!9c^pLYD8td&;_zz!U~eN@_xx8wTe<>Uf7E)ahDEHE1~-Z6LU|Q zhzm^xnsY8UUjY3CViXv|kAVWMEI=CbX(UoV2*-~>Cc;E|)Pg;OM3`?}R*Uj`%{S(R zi0s1cGGtskT&6;HBn?uGBgR9$YfukWum;slr0y!EL&1C!FSvbl7Ftpd#bn(LMUp7|@6WY_(s6cYOFC-a=F)*mO2kE}}qL}y*P zCI&q+LN+tXRY{V@HWq=@uz8T7Fu{r%UKpE01aT41s_o!raYa^ZunkDncUe021Cl^@ zVqJKD3K2rG`BwfaFna##K@j%#N;1fgJ9A*W^9!yN;v0bbe0DW-|2m2cW+w$u9s#ni zuj`COdlmM8Fsb+!2*0xz0fMmVM{jy4`3*o2y)gm8xMQim!LEFP&X3Vq3weJQFNIFq zYt)CkniYV{grz4-{dE>{lg?_?#Si1f58w%4XTVCw6yw%}9rf4n7VJ`gr^=~|7c?;_ zb*-Rt+d%OX$hizCq6CUhhcv400*Xi}O61KLN+|>^VlHBd#Cu}nE@+!uV8{VPEuc!c zk!DiI3TN7fa7y^iev~ml)c+}S#)4cU3gkcNSzX9KE#MZ@tjHO)fHn()Sg{QdM{^r^ zX9pbF22#N`5V~dF0+$ZzG7@YD6nI#{viXxICk*u^bEp-^AM^ee{RC|FMLO-x zK@kx7QfOxL=6I++(KGmMNL+0{7+P(SXviY(q^9W*+PG#8(K)OxFiyD30)5ZZ`4pX7 zaD1bJmG=UJZQjB+s=CqAe0EMRD9(6ihhS%gvrS@mNP}`3#iB|B^@WX%*eB|vtM{sB zeG!F`QR8}dPQt+|TDFp~NdRwXGO7UrH6Wfs&8Qn|#wpI=zP|*a1^;1YKt>7TPpGe; zK=r@Tk)yHd!CcV;J zaXQ=mPvgbk#`6wafyp=1amBbbC%=WV&8b80g{LDZ!@}c`mUy3dT1f0R-i-hkn=M=n zNg+w?SPAq+fN#l##vu{x#Q2&Y5j4uTbNdT83FaT=&=8SHnxql+82BvX!28haf^Ws_fp zx8p<{Vnw^)$fn-k+SIwBV_P?GnTxStTerwRbgf>Z$Z72ET#ai&=P%)9u#@iy)$c>k z-wQ-n61jgBaQp?-CHQR=J8l4osqPw;Og9(&s?+@s@#5!O?S-_?RE$}3venPx?L-WR z4Tmm0x7K`eF5B~Y6oKyfBwMz8J{vLYATb^~U=D?6kl8b*?-IzsthU9UY6=m;`p7B; zC-(v)+GL9QDuc2kbT){!_J)6pw85^O0-XZUJv!B%s=s95{s}Vw3M>2H=`ghBpgBD& z1ab)+%mrI>Ta+C4BU6ViUbt|_x*I`RcL}C<`&l$Mj7ysC=AgFzn4mq#q))mAS%%n) zwHHlngp3)t={Wvbg15sQVlBZ~KEA$%hUoEKeFOfh4Dzp|Y3)JA<~tzWY>zS~mv}1Z zCHtz`nKc3S=aA%AnM}r=p*6?71+1~DLucn|P>1Lm$`~@X{^)NH;om|7UpGvj&1Ty}ZP~WOe)tnhwS4ERN&2Dfo$JbJs7_YGoXbJc@7^ zMwgj;0&#zYM|OQMk>gm$tHD6~Bpn2H2eqDMi9ZI%w;&7B@(HOmsSi}o;rv~&0#1aM zVck>Ax)z%KJ+^27P7*@HG)V9;wkj}#sqWUTN-)WE&LJlWCWv+^M*!Kh6nbMj$N`ZE zE*Y3&kWg-<3CY2%A&WB5dML#>c^jbPKvwX@kO@g+82=V61JiUMMw+d)*`~mQ$`MGR6oN|g4=%PVe4AM(Ib6;Z}9dhww?N&&8q$yd% zeG5-pK$$dm8@s++@9Sz5XdSLLb(79&^dcAJf8oo2K!>3`o}RzfN>%>~(Ig&F{u%W) z0&6c~3Mk&+BDO7&47C<7AOVska4C4O@a?diV8vD}UL`yPNi_`bDvO{4|LWf$k)F$M z(DzX~9rSw+!9T|nj3PmN#25oL!CtmkUk`k9K>F}ZF`EmA@U3Ov0enBO9KPi(8HO-* zF+}JgXi5{w5^pUTGeB@_!1|#Tunu|V2Fb;5foB40j=@cSEDkc3IUi^1F??mAU>l;&yTN|hX zX=UULbhI4l4D?^PowxqzC$(hZarVOLz!f(I+aUZ<1BM$th*0x(41BF65Q8xyoIsBE zJbu6mEQ1uDzpNxePa#&H!=I*4C{gBL4RoLr9dZZ|Nn;$;k@RA9s39vyicp|N!9%DK zHj-or@Rx1Aj#lg68H;C&h-b`d@vZd1`xWWC;sI3_p-RqDCOi8db};r|bzq}|WsdMg z8#ad6Nw!~MUFa@$WFxW*($NxvOKStie?X!Hiwc0lN=Fpq)&!1T+Co#_6j6}2AO+q{ zTg3f@&|7QhbM!#b8C2SVBFd#fV#I{Rh!K*o+XI$bkc4b1e4=;3+V8?)e~_X4*Lq^-3XLv3CkK5{drJgh$4?)Bj4MvL!HL8#1|D7aFdR}m^dB$ z;p4?<4~Rq+0+sE=SQ;;S0GPi*BAYV)aoc`$9A) zqh5jsNAIDfP}fmB6xeY1PRXU-Fr#B3Y?(lH{9O`#+tx8Xc{dwz)MDk4evk+9JIe5| z*)oshltX$P$Dygg3VfSB!mr+IWCw`i7mtQhx~LPqrT_4!=Yes8!?{}=0hg$`MB_1ko$a%qe7goNlRv_Sn1JoQ0c?EfD# z9mO2u4Bs}^L6@3jQhTNadvTs|+O}e*+vz#qnCX#FYo@sbx~7}+tEc;I`u=Y^zelGt z&h4psnjQQmUe*VK4z~7%r5p*ab#%4IC25-PfnTI2LlEb+QFTW zBIRZnnbD#iDzqn{=DuE3+6}0h5xG-4IAf!mC(wFu5jH0Hr|d4?NDIKu#s*IHHI7O8Y7MMjZrLv80@u z;TguWuCc+k8XMh2DM=mr+J`)|l(NHDti~oU>0I(g@x;7@)R|PDf%bi?MIZAFv?c*E zfz)U8cDOO-_PYb#SZT1?zXU;rZ7rH8Iqi+v?7?-s87d7|P5cfwHhU5ENpGyVZYfl? z@H-;nvoY*I>)*7hC9hBYD|fWH!A38?7_t*@f!~DG1hZ8%8e7oXjipVfdkbn$-L@KI zDD5jNO50*5H-*%{bjM1YJ@M=Nl=k3lv#}K|-D;;dp`G|0+7vR;*2ZSk{g7v=-|DJ+ zSl8`tvDYo44er*`w&r~t`^Gk;Uhk#eG}MD#=||e>*L$1Px9!oYVQ;`8HQNu)sXX4e z5ApYSCv(=UnsANAxOZP^JNh$jkD+bb?alU<*FxU*=8mNhrAmz*@b6p-m3B3D+G%g+ z)e!2OKo1aN58V6VLmw86pciKKm%M(Y`5RsB+S+c{wxBKdm2o$O=EzU>Whl21vdwfa5-UFpcFA>B(Y;VUZ+ksZ^@%A+LFQJc{Ld^qc z>p|3iqbu%^cfY-p;!V`E+uMU)O?VTf!$|eIH_<$TjeD>6K5L(RvLIMe~jO8xP~nQ{Lo~@y1|dzqh}6!h5)R zatYGG#@LJCYQy+P312mFfTQ)zZn?)Q@Bc^ld%)`ydVRn>g;75^8*1#ZCmRtwhuqUx zBWHNP_{zr{hmr2AcNjGuL0g_i=oy9(J_`Q_>Bm@{u^)Cngz!0r7Y#s&sUEi@jYHmn z=Cj^mc{hzCuft2Gy`PY>WMWi~y5~z5u9!%B8fhcdh_|EpVebg%2ssY4N^2a$m_3KI z2fbt7QTxDcVAkeEkqtfK?emU!M-l%B;PH{>C9eF%h{`fJ7^!?6e_(yXld+y)2P1c4Wboa?3&RdT{HS(<5BO?(np#v$+{au`j@;k(*Lur z^hdkWXFiQv!;Qzh$B^b{Z-q)9Mc$X0w+gl0(ko5i4$dITIpPhyX{l&eImdd-5g9M< z3CA19_3hxs=QO_Du2mxXPa|NAOnFnyqTubRW(i|)3gIjGJzf2bH-a9#>Xz-Zi^dNa9&oz) z*Igw&txIyZc$@HM(S8Ov@`G&*S*cVTX8?g6JY+dvYU81r{UMEq+PL8y=F;%aH0Qwp z19QQ}3Rp55XQlL?_0E=FwV%aEP2VQWpJ^)ZEOI>UJ>7KhxEg06<{5+*(Ec6Oz1}F= z^+(d;^Rfe7Xtmh>aL~@@%2zAb8XrW7AH;ZE#CVKh)n0GjurHyk4b?b$|GeaV;Vmc& z{D5H(x%I`+RpZUDN_k_?= z?v8^JQU4E|OT0E`7p@lbI1MUR=gT>G!n}0lNA-oNdi)%6;8&eNYlKbYQPoBiS(0a{;=E`6_OJzixnNa zI9qh(xF3Y|V<@5P-b05lC0~Fmq&6<)%N5d3ldvTL8VyB_v3vCsE2+UYecgTDf)#D& zc*nGL*|c5a1n%(T!gIw@$JvNwRmYDL86ay<#nF59eSf8jQAuCzGH(=T=W4Yp>X+HA z2rs}n_3bjLyj(uctK?HJjGvmG?w!~XUMJ6A-8X;3ovT%!c>M6>k;%jLZJmtIEr3U^ z6nb*0aa2+tqqO^eiHZ{=wZ`8CP7q471N`o^AdZX7?SIHXN!y{5}V`b!oUhriUZA zjo&*I+fgPK8m`bPtSelWsx7#F0@v5GmwC`ukL;bijyw0^$;-tmz%zoChuihBYlY(M zoa{H=?lh0~&1i2JRvJFQ~*1+qk*zTl27{V5-Y!?%u121zeB?Ib(ScvvrN9fscBtk z!HFwvWH|MWGDF(VLGKtnAa6P*1G;P+^d`K2dBT=igPvr0jz{Y+0uPzNb$CSI_?A(E zoU3Ve-yfRAy&@dCStaOwG;mQGca4k^v;-gx!M4`+^Y9JaII&@?_|`A6p;R#pkFKHw zO-iCtF+zcprvHXF3MbJ7MVNW0z+#7QUOkXW<8YUD1ZHWXA&dLYEi`G);tsT{gZi-L zJ1FA6q6CF)HB^84eNDtQ6q}te`rNRe;4oM*foy3HDS49UqAZ)tNp~F7)uWim ztec^L6vl{m@DDlQ2}ET<2QRom&-yzK+7b#nJpn@YGMf zuW_2Sv%%DK0Yz)!PQ-z)feHO+ZXRgW)`%(!q5=Wznw(pxvH)}owjZWWgKs$n7pJH) z`3bq;EzwCyo26fEV-tJvjGyeZ<8EutDbeN?G-xNizrCPSM%}vp=zfUjHnJ+7hykbf zJyV751Z?2I_4`SLVWpJ|vpE<$*}EL8&=kse%hE{?YN-#Fi@3llmQN;~+T?-ya96~E zN&5zZ^}bdLyIjU%kKi`z!gX9Gxg+(Ua2sK0j2L93Fo(}?Y!%qVUT78GTPZm!&J zT#G>HLXOh9ZpshKXdil6pFI8CbLXFXbo{0B=gwa}GJfd{Os{lWKVEdkVaI2jrhsJ5 z0t(`SSXWRIR^S1P$o`kOmqe!`mQ{*ATgqhh`DO76e zMs^+wP1@=M1^n5{Wk^hg;LR4X9X<~W93XACjpv;4ORDI?Hq$dL(?R1G3ko)m*pwkC zOyOO=T*UBvG*>Raf)z2OGdzcS+`_Zmv+uwV6Rb1&Nq{H-a|ZTu>ak!{uuBIckn4;A zXC621d(Z<{D|Zb+4;$eT;ucp#ArQow$rnR!1>`K65D_~Pn!IUU5iKqQZ}mS>#vYU! zP_8Kh1mydJ=?>*v{3bTFOrpB~z~RIDj~zXH_xBO550g@OSZq*D7hEAmCT?cd$(%To z-2kP&g+{19rqdQ1FC=s6Q!YBu*6+cyUvbn1{81#sl+=}o}z=ptmIG8KfE@zT# znXbj}BTj~iE!yhJ46t@t2}Ct8dc-g;-IC;tkSDy6Ndij-QJDk^2V!@N`5+~1Lb-F9 ze$ofv+kzSf#1>b(3z@ivZ{?~qTiBP)2DZUqBIDZ}LQ+7WbZfJ-KzHG1rq7&*=_(-c zOf29PG8^mytRUg?Fc&4KIjgNv`C;}fvk`X1fE&R=2HZgw+@(u~lFLIbQe;xRPYjZo zbgYv}v!g*ssN+nuir&r_GDF>OgwD;RTM5y9y^%!3+?wB$R$t&9;&8RnOO!4V+z46Q ztWITC=Xuo^z;@uMqLzW51kqE;qK&ZCWV$t&rQz&@s#fo^3naBOecIfX3moML$6Y8p znb@=E1e>UXEG7cY%^c>D0IH`$?c(T0g!as&+Jq4(x6I%L8ipgdUIfOf&Su869j$Bt zV3V*i)Y^vB9)j2e>*#w%NWjxd)l>#ssawdTx>v6shh)GduEK_U!vVhvf~+jud?Bl0 z!O?@FcUl{;UPMoZIr4g4U`LTSf8BJ=MRl5rE7-cu0M(;;f!Q`c(eXwUL0-)DK)#R} zC{&3EvJg^axhbRrP{HO(wi-PUP|z)z!`WxKkQrn{bd0dQI!Vwn9g_kRT6QKiqF1oZ z$m`jfb5&azifZm%5hbgCODDy7fRPzi3&S$eg?UVIVd6vj&^*TBmDO;XK5Ak?v@#Fy zc!!xg2$6@!un6KR2>e27oFNPRpxVJ?JLz!3)%|oR>`)KT*+XXn4vePB<^=pOcP%f> zz4`!45~N35nCa`1#0aP5Th)Rq*i7AH5iut)&X?*Q6Ig@6q%A{AzFCB<2-&aI)D=G= z9IP1BNNTTW7!T`pEAtd2%~&q*1?+XoND5*wW>kS;SWc6*?Z;cI*^k*bFfGNrI>Mfi zMHSXlGpHuacl5VbBxnTX;0m^@US=v7>cjxi5h}1R;5=r)Pjmr%yhVKZeO)WU@9zO$ zf1nq1GZu&pvQ89pQ5q4z8#XA?qFD`V0y_FAb~Bqn8ImZyvw6;s!$}V&%`s@On8vny zBj$)1hDK+cIxX;pVel!A4SK{h;ZNN|s(Ztxd6Rcshml4MyGcwO1S&%JMy>Eo{f_Dd z!zfiVD=GNG_Xf;iq)S3$G<=h9G19+cC4jTSo55h21h1T9onggf}h9^)KQ^)TOTT(N%b)*kS zWu~CB6TXKY1ofrNcr+E`tzOgv?anw#gK;y|LOn9`dr>Ql`oi~2%Vev*i|}{DVOob6 zwx|!JM?>C8iKtzANuvn5#ZlOi&}AgiD)x~*gpCO^Wo<;7=)Emw+8j3Po8No0%3$rdGt zrLe@t)e+BhcH=y95@`~YTtd${a@(qgaRPxb zd90K;&>UnggXaYU%`{?}|2GgCQuE8w4mZ~!egw~ObG??4mqtCKXh1l@TyNve2Ath> z)!=SK8#Z}3J;3?U7*B#E<=;ih%{Y1M!|xXL%N!T@w%Spg0MWF{FC&y_ZnLB44~(Uh z?rVWpQCm^!Vh9Biaq1@P;;L?-?s-xLd{7# zy~Md?Es5sC_K=J^#?l`4;&S%;ad$tiK(B+=Ei^p&%!X&7S&Z>cA%2)Auyz<1NwohU z{mn`DPzh&)kSst~z&+GF;-$R)=22J=#*GDUpyh)d1B4s#>Nnht?lCW}d&QZo{vz7* z2-<$T({?HK13_CKZ9dk9uH!t}4@P1=M}p%ZXW8-U--7>fl=GtC1Q>OI$3B3PF zqlNb%3{djikN<`i3CZ^UU#y!rxSPRR-5pf_l5+7!YF zoW5c<(l|Rb)NdnhY*PrLN$5;qZ4GS>dBYnp`b-t{=nPI?2kp(&ZVq7aO!H~>3#Ya0 z(KCR*oei#&GdPJ|$CSu9B4?9nggfPrv_{Gs-Wc+RMnXsxZ+^%dA-ue4y!sC8FT>We z7uRFN3+_1k*$RE?ZLga%W(4PRow@!1kj(Lrdk)$d8{J*dE#L|6dG}c_j`N_foadgw z*iP$Gs7oync^~IN*KiT5hwnwtdOT2v6HTcf0SXf>B%X}q7G}l3X#ysEVI+?#acOK> z5SWo(tz);uPB+dye*9en|DUL*$kf9WP_8`g*6QhTFlIDwTqx^QD_C1%&~n*+6Iwx)C(Yl=zWM}$9|1=oVtZUoTTM0`XmE8&m?#L6 z^PIav{6rQ&13UtW66M6A@i&8W2pZ)Cd0tasFF@5fY7lK%(Hk=A(wi-<-iW=@QlLo6 z+eBU0cOdb89i?beIUp_q3Kdnsgt1%8LV-xj*FMX>i=?XE)e5>Qmms3B>=eZoYJxzB z-a7+HKpaPHux(kLRfjl4l+*hu%nD9s>e?74BxZKu`l>y+h%6!)l=&Fojb&&dVdsJt zPYPN;ltx;;Jqc^ZovL1-2JM_uxf?iBBDb@uc#t#oX*M5EXjf=bE{$O z2pml=#~#oy)JHfxk0O~`WuR`iY8nkqoEOklMW*MlXf3uE>3f1sg$|WHRf#^KnorOt zWIInu)sHe*g#+V11@{@Q0c|64oNsGM2o1EsZuxi@_FWo!#uYf&T2M-cB9BQtBXu=iOv*D%h1<=13fljoD12j z2h|S73Ln9P2tPr-RL@s5=ZG{V7SOVa!>*1aJ%p}mmNFJjuzHd)fKVzdg8Kps`9mxz zt_tM)3%2G%H2Zt#p!z{1rw9Q6Ej%zmnsBembcC4^T3tArPcV+sc-j>gYq+lZ0mi*S z=aX=>z(TW`l<8lsfhAs)7i8-KSp<1*4U@u~5j-Z=YkcAH)u-s(qC?$XFbs2Ds=L3z zes@?6YV!IPER>5`*coW`l^wMmdHt|P6=K5NPXasEAYeqCAJxlQju`Z+6|j`0n?-T^ z36bA`hZH&K__P@{@rOkW8W<0${wQeYD5-GJrTis@okT2ci;xb6=nnrtH9rS zydMfJUpBk}Y9)fYdj*iUDI6dN7G}FjcuiC9!2F-c~0QhkXf()mlzZt`(upjmFr8A&$TcC1o>N6YiV0Z18Y2w zun_KtcW)w_`Z`MQ=>oAm*N}AKGw{$j7ar)*mvCVM7AZp|7#Ju?CuU;MHL6>??5-dXjA9?i$v z!+efD!JUFEi3zkB69P33+Frm9x6WCoo=M@xXk}qu1h|fpRkz5xdZ)#Y>STYNGsEM` zap8u+hu#|nG{*lTY}GfdGE+L&yATz*a}T?o!WrU2Ky!F^j~tfh+8WR= z0w)G5xfn}oiQsgUmt#z4g9{xooS}jDaB<5Q_Qdo(UtZyYfgz#KaQ_@+=Q z^sJ`Ha35>YNQG)sZrqK*Af#a@?Br{4yHB6R;UqROi_UyBqd9h?773k?0KxR%B z=LOuqBl3-fp1xpwI`+_c@kKVo1P=meEpicd46pW37p7v@Wz)G6+z}=w!}-i<#`n?^ zKY`+fY#IyI53VxYTGl9Yxhm;I&R6|&I4j!Bu_qDYu+5?lAcq&E@vg!W?K1n8kSL<- zP|Nrkg4TXIorb=S67Fy*{vtf}1Mj<;?K4CZdJnEUp9#;Z`8-TQ4{9l2OE1b%Kg|kv z(m94I)I)Rz>F|VB{e8T^vM87RD8yJ000QD=ESb$W9UL%l19kMy#&%=O*ayhd@rwSyml&!Ao4!T6s#Pp#w!8kw-_M zuHl-SppF5oA(m&RwXl#PLYRW(XGn0m3fkBa87F#jot+R>|K266p2dr=vQLD9xW!w^ zVWYl%wO(ksHJVNF_k@WrFBs};$c~?%;?KZx%dmpo1%2_#iKerJ*!xK&$EQE!v;u<3 zxf*OZBT553Tcl=~q0`ilGAMKKNAQt2a(DqM4vZ7}MDqD~E@_zw$jJuA48;_AJUxHp zRru!+%XRJ=;CiQt?-NkB@B1U9u8vIp&y z-7j|v`sGf+0I)$CA2`9s2rkkN=&aNh`o@#9|K?Zt_nS|inN~;9?n^y7C2SVKQ;^&( zc)G51fnewTKHQig!ozmKNf$(iV2!ci%ZC*SSmC>4j~B5>O&aPOe_I#vM`le zimQ`+uMWda@}C`n$|AS5z~6wOPtVHs*+78W2tPNXiTHGVFi7;kq9=cnVaz#XQl z6725heQWN9mjt%7;Pd&x?Cex*f z15O{#>?)V_K6jRHg_8eMz5rPB%o(ZyoJe`$$3VJX#y4{GEp!1DfgFJsLf+eiZ`Ppc zpExL==)IIhdxKG99Mlow|DrzJ>F26e`8h|NFPSR^kDe>iI{0J;$Od%`uBFz&4f7ut zs*H5t=T-$RstB$vnD|_RD19ItU&OY87@9-F=Mgxo>R9_NK9?YGF_)MX*e4|O8kqL= zqgg0alyB&f`la?r@gP+n#Nu!jN0hU$5h8<|>BAQ)VDl6TI7J&QEXU9{XR51?+^?X# zZ*b&zGMK_Bjbl%zG?s?U>glf0(v1)rD}uDGzae;dQY!=YU*K}UTF;Dy8!ga(*#rs7 z8{mw9-%OWW>IO(kAvI}GS;|t|-7tP5yb8N$Kra_IFr zLzLYp{9hI_1OQUGtzwUnL-iH#BUjL#s|BGI~&g&(c-;z<+p`<}bxCSWU0+>hL-OUHkgWaV)kU%osPP|RO zj(3d#FVP(L1_X^wP{YZc@CFu*%^~Q%o6T{-vqSbUD6|36T0#ALMb^5-OI5?@!$Z&yTzLg6GWFR1tVmB+JtRa15#3!EwPP3mc`WGAX4wL z*DrC*24N0#Qet|b!yernY7DNp3t%`iJ@C+iv1=GNTx29e|6VuYZqzdbsXSsAEsjJlC z{;)fjY#f^ci6og7i1}pu7k0+bjQg8JQ~wywCD6*WUsT9d{5a3wd3p=g+0y*1dK;O6 zpM;AO)(7b|Kc~$~te}~Qm_^(~vOuV7xnxWWlwDH0U~|%H;vy;dJj?Cu7L;uzfvMC9 zc}9HX1fz z?yei~GCMz{v&SH2qIQ5t1ScL-{Y-k2&J-OH$~}XmdfQHRbX&9>Kp0ek7vV_Ax%$yd zqJQPb+44MuT#AAMKSo+Tn{zb1FS0Yrittqk$St8|0lC&OR>Y}L947T)2Icf0vM!jr z2W^axqZFWNsB+gtHU^QoBf>OABQSmpFGUea#8Dq&b|6Nr?cpaNqYXaH7lzhKMQa+x z^H<#|eo@aKn1bfqbJ?@cop|Zs$+ORiAWloa|0~NL$~lmb1-H=yfvc`s$Ya8=lI{E} zs1YBOsx9DTo!=S4^^158W~Use`S^Vme#ip)KbH$-^=oV!DM+=RH6%3w4`qBu2sDTd zWV5%5xS;U6Ohzdh?1lxO4m537b36Z$<$RSzL`5BDKNPQc?L*7dL=teFNyv_B5t){_ zA&3Q=VJs_Bzsf{M>7#hzH%UkQBs}Rp%`;I71{sz`9%{@S_+I+Y({VEbiIzyu(k5v$ z`EZhaI%!x0p@1|mVkZRkXkM!;SE4 z)bG*xeL8pPe3On$k$^UdQbqboZEcOv+8odSfpq_X<41H74{6;3v=fp;oZXwLbT~C) zD5lCILC+s6Rt!C}BZ`@o`=-$HAQlh7e9*5D<2g*!1G~z*#H44|WS$@rkbZl-rM#5JDmE zfIBGU2pL9vL!>1%%BCUw!vQo>49WJG(A#4VE@#Kvxco2zvT2OMh#fYJS=~3_a|olP zMfe`2^*MQ9gcf2iy`fA*dddXA2;qGQzdhoc)&boz@s0X4TA>beIt(rF>+FiJql`&x z0zI-DqLx*!NVSgSCfgq}!LA`YxlHgf-!~A6U5KIrs6Pp^$KLc9g8ClSpR!qkN4Ph$ z6@l`rX*_hV{y+!Ky=Pli3=IM5A|4SVwCDxy(LF}`(2@;oiEPa>9-Tp9bB=OQPQcSs zJ|YERS^hg68lZELX7dV~1WQ1=%*oeCbd}nUv9=Y2KzD8G&p2A0tpBYy*dE45RxPOq zWX92wPlBz5q#FE`XnlZUw?R(@fCHiaggJ{;q0m!rJVlX{;cBb8s}V8;p+`}x!-1#z zQGNG&PhDH?H{2yJ5aQqu(J4R7b1p4<_+y46ytanVgUa`3;o{S?R=qgyM;HO6V8&Fn zt{*oyecblnIXog)4a@_xX!Sqnuu%2qbSP5`l+|XnVGe++NYZ4VlQAVee~`V~0S^yk zsBpzYARhAQf83gQ?6VQuSAE?oeLr(a?)BtbI2$E}M)3^bnZo0I9`j=Z9e{6(Hxe#6 z=HRS?B7#%kqhDDM7~_R4aF);?FukO0ZJ=HZ4VXeSvB;|-5wEWVT)&C&G4aC!9Q^UO z633t!0nSa{s$T(zr+J73IXrUUqA}6m8}IdBF`F@HBMrb}@Bqs}O%!E-qm*)!-hgdg z36HRjzPBQ*a3ejzmbP^h%e$i&!uC}Sgk$@B+ z?G1Uu-a2sb;G4V=aBPFO!*=>h5%Qt0em{7hA-svIx4m`9Wq{vDY4#8;6Uf_jjlOD_ z+B)nRc=kSby>Q;Y)~~chxLtCyn_4lA^?1Jp z8YAmB%Z*@gVbq$kHin=bf^;rySlARYZN-R=fS;GQ!3eK+$L&#WQPg{qvLnc|%U$2x z?qR%<7El2mdcZRf{|M$`hrL1OA&$~E;{8tKfPkUkZwb8 zn;ah$l}&9--$|>-lA?JrF6jPU<6+!58rOTfAhyqNJ%KmYi>_`yG!C8v@VzpLe3uxK`oF_WLRj5_4?6xr^9Ec?M7||mAKX)|joWg9$-QZ! z9+eLqkJqE)(yDqGYAxgVl4z}L`$n>UE_rdjP_Q2zzkl+;OvM=oM%f2)I1l~VhgxxM z)#Y}~^s^Voal1wrIi7?c?oQE?`bYkd`b*)*##^O>5dM7n;Ms_>5!>t01L30qu`fCV6yeMne=VPiy&BTXy*Jsb_-Fg*M{9MgX-wDTXh!0F{-oybr@b##$3t=z@6btjiWnp7C*o`P+Oeo$aao9tQP9bRcyZ9_- z8{j6{o}NQyv|I=QLEpm&U|9i$Q;tt_2C{Ul=}Xv8WI)qM58`W>KY_V6Ms@Ayvf1dt zZ(VTampMC4;N-qLh;}#+>y_{@Lwn0C`lm(B!(L&0q zX%shTCk~}Y(U?njOg}D1OWE2L+;6^uNd_+@@Fa~L(yeE2r6+j#0{!im%e%v@3!Fu6 zd|=(MZ;qI~odPhe-G{x%=0QPqc|QcjFY60PmR3L12Gonj4d6#NpDkBoI#!x2os!sI z$df#EHko^~X=?3f@|9_;r{P>1YXbzi{9b_Iz@v#8BnH_WjvBy&AfZvn2z|?%!<0AK z9MV=~j_X5RAfcCL4}r=PC*lU=2{|)xa0m^;w}I`g0lSeX2ax# zUfBxLA`D*3fKZhd*YW92d~-wIAvZqx0+*DJWX z7``Fybx4fphkDZIh#wBop_pYNmWf@@b=$u@qU!EhZ{wH() z1)X>4d>2knzcEgoWB45i2;q;bUNZYCixGHl+34~LTx^Xp12^;i{KHH$U zn@j|`Qc*M(v7ZiY3eCcPe5=%H^)_;2YNzm{k^gFBGntIfaB4YaR27`OSKr??xikov zb5=XmM_h{$qhQai$(`Bvtb+nU)&V^wJoWqES3N6+hb@)i;kG2h13d>avZALEYO||` znnVJ%pAMS^UQ6}_0d7Hb?EGiZdG)iHNbryZZ@ruT79x1lMFez#1yWvwxdvsh9G=T! z1H?CSvQ#k2QnD;+mh4CMxV^@aC8omG0Cc_$$cIv?EeQ22A)UIXr^qSGhriA^9O`jE zx{F05zyt)^7DZqY$*S%tfUrn8qYTw?cx0%?I8-c%Dzvi{7y}OnKCG_5hbsgn8bgXM zM!SW{F_ETpks8aG7%j5n%J?ju-huvW91G$zLcl1baOOQ|@bp4~2BOuV4MThjd1XTn zFurf7RaNM=qvvn1GJ+e;f0I)RM|uWaSJX3JXmRgeDs~O=J#-rM(tDKFF|#_S%6=?M zQv$fk*y(V)*EBl-TILIAidv60)zjVZ&Q~kyXB(#Q$Z90&Zt1l_&mGaL)`BMBWFwpX z61&Vv2YL=n4+atxdA*`8B-eGN;L+OzsD{?|V5okSrPk3|rEh8iFaBn$#?YHQ+n#NK{6rQZ*XG%LSlIC&&#B_qjbJRI+fC!r!^r7c-$ zY){W05fuN_e}V&}2ayY^=GN{)Pb#GT1X1}`0w$uL#}lY5s_tKR@pXi{{4{0-yfq!Bo1^vfh{=1dHyl_ zK1N5{(rFM6CBDuEAyo**&HHsjf@40w^CYu#($q~lo$MW0*@q-Qfil#uGX+C=czXU? z6W)S|_!LiZiZeHaLtZK8CT)QdwVF}~AV8OO8I1>>WqA=yH}F5eK|X0hgQ@WI!>CYS zYJ-Y={uH7FSewDyKH)f`(?tTFv_Q-6P$8L+)A&_?3TGnKC38E=*fVs_(|Hz-KXk&* z&AY{`g%ek2^=D=vki*SqKXwWvFZ8Ejc?cf)%(Q$T`Mqq>OSKu6E#R{X90!phGa!BLLOJ9UcOf zfY#qbW~`5sY~oI?<*(8Abvi#t=MjR+Q93e0pJm)yF!yup3P~i|B!MDxy4{Hu=moNJ zwmA3_;!PH~8UP+foj=toP`>>cSB%r21N<YZY%ODrZ!-@k{CmjKFT7!s2s zLS)ic{e{Tl=Y-rj*dGxb>!+aDiohh!YoTPt#_Q$(qby6L4T5YB@WpSm#s}9}YAQcB zS*>c2u`WDwP6QG#sUa_DDL@h4GedzUC$tKwV#tq*bKT1UwcoE_QXH&uWxcl(6!hUT z2^Ov7NhKvEA`B9A`dFPDmK=xb+Ylnk--x(OpDuL1T!V(>CZ-~~SDWGZL2IOuSxKjr z)3!2&XqL)ipm1M*BXW+p2B&J(tCw1b!uagcJm0%?K1t_?=`7LtX*xec=O5DHftLC? zI{$*s&(rxeI{$%=Pv;lt{3e~dbpC+OAJO5NpQibUeDu_J;6!}f)cp8yp!kJyfqLJX z;lqW2Ft6HD-IGVKAoXx{K>yR>wE2`V0u{Nql^h;Urq>O}@NZ~%WO!hB{qR6Kold0V i>G*JbczAf-a6e+xcm{@(p-+T9G5pGaJKPxU|9=6lGC(&dEdY@?{0*B55f+RqYplFdtgaAcpO?tW5**n}~%kj+| z%6c}Ye3S(_hDAwnnnZg~i3<|PP9nFsx46i*+)KuGT;xW_QQ|O_vCRDc-EMe)mSc8jpyPf^V^j~HIYk5JX%RsQ@NDHt;#@dK;BE|2IY4s zH!Q!ITtO-ejbnW}EdZK-a}ZI%0E zWm|Q7ZhLh{Zbx-zZf7-{%T{;gc2#%hc31c0_Eh)g_Ez`h_Eq=i_E+!8-6QX$DhI05 zx#{Y`+`;O-xqBsTpmL~sICr=@lbgZ)V@}!`e9g=qdD3u(oZ;6DXSg(0KDr#q9YadS z89~ZO=@{;h<9^f`!~Ga?oIuJ+q>MY8kg_R!=M+*VoXtqt9H!idlu2g_DN|v}{Ycs3 zY(>h}Fy#THY;(3FWqX+NAX0WXJCU+8OgW8|tg{O#yGo}~_d~ef?d-w*9=Sh*`@POS z-0ze7hjG8(xd-?6$o(U@Kj2K`emX4iQKTGn?nTPIVai#g9C8jL<#3pC4k6}8!sW9aUq}=D+kCgkvlnY3CzE^;*-cyB|yI6NiL9$Zvypp<7zd1b+#GZck$tRz9E*LFdEfufj zRq2(b1y!n+YMvXUb(*_0H@9#z7}R&yOUhlS*MdZ`Ua2eBQ^C+et*BqgFQNs(P_=Na zly?@cFSrX=Dy3koRn1NTzNh177LY9zF4m-#|@G@_`FgFCX9uhx$=>G+9vYJL7OJJZd#hF~fg)?y|7Sy5rbZ;xGxq31!fj+1*tQY1=qv<;v>89>RYUL+-Phq;DyD4Gjpa= z2Nc~$5NYQFL{flz&GibkVkw9$)I21a*8+2~TfNGnpdcO=vlBON3_-%mnzpI#L9~dg z@5Ao$6MZcSsvC3Sz<%3FI7$3QKMWY=dt`j#K@ZO=v+UAFufN${PxZ{(w%)r$Gn ziivpKk2?{J=zgRnRxlR%w1dI%l71Yeq%gwAe7>Vot=2#{561Tb02G^7^`%8Ou&*o> z+{@F0K@`YZQb8i0U#Km3`Fvp2YC(+MIPM69IPGfJgMr5iUh(Q|T~&`efpw)8*q0Wb zeqQZCSAy6xGK+)6Qx4|Sf_DouvgqAS1h!kM%mr5Q7_Q@LAM={7+AqKJ>OTD5HG^b6 zU#t{dH=ox7mqoHVjNkw+mw-!|Su<^(HXDO%*G#*qYmh&Vyn_4_i1PA61#0HmWIbk-^(u(lGnnpyCNg9zsq|=D7 zdi5w0r!Cz8)~(Jm=*$<5u<5Yu{M?dTI))5VbH;3p^wum-??%-$OJFc7;pOQ#N0XEj zqnXO*HHk$$ozK6rRH*1D0%3KI?>>eg7z{_DSgKUoBO%Rtm|3z6I36538Iy!DWF=ym zh@=*g+V^SCqPKP+xigC}Q!tAnj?Gylv&P1(i3PF8Utq^{^NXCJ?GNLfX|oqMoHk{U zpK!WZEtxtimLGM@J26NJm=4nx7>PQA+#1DfMPeu&4r~yjt3hPG=uF4-s6WVOrx_4G z)k6%3nMyh?AjtBT7GUV^}x3gN;C|&PZIwusqI}o?vi+0bf^7GUyI=7qm~| z!6{sC5pW4(!ct7x5UlDwcnZLc=r&w#vph^$X#)vRk+Y%wVSE5r1eeP(?1!Tt^G*~q3#>&<^2OupL5i!Th_F=3qZRIK*b*D;2Fl& zgW0oqAfq+Q9K2ftFLsxow}?7zG!!HRb$u-XQch$9DHmjn0(GK6MxcoqSS_ii3%5$D z*e-=7G0qvzU-u@0HFR8r^g9tyVLPPk-4xJrq4nJqfV~_iIy;d&7T!s$$&p->l9HhT zJ;&JGiza}qCri~UB?q!dHAvQ$s`;y>g5ypPbP->XfI$dasFeyTFkcCY@%3QznI*7i z=068v3Dg@Rk5w%MkvULp1jm8bq>h1ER4?Fny#YRtbax0r${dw%1|bOL)M`+6#(E9a z?2zuWD5jTcw#hesfu11aPq!C3}f+{*HnB=KonA}Jg&#~~>+Cf9T%q|kR`>&|l_s~b-C%P5^W-Xe8N7-=h$ zR7@-4>iZDM_)(O~j3W#)r*0i{i&?2`}N>%O-`yuV)F#+YB}OMP}EP6i=XDm*U_D zk@clS)AUgIZOESQgQW9{Ma_661gqS&1}bGEW$S%&%bP$WwW~ z5}|B~QschuM_0&>?O74$HSr}knNEn51WEB~VX+j%=PFP_O0^&kxv{zg1?w6zsjCQr zIJ6#S(hv)|1;whsyrLZWy%CsKMFL!J-f>d8w2hSbi?DW%Iz z390A!u)u(KYo-<_3O{d=8MZltlUPs8@9>6Xkb11{c@-dJ@!Dl!R(~pm7{C8$pVY!( z<0UhZw4^%|JqzHDj#KT5aUP1GZ!Uw3fn7v=3qIIGBE{>V;l=C6A&)!G8txq=+^cov6_-3IBrt*P#r%2%ejY`J zu!yg>V*g=lF5*O;7|8vwAL$|Y89}$W zRx2*3=h0>%4QmL00#8Wz-`b5p2!A>x{KjqTHZ*8Rl+lm@&|nbxQ{=Gp)M;H*>IA=~U zIH{<8X-$zG)QZ6ZV`b(utC)GBJ9e~)z^v&;OY_+|J)u6rSe60#0x`!OMUVnd$Us(_ zz@*B6=s`rDSieVobBpAQbn-mQc?G0ymtTQl zfnSUYgeD>PN%i+opt{3ArkzZ{uj8%gMbw*vC`?sFC{o3Och}Y+5IS$&f*Nq^c+UjO z@w&9Ew)u%)4lS1u@`84Xp zokX7|kP4FLRo!((Grhr@2p#od1Q+^DJ4vK;CiaWCS#M%DF*!l`5}@q@ci*0~I#gdq z!-3QVy3a5Q(~qY9@%q74)IXv}Nm2Juw-8JxS7Bg}9^1u1nMX3Mgy3~&6tR(g(&XJr zo0H}?lK0V0=R0$%4+DFtyLuG`h*+mS$k5A^e(P7PSGeq0zv4mU z{E3_IhL9Y0LCE(Yq7jlb!H+9%m90GDV zmpCA7e6Y>N$-=?Nt=raeJY?h%>Iyw`mPBM0{G4-eL!6}C-=J{DS6w@C(f+IBMY0ZxVV?Mi1df8x>{7Ug0hA9@<*JK%#`| zHu?Mv0})9!;spN!IXW;Kr@I~7}05s00yR1Th#=8xH=2GkLFk;voU>6PE zg6R1Zmo8qs8+ns7`Awqdn}Y~?flQ#bKg`ep@=4tE09ion$2*fHK28;kOgVvXM5we) z5NqqiYWM~bG(^L9eRlh|0rF?iv<{HLYKNqo9VlaP1@)$ptyc`%5LsKu_^)wkxbtp9 zxVMote*VOhSL;~R)RkQ4JCtGCAI3YNd>l7Dpu{rOT6`r_B;CO{{;g;Zu~}#iSze#b z2nMP(HltuzTHO^KwZdR(g`uDQ{V)nt^9UMy!p?ReybcdkpJMhdKl?V`W*Hnt@B_Fy zBL>kmv%18;VP=r*{aY3$GlO>1^}`eU!LIB1|5 z%Y8orJ$hVh#KJ8WfSWK)gNN8qD76$C!M!X#E@SjIYZbEnEb7HtSazhL+>g)}D2il4 zxCjIm4xFIT8i9pKbcs#KB9N71Q46_z1B0YA1cGivuOhdXPgvaDNHB1%q-v!~-d$*v zf*25W!GY-sYr)P!Z62#TGsqSspDEnDSg%*G?6Dp9J(N7l+g%8L2AA-7sES$4;q`E! zyQZA%A*%;iL<2z(fyGB#A*9xnUhRAuE73v`u-3DR;_l8mChI{x1-sblMxWb_gVhi)Nx+<+1SmN3kyD^TS>kTz2o8lGj9LM=YSSeCfRv#F+Wll-}~2%fu2S|y5U^gST_7- zJpD1|SZ2VxBCftqd!DMhNT%3;@-L`Q<7VS2d=XE-v?=UF#QM{oy1)fkkD!gP1*eJF!%nIO!NzF90xja+&4z2 z@5R$!W)49o-ffIdgdA*<(F^tBk}KU<#{(sSOc7k7WOXr$%)l{;IrKpxbNA!R^dpIg zMD&d;x@QqxjLWhnpzg<`^$f;;%t|=2>~~{G8nyoLTgWtsONO7u91cGRo=t+-XX5=lL?V6R}%G71N5i`aW%9(JvHeahVL6LIK6*UZEfnBt=N-@u=vP z?1%y#Gvdr=?*ObJ|^0w;;qAT+jOD+cmLW!Z|Tf%Fwxys(}qZ#^lc!EA$X=S?gt z)8m0({RRdhpCql(CAROTd&SEOAeY3A=~NGgqCln=7!X+EG^0*2CR122uZi1t^Yz(C zPWx-TCo9=RH5(>`ETpyA7h}=vJC-^04wiG@X-uwCN7hF{k~pG-pddx2HVWbwaN&w< zTko`V!`Wtamccq?2R_}J>_9)?yt)BxG_$(nzcC#mp8OTAE*(khP83B8qN1&TMTeM5Ip{_h?i@76`iV= zgS3{tLcf#tJ21KVyli<#-jp?TteK&y;T-%lW1NVZ$57O|8-;5mG#mA!xC!j*3#A*X z&SJs(;+>!V%pz`8L=mBE*5IeyW@ot-JCd}euTk~F_6loEm_`DlH7(C zs2@k9F|3RI|1r`rj4>A49disG%#?OIBQ4a3#m-=MZGs!Jk2-F%)*NEkL~c@O&1pY~i~<%aVBaHxCRj0E&|4 z2%Mnp597QdxVi%aR@a@JrUNyOGc!oS!>m-JWBYpJb&yT^apY?Z>)O^IFV@zfheNpW zgbqSWg}mOTgpnQr?B|hZLvo0%A(|Wx(UK;eY$G{1h782Ze7_6+E}H)w9vtYvk#}q0 z$O*uY`!a%##nLQAT2wE@n-xqfI;Sl)^bAktqRyE3hMTzn&Xi9l9Dg#r`-Gf0bFTZL-mJztDJNq z*CNjt&8Q}nuNhv-iR(27%W7_FPWZOE>J30BfY1CL>!ZWX&0e}S=x+uDQ&UEB;sr?V zHoi1S)DkdA6nj-PKc&9WPjp(Yd=srsLWCH=L{8tao0HxUoEIj`nbyz>tPqa9j9R@B zf6^I5PrnXNM$2P03%_G%OH_S>KiL{zF=`R~ZX#U#N${qL*R9%*`2*@Vz0IvjXUI=} z5_^o^fGP=-2=*5(n9V6~s=TGV6?IQJqj#+4B+B}MHDygXnJGj4y0@*h9r+N4lDY-$ z+v1E(p>_C0pEX=i<842pzR*+SxUSLL;cQw)&EC#(wzZ2R)7*;OulVWLP4z%e{)sUE zRC#x657^T-yt&!mhFmdcvSvC{SZQVtpiNcwHn+=@P2ATsZ`CY>W^;$Xy}S>7+TmtbCy>|dHw13&`^xz(U5P814r%l^Ceb^D!y|aS7 zUCmw9dz-tfhnjl;@mb+RiOEBBU<`N=T#xU&oRvKwvO z=kIHsSV5nrjMhoC^%Uy=nx3@#eC)WQr4se*@%I65_xtMhNKt1bcTW8ol^%=~Ohkdl{MZxLU&DuZrhhDeTD?L5G zC&U**`;q2B?@|9?`7HKx4LH+xfZN_V?&Jb?-s>N1J%(qGmCvJ&Lzo*$=g_kC(S6O$ zc=EV^XvM@VIqV;9J>hR|U05LwP4>-_p8r6r&>m0ISZBO` zuXnus`~3%;`?>q>PNex@&GkoLx7`2gkG&pI7aJ+e=+l)e)nfB>)oDJ&(rS~o;gw#G z+#07TH=Oh6ANny@E=o9Gb}(;GHy`={DE|#Z$~x84r-#BmVP7A2@_j1eU2QF_7=Zd2 z|1@aCL;jg%bKZ2$V%9ud8|r!UoPLwJoX75%&4>MmTVIElAw^Mkhei6Lz%`Ac<_DixAjUD0`7aT7Wu2 z+jU?r^au}Gq=?=6Jt+SzmOEiIj;v+c9noh!OZJ<825CzSj@bVcgOwT^41*{)=3q}& zB#1es>kCD!cP!KvD+NSsZUhQqdM8gK@iaOL(|5M9Jqvj;dzr3Smx^j((aSE>=CFGb zJK|<$W*XEc<`(8>yqjKNV4q|U%G+>$Td=#xO&rpC6uI zTnYwy5^|f)KmYi-CvKMDg9@4QqN-oVI*-bYpTGG0npAWkxftef8(VbkXxRInntisf zW@JpT#n%^VbwCq)@)UrFy&Ux=FG$Xpy!;ie7&M{>XKq$24SVLwLJfN;q5yBH62xzm z7Ur*dZqA-P|DG4%opJGb6gZ%VM7C8mqBFM&cu_auGUVa9dhT*x6B^O`XO7Ms#f$b; z>>dl^k1f<>7~XZoRtAXv}_YQGj&TJa$uh`c`G}VvsmfDO9gGg-5=LV?qKmYBb*SC5^}pl%Ahc^+u_tYwD#v zx=>gL{~L-_|D9D0>KB%3McMWfmDc4l1vhMGsUjPsgXkhAa4BckO0Jh1tfCSueJ&Nf zB~^l@_kXac7)aF;wo=D2#~6{7KJ>11*HpBL0xZm@yEw9@q2I{9Qq|f5V;k%h_x->1 zw-LE{`e-BdRJaZG^ik-of5mFQ$R=QO`BOEl31V+H!eEg60%K7r&0}+N)i&mwC@W`I>+ywTzloXY0uTbeU&JF2w&jgaz^-_C47@lC5yiYv$ z+_TS}&VJz8r=PukDtr0DOxf+WKD*#%vC}-uE$TqXlGBKX`9aRYXNB%l!UQ`er(em9 zNh95v-u9_qB5Z$|LDyIi0dDAR!8TWJ+r1;X3v3~JG{%8DfIfZC!y4Sk%R45JbugUSLaccG<;# zwkYGSqKX{)cRSeRJgl(kZMNiHQ1vPqAT~NLIfWp-;N}b03k#J3_VlYi#5=jnQ*Ew# zN$={<4U&rpM5-mPkQ>9An^&*qc|&53QvenNyL7!!$z|G{XeMNWifd!)PZ<0u1ClRr zdRMqIxC2-;tf7_qm0XGq)U^ZyHx|6BI9Pxu6y%0j`(i=Ug7E8I2-Y3FD;fu}#BKn``y-Fz%!)hKShbD4H60Fux!Zg2u>9lI}x0wPs6H;F?ZKp-GHg<=X0a{w8EBt`C> z8z_R`RVyC+P+SqwaI`@#{>;y%%mRwmZFV(81t_3I9;Sqw8zuNPjc=y~$tP|WON+unu<_i% z!5!Fa9$Q)jj3IaFqZ_8rYSaFk0Gax22AkQ||AH8#+r?w|so!N{m!QzZjSa<~8e_gc z;9UgL8gNsi4!A49u8y&{*5Z{Qrr{FAL$V8EGgsD9t|lDjoREv$K(7!?fhdU7N}ifx zJ)G|9^Gu7o;Q<~tC-tgH8(0gF&hgfby1Eu5MVyq)w<#SJucc9qFeD@ScGh$VI@vLp zi+Gqy|B1B$N-}Y<<47dwAkm%*z`~kSx+y}1;IEb$2is^1k0|I4@en1kfy+}z*%N4g zhGxwJp+qnTKHq~Oi8kj826|8^80;JGV5onrwbl0>LMhAOFoKh)$+Zz=22<7+aPI`x z{3o$wHbnQPckIXx>}{EZ3%K=a3aO)bo{_r*a!i=^tL?NHQd3AtApel~Ol-kZrZOdl zv{95ZE;$lN8Sj7I~xIJodaWH`zxVtlhk{Vmz zMRMs|iKQ%=z;zy%`#Q*{53v=iQ_*Iu8E?{O%T(h?v*=ifZM%rKqEMfy9>>%kgtBj( zo;`Qq;R6Ld34_!fq$ku77^?6T=9zZ-2a%psPoRtx%n~>kM9QHFRvXETa+;=7DdQUQ z4y&uH@@85iNYAuJVRl2Q<*|9TBttW*zW2=qh|--J--D-{l;2-UYr==|fDVM^&0&jA zB0j0Ulf6f5%7KeV*|=u8pZGF?NH>jUOc)Ecpv0{-EaAyECj!HgQQqF#;qByoE6XR^ z6AoqBC~KGWiMe;fgtr54@4>n=S9#(qb{87n-qt=C#L+V+F>bW>J23jd?8Eost$Uo5 z^bTfnn4FVh5>3Gnnsm}|NJ+pnj%(hYhYt;mRw<+pz%V(D8epcD_(8^7`@MV1Fzmqq z0|UEvZ|kt1@&{To&LEyze!3mQemvYaleKSghG3XH>I^#>7{-#?DCP{(_=^4SgD}S+ zj(i{As8?wHG4%Qzevjk#LG}i{#KOZJtH!X95AYW~!6Jb_F@b_J{cld6QCN3-{d;lTy+nfbZy};g5_13V`%xxCKpq z$eOdFFj#fR5X3Zj)GNCkkBc`+AcN=<=;{NY-&BreWp@t7 z4IWkmkpZVcE!4mt-8bLC2TQ;OmG9H|DHKs5a(QJDh{iv_4H+10*sZvCZ`thbs)U|@ z1q~A=l;@vNh~|cDu^{1Ii~!j5G%N%`oy7iQZ!Pe`-+|nc%GfZ0p{hQ>f^!Ih2tXjN2@pH3_IM`z0NWJ7v1M|0)Ew3dW~y zG&W8TIUg-ZVOrJmi*=-}HnyHaX+Me=+^dKT3Ac_}lftcIaLeJD3j94O;RM11;OrCN z;N;qn1;C@X(3jWTf!L&#Ksp=`SytwJ zfAdvLRP{kLug|HKN*yslnER`UfY9(FJAtzk4uLVsAirg-8#gViYhos=)40brLAXst z)EO^X2FpRpfZBz4x@IB7@oi;z;C;&adN{p$L%1{55hsE*J~^f1r2$yBj21R=lL5YR zoR;(UMOP|L9uNYVYa)V+=K{Mjozb(cBWvr)K-pRm)o^}5?UokXaf+m#sX>%O=IZ$? z6P75fHwT_$nc6L$^l#p61#*5&1 z6P#3nMLL_V?B(c3urqcJGJ*KeW2tP}d@=?Mga$-S)*nJ{kk$S8 z(E!adHisaMQbWRyb&8|wfKR|B!_s3dY}0l)&Iz#_g6>HBr8Wma&%Gp;2t|;J1w$J> zholK0%JLNBEHu1}RYGxv&I-Zo;Fmf}d^pG8F$6gqunG5&f0&8Vog`vWc86m*YSRbo zpvd0v0^RcvG5Z|eKaOm2v_ZniTEA7Rxz^0*4qzo(qaUn}eNIV-ZL#aPP7jnb-w#`plc`jc- zEs4wFI+2jpMtnxwF)pwLIBtN5;N6`%t^l1*rZ>((7$FPl0Yt<@39`SA&s$~ z-0g)EA>i!o70EK(lZ7^9nR;lBww_3xy`{A6-!eHH!}+#NTW1G~MH_?N5@BlA6F8N> zQM#gg&pbiwg4mIR#5py;L~&YugxK`w1XtwAi_hORX~L*1Rcr2DEQ%~TLGp3fQ7_QM zsYS|FLhEh3KFN7O9u|wlz`j?1?f*WSR?OFjamX&tXZ)4Dab2G-IMZ7=)iVPBtq44~ z>|ETzxFMc&a8L>u0gs4C(pwAhq?5t{aswr(&`^Mf_1VPmh z1Ci!gonidaqw~M;^ZfkIqgc`YO{84zTgoE&r`W(>_c;VT&a;+E)0Y}zKbSjwu1RRX znc#5^+Y5ZNO)n-#dSTy2Ii5Bav(TtvMaaX~x|Li3K?@J=iq1C1&&f7AIgeGEhsEM7 zNIfYM?4-pYa`o1t&?;?(j})#HLw>_G>0)JxoAF9FCDM1e7ESSLltW7fCIY2Q`XJ&| zuV`+tl~4Z+PeBJCU?#?cI4sFmaA@y(ZiUC&M4#p(ifS~^z}s)M1K!SJd!G91vD6aWTE;^Gu%Zf?6e@HK@kD*6AA=Mt{(#s? z0vQ(h1^}R^yhJOF4cO*KCrm7#@?07ipO6=3ks-KS%l3ek$U0jSAMfR{QsS_efiVca zL-6o}mk@l75YEA%f!*>D=H@Jz9dPvAJh;ic5s6rlX7VFeGv&b$As$a};5iI1j_nW7 z_oF$8Qq$g;H!h|A6K_*%!W;9`%Vuk{6J0UY*LjoWDL+k<(H*P21-^%Av^Ndo2fQS` zt*vd|cIF0JrE3#X)~50%wC`W_KJ#{PqlV><47}GSsI+myTo*r zF7Kv?oilI^uEQ`3?Qu|FGwr9#d!0dS#rQRpGz1I4AWa?c;Ebc5eMlR|JA*JBG3RbP z&wL0^oA6_7?evFO^6iMX-yd2ww;J#XvRXTZlFnP!ZOhO2FxdfS#)m9~Vnl*0mt-n^QeQV; zd8MyLn8xr<%IDm;-ybgTUXHEBUHIqy(_ZWqU#6+>ke4}5f9hoFD>(D*tDd)bc^dve zSU@cmszHKoI&>C+u25c_*H))bBR}YX=%1p#;Z^N;J~M62=-t?BWLpwJnjO~c*BuX! z@a{^TcSA!FRd{{IkTBjXZ+9VB0DaNBb@z4Y$y(b`XV31Mwoo&cw(Lekys&V{2t3mu z3VWjIKqa$kpesE)iP2$EBuLjZt=x_n#Ae|iKgrboR$fCZ#W!nrzZ(sbyRITMh)DxN zqZ)iUAbe_DdV5bgZCiE$TwI_JK{}}`7GoNfSQ#&J_+zV@P(fYdy|IIPCp)9mGjcEL z*W)}f-oXMCHx5icpr-JnySp73Gfd%M~o7t{99IRg@r(hg`9Tb!!n)v7>gf+%K?5E|=o$`u1VPSnK9_wYCQ| z7lW8KrTr9hjTBt2Xz86W+8hS2{$5{#{K!MZOD?*OUF$AZyMuu$ z&$h)f_?1#c-Q|mAzBtMnQd)(AY0}kN*@(7q*)&%u!6_QLaOQxjb0P+&r}gV^XE`5b z5iwb0J_=t1Ja^TvvurVJoM#s5`|kgCAn>!wi0qfk0I7PxMBLAKC#UUaQlC z$kD3glkA?%PYFb5$ujL9;DrP1GG)btVPmnb`$Z>`G3t!WRYZoq4^fRD_iGV!B>3&Z z&ma^C9br<{S;aArULxkLY_NZi>ei!3JO{hfMW2BiKS z;B({1pK>-i6UaY+1%^rdrkyEg3w{Tkt1l5eIbl)6yVzABCrMeN`y{-K18;r3427p7K4iZ9 zsBpEE_R#I4_v||REcE^)B0_E_aC8Oq^LcGt1Ic|Y;YgdHt4BU27mk{;@3eh#qkUzP zgcD{baEbCKRx(I?(YddM$uyw z>oB#M&Y{GZ(ZY&lkXOaX-@&8Ay0XMLRv%Fve#Fr8Ge}a>h+SbLgk7988uxsO%~%5$ z;?pHueSDtlMSbUTpaJ8^wBeB{A0s?=BS&BPgn7ICw>V*CEE8Q1)U9h}iO}9K&_$O^^non z|L!B&wzkTi-RZHeGo5DfT>J=Zt{-Wg{3qG{UywxkCmEZ6A`?U!3yVRN32=C0N=;b2 zZm)2BVSYc(m&u#j8}V7X`W%B!xw;x**Veq==?5qX)(Y!l-5PaC_&?0960>QM9qlv2 zWMl;<4gQ4vx?}4A;lgEEuk1TdlkWLLxX$BpYbbo)z(HNG!ct|Y!cGPIaY*DZZ3gxw zabdq2G`}QIF2as2ZeC|D`1M1>OO?R^rkEa+I5Z-}<8LIPB@O6j>c{!= z?2Flh1@^vlAG%j74kw8rKkW~(6x2f_0Q5I0HHqbH>zX~z8d7gW?>Cz1SFk(Io%bWS zqPT4JW6%#%*b#y^vGgziy?NL}OGFoW>UHZY487wnEO|66Iqr`*=}+2kM4DqL<2$iq z?2|Z)VGxVzx&@L??%)(K1^mUDkbymW3Fy`tf5ad4$Dk!sUm1tP$j}|z8U7SBTdWd! z&|R=(@5j{l_+xl01DzVBIV0SyBu_`1DYy!xol*7&dU47d7wz`fy@}RlXt0yGpQ2U= zy*Pzs4^#a^?;LDW5f;kPboCnLdB zl!XoeV;KH}s#1cT^*EeUi>kgz&t6+3N1J9^EQj6Vv=M3QD-nZNs>r1xG5#LPMKMwN z@2AG_fm%K5$oeDY)9FS`+~u>4SXP_~8g|yJXYn6C^@`IN(a&)5I)q(U9+;`=J?cAH z+b=Wd&Tz{7S`3dZ)fOPm2N8Fvs;B#2KHJ1V)|?^k)43BgN65l=v!Km*noD9`Hx#Kk zQ#D83!r#5Kk$3|>M&Cvq(A1~LAX7ATdCv&V1XpD6HHC@Sqv+H%ObuRenZ z(-T@GkKy06xGwxh%*;2itPB97DCigPHjN1&A@~!{aKlsTx0tglj|+7Zc|6auSzj($ zX#eBZs&P@j&6oc@gYyi!Y$uENg1EEb!lmNYKO)t;j1(KP_esza?eNtu1-QWiey8Wpnz_>xoq%>u#yobdWX2EuOy;^a5r zn!-2wK`A+ZqRl7CBpR6>79nHaXU|2}Zl|fl=}yxQe#Z(s7eVJNjVXVIn$b}<>TYN% zVA84qmApUAR!8+&~i#<&mG7$R_fw+b9B)Ew0z&ej@ zdjvmPNXs#VS`bwk1S}2yCp6|VRyz{nZJn=Q!w#ftz+SLgnHJL6H+AcWQL^*|4+WH+ zhpW58S3lbZDsi#DLzp2Ow_gy+R0FH~Ak1V{KY&Q>KBzCCPBiRK@S~f~R;$0%snT5O zkfg4{0lxFOc5}C28uT@n(|ltTpcLDW0t7WBHbH_vWg`R$z<(GQI`|iit!e~w{z|(M z^a^Q=^dNzzGJVsJ^o@#51eZ&35C3ukR)_6`*pB63qmjoYOe|g!7htfA%wt13M2hVY zR&1zsCXA#!%n|?g1JP5S;cc874J@%miH-589~D&#IP=Pw{6}AWmM6yDtYZI&eM`cd zupTiEH=;ay_N!=&UW>;cfx<1D&f&TEEgcD>Hx?Y2a$;9yi`(iqZ1syQ;#U~_k`#uZ zuiu;^>l*N+&7#V|tZ(S;yP*xuT2lg5-!WY7eF#L2*i0t}THk1}rouu67z!wcvwJsX z%tOFympUtFr(v!O-wIHYR%res8!qFUW!f$Tzr&$r)9>qeKnP*wK_vxueFd+h>(RjH z)p`4_<*RQ&y^Vc6XtQzYYXCxY5`fU6D*#E-&yNRX(kR){giZA^_U;o5!~$ySGhzC> z`@8M#ZlS@+Mv+_5B9Cw+gtfXcsNBAm9UJg;~n<&7zPXrhD`o zES@|%A@&-&dT#^M>YFk;XJ0=#Roa)!^7zL{*h#D#QyxDdifORE(@WC4fFX2Rd zjifO=O-21Yp7eC0gD^onpT{CYKF|ODlsN+8*DHL4kJr@1(U6+bQWNJqI2(yCRy?&d zt0jRh3_efTFC$?{WMxWJFEU3ex|LKD8>Fr!qg*zbfiVi zmxLSqHNimGDjs>GOa>d8Cuzz15L-CO7Q*vz+eqTxl%0y;vJ-Y9l}KhHfCsn~=u>CKg)*bw> zQyxe5h5rXCr>KDeYO~Zul)2`-v|!|%Q^0ne>!owo=k@Wq@b4=u>i<8v2d~H>WI_Bw z_(xAR|M6+fzX$dF&BT7b!zsDm>it^;Av?(b0@wk87YBER(C8T1`glA6LHrefoJNGl z19fQxPbb8AHkKN~DKLo7!xjL26=yj%u^qRCsg8dMz^Y_;vST0SqA6YeMeRjs2wd#& z61dJv-=dSLY~sf^6J5GkoW)zhRo>>q-@sjCZ>K-+vJfT%0;((Kz>l+#Jq!{oJ;tJr z;zD$bMK2+j$pKqZ22 z5O>k>U)P9QvMd)2>W8$ZDL5vJMk#wfNuy-Tq7vOwglSnzlyxL(651=90%dMMm$F!? zV;8gN!=mO=_YK18rB!*!r~O=H%}!>{Nfvg1!Bc#J=U`pG++Oj*In~ed(Pai#7%VY( zjln-;@Q)dMlEE7cS`5C0!8bGbUIyRC;QJYTD}x_m@L2{w$KZ1eZWFp(Hqq=&23Ck~ zH^?F%cACS_4FA6U2(lV3+hd!cYw)S${wR7@C*Uouzlcjp#yV+^!$Ue18I5O#M`QSo ojE;>C;u^{fWfJ_3CPp)(qoZl0W=4nbO&MR6m>LMZVU(vPJ$!|f&c~>9s&n20E%P_bgR*?XQmr{ z4C}qY(S9v?HIxM@fe9b+`jbSb$4c;Fw@y9A- z)lJz=;*VFxtDCc%t6Q>LB%G)`R^6K2THTi2R?TEH)$Q5s)g9R#)t%X$)m_&`NcCv; zD8lEhlr{90kv;Z&z)D-gZw0L3;`;LOrC|06#Ee+$5VNlM1i~i}9<|mZydEi@M9fo& z*j}g>;m5py zm=o5Mh@bOP8*B(aWknTdEYjk-~hja5JZh zZ!VTpv0AJ-wwKg#_Tubp>5ez3L$`{`F4b#ZtWdAimF*}mRjL(qmfQjw=cTIoo5h?} zx>d4EH&FDtTD_L5=I`X}#f1e`v?cq1UC398Ikc&0d+|GWZ_gjSbN9|&dhgtY_x8LO zzniaCa+P}Cnh1LFv$a~?$vbuBMRd(x($3#1=IS*m1>KHwh;n%cO&YbUdFA9vwOI^j ztx(KisEh9!80JEdgYEy(-+t_mB5)6Qiu;QP!sLny(J0v-k)nw~j%*fZ`tG(5vn+mP>@FHD`h z{)&wWx?Zf5%K6;k!-ub*uNM|&I8J<96bAHjOFXXMsQ3_-->4=R>X?F705=dm=(uw>{EeIAREU74S1ocdhM1>)6BI(yr%;C*$l#*JqyPP zECpJ|a=-{UW;y5@uIUEdkQ0I%cB4x{H{wP~o$dqLa$(18MO<^)fIsTStRSH71vgNR z)iyYBH--`tfY{gESS^B-1IvJ-T++h$PlRTsF;0;=RbO1Ny_i+1QT!xZ`8a2t2moqtT?%R z!BG?Fkr#Pcu$mWp(E<`FId_4T3eH5_3)#iWtY;RE;5n)uXIjHn2jq87Pv0p70M7AT zu29L_b}pwURObH}-f_rB3|VDtL&}x=~{tP z+y0}UYfIJ@F$ULF8HmPTPi9Nj*0PCJ6|s!_uUOJx7NCtj^F8%p(9KgU|sLDTDFt$r{of=z!})s5?KpR}aJL1s3@totNmy z_+O;23liP2ei<)L;*lC+regT7Im*Mjk=NSX0}udbM+xyxk@Zt=f_INV9YCPFt z{@I-PHq;8t0#m;ggUmwM4P-+CyJ5gt*h|ZJVii}5>SF$GQ5D*yuq674CHQOKfwPJ( zh+K3(2#g-`DMdF1bU|oGHwFV=juQH<;C&N$NE>0|X<-pI?K2_>dK3(gZ{0K~?u=Dpx7 z&_A4`Sa-y*o>5S*;diYm=0dPjaER_6-!LvUCIse1yIno$sWx1_BohN~p7i z!!Y(4qef$FRY!a*|1h#977#4_NL26&N@oh2NHu0AkXzjhg&7j)lV5si}mI#oh6KaL;f!zj54&)+Es2u%N;8Vt| zi2ZNHoR}M0GRQW6FH4A%3D0Q&4bH5|WC}GNL*?L{(@n!c(f7c=e;AzAn}7@l6poCzUSc6FenINH($h0%@u3KI}7Tx%<5jmH4gO5mJC7%?0VSYcDBAnB2=C9 zPnL^%oi#)%dd(!RLg3;8$ydBEQ0`pOGaV(3nh38XIb10a3$YR_L?O0bHAR-pW(%!g z>77**0jrxdQeb!cR!x8N`&JEC3_(;@%?)_KUxHRH8fp}40xM+MAOl}Eu7{dtGuR9@ zu})f;wq>Iwx*99E9Bf8p4bcS=SjMW7xAJ~DNFElYM%|DbUM6J^&8U5-Nuy@cnu(Zj zVc<&V^9x0ATwqs$MZ73@)aoK6*)o&Q!SSMyCz(i(m`|j4@QrapS@L_^GveqLvJRWD*#v73)$DmA@`k9;tguIme(B#bRD%QJuxdm{p69Z~15MfzzfO7DNZg zlv=C^`eH?wcEr8Jxw_+20FZ^7*Mt`Tu@n;c+H(a?Rr`-q$4Ay7(L{G0IwpoZJW6pj z!c|Bn$TF6I=0Hv;WS9_X4L1aG5k_3t4c5<-eB6}P=@SO2`dyX|3UQV{(H=^$9}&Xj zdBMUh;P}F=qhS24)r^xDv*+u|DJ(ith-U!e3%RujKm|p1<|b*RIXCONW6@cEeE@7C zQwMJ;S+khZo>km)2XKBIwXXo22m$BQ0gb+U04H3I5TP@M z8W91VsDoG{_P*Gd1CHn(bSMC{HsHmba4Vr>MWP)*I4LsgAj(LQ9DK^04MK+!0YMmY zgFOTxEg%`yx|C`4VYFFL$STr6Sy4zs1|E=xF`qO9?wR)>+k-0&`y>LYxraf2VSVE{ zXjxq3m;t17Iuz#vkkO={ps&7*PKRKqSp+}9sJ(C+6Me(pfz!3<#PC{6NW&c^G4%u; z!Xj7HF*?W9lZ<Jxp&YEm9)UxC zi_2{T{*B@{VKg?Z-J`yhN92~kGlj=K2M6nG#To(UKLwX2yFy6ciG2-sg z9>gtup2ja%1U*w6L-{cJB~EvLP&_55*^c$=kiI3W8DD8{T>&tXLJub&dcg%S|r zA^BQRE%iwe2`@gS>b5OH@FFtH*r@*rzUcxXz5Pc~xU-mG3X?FakggP!#=Q(;rr`hYxUhILVbb`hfOWf>Bq<| zMhQmVI(4iAG;5*ZUYVAlBb-bRrV;ET2)To3H8T1X6AJ)K{eoBsz@NoqC*ib_sfexv zJe#g_kD6{yb!WOMN%u}SMaY%Y z-Hd{+MY(N)DHaJwCfA&KmoNySx})AzveIuLR@Muo&VIH@-Qz2vQ(dz4D2nw(r0pHx zL3XM$pC4p|e*h)@6if3*nBi3;OdQU?e$6?QD3Bz5{-Lv;VqSRa*wxFI9|qrLws#5C z?+(K09dnLx`;Q4aV@@dR8FLwNH`-ZZ(NRj3IQe1WoH5^OSV6JLqBcLrCHGQmC@S@FQXSZ{h7fW|{uTGrVf#KNu#9P91xGzK&gT zR)1}6*{d_u=aHsosIe2c8oWuHiOn$29|)7N*@DT&;OSA4K_9nDN#+eno4dTQmVXQ_ z|8z;nEQ36j={%a6g_5DoVC7SOrxSI~!_GLW1^O;^*;s;qqpF?3^VuAi- zGW_Vt+=I6($PzdC$%a!g)2qynDiU`AtwI|Z5FrYDuCE{-`!zUvD!I8!#rt@m3DrHw zR0zVd=Cp$d%VzHqrgEbekK6CUPNWsp8wqZZnotpvV}i@TMD0WQ0#mD)s3_iCWfL-R zWXoNIUhalL9V-n1GavM=34&6H6!$Rb4csiMTCtL|OO2uz0o0Z(sNb;TZ=RN0StTNDFZy+1s);nK7Bjd1NIs230k6AYjRs18<)L ze}BiZ6jEntKzC@C8VumH%8%fEh(T+H}q6?=K&M79f|Zn8EOap-9^j!`E^EVu7jiC zvwc15wKF{e9-xdeDKYGO*8mB&mx7F4n9p^>NxM{qof0gGpqNbnJ>Vs_Q4fP&%2QFkvWs-IxQLpnc6M>;J!p)S}_htLBv9f+ni02bK1ABAil6qv#&5|W_Nq{a-CbOkJ(VC*Uu zSkUSjywW=;!Uv1SZ4Kc16C{%X;Zkm-q!(+$m5G#OKn1Q8JcKI|cgfWN7h2{g(Q5rW z8|2O`;u*76d^^4WepULOc)(qda3{M?lU*u`?P&I2b)n}1gR*$KD`*;V{~3?46wm_= z>MpjW=CG!vtdCDB%bk10bmlM~J6jMV2T4Uprpd{fRbH^7m1s zUOgO$K6$G{-UVm97qE5V>`^5B|3Tp$x)Fnhurbo<_1ZwkMH9gzK$n51gUn0;J*{g2 zaA@sHTelL@YM?G;5Y71J3qo5xhKW~>ugDSVSE$jY|yaqA7Kdaaz89K ziIL?1(uQTb5nK}Em#IC(Ua;i4C#6Bfg@2u#Fb1F{3;*dHX1grpjIW}X(~1x{k?6s1 z#}T3C>2TP@NJ>3JpR9R-N=o@qqpihMo?*&IfgyOgfIW^B#Cb6z1Z8!`NdHA79Qjtp zFuxTuBHwC^t&pbH22bKdE_VUXQFz+miC>HdchTEIwW%A-2jP1dhFvYBHN~EKpe%LP(~bamejIEes4V0vL0>tGB|j;#YHg69Y&*a}?* zC!O?!7Qo49pbaNnKs~@h=+F#_fXzQ*@So6GW1@GjG0`c%tM7N3|0&Ae)*dXGX2yKG zrWsHNkyHBfS40vZ*&QUIcA;qylZhj|IP4pXH6BV?YyNR8tA~sa>2|C=|3p?D44y*3 z5zGw(6v#Sl+#T%UG1-boBJg9Zm_-7Cr~GQWb0JtD&-~r#>@K;V!Ha|KhRa+qX4PDf zyz@^s0z29-gXbL}o{`}u@ea>wB+l~=ah!r!FDD)hoPbDJ1K>QPIA%6zrEp9wR*YL4 z<)E6eWMmV?cyYiQSu&uiTxX5G#kiy>r3b-&k9i~Kiw@4Ah^6SceBoxXW;GIbaj59l z6ZYhSgMIvk#c2pYM8EC755YshA&AGW0Vz`nS_xE*X0&QHaTL%AE(?hfRSRL?q~{;~=OpH@<9KA8aO?1I=V}u$gKOHPg-E=16m0bF{g>xdBTnu`urr z;7Q_1y&Qm{+t9+t+%%qHJR^A4H8)zp=9m*NCwSbV{zI-=PFkUx!R976VgH1?0Z-J8 zOPz@ah8n{mjdq*9# zVa`fMa|_x$R^EjAx1a^d`@!b8l=#}J61P~1O@Rk76>!GOn_UUXYzM5N`)2bowD>VA zy$S8b@9-uN?B-Z=Gg>g~2GN3F>S@7Ej+0{a0mv65+S!y8kjc&u^GtcBnj&5XOPyd6EuSmS8nc5Ab> z<*k6by|rUGK!sd$2mCvi1La-KomSf2c}w>$IJ@CPPnHb7FX7r}-N6Szb-t%< zk9FF%)o;zV3J!WUcUAW_cULExd!QEFwG?RXb@p@qTaPaX9+>u*odbUOpp{wHu(-E8 z>BjxoL)Lbzx*cfsK6hX1@G^S2DbPBCwjM?OW>4HPcek~ZI$_ka$K8isO}G=~<485` zPPCo?Yq8(mTR!3L<1tK>_c&nw$xbbMXkBw1(1n=_)qd7ZJqW7}J-yrO!wSdvspbLa zY4_!czq~0QQK#ncbF=r2FE@`P-Sh5o)c6G2@&ZCHGKBC6 z_+O&`K}dbxI^=u^;fo9}8Gw+G`ivEB9&?YhUUrYmyBX{+9$z-C!-SM&6Qgp%xm3P< z(?r@IN7`^L;qGXChx-KQ2sw_lOKU!fF}s4aN8Kmg6V{RYz_hKaqAYvLJ?K8+o6Q|IIHxD@H_3>8sXzLXX&6xS8-J{m=O@Z=<2`$b256s%1xI>uH z>7E&VqGv{5ZJu;bmS1bVE^BWb>3_yeBmMXGq(9Mb1bMGB zZ!Lk)8!ccF&LGNB?(hSgKI$pw$^LRgJOqDfhH>%6-;74b1s0@Y!<@jN0$HBM&0>es>*O_4URmaK`CM zzIvm1rdnv8Wtp|Zp@3sOh}^AdMZ91Y^cVdqmT!nVTL!<;Jkvb;I_SsYfRy#co*q8u z_wbB$*1~b$&j+2k*8Fk+GjYy611xmbJ-1}c8P<8M!l~Mpo}3qSPNuS^?;Fi2cdAtq zEI!pLV>Heqd=tMH(33HD6n*%nQ?Z_3GCpA-CgQGt5LI93Denbcp0fowF{-v&F9KV> z)WMb2YOOg9NbKUHsC~JEkLuQkG(PI!h>Mt0!<}v|K#&IpgNv20Y&M^l(!a-jzWk>3 zGRAA>K4E{lrQGL{;|2GHmW{{JxC=2aBD9EJ?5JIHH=teLkQQGOnsm9{V(UBnc3!F6 zs@`tCgc4uEs9eRUjAQlQY2CH1p{$LyG)CYR$@}4VV8HPS!y0xPOMzPkDAZv$1b3_R zT|7HL3|>oHuUfCY6>u(CuWMru(9O+UB~}*eG=?L)uDcm&KjMeJ%V}EIH)G8)H_`^m zA8UP_e!Ipq%3iV^!_d1Ka471Z!@0(jmsass2?tiA zrP@Lz4^N2a_`HZdgV%^%L=Nn$WEziUz~W`D(T?$IL6sJqOy>ymZqm8q zcs!t!%oXoIaLw@qH;!vp^9wjG4L%vXYys?F=Z}7}Ojq&PBXBTrCWs=mt{RP?Yb&f@ zn4HY4WZ45x3CZ8s3aUXHPJGi z6IN6lJ4J+mto_v<{6N*Bih=qKo~h2w*XuV`i(LxyJf+>(E)#lbX6CibrG;W`>cg4y zGc)~@I$Fnz!mWb~cb)lq?ew$9C!d%+-q_mBcx4fie6`q@%L~!)5hfA+$iB5dse5jgjK3;Ve$J34jwl~u9mM0~+dVfO(KWspALk=@4xl^13I z6t@g34(^87X_Y~S6$7mZK~+qx-gP|mKF7CxxV$m0M*{=(;TLBv^^HU}#__vVs@1X3 zaPnGVUEzdXebMpab44e2gFBUt@czj=)k-5Yd81SVc!sg^iWM(F!|K=2doOG`cX3dqLVw80J2(xufU62jbz}P7Z{bgL7}$CG9VFL;Wt&wb$J}_(kMhIV-bx@lIz}Cg0zzw9hzUtD4tN*o`rn9FF|U z7yCzMue{NtHeIRT&RoTf4HowDih~=5W9adyv60Y-XA`RgXhwMj3_`(6@k?b^P3mgoI8U*e<^POB^Uw5N+xz z$}oK;0==)ZsDF(&>fgg5rY921JDz##NH&c^+vI#tLQ<0K32mP%kQyqzYoV7)EO3Q1v!oxmS$zsdE0Mb zu_D)gc;N*glwvkiE80$WkTeBG7mEdFQ5CV9^9L*{0y?&c3qqn;W%THM5Oc1G-Nipd z0zfg-N#DHTRSo?Dal9xWjBzN<8z=s1{q0*MoF^7}6_0%x&K&-MbCVMXWk`N+Yylt` zP^AZfZ~C_?Nu-f6E*0Tv*d`lW@2dGatUL%jsB%E0vo$#h|jRf=7dTR4gn`+V#mJ zjgg*+Ba_x$1RDeG6jr5zWgf@mo8qq^+e{{^kmuYwmws`Gq@Bd z!+UjP&U~)nG7PpVMeTM|lYvarma9_KE*XYwvNxs?-#@uvNe>zc_)558C||Wp-2Rv_ zvt!j_wXW{w7GV9QFACw-L#}!QI#gkBb0v_!S8zcKSlO+ayq&qGN)9eMda->EPv&Y- z;Q}ExWf<0Fcvq;DFgzd0S1NB{MGWf5u> z-s%?-&Fh3d5IfW1NlSkWLY9r!4j(yw{P2?}j=%eLgc~E|2@c6sPBTSE*p7+s&aRW$ zuqS&#Nqq^`tA9bKqteDg#kt`L#{D@QZ$Pd*s^o;{;N~@MZ~Ia=kvI`C5+LB68^Us&23>X1emRR!E(s~wxA3Y zD%&{eAEw_{RDmn*mU}@9*U1d2V%|c}rd7R4hFH<`E1T}X##MbYRCa(|n~%e?gAB+J zREthNyAFmiPQ99AfWw7rr53zU@m9W)O}E*=NuM89zs~x9gU)Z#AplS=5orl{5LgeJ zg_Zh^Y=SM*wRi)>#<;19S6*dPtXj`ZHjcu9(w$Pl zPg$(ZIrG^;@&u63{2EenrB$a3*_eiG<)}1UIFQTvS8U;am9Mbj01lAci}krVAiB_t zGt-yA{R4exqrPO2-DnkUAewxI%aPNZL(Mo~?uFR1>=>@+0#<~u=!*h5$dL|3M=lIG zHkeKFSUH$ua;$bX&5rsZ;e)f08hX1>%ntXy5so&SZYMKp8S0*p>Ihc@DFD$}Xs4kOM!_-FM?>;jqU>_7o5 zXtm-1JJ}*z<4UV+^yQ0!M^q5Y)eFPGHII2DfVL-`w}`a^Vc|Qo$qrKlt}Q!snfI&_ zY_9@m)#kF}&TIwrBZe zRv@`|ZOEu##wsGe7jqgWY&|%d)>?r15_&Yk8PMwkG)3aPbu)Db)oBi{Xz4nAWRK?g zx1f3Pt~aa%^m)eIm8$g0?Y!qor8#G%;*fXTn&n=WGH#x^dGanuOY{R%@tNPu3%Um0N32`p-oZ88^b zWFvY~vr(VYXG6|x8E~ZOqZiSA0N$9tT@XB}aNA@+BSXQF13kJJk+!^Gt>_4rQ@_gk zqqcwdU9xwKaLEuRaRsvS%o4Ok$bP%7ZhCQ%VdWl{g!YPo_K053GH-s;Y|sH7`9E13 zMph7pF{@6nk5PZkd$IN^_oCKaOjD_#BJ2r;R1rS4h-%{2m%;W*1=pZ#oYQv{w-wcY zWYYgeNBF`4UkaIpY|*m-Vr_QJ8|Yaf-e4a%d#Qd<&IZADP>dp2QYGQT9|#w@)0~Dq z0VTa%`=@NiZaT-{JdNhr2jB!!X)^)wVi%4ukHRiz6oN*~j2K(t8^gio1PmHR5H})z zguWFCj{9*mc3^xAX;bi#ZQbg>8^tf<#_*0Q_%CH_>EujG>J;A3;7tTI^Irmq8gLoxHk5A+%Y1W-=U$CXsp)sgi~%t%)HQ-kK)D=3j(@k-rFqMoqSn zEkVih@VXo_+9P;$i+P1kyU%Q6%1j57sA-FlHXEDYL$oM_FqrMX_3(ff)ar;Y;SF^_ zFoeN1Ak9cK3XM6eK@ey1kP7a6fxi`o_7~R#lq2diLO7SD4Y`&>5*t%x*R(IXL3JHz z;?yL=z6mCh5X`CqnJIvj_d&%N`=tKEEZ~ zS`!?B@;=>n)+PHov;l}Y`|)Pf*S5#7Cv+L%$1y5>Bkvq&9n^dB*3f96HEE@nInS&m z-a2Fr%M4&ttr0hd-8WNx)j5ol>g!+#46{={bD=pHJ7eUNh#%oDsTINr8SOtxe{0e? zR>uAvbO6}jcaF85aFgy}>jbXJ#eobr)%M|v351(5>@AErPr5PPE16S6=Km?|bMEiX zzm)s5-`bO{XF9NUihJ??Sghw*a3o~MIfnh37Z7?DC7tPkV54@lTdwr}G*a9`%IEM~ z=4fD)aEb4Ivv)PrehF{RAjJ>j_bi@sttqy`8pD1`to8-OoR_`8A$Q2yv?+jbxNy^K zrm@QxRKJF}@l64!4`J959vNt7+_!4q2ONcJ0IEbn6B74g0F>)r)|| z=_XgsH12v^`qx$XLJ~9t|K>to0#xR3J3?=6}Ow!zpY;#Pm4v zHUjL4)eQnt{UFD&vo>NND zOMe9Y(afz+{(YVjRpAa_r=ElHk91YtV9#aSrqj{w?T$`EUXNh|440PH=9C;PqW0WN zVivGx($Ke@La${N@2uJ8t4J+3p-G#!@N_q9hCshyaWe)B9U658``aGULiS!X3mlr2 zS1NxOyFHXlbc6f1m}KYU>2`xBp@NL7~M3*arxPn&5C`kZi4idA428 z#3cb!b&{z$B--k1AA>`5&e5sy#WVDYNH3hSu+`kRR29BB4+l3k6`dCe@A@K8GSlh; zUj)I+E9@!MqZyw2EBuQAS|^C}#zSkPi1_3gLn5M5uF7ZkgE zodIQI4UZx_5jZB+Yus>!CA~rCJLxR2GP5w>WA^*g%-m)D&oD(07jny8zq7EvtJvzl zFcNAWA_ki`c6$k6!a7udi1Q+P@rwFiuTfd8vQVdy^FO0z`vE)wle{ST;WT*gJ@`!t zr%E1`9P2LOUB}>(J5GbgrJ{HTp5w^30kPz5$4p*EG>-hcO!D)ih)aOe1_y2?;qJh* zK{#ucIVv1)2r0cnL@%k8Ym3=AoFH?Bz9dLRO#8wA}l3by)2nwsgI{6r0-Y83f^&El>$q+E`Pf=5?I=SY`lm=1QPR8L z&E@|V^7nNij8|L((uGgJ!yD1?z=pnzGcULpG*HGp31#WTY!sGUviG4rhPYTcj)NIN z`y~7k^)V+=1|JE3QeA|9uoi-wVmizGy!pL2?uU!ICkI{Y!JtQunBUK!4Mj}m9fJbDRm@wjWuKm$?D;X!sT+cyLuFp2lM@CMRca-MCj3*#if;-e121dtNp99m z2I}AM#GM#!+%&|BH)bXEJ{NX46LZ-4g0(A5(&yyR$;6;<^sK*=9%C)uulC3_& z+DUV>k*lJ>RznCVf}8pza|EGZ^dis|LFPcA*Y!rlc~kHloDG zO&f5?aLr9HeE=sfH}A}7r5sgpxT%+4)4*V12h$eaBKk4IF0i~6OI&Tmi;u8Jgor?p zdX%)$*uGXTwB8uaruuopoCesdC1l6XJ^rLtUfe5K|0}GN0HygSVtDl_IQW8v>=r-) zIA6CEM^=MBTcj>BLzg(5V^ApT&%*a4Qivs2%*=c|@MPjjESQLyMlxj@ith_}`u-?w z@X!7*oUW}u&X1b-xK0pu{XtmthhWVg&f?Po*|>!(7=ayl30*v^zkCq461d)apqN~O z^|zI@2F0d-P;B~B*db5jD;@Yk#8qC%Iw!RSe)YMzU;Zlpe&@OA85KpVuk~4Ruvvsp zL4|L@({soREI7{t;uH*)6;Po}xy3VpA%tO&=tSf)VVrR;N0WhtGf-*pI|)#2wBpME zn_PmVNqslpt9iHy{vTj9#v=)_IH1SjbyU5}*oqKzGwU9*bd}22fdxCSgyF|t`JAiM6;Ko^4xs)tOp27o|v$ZwHY#HT!7so7(;sRhQ zIZjC+Ha?AX&uHmJ2(v{}-u@egr*mTV&oOKqu=X<>1X+cl2@X`4Z~qKn7(?Jt)SQIU z6Y7p8Es}!joD;$?ZWN*74QeUetcQ!n5Cdim_qjJCB+_-ujg;f%0Xc3Gr6DHLB~jMT zJBijHE@(2|1|!5=SPdzr1)n;^+n7bYYWCkp(jSuBNq)B1yk5#rJHu4Es6TL^ID&~x zn(i!AQ@Fzd%FtO8N~KuSpyACNAO?!J_@~Wg!db_3=b?~%<0YiPfg7whsu<1YAj(ZT zqt1F!<`@7099VHi-Q<$d8ne&`C?>Bso66&El827&o8`?gHcg@rNtBL*JI<;SuZlaZO)+5$vZMO!Njq(oS8aG+q zDT=7%O|)A5ikochvIbcL-t4wgRHCXsL3u+^RHdk>0?;R&J*~ay&7Sf;s1uoTC*Gz% ziFeJE8;7DdC3tLthDFYVn_4n92ViP$wlacfhpiDXZz-~2ejE0S;&j?g)?(<%0qM8l z4&W$&@n!-u8-!x?W5U9%V_u{=c#nq(CWtXv4#vv-w251E4JbWXy2LhzSQ=BiLrA^L zS})4iAzZ;aDKUM}Vr|$QXb!D1zc=mQ>4T3p%>5^f9!`R93cL*(CeVkQd&#sm)2qq| z+L_)mkPioN-oAx(aR@mNLs@ywO+y8f=3F4=2%e*Oj^R0u=LtL~pd5`h(^T9N#@^W9 z9B`hxXS!*30E$HX`=kkXOwSQi?TB3pLOC9Mv$jTizS!Fy9D6ZPPQneNo|EoSdFN7O z^*X^pl|SmuCF0E{U#&=K2D-Yh;74r1!6t}AUL@Lj8_qRy&@kL6=Br+e8{phFg_UY~ zVNUK)0KQ@w`h+dgYbB+=xMCI4L_{r&HaRn4wB=(l) z_yyoV%~|j6;=K3JAg*c)aiX-PSGh^q8EiD}Z%wcIT4})Q#%-xO?RlU z@X-OV#`v^N5hi{re_K>u(0bdVBU5n}6~#-j1NsKbz&8*5J>rVg1GTIFdASgbMylG- zCsw%SO!2#LUTO*^Ygcm5Upf2w(R0sV5&fE0VE^yTKAg9q`0@{^`#MxzwQ$FTU?s%8 ziE8noulgc3u=!0WoO6ifaYdF5D;sa1%Fm3z{N_fnqJEMMEHUGH){xNrI}G1#;nJX0 z5Yitk;Vi)a#$;TIxZTYE0G??%TGSCc+@^=3)aui>r9J10p zmTcibC+MF6p@s*MpcgVh6tm0V)#&8JAMF{2DIbjJ@H9r5GkRLL*A4# zBuoP(J7|MgqQs}6yTh(8QVa_bi)$j*hjH2Q4#qu>fMDDMxEK#;eNOicSobZkge><* zq09O{d2m;5=)RBkC&GPY0$fDCAfWYVT-rNfD>N=Dhc>7%Cj-18{as2iixR$y;xy&* z)h$E-Yn~ZuA1;~feC!3dU8pU4xzhJ{5eZ5`c^%;y^wtO6fQn_8f=snyf7nRMa!6)Hi{!T()(V&$bmV7VEg(k7Oj4X4hvQPmJXF* zz7^M;zSRPtyNi)RnFLI^+C3t``aOM34!#uHm>b0f zi1kx=rtsK*2ZdWe3Xrs7&A3Bp*WYlUcn^8deB&uVTtdr)*t9p@gcaOKqZD3>3bn!# zPffif`uCdNm_kNe=@kG&Ir5+y^-ZWcL zm<6S9VR4Ejp)M*SAO%Xv2{&a0Z-z!$%fLHf*7D{jV77p(6CeZP(+Kt-x#0TUFrE0zU=%}XJc#39cXPr%sAyk z$dUsvBBJE`M~LU)SZR~Us+2^}wPTv=@qP=;Hr8*JgR_vJXvP#A3qWp0ItSN2SnC*v z@g0RsE^qzuUhibA4J2GNL6Wk=IA*%OwcW+oBPC$uRN&1mu0h!sbF#zQD031+Sz~y= z6ZyEIwQB>AC&txOEsU$FJ}NkF^S+8=nzKZbTDe`j8RS|4ri%j_G7H zz25aBRCy&fl?F?|sL9ns~n3x`D$91fIJ25jaU(Mi1iY_vfhXD(PolIj`SB4#S zFw{0{n%J#=n$7+dI$a#|W9+{O0FlL72~q^k9V}Kg!l8)cMfgeR(t_H=gd&05W(k6u zM2^^u7uh(rnSHjMyF!|fi<~4|@3LV^K0|T*l}*QjgAtvt-&ViHu%JY$+O!UpiZ}HS znI=-vEjYs%qRWX5n)8p~>(kh^R=>;aECUgI zvP~}RhM?MyvTU{>8_&1DOTA*6)o(N3Z_*KQp`Vy}AzF`n#J^Wi4DoNqcuKJC7`{-f zKX^DQwx0XJ=!>-{+|>Vx;A62D|DEGB#M~WS7q#^cBKtv!0hq7|07iXiczCB^o zD-3`j>JbF6WPq%>r^566vMho#*FZI7uznxC0K4^v{PAyAW7J9l&9qVQ7VWL;-X?J3 zzBz<;*oQQfAHr5v-&O!J%3h(giM$J#9VoE4|7?L@#+?q_twsx}Q3f{&G$!rD#gku0 z#n&F1UQBj;a`l@y;CmAj2+2oaMH>4Dy7la>^aL-38g^c;><+Ug*d4NBft6!QAbE8A zxfWalx2mk@gCOt~{QyJK?t^+4b)sSa0YAFwY_HJ0D<4Q-K z_9wCN;YRH8Sva|A;qa~l7QzbtDCV4&!ESIaL}}-ZlVBS{kfykUja_l5KOraK<_D2G zn)pBspd=*XV}x(ru*f+@dPnTUe*s^aus=&rbTpu3?T_CK@iS%}WBp;;5r zZ1he6#(+3GAHAzRUify&!nc7V^KycGWsZ>gDs%rkI=@WkSK;*an=6pInRV

5E8G z_`Zm=S!5_E?gRRI2CXieFjtj8=1WRA(Jo<9A8NA9?!Rr;#o zATY^r^qLw_G?nkv1v&z#LCOufyI2LK#d+)n%Zs1Kj_x#0JL>xjAXj}9g=z5ah;@&( zyAMf`NI_G_e}uW^;SN!j`Im_D?8j}`TjlZG@IQ;2m(s9=q~s1K&LbuqzYSbA$;NgXrchFpiK-6^)(LXAJJK( zcj_zfjg%k3g`j_6(@iq+En_*ri*G@Q8d3igbR&U7&9s>;6cdg zR(p7o3|6F=i=#b_>X&Erdw|ZrM%!UI0*$G8O?ZmVFQSH5>Q0ZbUZQYK5<6U_Cf+23 z>c7(=(5Y|Hfhg>^6sHp2LyJmPxyNU4&*e}89YBlAeaBygV4_@Ppi=ZKlLtV=a zKZNlaa?Qc{?|HE}Xt)?{=r5T>X2j|q*!FQ0EZbZ>_Lher;s7xxZJM0sW^q3-wiH4% zrd#`aA^OWCu~!ysk;c5(qVJP!3Hdr&+i>DED{9htj?OtcvO)zza8jQlq$J=WSDu!C zjg*W~M}xPDlOh70!sFl1(!#>QRTBaK18$I?^}qJA4e1ZI@o=$ zb8HY&5myZAhWLAN0RJGff0;S+bh_E4XVNJXNlvf~P9@+esu)v1pzp6e=^qSsVpH6f zDb3y$v3a#ZoGc3J)`mx2&;fhbbI&~3wV)dYV1SSSaO8U`yn+;deo+<%Ngbe)@5VyA zfE!!zc0fdc$V`bKDlO2eJyb~P`}>0GWH-HvP~LxhR<601*deYc=pzu z{<;nHbvQ}vMbCqChMhC+?14wU&K#_xt$!1E5>OHlKEwA??OFN+&}1h1VeA5@g9ug| z=8Qw~gU#7+do14vpgo1Wg^koMek?|=UB zk)Rq!ol`8(2i2Hj3~hj@)}6Ra(rVQUK=qWK2Gg5EAB3#bh@)AWgA zLxff`Xo`}WY;fvh3!?m|20uHX3tgzxVe%;27tyRx-QxMJkw)euowhl9jD^WUI7y$Z ziH9?! zr^9LEOc38P4kqFw!|BLKWMp_`-N+!G)JQ5F)Bi@&BO@b8#HJZLk{J1T;NyWqBZX9b HWbpq7IifTP literal 32562 zcmb__378z$U1wGGeatb?K_zyJMS=lc4R20r%Z|M=qAw;RUidC~m|7kmi&e?A^8nw!uSo9Bb8_-T8U+1Cd-G*@k%@sm-9$DQAuW!a&DDVnUp-2&h*J| ze`Y{_2Q!26JCqrc-{HzgW+ZrKG&734Xn9>_eP+Gn#mXBhW0^5IkC(?Q8#5a#n=+f^ zI#J$S*^=2(*_zo}*_PQ>naE63wr93ic4T%`c4l@~c4c-|c4u~1_GI={_Gb3VGs*J4 z%4BA;vOlxGa!2M4$xD?FR1RhiR;DsjxISg4?Y?Vf=Fp>t-ER+EGwgxFNa^rWD02ik zgZ2<|h6+b;eH7Qj_6V*=P~sSJjw5H(UWc4@!83OvXT7}vIU9nUyO1+xk0WP1$hjLi z8|_WV*%aj5gPhIw7UXORa_&XWR(l(AwgovSkTYR#N6z-b3B3D0T<@@V;(DiCpTzYp zdpE9k%k}-Z-ed2@^l*tn3*Xq`hEK9azQ!8TGfx|Yvr19T;=x{t9kuMb{;M8`zyIig{)n?Ty%;T z%LRX=TB~L&xy7urFh8#fjy#@r^0{&$o3GWB?f8kst5@cZE?!-{%G1TGIK47A8S)bk zRjV~O=hl=T)$jAuPVRCcTdNkb?11Gb*!yhGMMDy9u~Nu7SF8Cv2t)hB^AL_RIGh7W zJkvH747JZSJi|4YEVa=CN!Vyq)3#-Yt|9kfW65loo{2#TPru9x@+OkjhZ!V7=W#fr zNM`ZOb7?+d8yk)HTQ7w1q^UOI)-B_fa`=CAt6ljWl0;(Gpp9jM{&aTxVYIXpF=v!_m$YXGJ5 z08Uz^0G1?xUv=DEHDB;U#j1-e^OA4QciNz~p&CCHRKp7=%9^dXIbo_jILq_0`x6N+ zoGF|X3=muc#5VaZV+I7(j$BKC z@2Z#Q2^<_G$425BOGeXNHcZ2{N+HkmEHC7R-7wO~lI2B7e{aM*BO7t8X4JEmO`ONP zm>t5fo<&}K*{H5}6J89@Co#Zp_F~m2@>9zgsBGHC$YEg50w~d0Ra=;M{P4wM&bctz z=SP601?9)H*GZhmgMrYc8m-?~`!!{>{SKdm@G ze)I{MzJB~M8xyJMUd2?*ySL(g*eR4}d@Fwh$5FM5Wlcxzmfu-*Cw_04ej=OAmvfGj z&FaD8&?w^VJ{*n!&P-yA(q_Gn?V4&ebrsgf@PJ_bdYpJhlLtW6v^;ZJzUT42>B*41 zSsg(kbqL8VOWn=$txVcD8{o@KOY3q~PdD3@C!AamINf^CoWfN8=&IyjAmtw1OXp z5N*#J%ni(&kR9f{k$FO@5%r@FKf`Y6#^+^D%F~9y^Y%m`_Hd$JDJf;s#$TmB%f`>#q^0L%Mwz zT3is5S=vW(l;=EYeR!V25yIi@LDCCjFXEAq1aJnFDNbrA-!j!C%Bx3_OvY{tPT!ix z^q)Vv7DVsoOM_XpaaD^uzzuhRtJnjbypd;X!ShYHK?G>Qlh>=@$qw`5aNt`=DkutS zdo2#qPE-YH7gUV^u_FGUz!S5xQc#cQt`=0jRSRq4IcGnAokb+8=(ZijAIcqYE8L}FD=vPJ7LI{VQCko-iUaOv(uSIF7U zWU4bQ)Ik)4WK^sca>_Sf2nh9MfB1<7@NAZU2oe-1JLDj%lJi3|py)`B0_!;`d^4|} z!tYu`dS=nWJWXY*j}B z>U}G^CKWiv%5O^wp2S_!V<(IxZlx27Ie8pCAEFpP|5oHZ4I%1Mp2*tz@SN{t&Cat( zI>>7S=vna4>&B&!+T{Y&*A0jz$aBNZ$aUko0Satnqi(nqdzbMpb4C=>YjHR3g_lgq zmhWW^iR(-?f`+Eo6flqRzY|TGS0Segm78Lc(EY5_4 z3l6qvwx2jTTc{Qm=hXullvCT0)DLuxmH?rRc3pyH>#?3mBHVf}E8=ufl!g6hS$_?D}rQDD#_Tyz)>TM3m0GqFq(;^S~`4~XwT8b^2jK4wJ2wyCA_MgcYDmK;BJ96X|mlPQ_T4~VNPGC zS!d3llL5RS?O7$rwGrnB@%9yn6D8uj+tB128f6P{BBdw^I&)|a5Y&me$R%O#$&I^^ zh^`x>1`w_Fe8P=1lR8(l(iE=KqILG6j()O(cUdzbJ7Pz{5C*(Z7eg2nl#FS$<*Yh~ zHVX+^#Tw$cBdlQpCtwX@0c$X>Ti2nJLn@5~ECObwTcN)}DSW>CAWRaMaWV}|=Wr;J z31G$*o)h)e0+TkwP|xGC!7O1FlRaQ>FFjdZgUQzy)%r2@)oDtdVELmn>6d|*X~iz z%p(GmlAgigoJNBAwPKEed4Qs5k*fh=HW0LoX4H)}khKB_< z>Kk!(w#Q6F&9^EvD$?d zoFSygNX@fFFCdvrtiV6Di?>8W?qHzIAem6YMmuwf8t=PsXwq*H7st%aW_`Hb`Sy(J z!N0GS@+&G{x25*)V~h9-m~n%T){E%grri@xS!6Zsvh-@7i2Ml^0LPyD^1Y1mi zn_{DJA>ww)6H$>T)=(`uCL%xVzzO7sjV<|svI8Ut>$;pl2<`}N@CNAaFEqk}y>-TgK+n>g3a-`>b}budOST)5`9;l?s6 zBQ1o*k=I}2Xot@nyYSTQhMPm(9`3tv(LLN$Ay*D}BL==k7YF#7=4TvSND?m-2ccBo zfgf#F-O00OnY;rDw9a0!NxjHh!l&-z*<~iT<5)k6o818JW2f5V`FbFH12x@ZZ2^RN zy$Zsl;r#R(%3(x-p*)Zu+Vd&yN6s8Of9~9^(93-6Wg_*%9cFoQ55t_?0L!R?385Do6NrG(c5^N*ux-7xQ$ZgzkkQSV2I z*N)paQTnG@X8?I#uY&yRqhzr&#~z)l!TGxnb=^kyYJ>V=l<5LBJbA0pyG6|~&aaD- zvDre&Rv5`(k6UFV^9Q8O9a&iGKc+_h4hwl6b^ZztO~7^rU2VgE4MwZJoy9v$^SgN| zbcIsiFX8AO&%u>7zMg6HK~(k&tS}f%Uf+H&_u%ddwnPs%#c=v-yN&rVMdmK3m2V*f zGQ@z-`4z(9yakCKN;+rZ?S|L90nx!T94ifhFdqtB?}77))!hpDsY?Y_ zEtIoPv0m__z}lh>^Ba8UcCk7O-xxWLpLim-c&=6}!!fiO*Iisf266&F&W4?JW62i0oLhPN(CI%Pc8EhA@dd!+ae~CD8S~xju8w@*4 zh4vZ4f*m!C?^4Lb039)$%weDgzi-7646Gk5&Q@!xuxeO|X(Q~$D3VST1?=gj9?H%* zV4$|3kOq|5<(zJh&u4jC#%CL$5Qu$U^R+!Zf@9!}6ovgb{Ml7tg5&o?$Ij1X+t+EQ zSV4@tRA;(e8)Rm?da00KEga{$|;KJqibigB=#{ zUToC_SJXV!_aMPAbYz$IRO!qTMR8t6T{U?oLR*JglfwQ-96b{OCe=RnvOPz8W&)8e zh2sn#qkV`oLHhuLnJ~R_Cdgf?En!BRh>=G~nU>EbKeTMlfC%1ZR?MuL6)-R?H<+sWQ_g9tg$_E;Sh(_W>n(8n`c$cMX9b|YAty&8izJz>{5Mb8n(_L&S z!K>qAZjTQ?glay|ssen7DCVrf2j1nU7az3LPF7EGgVKAqxDodgB8_!Ly2NxXk`f)- zC~+DW9h9K49a0BEyD<4;L<-=qBsQCCUDXwNtR%owwM4Xw2h=?Aqi_^ zP6r`KEP@~e;gG(0buIk(M-=N!f}u|1T3FH@U_ww`xIL2mFmC_PR(*_uQq1}KAc^`e zRMjy)^f>4VE>dxV&iNpcRVc#fz!0JaO&WSdIEC5wlBG_zxG&Cgw~gyJ!|gL3NP{ z0Vlyuit4GJ?1GmIxO}w;t=l2uzv4NEQZR{K+zi9$JYf!gB^rr-b;7j18aJa~t&gqH ziq=L@5<|{&K~M^7Ed<3cAVP0=OF6c5!`bF8`KXs$A@IIUpf;>*Knn@2&dA>${fHs| zf}=&Fl7cg@5&g8Kj*{(~Dv=)td2$tEH?<#*Kxln<&O^|T0O>+Mcx1DdFsKJ`10J?E zgYv_(xndRZ8F%A``Y|SJ@Th)5N2n@!6oVY45o?bO$z~jfj7$k9h+Kdzt1FTLA%ciq zpxIS((db44d=w;;zspgEEw`c^s21ADYlfE@Gt_zpt`(0*Se}t5CB^~1N;EZ?p>4XV zITi)HTM>l3LhF*A`r*sP!WA`#d_S2-pNg{!wFRf;*P{tlvHdE;q|D?JlM_tVka&g( zr6g_|P<^6Me#XrDU>jB~B+^mXx5u9!!Sm@Byg|OgiaB2={E0rYIG?N9UkmoAA4PN2 zk26^VQ#Q9+F4D|^e%;ptPolsOgizS#2m;zEdA0FGC>^l`F3nYV(k@SI0iKK+Ej%F= z>OvM`hsc6jB>f3qevHW)13j_EK=%h-eXYa%an!xB1uPk6=6t<|8CVC^QTo%PX}oqK z0jAv{5~dAhmlVQJAWE@N<%)^5Mt^^+LwfXeE7l%;wxmtJ`*Gn4-G)97jip7oLtT2t z&!Eh0$v4&;waIsX@Y>Eu?Bkc^*mIhGiB) zO)6OuHw~5>nb9Pis}2&Gh&>|f4$Mpx%b8Mk8q1+#g}A*=7DSm#W+s8F_4bfGe2rIF zxezuoDahFy{NaZSZXxeVgy-p8{!*c8*OPihhBGzq!v8+Ma2AS7x0HGcO-tbj;c)&M zEvE6ZY*frftYS65!|dVPj5!fyTYWviWSPk!++I z=|*3pzcJ7lYz#Gq8zYTTNU;ei7x9u#)l1<>T_-iA`I4J+_2uawXwlXH~YK|fM9amXsmw*dPx{xns2IuFyH)Ex1#DL zZ<^|~7jL)yuRRm(PC(>IfjFdZgd1aSKf+kXN`uY*Wms+O@RHS7=MH&ecHfePr#Iu? zaA~9(#_tH)6G7f#Z>%|rB^puut`ke*7&zwon~~}VycF*LqPw9vX7_uE4`C78OHiy~ zNWucPqS+XC$4eVao6v%Bd-#Uc7?b+mv8ujtdvM&i8C8GeZfR(stN-tTTJO*FT2oEn>MhO1xl(l;&jOJ2j$00 zJDNMeJU8RX4c=yy+GvkeEqfes2YKg3n<2H>)ut`&K5Px%y{(KT zgpKW$I~qGG2O2wJ=G(qxGL-m}x&4J_W{?J3=jQb)Z#&3gc)z22_U zz20sv_(OgB@a7ZVzS4b5eCuTUeR}})32&l#679p7V#eI>p=Hkk(;Dk%O_d#!e=@ga?rtHZZz;yQ^E*J!+-r?aDBvm99K{93UE2>CrfvFN8Klz z=e#2rjj0=;iOp{i<5bwY!#nI9@s7d@Kj4L%PjOx@MZ78R@JR#poM%0N*9q?kYPoR9 z;<=q_L~e#(IN?5OAF>a_IzQEXdfA|<7jHaBxD*v{3W5H(QF7Gas$y_%|&*ItV z_-r+atG72X?+Jg@Q}TvT=5M>|Io?~3n5^%#@46Om+W|xiT^XS1n?GXkg^zLcq zTBB+jt41y2{9*T~d3SsF*mrZO=Z#R~-s%^<;hPcXpS%%Z34{Vd*FrjD9%e z&NYk624H{EI{`{@pLcS}oHgxJm^=4ZH+MnfA^jvv*{5%qjr+a(nV6W%C#@EdN$KC@)LXjTVt_OMXiM_xiO!HdvkO4Vk~onHd(2L}DK!IA;XA7lt~ zW!}x&wJX(fEob}bRt~S!&x13*Jb@$+j^F;Ygpu>ean>Ud%YT0Zv(+;{WV~dxR#5s8 zSqUd{1e7M)|9!LAC02+;F@fwipnh%pKeU>Ww?(40d;u?i5$sz`u~>T-g`FR(TKxzY zg<(Bl$dB5E%S9}rj1{Z%&Vz~imI+8e6Bih)cRsI*^X^2k zI)f#g z%@Yq#PxsU^Y1T(;xRt+raQ>=0SF1j7|M96ir;gXRb{>6d0h(2%(DR%hu2rjx^#P_z z-#<7zzu>33GBWGVJpJ%Pk1Q4t!wh-zoT^=hpG;*&&zyUDRW3S^m=8)g^^N-NXxN({ zn|`wA%_taWi!T?eH9!+Sj8Hh2*6t? z`>`v9;_RI3WWv*D-tY_pXwE&23R8MWWJzB=GIceF2XzxJKvu4)r!Mp~p&q$=>hRQI zJQ$wC%0)kRx>yy@ACyxLvmdeDtBAFqA$EoBoQnlL^H@%7sf)DN{AXnMU@xUTk@)cI zY(|0zno;l~MuEZ@R5E5(`<~UGq8wy=Vz3Ut5c#?z9}6NskGnV8h&=V$5V^HD13osc z`*!MMG8zQ*6OZ=-Vz1oMKssBlU70wKwbVA8kcDz@!QCqhKB{u_a}O70dWz)tk+x+9b5798LRnVh`;mD}+(IT?EjVtbuY#ArX}pkk7gPau?Ek{5 zq99QVShpX;+~SG009&(#Tc-N2goY{QbO(o4HT3(?(;!Y^BaDUhyZ&wct%nv*9IhuH z3zj~gI1F9(3+ygEFMxIGu`0Z&*a!fr-$yQikti2tu@b(%eCf-n`b}? zYF8YzDk%g-6KoVajys^oN9KT&ihu&u!a_d@_W7VLT~C=W6g+{<Y3;pAotSz+NJY}TN5vPQBc zKQ@A8BkbEIbCqis#=AN4~y#;?3|x2I@#RiT(O+P1`_JicqTLWSc{vU*IP(r z`p7*5B9(%h%Z$Lg=+-J(UXX}m%ZYhET)3PoX9imwXewZS>N9M{XPNvy6A~|Qct^M~ zxB^%+EVbp@#Y~b7)Nk=qSBmZ&cCp}g7@2;)eLknaK!C_jCdq3oU?X3PP$asVNjmMU zOahf-I|jFy4@wrQv+i7`kJJegK=6hz&|S$yfh4kskwZ*^j7hs!ejH22z~hvw=$rIY z*=(>Q2NtQ59)r2oW@j9$Kg=E~ie#BI z7vX^clRG+@LG~!P5?-5$R?)5bLT0G@jxbNGQs8bRy?69FwGO5n7a!PZ* zg?Ydj@|NBrWb%}DJJ7|iKFI_TI)?fawlzF6f8;Lpo6PJGluNv0VTAsL`M<-f5TrEV zrbZp`R)Ss83XO6X{iueE9}CDXh|Nq%OShVEm@`5y{8YE_OHs%VRST|yf?>#XSHH`= zm=o+46Xv8|F=>6P2&o)TU8$){enP}ZS=61>rxNWotPzG}B-<*Q4nQZXP5qFIsq~-t zHb6-xPAnKzKi--Oz{09sSQL>$@KyCXpLQ$$p$A@r=GVir;?R9m5^o=eJ+JT*2OY zp0$mc)41k4lXycvT0yT2J>c<9NwkJ72%Ed~vsOBihGTLHPozyt-X7QOvMgL%UkQhz zUkQgtEw+#EW*?H)pjqGaDiTVMub2{J9LE_P&IdszJqV}pkVP8NMy$c8HdF0Gp2e7Y zEb7L2GXnLP%5PMiMJoFaFsA7SVw<3VCt!A(Kz>}k9p)(9g%PHe&-xPTJ*XoIvqLJV zhh|r83NuP+8cR`!RsD069Z;WJQFgF7g#5wgFpO)cwKOuzmJHHps;@n>2!T0xqgDMN z?ygh+)LTn)y$9m}11w7$q{Z0D2InxJsEh1B&c|(pt&|LyfJ|rq8wDzzTpBE2G+it* zZEm6o33oQzA()Vi(w62{cN?#p6TGA21yNfVwQZMvVG(X=2aKPm@$^o3$LTMMEr&Z0 zad$O$!w`<%+3``MxyOcS2gV(~CDz<)C#8olkHgTMfbr8(V=#Fp>@>n`;xLKhm<`V& zG6?3WB=S=*K~CZgW^lfr=gmFt9VM9XU}S+|-MyoE&`Wx$=9JxsyOx)3onbvDt{aK! z9=jhV$iw!4JqQz6LYu$rKAKsvhe97rE;vX3%<8V&^$YDkg4SNg?@|0NvOnl2Tn;y^ zDlkLiieN>e`YX7545fYqzsGUh*}O|IY2=0(l=v`m?K=1Co zWHkn1RI}7SAaC8c0Ywy!11O?Hn+$Jw10c&%0c1}!??b8e3{_gTV3ajVCumd+&hGaH zcn@V0s4)b*ALyzvAvHY!2oH}L-q0xE09bE8xYO7>tr;rc;K|sJpW5p`Cq4V-r@qsfs zr?NeIJw+h+a14hBz0uI?*K=hIh8mVBuHalUJL_tp%|AsVPj!!A%{* z{Y)J4>|<0h)hRwhc|eiz_#tr2OsXT5IKnBlpewA*QFK+R6HH(ng{s2jK_TF0}8 zm>>q($Rst_E@UqsRg9(bqmbekU?!ueq7Ja!9M3pfeuDC4PO02gKbmty#fd??qzI%) z_tihLR5Yh%sn(3};a8Xgl%UhLK%7uT7W}dlgm4X&Ok^6(#Sik4hnY}`=Eirjv6`zM zWu6!wpWxY9BwFUzvRelF_~qKAf`)_63k2I^N40hqtJ*=!^=&;^Bii?C6)Wm7R{TaJ zeh46t@Cb+(b6wF9x7el-_QI2W$!6fEEIW#5N1=sd^(Hg{$2o;=yZ<+*;?uZ$)Z@RRwyV7EfOIB78XIVrV>EpZPdQImsumbGPR zJqai~E20?om8f3T&-^`~<_z^C94Z9OvXD$vqOPonmpKT_&)rh2SebR|nKYquyk6IVFE!%$6ZR0el~9hxO4#YQ>U{Ki@W4|1_#&Jv7(@5btAtp4G$6fDyw z=Y2MHG{&Vws(2bduKua>C%tkPEMdi;WLv1oY2NCfB(^#ggAsJ!xtgsAf5q0^ezUBf zLy@B>Awv|mCcfF|OWvQ1TV^_CnTmJwIC?(e;7XQub;4{Av29@!&k#atLI|b_XJU3F z6UTYN#vUe^r`*LM!2lYZC+sBlhf5Wp;Xpqb(EAW0yapu^UNeAk7{pGl*skN44AcsC-HJ@tM<0?^V6q3RG!MSI3541bH(;*H zXF_vV=LKuDF*KCBm=9(?U4r@Y0@soi7UiU8e=J%=)ju;CW3qz@hX97p%0)dhLmbV| zArBZyI<9RaF&OkNVx!)*BEEJK6B)k6h_A44QtoL|A8v!U?d$B}k%S_LTA-MQZ~gM`Dk6@eOPVcS(~j3C$XC2&*Za9adywMw|FYWps> zUX{D32S|WjPrd;i4uIrGv$^^C@>LCqZ)rh7&r+duAu-_2Vzr(YmQvWv7JDqgFH*Dv zfce%;RlNh}T2gjTEX zh4#MF##>53`j^a21_Cs!<~E^g3=@od{gg$57VItUgQh*`rK@q&JSn|4z0^tLg+InV z4BT*LgwNpK{_70L*<&Y{0cGqnWntGR6G4frQF0mstbrxH0hHWnuM_*nfE#JvAvrx5 zVXxn0KjgM=V*rf?j>aTv!D z97k~+vp3v`H3r?|UR(l__BT@Qo!2dI(1UFSFf-n1Ar)O8JT}2x1`Xb{Uifm)8)3n~ zGf9s#rfLgpknWmhZaRzZlxXDj4x3`tgwpzLB^)VEd*VFPLuMcb=aIQlX(`L`s@@kqP z!Z~nsfjtDAgnq5qJg9fbXp!3=S(CkK$GKrXJn{}UQ7#}zSmmyWi2+6|M>Kmy zvDS~_CS9m7L(IeFYAfs-Y-o4FjJy#honUwVFuZRS_)UP~V!6;ZkDq3#p`1e(gI+eF z?FJG3l3`XymZgM|&aro)Vr>CF3GO6baL{!uz;@t%^ivgXVhdO0#X?!Vmk;Lo;4t5i z)PgN+NRF12hqQhfrhcgepJmBcBJuk9~sCfMj!#Rxmj`|hWEt=>N z7NMS`mu|~EB60!|?qjZ?tVF-adt_pr!i~FuF(REBDH;>V!qeu7&;f`&+LUk@XZ@nS z()e%y=X!Iktzh{yO>BlCA`L@BIy4HAsr`K=8cUn|AR5$%R!!JR6QN&XX@C?zxKdyz zcvX|hN0@w+$ubjxrr@4P@=MI4bPqRGjo}=&7VaNmcV%8ma*~(QT@Me8v%6G1<5s(= zY_!#%NT;HvVi^ut&xc6HkMkZRZ7F_>a50F*!3xX>tnmCs3tdYwqsYCyVNX*_QHgFR%|AY>ghvdH<+-(%)llD4$J<6wG{T;(^+8(zz;EKJ84T>E=OUNwnBTDy= zAxqDJiJnI6eT|J+1ry@Zc^o~Qou>SrjV18iV z4$sDoG2rGX!F(HGf6;#VMrATk7zR8E%|@H}pQYpWskJ+D5( zEnAo@1)Ut}#5ie2{{!f#AEvoq>*%-G@t>4T`8Sy`|47CU)r<3fgc*oKVop^Uy>6@U zePMw=#D~eDTC2ZVyZR|6?RxcVIEgaWtX`A_2nY^qnTw@mLbORM)k$JPK7* ze+OwW2QS--{eRa0!r2SrTI$&XlYD{N{}~*PhstLSY*GdL7*%*G@Km^=)mhJ-1U4pc zU`Y`)!2~zr!D1S&h88<4&cj}^1a2_S{HUBmC&GE`r3Cb(l)eX@>b+hnkpAP;shUwP z3wZ7C58gzx0Q5MiHv!M1btyc`cO+km+znmvg+!$I9KZ^%x6DEv~WF(S3h z$a0bBBi(pd^-d zL6^m%G1RmO7+P?Su)Ymm47H5~iwffBp%P%`k5AWi%<+WK{a4IGb?d~0=Jh0Wub2~J z@sT-Iw+>7|D}bpC|4LOZ)I(E85kZ(&wRwhLhDDkTGZG6vjag12k+O<7L+GH$sv<>x z3iYCzi2NH=ePCa;Het)cJ<6<;^{7NNPt>Cm;sdUSC*0Zu{@kZlw(CRs9=2A80PM&O zGda0aeFxw6aVDMl{(X3j4)TdERErSt{gAUz(UX5C@2z7(?SLu(Bk(|hgh+fZE82h? znFO42ftbyCtGVqCo{g}~MJ7>RW_b2CCiEjR(ifgNKgvJX0^JH@WAQ8Mmw5X{mhRXU zNeR?<Qv+#P@X-tjT;c3_#)Ni4R`kT(r7Yg=?iQQ92 zW-87ESj|BwKY0YE9cbmXBY@jE(@&hAKnSr$!igM0eW?pi0JVRY;QAyJ;X49r@*Qwa z;UB#~l#-ty04dKH09}2si&e;I_t<$+w9{#-bh^{DkKcf^O{Q%KowGEK{0`oXjIrhG8oL`d)1a z^H@zq@S}yaC_~T%NtJfOdGlGk_`)sAk4rFYwsr{%LoNYt!E$9nNMk>(TR(uBr6;&4 zplsh<*&ROmOC6vBmwGycxpDjU3nG~ynk)Js%w$DBKq6axP`{6NqG6xHk8V0!t^QEw zN^_+{lDh&2_{^_%n!B;Lxt!p)M*&K=y(mCXQ)&|=_-!^qkigQIDHQNC%w5q4mi%O= z5&h7byO2QBm!9cI%0^`;gu|h{gg-9tdp^^3dDuVV~0M1F_&#ALbF&o48h_A zgqAQ=J+tKurl}qOn}U0I8e232OR()N?nZ68iX)6(fRm0~ER7|zbs zxG@VsuT|@`?5%*gE*$IEQIl45evb{8@olRfpFy*=+821Kg{*S#ik!2y(%07iXzFuo zynGF{@9sjH+toh8dW}-5+V5Z0p7*B@BoL3l#_hSk^Od`-z9K7T_$y z^rd;AcEZ|vyZ_l%_o2d)(rR(wdjKcKAdBfP*nWp`%2yDAlf!XQ$Jv-U0Vd!qHOpis zVEw4>;ipi!_SxaD6=~C9TrRe1?4CfnWKX|2aaxBnfeZfoPf4bbpb`jX+YW*$By+l_ z0=!oKElvPOBa`6KalO|wK?iIh{($ZVXP8^T8NxG42+Sx&m!jR~=9t)Xy5yu=L?!B5 zS~L=NM&(bffn$Hv0&=|@3pDDig<}9{o>jewB%tZOsk%0Ply?XU|0!DV2Lv+_#Xtb` zaJUD&`XxQk!4r&f(_D#d8lvwvvw z9r187lnnD5PsS7Yn?#o3;P!l^Ysh(h`^d%5(8OJwxhhIXuT_x@h@_PBNERh_tctAw z0t5r_zvsBGm+X#&0I}A;j9NGFUr$+VG{^rF#hKpEtM$hakE702`*`_757{{^!?;{{ z=<=-IP8M-pxp_5((rOQrLrDDC+2GHg!u%JgRsSX=ghd<6ck`M1m5vl!YGKb33%hoh*s%i+WO7&TM%ch2%O@>s9+uYCFqOm?u9AH^BNQp;s* zL?b>JRA9)eF5ybtv)G-(vPSKZa(MF@J}CO6%+WvN<##e6ED$Qp<>*t@s{W5Q32F(Q zd+c!?Ih)jXxfo15X8sUXewU$5fn#!g6#t8j$R*xAzfa$!K~5nQRn$;f&6j2oE=E(L zkcu2G4oPuHQ3=s;xRgaRDP7BaxrX(rqC|^syaD%Gua{P3rJN3(5$B)Gonx$QFO#!; zVBie?C-p+hb%vdr-^5!_GI^Fsg~?SW?`HBICf~;729ujiKFH)dnfxG=A7b*uOumcB zk1+W-lTR@DIVN<#Xg0_Z?Hc^{)WqUp#P#9-yC2~AId4Z24(nwzTzCWj6$F9SH;psq z44TI+{4G%^85y{`?ZAs)j?wWX*ndi8vQ%U2e%sZUQ`iqQj8V`twQt5l~$5sz!V2^$RNQUef!DEop^!p;(b_6k0POzvpn)69@XGj$duS$pX>PeWhzmVzY;_>$Ms}l#t%czQ}KSQ z2!5D#;4+qvqaE-tsvPon;A_(M?$jN>%_9>K~K%zRXyN=+fTF|(PbY1@(f>e{R zneW4jG$6a_XYJ=J^C!aK?kF2s?L36L9pUKYD(}bbT7W7azEN2k0Lczm>+Rma2cX@Z z@$=-J!L`~#TU9XXoUA8<*&m5drJmc!gNO&?e^Yxp*F#o5uy4oX57PTisgyO9DE|mG z%-3?f01SQ+$t5IxNR+Dy+^yw37QaXjoqtnC3NjAh!?0Ep%4i)rN8VKzoxl3wO~PA- z$2s~d-kBMp6Y-TyaY1An7PFdKbV!)XS!S2%3T;@WoZ=^&8owZnhBenF5+XmNsqc2t zft)67BC>6XEN^J&{Fc&40peD}TJBsS%UvrZ&|we%E0mHuU4ZOfAt%Yr6Ax_B)3BTL z!9uc;6uonOur|Wl1gjs`=7!No%vg5`|Ei%i?9!G-vSHK-e*(eSb6Z6M0)8U4HEfn{ zYLtip+(81{S!RWnDPVSVlBNz$XO;b(p!*C0v{@E3pD_Y92k*>^p@k*Vq%3n=s$ymI%~y&zEF|vQE~uXtxke8vT84GhyAVXIt2%q8c*Kmu4h*g?Z&xUymJ=iNL9Pku+xNktfqm;Le7;R0a$;;^ACB-5sCK zZA?Z@yf^{O+^&%*AttFm^0tFg@0hi;)1A#ky8tRWF3lZVS*;LX?g zksz+OEuK+F*Lj71lgR*t>IQd^!rsZW$|xGSANnD`jCxm)jK^d7?gcd(3_o-j$15-O zvL~b_LN(tUA+1>_a)jX06kZU)z{7-8mNFsgHQ$p4tGGfYy#O92?g&qsPOatzj8CFH z!@KMF(t;4`K9*11uX#L!lH?h9N8+#ZTiQ1i^8m|Np6U5kx$>C)z?c4EGd;h{c#G%vIlEl|d8t&~{YxGGfT60;|a2m0y#2GN> zRcoSzg}^REhk)cYQ|qN2w4bK*9NkFsbXd#iHr+-|+DTKu(H?ffUVDh#g*=d6Iz&gP zLHQS;U(hG6UM;jqt5jm43(sN^m~fEl)!KcIQ+D-D5g~VHM}nYLm$tg7_|XUp?*$l5 z)ar|QUR6-Nq8w){KZ*ngkn+X54&qkjrHIPQKHMt&5emYn6No`=-PUZHp*X-gx;pxD G+5Z8Hulyna delta 2190 zcmZ`)OKcNI7@pZ(uh);YW5*94^?Je`o*qWA>ZJ z`4`7jS5@N#e#W1_&5doU2k83d@SArGYDiYhj+f$;yi1ITA+HldHsysxODrXU6Ezgz zD7zdLI58s*oH%e&pp!OIM$%B%X{pN)?vg@!^XZ|8H03%=Km0sJr!nLX4Pu&Bwsgo!_=+uSTdls+RIyZ+dH@K_U?AcYTbGa)3P+}6{{Soped7pU}z5$Qby7%<^bJy}e-U1jx?9 z3hR)`2$f$J_Av#`upfbFnNGXMidA#Gx?VPeNX2qp#uENSbcCwD9UZ@ey=LeKB8-Ir z**SPCDdxk5ZfWIt){PoxP=n3|RA<=Af*9Jg8-V)l-eTaq|8MlCQ=NJ32rMjR5=1+c z^`UO3V_^{&{NI(acXm(4HI!dL5*`n+taFB@1%ZW`?%Cf!rEsD+`Sbpb*wFbS$U{#v z>;P^%S%gWqwGV^fD6GBtceBI(cd^fob$SPr4TFV;W-nQ+?vj5i{!Bq0WR&+KiQ(K{ zN?|rQmnr{BV)SZQk`NZcrqM!vrH1!`!EPhDgQOox80tOTb?Tb*pC|Ig*A{aVl;4K6 zTTD{OitMJJN*<)o{nN=JJ>kzLN7gpTRiuz@7+Is+Nc&x>9-8y>sZ#?b(X{m-5+Yxa(4F$U%d{#3FpKzKqy}lhf0`PK1`$WM zP0#-&b?J2b+yt`6LCXw8TVG+2PpG7IEz_;^3M-z`n5Syr)4&W`j zSoMERudjvXpPFdQV>kX=LoXLr$V1_>UiAYStTGdR>_1`>i=`*V~m zS2q{}V<1*d9thkGF-7Nrd_=F;O~(kNO2Y#qb4j;?D1>NOCSV(1g?mBu3z_5Tt7smr zIduyliGAp6nW4)JvjTVpp97lXIvQwu5EkwcZLU$Nz?+6b?Yd)@mzjZdAcKJi05^iXkNxj6gKHjc7m@6hh<8m%DlgXC*b-3Q2EMR^I_icEPJ(~a zu5%5m#-4iPK)M83=%raYNQY=rRDjAfN8_}g3Xc_d=HOj;oD>w=MHRZ%OY^iyCCb*p zwjkb`nJHv~sHPci87`<%V8Q`u*l<;zhy5;KbC;Fpi;lxo;FZ+fLBYjhyH41uQo6xw zmRB^~We2nK+C!0VhW!rjGloFNZoGpOXBsB(3LRuaUB)T%V0JQ_|n1~Z(CuNx#W z8SKgicLgQkN{)|pvfgB{&L&a*Nm5QKu9TCitgCWW*_Er}RR86T;>2`ZNh*rV6;;;C z@AthK3;=?z0M&2a{a(NC{=V1EOeSgI@Af~RD=ofk7@x7Q^)H3OSMc~FwqdwN-f&IV zs+oCHeyzMEzjof1UnlR#ZzLbVuU(7QWBHiLF-|RBPvjG(;U&D}n%PZl82Pl9u4dfG zl#w6uhP|OR(~DF`Zkg&lf4if)Zo|YFr#rS`xX~N7dIRHaw2pg2UZOj(VQg4yW_}1O z#ApQhwvLz4d+heX@F@HdUX_PuKV7_ty{P57ZCl57rOm4@q0HcDSC+XCqykfX%H*?o;Gu~s>(`$Bq1~o(OFlvUq8I)&H9&txe9_{sb95rL^IBLed z$I)ZXo6Da;-GsXbb$h%y)SX3n(%p;lUg>kr-RDl-HS$k*PrB1LtWQk!ZO`mJg>!w{ zd%F6JyWbmj54Z>2L-!n?Sld}>Si zF_cgD%iU+O)^pwg@44!E!0x%idGC<-y!YIixki|sMEwQ8=7RS^_bbT1=+3ya?(Bxs zeaXA9Vdm$(GkRpNd=aIW@fOc{7rlA+ad+;X#Z$dvp#C`OU-gcA^WMd>iFwZ4v-6jF zZLgs117O9Fdv?D8%=)T(Zo|B2ymvBxz8%9F*jxV9cZ^TWn_maKzV?Z^vT7Rn*8s~= z?{)VH@BH@+_et-y?-}2BxbJt2cL~ea{_JZUp>wwZRaxy~_l*sK^%W`So@y^7-+lA- zH)s5oSE?*lO4)kTU9Ne4GWoV&EPKx;v-mrfZLI`L%?2C2n*aDpv(}t#tz>6rN{i(z zOShi%XIF~#TJk<=Zy{%fao6)&rDkgddFOI5Sh{bbC5*jOX}HbXK5ETVNXAM_Ug<`m z*eESERiUX|Pvs(E^z}DidFAD|!i;Xc4cOV|Nqzs2g_^Ac>fp#DB_*dx>h&O|Myt zj+=3Z?wa}3hlT^jbH&Jwgwcy$fL$zHx_=pye`;JUMYb%>d>jwhp5gy3B!RJJbWMW4 zW3C$os{n4cqQ&Yx&YK4)YsY**>#H+W0 z6_l;nxzH@Ay%;LV7ne%UUVGaIj$QL=m1?mtH#c|fVzabd_ZoqJ?UhRK)#dBgE~{o0 zhkf>`YcIY!^J1lby%}87OozX1#UcEH}rUP8#i_8ceeV;8P_i6lTxY zn!qIg0y6#n4}C*5&28>ya)8NUCYxJVM_4?HY@%O;I#7Fl1`~;Gp z;mHl@lOs-4Ej;#wfm5`<4m`ODOvOoB%0cIaPo2rRh337E0eX*NR?zr0OOKIxGTc!x)B)A6 zS?Y(qos3{E?qk%o_Hwl-^ZTG2?K8mToDB1pCQOWW+f4G-0g9 zJBfa~8yROGtSBvsV4}OHgZ566Jvu2p^J=>BWN#O!?QYx2B2EHp+j{Lz-2GElW!uA7 z)HAJu2X-er>CKam;Vq)_CQ^PzRX9qNf++OvC>>B)c!&*e$pwuxJ!O?QD z1z~AIjrYmc?~|a^6q9{Oa{JUzG5-vc7mt%!C{aD&^(o zvLD6+RRn)(l*5?2(kRv|CG{+)NESrUlAP3YQUQV6@B@&U7sjr5WpYU@)T&hiA&@ZU zSIYG!NHOXM80BfAvr8bgXtCAu8fXDeXsRHW(RB4V=QZ1*SqZJ{zIud3tFeOTPH5f1 z@9m|~!Cn_br(X1LgyzZ)78u4Bo9cE^xqNm1Q6!Ez1X>$2=gg>e+B{??t+Qqpls9dj zuntH|*0esDwc0~HI_ynwp9Cw%+>V}^Ju^2uw@s)Hp*4m_2=h0P0pJWF5p$JDt@G9@ zv}}~@4u!0>7+H-44v5d{L^@H|z6XJelG!g=7Ll&wsK-$sCEAiG@7WEAX^wbdZpddD;?1uBh<0fo9H}v^ZQv8 z??dK8izI^g|Cp!FKz8F-k!URv$ScUa0n@BwLvyo4f3tydqElAE+fnNTk#1D>0Z9XW zFS4oc#R8OEX9~Tl7+fP?U9|YuuX9wn1~LX zT1*ZIX9#WAoPuJ!3gO%ZPw1E%)+%{}r6veS@&a-zbx`;0)yOE#!KREw8%Ses5kMfD zJ6J@mFnNv1+emWuR*v4O*d`p8_R?r1>ooQIH$RDl@K`;G_T097-vq1#QbLzq>8WDRhg523Bd znSsKq&{o&XkBp9`X7O%LC$esIoDCaX139*=n_bRHEiiV=Q0Ld-f}#~izM@_W;^-fN zNMjFZXmkbLL>*2Q+=2Y=0q(#v9p@Wlk%(=W2C`$$8BWui_zk!xxrZawQip5*xe}m zcA)XN@d=G2dTq=J@P0jrbwQGD@_P;mviF;j-=XhEiB#+XM8aO(VfKO$(!XXk;f|nY zv}0D2@TEbs0t!0nE=L3~KHw6Op73+(`StI(o|ghkgw}*c%S+U!U_LIxn-l}!t~cRe zz*zw!@#p>4Z@nmb`+YjE@4HCa;zY_mTK98p=fase{|hr4Mqjz~`kR;iFxrA%RrlNR zY|lT}jzBd%yZ8lv<`%!Ove_@pnRYCjz1j?lwOm^LET#+{IPF@RNkDyWt*8mqY8yci zi;xT*@p4&^Zy&N~`^)uQT;mo6oTh|zHkqZ+Y^ocqe1yq>*$TJ$7V5r?TM%P?=sSs_1vc6K%k zlcWJA)-(JuypQsn4jxZF~Ih1nPc02{(|pQFBK_5thbgd8KIM?>j;7r-|{s-XqC35lpAM@mk zLb4;s(3e2bzXncBv{fBcf*G={FT?*l@=*Wl<$@32=uT+!D1Hf#FvF2;dmuvXs+DjX zK9RkzL_#MKSdt#$;DEgpKxGs-&T)IyC>$GBAsSF!WI3@0M|0F+F+hB7vlVcb zRzMZdv@|6`)qMg>hS36y&)SNBPoSoiHEIW<%7+fUaiLu=wg$#U?<_)Vv7y=OMRp&E zgdd{#m+@$0!y;_busZnwE+YfD=*4r^B`}6d>Q^x&m)xn#ypLKXjGCmRe!xU9j;pAL z>NkH5t#v#-dm(8h?RX}hNhT+3#kLZjZGXfB{%zNDMn!j_hLeDzJxad0c)Kb@> zhGW1(P3?vr>T&2pBj_>Eb0)%`F%4I{C_7)Ix5ft4C9Juy&73Atkpl{1{5II*z()=3 zLhBYo@Jw0-r*RTytWl%5fa$zuR->pDw4km5&A|S6Xf@i2bQ2vgdX9JpnC;-7INpe# zG5opPU>*E_7tgv?bzmaHDBrLgARo(a1b!=ev}`FwAyiPJKiiGbB75;Asn@k5G85x#&hbVonpN)xc2CZK~ssh5CU9Il7G-F?u>Oa$XH zu)$cKa`$wsb#vX^z-~5evgu$ybuyd{POW+XR@rp-AavV3?w$?%pkWyWslzU&*RAd$ zvd!S|I!KPE*-cBCXEn)p7h$+p8w_Ej=$9%L2*h%usXU0rxYsB(U3#0M&~lo}*T&FO zz=ihIn~;}q9FYx+nk12>Y@_siJCVIz36>=L*zUc0&TrkXS9o6h6$l8-WBLa~45{G? z9&y!*z8IxPoH(X+L?L(RZ#LXnJsnY0vYj09C?@$wcs>XBg?qC7n07VviD5UJbJPDX zX5dw=iv}3JZI_@6p%Bq(kDw3$LV*d^mxlHXqYh3WFwiU1rKO~qABTZ|QNh&4{`;&j zMWEw7MAC@N*<@#7{CurgzwQ<<{0LozvmY_qx&M#%(4)5Pnx{bVExVpZM$R^|iZ}zr zFYsf^*uSZT-~ST?E3mh(qMz9DIB(P6!f1K8zlldSHEoCwG&FMAftlWu#BQYomp(&; z%0~atp|>VXu%aMgJf{g4B!YRxMJUQO`>|WM?CBI92`LPbAr#x1(RhRg$B?sLY>1E_O! z8OlF233~|TINDE%Ie`9V+-AAG_aU&qw2h4u%zWm#>H#D$_u$;7=V3#f+weLdJVC>+ zuWDG~$fnIyToxy?h`xTPh4vRhS-qA~l*GQ-NptU`;kWUKZSUZ{u; zLWe++9sgBi1SKJ0801*B%7~8tz+AQQYvb2J_$3m^8?0H-IUj>J7NHf0sF*s^F}pVX zu&{B=4(t=?rf_;~fRR!tAxL4sXB!8J;+_b+yaw!#goI114F_cktOOb6%}KN+I}zwa z5ecD2+~|ho8huaguVI&fIN4SC;~)Leqt`zEgBQv-pHKeIueN^g1-fQLyn=45vOkCu zz>!6o3hj2KrKmnao2(&w7g??LK;gL~M-NAX>**MGOL?x?p5ZWpTa{L!2ME*(f|FlD ze-WG`q|{$$B6uh$N_TB&`Echi;<AJo{kW%mMedi{KFeR}bu zjbv|Cx3_8pwZJ^Ebwmz;HCzEWq31(TD^7HNm{qUfrFA$+c)v9(*dmA_xg2WP zB60?}^b+#}fcC7uHfr;TjaFJd;yS$ZpusDE7DxOwj@xdesDFvNALH>4A&FZ10L}^1 zNyh&eY5Xf+kekGz7iXy#AJ_@jt$4T1a_Dmwn=}79lD?;*3fEDf-AqOR*RfV(*5KoW zN5R(6ghEHiTGQ-_xC^c^*tFxR@T{2#Ai{1W)PWs1Q5Py)s&}%iCzL5xx6UFeG9Y|6 zzROJXDacHK-C!P5V@odn5O@()u!j0yhRY?0YTp1rTW+G@^6?3{2#WJKW2}Ai{ycsxW+j= zein(mZ;J1`4*wCUZy)r9-h0C<4ICAm=AbPv4^{3N)aWgU%;W3-A3VL+KLq!nme6hRzSmq>3;i2%P@WSPw~iOe~0Z6SR6d4jgHS zeeZ`3__StggT7HWiaC;d1sR3);h>id^)(qCW1Ab_Cu19MaX}D4gi((l%oe1ond%UP z6s|);guxcP2j6tTxv3*GDUH+ur2FK*V%ZKN(&cmV_t^{OgSWpH`)^>U-bXBAK3}#2Q)W0 zDl)~R`O+90> zj-?daN_e*Yky`jCtavkyq2q8p`(9!b4hSq3j`rd&azjp=?IS(2ea837Vtnmlx|1^# zv4t~Gg!g?q3|>IfgQI@nSh5CcdY}4!gdsb1+>5Lez9JOJ^U9cb4-1(|vpxO5PB(2T z0O3Dzu;@SwSfjyGlgS`Vq*#>cC6U!+p&z3m#f5lsabwa_tana?>9+F zY=|);9UF?sNk?(g>2Q3W_k?)Ma&XoQ_kSNJsQv=i%&G3L|@!Bajy~K|SX9kAv^+*8VV7#_1|e>#riru-IzNkL^_7ljX*p;v zF6ubtb^wj{&n*e|*gmk!D{ca*)JIIdB(-mr=kfTzfP^Nu7`61;GCqkEauC`SwJ%;* zM45g<2eKDeCt>~`>Nr*0q-cN*R-?>8SBgPXLK|E`PBo4PY?3-xCt8T(fpe&kgeE0( zQ8I)1%2h~QFER=#ny3X_+(DWV2ZqyQaDNROY)^_Y=+EMYU%lCw?Y-&>oWl!f4#zaB zcw@O1R9?cJ2V8W~oQ?K{;tXM`(kPUcij4+t(Zb5Rbta7WXVEffja@%vLMEub&4f;o zei(TZBV1vKj0yq4C^?FzEp5M`TC zvoI$gfnO3Cz{)6Wp?OnYzVFxm5qj+0-+w{f3GR>cB(3(Lhwnsi(^*(h|C#fUS}^e- zAKR7Q`TmNFJe;?FhSpJTfx?Ti;iNM%tXRA4Woh3};IZ-e`+&AO6hY4;LPnr{q);%C z!fT2Ya?(xV7uV>&=X@W=A3_eeLJven*hx}&@Q@#6+J&cX!(5D9i>!+Dr+Ltcs(GL} z+~0wNTe%2Q1W|W<&4l-Hq7#LGQErd{*{vWBLbx0xz!mn$z1_)a3AZ7L7DJ$G?_Rkn z$4KtF>F%2{!~l`obphPu?v(6fddk=<#t> zQvp1e!GYjla40xj%?6JIN0!EJ;ljL%Al94a9TT1nu33%ZP9bg;!mIoH!BOORkM97O zMXEE}NtNMGb@yu*zz-OiwtMp)!h8{#&%q7nWA0=p!@JDx{?$z58|ZbodVHCyjq26P z7~4>&KOwb8f|KHX#fJ>=zNWSDv6^ZW(c{q|w`9r81?CZM0DLBsm<+;>h^{^{T-Y90ZEd+*88d82U*J!YgwbL;uI zljFpdRAU0|v(o;}o$b?XcXTaIP2h!HeHUl&IK0j_yv|4Xp26u(MjKk+VZeZ!bcrP$<(`Z?lInVTpbTiy2mlf=!|vGY#6;0f~Ol7)y6v$i_SHtGgdf@=Nz6V z@H|P1SQqL;P!I1;3ha!K0y`7#2~uEZoD_%^i9@799nej7pMr1P#783BWvr9#o$DWV z(q&?*z(?pc(LL1xUDCJh?vc+^eo{8zV>eNsV*Sk;-@23djuAZF!6!lDsCjfDG?VHd zfKRlo3+Lc$fxvCs7tTBrnho_at5KL!KZjfxg*?HVy$s--#VoOcdBot^@E3+xr;sccev0%OQ80GuT``?*gmwy zueys3`oIpTBaGwO=APF(*y5gl`07V7c7dz^J!IO4cVB(rho=Jhz#eu#1vs+5v->F= z#v}-GIQ$qz%joT=k?)^|`ZaC}H^}R_dy6jvc3s&=9}(gG^M{dUZ0uYac2%SS1geSq ze5a@-46bdi`Uy(AE+wGOqE(iHVGb7vV_;Ce*6}IBz##$Ydjj3I*yM2Dzk`|`a4&Gp ze~;s8z1xZq0HFPD|ep6ALf^UuH~6(bQJ|Scbl`)bs-h&L>}L1_3@Fz*k4% zuvBlUu&-Rc96CrYsqf3UH!62H3ZFYfF5!z1Og|5&zArOn1_S4>Tn?>yko)|(t8ayo z`E#5nHh)g$*Pj*)bIeuTj`dr(3W5no^=*NzRrdV3J1WJ6adiY21A;L1QZXnkEi_df zBlzV7Ob7K{z86&z9dfwJ++8LTmx!Ym?sDTKqc7iS;m&QCyaIb0x3uw*Qy5d8eA0mL zrTV6_zf<-6CCtSOyZB6km$-3J+jDb!RFzB2Gda$Lya~Q$H*b}1hc?#Ra>G2TuTO~n z3KRUS4qW_q4*MpDnMGWxETH8~-l`Y$l@hKh*aKVce={#j2~NGp;t?kQ1j%E-DWCS; zS&O$ylDG|V2%g0W>jZ9_j39RaaSrQ)Ben$(jdQt?^3i62E3+|m6|MTvItUSi?v$zmCJAAn$OIPzKX2{$??*%Tg>X3 z)7i?bH=CWyqW|3KGpEmp>4ULjD+UVFS(spG0y zeDnv%;-V6zMzf)Z4$aKeDh;nczI|kuG5UH;_WFv>w4*aKf#(P9ai;Pf^dOuX_zuJ` z1mBNbM#|Y?M6O1KcC+OxqJ_vp7)9;mU|>5TWSp%uvTf_xY;GZp6$);%gp1g5a8}{u z>I)9Gn)rSJXdU1baaGq}@zpP&Ucz?xeh)4q@CY3a1pF8U#2lamzgfEl8F=Z{*I#Bg z#qYgB=dCy2ys9~j!=XA362+u}hfjhOpea76(rq`)Y_kZ%@m>*MH+Tx41U3NV{K#@p zsrj=-SkO8itzz7BMpx7Ks=x=uc}6FpqAev%K&9C)rTevN&A`{!|3-CgZXfnc-gwY_7ib|Ng%~qgj3gV+H4Ah?x(1#Z~;Edo@ zLJ@M`D70{^+N#vWiVV|4$^QLF0Ug8zU`2dG#I{^(Ue}+EBwoH#g1SU({tuiN_p))f zQs1T7%k2*mG89C-0W&CJ7eFGrN;lk5|U37W2os9{ix~h)=Y~5F?dJI&bW_@q1TP~bon_~Zy(2*oN!Mpo)p7w zpBNE4#OPfMUr#kV-BThW# zT`X5z&v9P%@+J2vCyn1!y0PLf)NAQR-d}j!uRo4XZpC|irCzDeG*;5n(}iLwO{E*B zy_uDKwc@N3-OOe5&=#)SDAXG($Xi!1eqBdP7<;8$6ZM-OYW1T?h6)RA;q_d;R#>RZ zTwMxRW+Gwq#mkp2U3@7_sn#2=^vd;G=%^ws6!ul~ue&)>zESqd^A$IA8nRsTb9q^! zxpx%ZyuU2n9GaK&m2xxhV}eX994Ib(Zmv+T-N2e6H(v{f%C)jzrao@&rduv8_+IGn zhvv$)Vm*vCWW89%3URNHueiB#wG^g2x8fH3oN#ZH3uy7nRX69Y)CvXq{H;H$Tky^y z<7!|T8nFwo3IlAy$VNopTUt>UQ4zZn&qjqU;vkKf=w~w0v-Vq>rDfw+wah>meZlpy z>fG%56-@S_cC8TEvJ~ZQJSHCRYe;-;Rcq@6_CR0Laz+4lwNpdR^sNAl1Gxz0gfR~E z6Xpc9Mg3UY7FI_up?xtjLUr*+)H!QfJ0T)yNd`toZ>Kt1yN^cTOU#bhF?7ub8{=<_ z3Hy#6m@ilRtuvSfB`23@^w7<4~@{6 zIUee_J_ND?wne!UGvCWxcI0%Gr%M27J80v_+50qT<0GJF7a5$qW3 z@S~QAyb+k=z+*~R0p`Fw*DWoY*R7V-inOAwm}+lZfrSwfJkcN;#5$37j0Bf3g(V^= z*&QB>8MMW?_YX)NZw_gKW?wWRYOFUw6Kb~2G6e8zfd+h!VOHSybwiEec~YA@YWH=t zmW{T4`K7=HJ?J9xrcuO<0`$~dXD8(bXhqa*>4n*-#CzzC5s_i)jWsrUciYctwG)Be z9z=io-XUS^)Qb4dj&a>=#am7*5yXGes;zk7P}}y|TaG{69tqGEB-Bj!7NflbtKeKk z468&z-w-%Ul#k0W0C^!dD*u&7f}eDZBI-6vfnJ0-%-C~G=1hp|#HQAXOyoF{8MzB- zW@i|Y`C7@9k5R*EBp{82JZOWcifKTRMGRHdWgl!GtlTe`mg~!2X!|k`Dyx;kWW83q zl@lwqe6?JVGc=ErV}U_bA|I!Um{%@U>nIU!hf&W*i;LwNm9np}{}?rx1&~cN-)Ois z5k^7bb?Il4ibf`>gWe4Fa%jwZp`HV!uHd;9>UiD4b8{iIutUX9n5i($gt1~>-poru zYcm(K^gewQRC8R98prg#deVsMPoT8d*v<8$x^a8PX!dm}sXKg~NRAJ+Ku6j8rI3l? zVUqe=WB?wGky~#OX{fx>iU=JgGa%7m6hT711+pHf(^@#~< z)sSb9vq20tkS}@{<+@r&E*IKW*IQuz8u9#m;dRCXs1Nc58ce8( zd8a5pN6BX>`81MH7oqOUr>T6I5=IzfA|fpkB8i%)NQu6#&NP6x zEe6F9-f^*C42u!;aLAZIO+xGxV|XWF8o+!&ON!UnE%w~8v(UpVE!z)1bs&sDHoBF2 z$*robd+F@X{R)L`MK~t+bM3a__ z`8H7=38uT|yM3)_5CN@#K4`=%&|ob5S7GDS>=T-I{3e(#{a+=OPFu9vVBb)_3=0C2 z$|3^cK|+cIrX`}|8i_1F0jeTa%{8zYpo)>rKrFo16XPsdVhOo)RnR>J^3U+&QhW|ei@7cvIDJTM7b3|>Aa)m3vFvNyv9SiFA)ooBQu8*zT~wi>Ar+h?;A@HvrLm$Xaj4oudyHVJVLTq` zkG#it3z~VLFXEiXwOyLpX+~fFG1U8vt~g{GouOmup4ut%k{aq(@OZSliLP;=WzzC6 z%FLTW2L@mbY`F$Q%o61Slw2|pB@tz45mO7}RFPvN&Za_+< zD9+}B(Xmh_?j9#@vowk}Cx~=(tOXtfeJk2AgvJ_9Y)%qELVgpim)`x>{~Wpg?teT} zT6)^~?pGWC_Dsf{Q?dv|Zj`)Dof=9AsT!f#EH~s3<*Z_*+z3rLMi>u6vzosp*Qg~0 z--YsLG^8u!C93O(aaV5iAca~|i{vbljI%}Iwc!EDd2p9VkJ{g)gtVK`@Sq15@!Z0* zQhpF@Z=uiIcsx>6`WWjhXuO92z5!sX1H5dGcJZ%=BVCLr&`I=u1a*QZOWj{Xri8y? zyxrAp0kaFONDx_s4hi{p5RgNjW%0zCPWf@I=Del71*3#yFQg3WNUA^z8FB`Yf&L3_ z1vG}FK#p40^ft{2sl6Dxp~-V=@OHw(K(fS?KkeJ-A6W#1(SxKpJeNzTgEA4BV@CN9 zkEAbR9+~DZ03T1bfK@_!-H>pu)J*IZJMD2wo`g@d%hN-B@k4_ZN~Kz|CUy{WN#dCw z;9;Jb)|!VOI8&m#EpCVnwoVE&F*i8XBmjP!=3?M3Q|=ukU|d~iSgu|J|IDvI@)7DN z0rLjs$o2>=QpPjUO{8IxmcK}ChVT^=;~jsA#_+ZYEx(Az8$&`&%mI5Mv&aU^vfxZG zZd-bD_z@qojY$zJCk>vN)c27A#5DOhIllxboec2^WKaMN!PKDl00VI1`(WByWbzZ` zj`169#MBIU-VHfTrZ2bK-FCa%PV=Ho8Tf&@i^jsf5#|FmaQJQ3n5JQNT^4HnY8>iet42@@p40{=lS6}70Jl){6`>M$48k2?n=>~f5y4PKHFJiHF~fX>SqA5* z22xv7Cby~N*CwfVWsu&;!e>-&crb~{?1w`7Lqh+z%*$V-z6vME5wUoNR^AVfTV#wtZh0jLF5F`Id#@)WFI_r za)Q$N{D4@BaeGv29_bRvc6Ppn=6k+Mg&BklKXG&NH_$5ElrW&v8@Wxm(+>WW>N`jt znu)eWYLAyj!k?L-&s+mAiPFZv2D_gN?G;jw@s|c|`DSRt>=N3(=AWTc_pA4U(<+ul z{h<6VS6NA8iP+OJSW4-oPO?ho;J14xp_DA4o>TrtO>J^^8OJn~Qr zw9(#g8}C855B4CeKz-F{S-v56`v!b1@TbTDIMl2TBuD^98MuXU*A#H=32PU-2q9U( z9sI~Dc!3`kQFenS*}Z8&K5dL8OJ?a{V76n_HvsPtu@NPc$ZE4`PVla4ejFAU0t+8l z!hzAO-bsF^z6@&OHLHC98@ptC6``a@N4dyo~Tl3>x%&-E)}@mFV=`k3M>i9f9klt*r|rO^&Lh zk5s)((>j0RxOY!ahtW&3FJ8Xlg;A*I`Ks5n)7`)oQ&=+s=g-OFJ$fB4-ZRqad-`-U zmQG)*`}s^9y=D$s_I`xXj4-U!b*kP6;%pc<>MFF}1Z!R<_#;cyl#!W-Q&+A(;jq0V%>7-m{We^Lp_UeBoFZY)4S!pk@;9u>pPK(R3%0~cTdiY^2a8QGh&_=&~T zVqXwx!|eza8Gf9*@apHRr6i+BLUBnZ|8+X~_1gwck-Wo_7-Z=b|2}@Ew4T`i??1Z# zU(Z~F#*&im(sEh4UalnbBGa#Uz%S9D)0Dh`B(&yXV{N*S$!Q2T9$d2s>?|O}QZH>f zd>O(jLK441NQRi3?5Z*dg(w3oG^%CRenWl!K{&5s#gFi?`^zy2!bkO4(qVqM0o^Sj zN7@=RapJ+fAF&iV09Iid$ZS9tXG*H-YtwM}0LpC>Q8!33MBM<@#c0dqNFBAs0S}OW z35E=y%<&Aw>)^L1n0$q5SyGK7ir3NJ*I&{`nQ<)H42!PP+I-^RnNnJX%4TSzPaOnh zohtqRsS+v#G8iPc@lufZ6UwoaAYq{5S^BjtG!|9SS7pOkRX2|?onI&Q6wb}5jd}ip za^Iw6JAuKZQ6XS;qTfXi1p?gxnOXW-J$m2z@rK^9wePmm2hwd?xt~LU0*i#PV0?%} z(U}7R``~oJ$@>vMS~TFq%J`&~sT`boFa`8QXBAFBL{Af{1>?a)QIlG7U}WgzfC|V& zFz1?sv&-;el+vtHdowdLX+#ui;8+_hh)*(%RaKatIaz2CXb={Dh2OD-yqEWVDar53 zVTT*?uP7UXQ;HhGWY4UkAUK=RFeWxn_JQ-XsMp7sz$06dtn4vz9YBRMZrdagDS`-h z7f<#LbSaQR)B91L5Sfvf1S(`94gi5?Q+eCuFQ5{>A8Lct=WTo)d8KG>-wQM0|3o9( zG_Vu9OOwZD8)NLPN^kFTNR&sG926}EAE|XCi&p;YJE|9x1{B zNFhW-0&my+7a&`a}M( zKjQE3NBy1tm_P1MEbj7m`+JH*@MQc8{0xb4(#CNLi17`y97OA4@SJ?x--{dth{vJ9 zI>A8Tl;DNOJuH|w;}9b7ikRJ5$1z(3vwaWOLiWL1pQH;QVyu;{EuzoFVj8N#_ti=z zjH~5PyB~Ew^e5qm(K#Gr2Z^pB=hQBv{lGHrKFPzFafW; z-}DdB?x4dC)0wQ|3Jai{Sk>tgix@+wdN9_46ter-9m6w?X9myXL=S6B4StlqjVL1MCyEG$ z#UxQgFhmrA6$vkhDwNJ1Zy$e)tc?S_ORUg6w?7CHCBivAd!`?|JrxX64(q7vWasCuMkFs8En&(fP4fUG*EviQ0 zxcnXD!l;sBP4nCn$HO?TY%P;(*hBf$apmpKpj)<2xD9_HS6#*xoJPgX_T4OtEoJ4z z*w7Ez0toGS#AoN>oeHf5H!qYf@<*8MAsJ6BhRssaRNerSAnTig6yrT}6zrXur($tw zbdwcInBy^@JA~pmR5vz!n|rs!3?5-@KePi9kHdJjg>uvmwuEwi^6KR1+eNF_k!kLG z^y(Y#y@!^6^a0=q-g)E!bp4KO+rXBy=%$?#S=-O!VDCKGi`~SPp(<`=;O5gKH{j9t zZ-oG?2Cehp2B7FiYCw{TOr!r6k+T5>Tu)U{kiU%{k6em@P2(BZ@IgQ=qX;RKv^A)| zn=Ti8Dfw%*xF4XVd>SjGL`Bl_sPHxfDSm=L;dLKm+#$DRD#0@f8rLyJT$XCMn z91M$#6iSruQrDlSiZvqI-QU-y07|H1{yu%D~9RG3$-PTmXSyICbr%FmmA(%@ezDisx5$Pd|;hhRYCM z14cS-$qgzEb8D4(;nXcjv2QZ!p-lQ=;`zK^SeUEJDn`)#I$WGl#uORc%Bp^ca%5zO zHag(C4x)9@i?_Q+P1>CeJ=E3Nx$HQGlH|NV`w>jLcRPpmP?j!@0a2EkRaBEi$?VFyilxuEpd~@;sdV~s95$8YvAC(@#)0cN> za_a_a)$OliTn3i|>tT%Nm}Y4BxDi~gvH_P(63X>rUM%?-NT44;g^&vO6-$-+d>FrY zs{mz%u=O`--Uz!Fl)c2_aWf${5!&aJ3UnS+p?MMs!tpT<#Dl8wH^|-|KyDPFdzdvLRJ*j=(-tU~T!AXYDSN*9k1yje&0R;8&7$bOaeURsy)a~2Ged|Zb InDxE?1BFP>LjV8( diff --git a/models/common.py b/models/common.py index 9ac7e0f..833ece2 100644 --- a/models/common.py +++ b/models/common.py @@ -10,6 +10,7 @@ import warnings from collections import OrderedDict, namedtuple from copy import copy from pathlib import Path +from urllib.parse import urlparse import cv2 import numpy as np @@ -22,26 +23,51 @@ from torch.cuda import amp from utils.dataloaders import exif_transpose, letterbox from utils.general import (LOGGER, ROOT, Profile, check_requirements, check_suffix, check_version, colorstr, - increment_path, make_divisible, non_max_suppression, scale_coords, xywh2xyxy, xyxy2xywh, + increment_path, make_divisible, non_max_suppression, scale_boxes, xywh2xyxy, xyxy2xywh, yaml_load) from utils.plots import Annotator, colors, save_one_box from utils.torch_utils import copy_attr, smart_inference_mode -def autopad(k, p=None): # kernel, padding - # Pad to 'same' +def export_formats(): + # YOLOv5 export formats + x = [ + ['PyTorch', '-', '.pt', True, True], + ['TorchScript', 'torchscript', '.torchscript', True, True], + ['ONNX', 'onnx', '.onnx', True, True], + ['OpenVINO', 'openvino', '_openvino_model', True, False], + ['TensorRT', 'engine', '.engine', False, True], + ['CoreML', 'coreml', '.mlmodel', True, False], + ['TensorFlow SavedModel', 'saved_model', '_saved_model', True, True], + ['TensorFlow GraphDef', 'pb', '.pb', True, True], + ['TensorFlow Lite', 'tflite', '.tflite', True, False], + ['TensorFlow Edge TPU', 'edgetpu', '_edgetpu.tflite', False, False], + ['TensorFlow.js', 'tfjs', '_web_model', False, False], + ['PaddlePaddle', 'paddle', '_paddle_model', True, True], ] + return pd.DataFrame(x, columns=['Format', 'Argument', 'Suffix', 'CPU', 'GPU']) + + +def autopad(k, p=None, d=1): # kernel, padding, dilation + # Pad to 'same' shape outputs + if d > 1: + k = d * (k - 1) + 1 if isinstance(k, + int) else [d * (x - 1) + 1 for x in k] # actual kernel-size if p is None: p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad return p class Conv(nn.Module): - # Standard convolution - def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + # Standard convolution with args(ch_in, ch_out, kernel, stride, padding, groups, dilation, activation) + default_act = nn.SiLU() # default activation + + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, d=1, act=True): super().__init__() - self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) + self.conv = nn.Conv2d(c1, c2, k, s, autopad( + k, p, d), groups=g, dilation=d, bias=False) self.bn = nn.BatchNorm2d(c2) - self.act = nn.SiLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity()) + self.act = self.default_act if act is True else act if isinstance( + act, nn.Module) else nn.Identity() def forward(self, x): return self.act(self.bn(self.conv(x))) @@ -51,14 +77,16 @@ class Conv(nn.Module): class DWConv(Conv): - # Depth-wise convolution class - def __init__(self, c1, c2, k=1, s=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups - super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), act=act) + # Depth-wise convolution + # ch_in, ch_out, kernel, stride, dilation, activation + def __init__(self, c1, c2, k=1, s=1, d=1, act=True): + super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), d=d, act=act) class DWConvTranspose2d(nn.ConvTranspose2d): - # Depth-wise transpose convolution class - def __init__(self, c1, c2, k=1, s=1, p1=0, p2=0): # ch_in, ch_out, kernel, stride, padding, padding_out + # Depth-wise transpose convolution + # ch_in, ch_out, kernel, stride, padding, padding_out + def __init__(self, c1, c2, k=1, s=1, p1=0, p2=0): super().__init__(c1, c2, k, s, p1, p2, groups=math.gcd(c1, c2)) @@ -87,7 +115,8 @@ class TransformerBlock(nn.Module): if c1 != c2: self.conv = Conv(c1, c2) self.linear = nn.Linear(c2, c2) # learnable position embedding - self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) for _ in range(num_layers))) + self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) + for _ in range(num_layers))) self.c2 = c2 def forward(self, x): @@ -100,7 +129,8 @@ class TransformerBlock(nn.Module): class Bottleneck(nn.Module): # Standard bottleneck - def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion + # ch_in, ch_out, shortcut, groups, expansion + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) @@ -113,7 +143,8 @@ class Bottleneck(nn.Module): class BottleneckCSP(nn.Module): # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks - def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + # ch_in, ch_out, number, shortcut, groups, expansion + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) @@ -122,7 +153,8 @@ class BottleneckCSP(nn.Module): self.cv4 = Conv(2 * c_, c2, 1, 1) self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) self.act = nn.SiLU() - self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) + self.m = nn.Sequential( + *(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) def forward(self, x): y1 = self.cv3(self.m(self.cv1(x))) @@ -146,13 +178,15 @@ class CrossConv(nn.Module): class C3(nn.Module): # CSP Bottleneck with 3 convolutions - def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + # ch_in, ch_out, number, shortcut, groups, expansion + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = Conv(c1, c_, 1, 1) self.cv3 = Conv(2 * c_, c2, 1) # optional act=FReLU(c2) - self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) + self.m = nn.Sequential( + *(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) def forward(self, x): return self.cv3(torch.cat((self.m(self.cv1(x)), self.cv2(x)), 1)) @@ -163,7 +197,8 @@ class C3x(C3): def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): super().__init__(c1, c2, n, shortcut, g, e) c_ = int(c2 * e) - self.m = nn.Sequential(*(CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n))) + self.m = nn.Sequential( + *(CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n))) class C3TR(C3): @@ -197,12 +232,14 @@ class SPP(nn.Module): c_ = c1 // 2 # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1) - self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k]) + self.m = nn.ModuleList( + [nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k]) def forward(self, x): x = self.cv1(x) with warnings.catch_warnings(): - warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + # suppress torch 1.9.0 max_pool2d() warning + warnings.simplefilter('ignore') return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) @@ -218,7 +255,8 @@ class SPPF(nn.Module): def forward(self, x): x = self.cv1(x) with warnings.catch_warnings(): - warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + # suppress torch 1.9.0 max_pool2d() warning + warnings.simplefilter('ignore') y1 = self.m(x) y2 = self.m(y1) return self.cv2(torch.cat((x, y1, y2, self.m(y2)), 1)) @@ -226,9 +264,10 @@ class SPPF(nn.Module): class Focus(nn.Module): # Focus wh information into c-space - def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + # ch_in, ch_out, kernel, stride, padding, groups + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): super().__init__() - self.conv = Conv(c1 * 4, c2, k, s, p, g, act) + self.conv = Conv(c1 * 4, c2, k, s, p, g, act=act) # self.contract = Contract(gain=2) def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) @@ -238,11 +277,12 @@ class Focus(nn.Module): class GhostConv(nn.Module): # Ghost Convolution https://github.com/huawei-noah/ghostnet - def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups + # ch_in, ch_out, kernel, stride, groups + def __init__(self, c1, c2, k=1, s=1, g=1, act=True): super().__init__() c_ = c2 // 2 # hidden channels - self.cv1 = Conv(c1, c_, k, s, None, g, act) - self.cv2 = Conv(c_, c_, 5, 1, None, c_, act) + self.cv1 = Conv(c1, c_, k, s, None, g, act=act) + self.cv2 = Conv(c_, c_, 5, 1, None, c_, act=act) def forward(self, x): y = self.cv1(x) @@ -310,7 +350,7 @@ class DetectMultiBackend(nn.Module): # PyTorch: weights = *.pt # TorchScript: *.torchscript # ONNX Runtime: *.onnx - # ONNX OpenCV DNN: *.onnx with --dnn + # ONNX OpenCV DNN: *.onnx --dnn # OpenVINO: *.xml # CoreML: *.mlmodel # TensorRT: *.engine @@ -318,25 +358,35 @@ class DetectMultiBackend(nn.Module): # TensorFlow GraphDef: *.pb # TensorFlow Lite: *.tflite # TensorFlow Edge TPU: *_edgetpu.tflite - from models.experimental import attempt_download, attempt_load # scoped to avoid circular import + # PaddlePaddle: *_paddle_model + # scoped to avoid circular import + from models.experimental import attempt_download, attempt_load super().__init__() w = str(weights[0] if isinstance(weights, list) else weights) - pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = self._model_type(w) # get backend - w = attempt_download(w) # download if not local + pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, paddle, triton = self._model_type( + w) fp16 &= pt or jit or onnx or engine # FP16 + # BHWC formats (vs torch BCWH) + nhwc = coreml or saved_model or pb or tflite or edgetpu stride = 32 # default stride + cuda = torch.cuda.is_available() and device.type != 'cpu' # use CUDA + if not (pt or triton): + w = attempt_download(w) # download if not local if pt: # PyTorch - model = attempt_load(weights if isinstance(weights, list) else w, device=device, inplace=True, fuse=fuse) + model = attempt_load(weights if isinstance( + weights, list) else w, device=device, inplace=True, fuse=fuse) stride = max(int(model.stride.max()), 32) # model stride - names = model.module.names if hasattr(model, 'module') else model.names # get class names + names = model.module.names if hasattr( + model, 'module') else model.names # get class names model.half() if fp16 else model.float() self.model = model # explicitly assign for to(), cpu(), cuda(), half() elif jit: # TorchScript LOGGER.info(f'Loading {w} for TorchScript inference...') extra_files = {'config.txt': ''} # model metadata - model = torch.jit.load(w, _extra_files=extra_files) + model = torch.jit.load( + w, _extra_files=extra_files, map_location=device) model.half() if fp16 else model.float() if extra_files['config.txt']: # load metadata dict d = json.loads(extra_files['config.txt'], @@ -345,14 +395,15 @@ class DetectMultiBackend(nn.Module): stride, names = int(d['stride']), d['names'] elif dnn: # ONNX OpenCV DNN LOGGER.info(f'Loading {w} for ONNX OpenCV DNN inference...') - check_requirements(('opencv-python>=4.5.4',)) + check_requirements('opencv-python>=4.5.4') net = cv2.dnn.readNetFromONNX(w) elif onnx: # ONNX Runtime LOGGER.info(f'Loading {w} for ONNX Runtime inference...') - cuda = torch.cuda.is_available() and device.type != 'cpu' - check_requirements(('onnx', 'onnxruntime-gpu' if cuda else 'onnxruntime')) + check_requirements( + ('onnx', 'onnxruntime-gpu' if cuda else 'onnxruntime')) import onnxruntime - providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] + providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else [ + 'CPUExecutionProvider'] session = onnxruntime.InferenceSession(w, providers=providers) output_names = [x.name for x in session.get_outputs()] meta = session.get_modelmeta().custom_metadata_map # metadata @@ -360,100 +411,149 @@ class DetectMultiBackend(nn.Module): stride, names = int(meta['stride']), eval(meta['names']) elif xml: # OpenVINO LOGGER.info(f'Loading {w} for OpenVINO inference...') - check_requirements(('openvino',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ + # requires openvino-dev: https://pypi.org/project/openvino-dev/ + check_requirements('openvino') from openvino.runtime import Core, Layout, get_batch ie = Core() if not Path(w).is_file(): # if not *.xml - w = next(Path(w).glob('*.xml')) # get *.xml file from *_openvino_model dir - network = ie.read_model(model=w, weights=Path(w).with_suffix('.bin')) + # get *.xml file from *_openvino_model dir + w = next(Path(w).glob('*.xml')) + network = ie.read_model( + model=w, weights=Path(w).with_suffix('.bin')) if network.get_parameters()[0].get_layout().empty: network.get_parameters()[0].set_layout(Layout("NCHW")) batch_dim = get_batch(network) if batch_dim.is_static: batch_size = batch_dim.get_length() - executable_network = ie.compile_model(network, device_name="CPU") # device_name="MYRIAD" for Intel NCS2 - output_layer = next(iter(executable_network.outputs)) - stride, names = self._load_metadata(Path(w).with_suffix('.yaml')) # load metadata + # device_name="MYRIAD" for Intel NCS2 + executable_network = ie.compile_model(network, device_name="CPU") + stride, names = self._load_metadata( + Path(w).with_suffix('.yaml')) # load metadata elif engine: # TensorRT LOGGER.info(f'Loading {w} for TensorRT inference...') import tensorrt as trt # https://developer.nvidia.com/nvidia-tensorrt-download - check_version(trt.__version__, '7.0.0', hard=True) # require tensorrt>=7.0.0 + # require tensorrt>=7.0.0 + check_version(trt.__version__, '7.0.0', hard=True) if device.type == 'cpu': device = torch.device('cuda:0') - Binding = namedtuple('Binding', ('name', 'dtype', 'shape', 'data', 'ptr')) + Binding = namedtuple( + 'Binding', ('name', 'dtype', 'shape', 'data', 'ptr')) logger = trt.Logger(trt.Logger.INFO) with open(w, 'rb') as f, trt.Runtime(logger) as runtime: model = runtime.deserialize_cuda_engine(f.read()) context = model.create_execution_context() bindings = OrderedDict() + output_names = [] fp16 = False # default updated below dynamic = False - for index in range(model.num_bindings): - name = model.get_binding_name(index) - dtype = trt.nptype(model.get_binding_dtype(index)) - if model.binding_is_input(index): - if -1 in tuple(model.get_binding_shape(index)): # dynamic + for i in range(model.num_bindings): + name = model.get_binding_name(i) + dtype = trt.nptype(model.get_binding_dtype(i)) + if model.binding_is_input(i): + if -1 in tuple(model.get_binding_shape(i)): # dynamic dynamic = True - context.set_binding_shape(index, tuple(model.get_profile_shape(0, index)[2])) + context.set_binding_shape( + i, tuple(model.get_profile_shape(0, i)[2])) if dtype == np.float16: fp16 = True - shape = tuple(context.get_binding_shape(index)) + else: # output + output_names.append(name) + shape = tuple(context.get_binding_shape(i)) im = torch.from_numpy(np.empty(shape, dtype=dtype)).to(device) - bindings[name] = Binding(name, dtype, shape, im, int(im.data_ptr())) - binding_addrs = OrderedDict((n, d.ptr) for n, d in bindings.items()) - batch_size = bindings['images'].shape[0] # if dynamic, this is instead max batch size + bindings[name] = Binding( + name, dtype, shape, im, int(im.data_ptr())) + binding_addrs = OrderedDict((n, d.ptr) + for n, d in bindings.items()) + # if dynamic, this is instead max batch size + batch_size = bindings['images'].shape[0] elif coreml: # CoreML LOGGER.info(f'Loading {w} for CoreML inference...') import coremltools as ct model = ct.models.MLModel(w) - else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) - if saved_model: # SavedModel - LOGGER.info(f'Loading {w} for TensorFlow SavedModel inference...') - import tensorflow as tf - keras = False # assume TF1 saved_model - model = tf.keras.models.load_model(w) if keras else tf.saved_model.load(w) - elif pb: # GraphDef https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt - LOGGER.info(f'Loading {w} for TensorFlow GraphDef inference...') - import tensorflow as tf + elif saved_model: # TF SavedModel + LOGGER.info(f'Loading {w} for TensorFlow SavedModel inference...') + import tensorflow as tf + keras = False # assume TF1 saved_model + model = tf.keras.models.load_model( + w) if keras else tf.saved_model.load(w) + elif pb: # GraphDef https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt + LOGGER.info(f'Loading {w} for TensorFlow GraphDef inference...') + import tensorflow as tf - def wrap_frozen_graph(gd, inputs, outputs): - x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped - ge = x.graph.as_graph_element - return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs)) + def wrap_frozen_graph(gd, inputs, outputs): + x = tf.compat.v1.wrap_function( + lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped + ge = x.graph.as_graph_element + return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs)) - gd = tf.Graph().as_graph_def() # graph_def - with open(w, 'rb') as f: - gd.ParseFromString(f.read()) - frozen_func = wrap_frozen_graph(gd, inputs="x:0", outputs="Identity:0") - elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python - try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu - from tflite_runtime.interpreter import Interpreter, load_delegate - except ImportError: - import tensorflow as tf - Interpreter, load_delegate = tf.lite.Interpreter, tf.lite.experimental.load_delegate, - if edgetpu: # Edge TPU https://coral.ai/software/#edgetpu-runtime - LOGGER.info(f'Loading {w} for TensorFlow Lite Edge TPU inference...') - delegate = { - 'Linux': 'libedgetpu.so.1', - 'Darwin': 'libedgetpu.1.dylib', - 'Windows': 'edgetpu.dll'}[platform.system()] - interpreter = Interpreter(model_path=w, experimental_delegates=[load_delegate(delegate)]) - else: # Lite - LOGGER.info(f'Loading {w} for TensorFlow Lite inference...') - interpreter = Interpreter(model_path=w) # load TFLite model - interpreter.allocate_tensors() # allocate - input_details = interpreter.get_input_details() # inputs - output_details = interpreter.get_output_details() # outputs - elif tfjs: - raise NotImplementedError('ERROR: YOLOv5 TF.js inference is not supported') - else: - raise NotImplementedError(f'ERROR: {w} is not a supported format') + def gd_outputs(gd): + name_list, input_list = [], [] + for node in gd.node: # tensorflow.core.framework.node_def_pb2.NodeDef + name_list.append(node.name) + input_list.extend(node.input) + return sorted(f'{x}:0' for x in list(set(name_list) - set(input_list)) if not x.startswith('NoOp')) + + gd = tf.Graph().as_graph_def() # TF GraphDef + with open(w, 'rb') as f: + gd.ParseFromString(f.read()) + frozen_func = wrap_frozen_graph( + gd, inputs="x:0", outputs=gd_outputs(gd)) + elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python + try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu + from tflite_runtime.interpreter import Interpreter, load_delegate + except ImportError: + import tensorflow as tf + Interpreter, load_delegate = tf.lite.Interpreter, tf.lite.experimental.load_delegate, + if edgetpu: # TF Edge TPU https://coral.ai/software/#edgetpu-runtime + LOGGER.info( + f'Loading {w} for TensorFlow Lite Edge TPU inference...') + delegate = { + 'Linux': 'libedgetpu.so.1', + 'Darwin': 'libedgetpu.1.dylib', + 'Windows': 'edgetpu.dll'}[platform.system()] + interpreter = Interpreter(model_path=w, experimental_delegates=[ + load_delegate(delegate)]) + else: # TFLite + LOGGER.info(f'Loading {w} for TensorFlow Lite inference...') + interpreter = Interpreter(model_path=w) # load TFLite model + interpreter.allocate_tensors() # allocate + input_details = interpreter.get_input_details() # inputs + output_details = interpreter.get_output_details() # outputs + elif tfjs: # TF.js + raise NotImplementedError( + 'ERROR: YOLOv5 TF.js inference is not supported') + elif paddle: # PaddlePaddle + LOGGER.info(f'Loading {w} for PaddlePaddle inference...') + check_requirements('paddlepaddle-gpu' if cuda else 'paddlepaddle') + import paddle.inference as pdi + if not Path(w).is_file(): # if not *.pdmodel + # get *.xml file from *_openvino_model dir + w = next(Path(w).rglob('*.pdmodel')) + weights = Path(w).with_suffix('.pdiparams') + config = pdi.Config(str(w), str(weights)) + if cuda: + config.enable_use_gpu( + memory_pool_init_size_mb=2048, device_id=0) + predictor = pdi.create_predictor(config) + input_handle = predictor.get_input_handle( + predictor.get_input_names()[0]) + output_names = predictor.get_output_names() + elif triton: # NVIDIA Triton Inference Server + LOGGER.info(f'Using {w} as Triton Inference Server...') + check_requirements('tritonclient[all]') + from utils.triton import TritonRemoteModel + model = TritonRemoteModel(url=w) + nhwc = model.runtime.startswith("tensorflow") + else: + raise NotImplementedError(f'ERROR: {w} is not a supported format') # class names if 'names' not in locals(): - names = yaml_load(data)['names'] if data else {i: f'class{i}' for i in range(999)} + names = yaml_load(data)['names'] if data else { + i: f'class{i}' for i in range(999)} if names[0] == 'n01440764' and len(names) == 1000: # ImageNet - names = yaml_load(ROOT / 'data/ImageNet.yaml')['names'] # human-readable names + # human-readable names + names = yaml_load(ROOT / 'data/ImageNet.yaml')['names'] self.__dict__.update(locals()) # assign all variables to self @@ -462,9 +562,13 @@ class DetectMultiBackend(nn.Module): b, ch, h, w = im.shape # batch, channel, height, width if self.fp16 and im.dtype != torch.float16: im = im.half() # to FP16 + if self.nhwc: + # torch BCHW to numpy BHWC shape(1,320,192,3) + im = im.permute(0, 2, 3, 1) if self.pt: # PyTorch - y = self.model(im, augment=augment, visualize=visualize) if augment or visualize else self.model(im) + y = self.model( + im, augment=augment, visualize=visualize) if augment or visualize else self.model(im) elif self.jit: # TorchScript y = self.model(im) elif self.dnn: # ONNX OpenCV DNN @@ -473,52 +577,77 @@ class DetectMultiBackend(nn.Module): y = self.net.forward() elif self.onnx: # ONNX Runtime im = im.cpu().numpy() # torch to numpy - y = self.session.run(self.output_names, {self.session.get_inputs()[0].name: im}) + y = self.session.run(self.output_names, { + self.session.get_inputs()[0].name: im}) elif self.xml: # OpenVINO im = im.cpu().numpy() # FP32 - y = self.executable_network([im])[self.output_layer] + y = list(self.executable_network([im]).values()) elif self.engine: # TensorRT if self.dynamic and im.shape != self.bindings['images'].shape: - i_in, i_out = (self.model.get_binding_index(x) for x in ('images', 'output')) - self.context.set_binding_shape(i_in, im.shape) # reshape if dynamic - self.bindings['images'] = self.bindings['images']._replace(shape=im.shape) - self.bindings['output'].data.resize_(tuple(self.context.get_binding_shape(i_out))) + i = self.model.get_binding_index('images') + self.context.set_binding_shape( + i, im.shape) # reshape if dynamic + self.bindings['images'] = self.bindings['images']._replace( + shape=im.shape) + for name in self.output_names: + i = self.model.get_binding_index(name) + self.bindings[name].data.resize_( + tuple(self.context.get_binding_shape(i))) s = self.bindings['images'].shape assert im.shape == s, f"input size {im.shape} {'>' if self.dynamic else 'not equal to'} max model size {s}" self.binding_addrs['images'] = int(im.data_ptr()) self.context.execute_v2(list(self.binding_addrs.values())) - y = self.bindings['output'].data + y = [self.bindings[x].data for x in sorted(self.output_names)] elif self.coreml: # CoreML - im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + im = im.cpu().numpy() im = Image.fromarray((im[0] * 255).astype('uint8')) # im = im.resize((192, 320), Image.ANTIALIAS) - y = self.model.predict({'image': im}) # coordinates are xywh normalized + # coordinates are xywh normalized + y = self.model.predict({'image': im}) if 'confidence' in y: - box = xywh2xyxy(y['coordinates'] * [[w, h, w, h]]) # xyxy pixels - conf, cls = y['confidence'].max(1), y['confidence'].argmax(1).astype(np.float) - y = np.concatenate((box, conf.reshape(-1, 1), cls.reshape(-1, 1)), 1) + box = xywh2xyxy(y['coordinates'] * + [[w, h, w, h]]) # xyxy pixels + conf, cls = y['confidence'].max( + 1), y['confidence'].argmax(1).astype(np.float) + y = np.concatenate( + (box, conf.reshape(-1, 1), cls.reshape(-1, 1)), 1) else: - k = 'var_' + str(sorted(int(k.replace('var_', '')) for k in y)[-1]) # output key - y = y[k] # output + # reversed for segmentation models (pred, proto) + y = list(reversed(y.values())) + elif self.paddle: # PaddlePaddle + im = im.cpu().numpy().astype(np.float32) + self.input_handle.copy_from_cpu(im) + self.predictor.run() + y = [self.predictor.get_output_handle( + x).copy_to_cpu() for x in self.output_names] + elif self.triton: # NVIDIA Triton Inference Server + y = self.model(im) else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) - im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + im = im.cpu().numpy() if self.saved_model: # SavedModel - y = (self.model(im, training=False) if self.keras else self.model(im)).numpy() + y = self.model( + im, training=False) if self.keras else self.model(im) elif self.pb: # GraphDef - y = self.frozen_func(x=self.tf.constant(im)).numpy() + y = self.frozen_func(x=self.tf.constant(im)) else: # Lite or Edge TPU - input, output = self.input_details[0], self.output_details[0] - int8 = input['dtype'] == np.uint8 # is TFLite quantized uint8 model + input = self.input_details[0] + # is TFLite quantized uint8 model + int8 = input['dtype'] == np.uint8 if int8: scale, zero_point = input['quantization'] im = (im / scale + zero_point).astype(np.uint8) # de-scale self.interpreter.set_tensor(input['index'], im) self.interpreter.invoke() - y = self.interpreter.get_tensor(output['index']) - if int8: - scale, zero_point = output['quantization'] - y = (y.astype(np.float32) - zero_point) * scale # re-scale - y[..., :4] *= [w, h, w, h] # xywh normalized to pixels + y = [] + for output in self.output_details: + x = self.interpreter.get_tensor(output['index']) + if int8: + scale, zero_point = output['quantization'] + x = (x.astype(np.float32) - zero_point) * \ + scale # re-scale + y.append(x) + y = [x if isinstance(x, np.ndarray) else x.numpy() for x in y] + y[0][..., :4] *= [w, h, w, h] # xywh normalized to pixels if isinstance(y, (list, tuple)): return self.from_numpy(y[0]) if len(y) == 1 else [self.from_numpy(x) for x in y] @@ -530,23 +659,27 @@ class DetectMultiBackend(nn.Module): def warmup(self, imgsz=(1, 3, 640, 640)): # Warmup model by running inference once - warmup_types = self.pt, self.jit, self.onnx, self.engine, self.saved_model, self.pb - if any(warmup_types) and self.device.type != 'cpu': - im = torch.empty(*imgsz, dtype=torch.half if self.fp16 else torch.float, device=self.device) # input + warmup_types = self.pt, self.jit, self.onnx, self.engine, self.saved_model, self.pb, self.triton + if any(warmup_types) and (self.device.type != 'cpu' or self.triton): + im = torch.empty( + *imgsz, dtype=torch.half if self.fp16 else torch.float, device=self.device) # input for _ in range(2 if self.jit else 1): # self.forward(im) # warmup @staticmethod def _model_type(p='path/to/model.pt'): # Return model type from model path, i.e. path='path/to/model.onnx' -> type=onnx - from yolov5.export import export_formats - suffixes = list(export_formats().Suffix) + ['.xml'] # export suffixes - check_suffix(p, suffixes) # checks - p = Path(p).name # eliminate trailing separators - pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, xml2 = (s in p for s in suffixes) - xml |= xml2 # *_openvino_model or *.xml - tflite &= not edgetpu # *.tflite - return pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs + # types = [pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, paddle] + from utils.downloads import is_url + sf = list(export_formats().Suffix) # export suffixes + if not is_url(p, check=False): + check_suffix(p, sf) # checks + url = urlparse(p) # if url may be Triton inference server + types = [s in Path(p).name for s in sf] + types[8] &= not types[9] # tflite &= not edgetpu + triton = not any(types) and all( + [any(s in url.scheme for s in ["http", "grpc"]), url.netloc]) + return types + [triton] @staticmethod def _load_metadata(f=Path('path/to/meta.yaml')): @@ -563,7 +696,8 @@ class AutoShape(nn.Module): iou = 0.45 # NMS IoU threshold agnostic = False # NMS class-agnostic multi_label = False # NMS multiple labels per box - classes = None # (optional list) filter by class, i.e. = [0, 15, 16] for COCO persons, cats and dogs + # (optional list) filter by class, i.e. = [0, 15, 16] for COCO persons, cats and dogs + classes = None max_det = 1000 # maximum number of detections per image amp = False # Automatic Mixed Precision (AMP) inference @@ -571,19 +705,24 @@ class AutoShape(nn.Module): super().__init__() if verbose: LOGGER.info('Adding AutoShape... ') - copy_attr(self, model, include=('yaml', 'nc', 'hyp', 'names', 'stride', 'abc'), exclude=()) # copy attributes - self.dmb = isinstance(model, DetectMultiBackend) # DetectMultiBackend() instance + copy_attr(self, model, include=('yaml', 'nc', 'hyp', 'names', + 'stride', 'abc'), exclude=()) # copy attributes + # DetectMultiBackend() instance + self.dmb = isinstance(model, DetectMultiBackend) self.pt = not self.dmb or model.pt # PyTorch model self.model = model.eval() if self.pt: - m = self.model.model.model[-1] if self.dmb else self.model.model[-1] # Detect() + # Detect() + m = self.model.model.model[-1] if self.dmb else self.model.model[-1] m.inplace = False # Detect.inplace=False for safe multithread inference + m.export = True # do not output loss values def _apply(self, fn): # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers self = super()._apply(fn) if self.pt: - m = self.model.model.model[-1] if self.dmb else self.model.model[-1] # Detect() + # Detect() + m = self.model.model.model[-1] if self.dmb else self.model.model[-1] m.stride = fn(m.stride) m.grid = list(map(fn, m.grid)) if isinstance(m.anchor_grid, list): @@ -605,40 +744,52 @@ class AutoShape(nn.Module): with dt[0]: if isinstance(size, int): # expand size = (size, size) - p = next(self.model.parameters()) if self.pt else torch.empty(1, device=self.model.device) # param - autocast = self.amp and (p.device.type != 'cpu') # Automatic Mixed Precision (AMP) inference + p = next(self.model.parameters()) if self.pt else torch.empty( + 1, device=self.model.device) # param + # Automatic Mixed Precision (AMP) inference + autocast = self.amp and (p.device.type != 'cpu') if isinstance(ims, torch.Tensor): # torch with amp.autocast(autocast): - return self.model(ims.to(p.device).type_as(p), augment, profile) # inference + # inference + return self.model(ims.to(p.device).type_as(p), augment=augment) # Pre-process - n, ims = (len(ims), list(ims)) if isinstance(ims, (list, tuple)) else (1, [ims]) # number, list of images + n, ims = (len(ims), list(ims)) if isinstance( + ims, (list, tuple)) else (1, [ims]) # number, list of images shape0, shape1, files = [], [], [] # image and inference shapes, filenames for i, im in enumerate(ims): f = f'image{i}' # filename if isinstance(im, (str, Path)): # filename or uri - im, f = Image.open(requests.get(im, stream=True).raw if str(im).startswith('http') else im), im + im, f = Image.open(requests.get(im, stream=True).raw if str( + im).startswith('http') else im), im im = np.asarray(exif_transpose(im)) elif isinstance(im, Image.Image): # PIL Image - im, f = np.asarray(exif_transpose(im)), getattr(im, 'filename', f) or f + im, f = np.asarray(exif_transpose(im)), getattr( + im, 'filename', f) or f files.append(Path(f).with_suffix('.jpg').name) if im.shape[0] < 5: # image in CHW - im = im.transpose((1, 2, 0)) # reverse dataloader .transpose(2, 0, 1) - im = im[..., :3] if im.ndim == 3 else cv2.cvtColor(im, cv2.COLOR_GRAY2BGR) # enforce 3ch input + # reverse dataloader .transpose(2, 0, 1) + im = im.transpose((1, 2, 0)) + im = im[..., :3] if im.ndim == 3 else cv2.cvtColor( + im, cv2.COLOR_GRAY2BGR) # enforce 3ch input s = im.shape[:2] # HWC shape0.append(s) # image shape g = max(size) / max(s) # gain shape1.append([y * g for y in s]) - ims[i] = im if im.data.contiguous else np.ascontiguousarray(im) # update - shape1 = [make_divisible(x, self.stride) for x in np.array(shape1).max(0)] if self.pt else size # inf shape + ims[i] = im if im.data.contiguous else np.ascontiguousarray( + im) # update + shape1 = [make_divisible(x, self.stride) for x in np.array( + shape1).max(0)] if self.pt else size # inf shape x = [letterbox(im, shape1, auto=False)[0] for im in ims] # pad - x = np.ascontiguousarray(np.array(x).transpose((0, 3, 1, 2))) # stack and BHWC to BCHW - x = torch.from_numpy(x).to(p.device).type_as(p) / 255 # uint8 to fp16/32 + x = np.ascontiguousarray(np.array(x).transpose( + (0, 3, 1, 2))) # stack and BHWC to BCHW + x = torch.from_numpy(x).to(p.device).type_as( + p) / 255 # uint8 to fp16/32 with amp.autocast(autocast): # Inference with dt[1]: - y = self.model(x, augment, profile) # forward + y = self.model(x, augment=augment) # forward # Post-process with dt[2]: @@ -650,7 +801,7 @@ class AutoShape(nn.Module): self.multi_label, max_det=self.max_det) # NMS for i in range(n): - scale_coords(shape1, y[i][:, :4], shape0[i]) + scale_boxes(shape1, y[i][:, :4], shape0[i]) return Detections(ims, y, files, dt, self.names, x.shape) @@ -660,7 +811,8 @@ class Detections: def __init__(self, ims, pred, files, times=(0, 0, 0), names=None, shape=None): super().__init__() d = pred[0].device # device - gn = [torch.tensor([*(im.shape[i] for i in [1, 0, 1, 0]), 1, 1], device=d) for im in ims] # normalizations + gn = [torch.tensor([*(im.shape[i] for i in [1, 0, 1, 0]), 1, 1], device=d) + for im in ims] # normalizations self.ims = ims # list of images as numpy arrays self.pred = pred # list of tensors pred[0] = (xyxy, conf, cls) self.names = names # class names @@ -672,22 +824,28 @@ class Detections: self.xywhn = [x / g for x, g in zip(self.xywh, gn)] # xywh normalized self.n = len(self.pred) # number of images (batch size) self.t = tuple(x.t / self.n * 1E3 for x in times) # timestamps (ms) - self.s = shape # inference BCHW shape + self.s = tuple(shape) # inference BCHW shape - def display(self, pprint=False, show=False, save=False, crop=False, render=False, labels=True, save_dir=Path('')): - crops = [] + def _run(self, pprint=False, show=False, save=False, crop=False, render=False, labels=True, save_dir=Path('')): + s, crops = '', [] for i, (im, pred) in enumerate(zip(self.ims, self.pred)): - s = f'image {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} ' # string + # string + s += f'\nimage {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} ' if pred.shape[0]: for c in pred[:, -1].unique(): n = (pred[:, -1] == c).sum() # detections per class - s += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " # add to string + # add to string + s += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " + s = s.rstrip(', ') if show or save or render or crop: annotator = Annotator(im, example=str(self.names)) - for *box, conf, cls in reversed(pred): # xyxy, confidence, class + # xyxy, confidence, class + for *box, conf, cls in reversed(pred): label = f'{self.names[int(cls)]} {conf:.2f}' if crop: - file = save_dir / 'crops' / self.names[int(cls)] / self.files[i] if save else None + file = save_dir / 'crops' / \ + self.names[int(cls)] / \ + self.files[i] if save else None crops.append({ 'box': box, 'conf': conf, @@ -695,45 +853,48 @@ class Detections: 'label': label, 'im': save_one_box(box, im, file=file, save=save)}) else: # all others - annotator.box_label(box, label if labels else '', color=colors(cls)) + annotator.box_label( + box, label if labels else '', color=colors(cls)) im = annotator.im else: s += '(no detections)' - im = Image.fromarray(im.astype(np.uint8)) if isinstance(im, np.ndarray) else im # from np - if pprint: - print(s.rstrip(', ')) + im = Image.fromarray(im.astype(np.uint8)) if isinstance( + im, np.ndarray) else im # from np if show: im.show(self.files[i]) # show if save: f = self.files[i] im.save(save_dir / f) # save if i == self.n - 1: - LOGGER.info(f"Saved {self.n} image{'s' * (self.n > 1)} to {colorstr('bold', save_dir)}") + LOGGER.info( + f"Saved {self.n} image{'s' * (self.n > 1)} to {colorstr('bold', save_dir)}") if render: self.ims[i] = np.asarray(im) + if pprint: + s = s.lstrip('\n') + return f'{s}\nSpeed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {self.s}' % self.t if crop: if save: LOGGER.info(f'Saved results to {save_dir}\n') return crops - def print(self): - self.display(pprint=True) # print results - print(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {tuple(self.s)}' % self.t) - def show(self, labels=True): - self.display(show=True, labels=labels) # show results + self._run(show=True, labels=labels) # show results def save(self, labels=True, save_dir='runs/detect/exp'): - save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) # increment save_dir - self.display(save=True, labels=labels, save_dir=save_dir) # save results + save_dir = increment_path( + save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) # increment save_dir + self._run(save=True, labels=labels, save_dir=save_dir) # save results def crop(self, save=True, save_dir='runs/detect/exp'): - save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) if save else None - return self.display(crop=True, save=save, save_dir=save_dir) # crop results + save_dir = increment_path( + save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) if save else None + # crop results + return self._run(crop=True, save=save, save_dir=save_dir) def render(self, labels=True): - self.display(render=True, labels=labels) # render results + self._run(render=True, labels=labels) # render results return self.ims def pandas(self): @@ -742,30 +903,51 @@ class Detections: ca = 'xmin', 'ymin', 'xmax', 'ymax', 'confidence', 'class', 'name' # xyxy columns cb = 'xcenter', 'ycenter', 'width', 'height', 'confidence', 'class', 'name' # xywh columns for k, c in zip(['xyxy', 'xyxyn', 'xywh', 'xywhn'], [ca, ca, cb, cb]): - a = [[x[:5] + [int(x[5]), self.names[int(x[5])]] for x in x.tolist()] for x in getattr(self, k)] # update + a = [[x[:5] + [int(x[5]), self.names[int(x[5])]] + for x in x.tolist()] for x in getattr(self, k)] # update setattr(new, k, [pd.DataFrame(x, columns=c) for x in a]) return new def tolist(self): # return a list of Detections objects, i.e. 'for result in results.tolist():' r = range(self.n) # iterable - x = [Detections([self.ims[i]], [self.pred[i]], [self.files[i]], self.times, self.names, self.s) for i in r] + x = [Detections([self.ims[i]], [self.pred[i]], [ + self.files[i]], self.times, self.names, self.s) for i in r] # for d in x: # for k in ['ims', 'pred', 'xyxy', 'xyxyn', 'xywh', 'xywhn']: # setattr(d, k, getattr(d, k)[0]) # pop out of list return x - def __len__(self): - return self.n # override len(results) + def print(self): + LOGGER.info(self.__str__()) - def __str__(self): - self.print() # override print(results) - return '' + def __len__(self): # override len(results) + return self.n + + def __str__(self): # override print(results) + return self._run(pprint=True) # print results + + def __repr__(self): + return f'YOLOv5 {self.__class__} instance\n' + self.__str__() + + +class Proto(nn.Module): + # YOLOv5 mask Proto module for segmentation models + def __init__(self, c1, c_=256, c2=32): # ch_in, number of protos, number of masks + super().__init__() + self.cv1 = Conv(c1, c_, k=3) + self.upsample = nn.Upsample(scale_factor=2, mode='nearest') + self.cv2 = Conv(c_, c_, k=3) + self.cv3 = Conv(c_, c2) + + def forward(self, x): + return self.cv3(self.cv2(self.upsample(self.cv1(x)))) class Classify(nn.Module): - # Classification head, i.e. x(b,c1,20,20) to x(b,c2) - def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups + # YOLOv5 classification head, i.e. x(b,c1,20,20) to x(b,c2) + # ch_in, ch_out, kernel, stride, padding, groups + def __init__(self, c1, c2, k=1, s=1, p=None, g=1): super().__init__() c_ = 1280 # efficientnet_b0 size self.conv = Conv(c1, c_, k, s, autopad(k, p), g) diff --git a/models/hub/yolov5s-LeakyReLU.yaml b/models/hub/yolov5s-LeakyReLU.yaml new file mode 100644 index 0000000..3a179bf --- /dev/null +++ b/models/hub/yolov5s-LeakyReLU.yaml @@ -0,0 +1,49 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +activation: nn.LeakyReLU(0.1) # <----- Conv() activation used throughout entire YOLOv5 model +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/segment/yolov5l-seg.yaml b/models/segment/yolov5l-seg.yaml new file mode 100644 index 0000000..4782de1 --- /dev/null +++ b/models/segment/yolov5l-seg.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Segment, [nc, anchors, 32, 256]], # Detect(P3, P4, P5) + ] diff --git a/models/segment/yolov5m-seg.yaml b/models/segment/yolov5m-seg.yaml new file mode 100644 index 0000000..f73d199 --- /dev/null +++ b/models/segment/yolov5m-seg.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.67 # model depth multiple +width_multiple: 0.75 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Segment, [nc, anchors, 32, 256]], # Detect(P3, P4, P5) + ] \ No newline at end of file diff --git a/models/segment/yolov5n-seg.yaml b/models/segment/yolov5n-seg.yaml new file mode 100644 index 0000000..c28225a --- /dev/null +++ b/models/segment/yolov5n-seg.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.25 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Segment, [nc, anchors, 32, 256]], # Detect(P3, P4, P5) + ] diff --git a/models/segment/yolov5s-seg.yaml b/models/segment/yolov5s-seg.yaml new file mode 100644 index 0000000..7cbdb36 --- /dev/null +++ b/models/segment/yolov5s-seg.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.5 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Segment, [nc, anchors, 32, 256]], # Detect(P3, P4, P5) + ] \ No newline at end of file diff --git a/models/segment/yolov5x-seg.yaml b/models/segment/yolov5x-seg.yaml new file mode 100644 index 0000000..5d0c452 --- /dev/null +++ b/models/segment/yolov5x-seg.yaml @@ -0,0 +1,48 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.33 # model depth multiple +width_multiple: 1.25 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Segment, [nc, anchors, 32, 256]], # Detect(P3, P4, P5) + ] diff --git a/models/tf.py b/models/tf.py index ecb0d4d..1446d88 100644 --- a/models/tf.py +++ b/models/tf.py @@ -30,7 +30,7 @@ from tensorflow import keras from models.common import (C3, SPP, SPPF, Bottleneck, BottleneckCSP, C3x, Concat, Conv, CrossConv, DWConv, DWConvTranspose2d, Focus, autopad) from models.experimental import MixConv2d, attempt_load -from models.yolo import Detect +from models.yolo import Detect, Segment from utils.activations import SiLU from utils.general import LOGGER, make_divisible, print_args @@ -299,18 +299,18 @@ class TFDetect(keras.layers.Layer): x[i] = tf.reshape(x[i], [-1, ny * nx, self.na, self.no]) if not self.training: # inference - y = tf.sigmoid(x[i]) + y = x[i] grid = tf.transpose(self.grid[i], [0, 2, 1, 3]) - 0.5 anchor_grid = tf.transpose(self.anchor_grid[i], [0, 2, 1, 3]) * 4 - xy = (y[..., 0:2] * 2 + grid) * self.stride[i] # xy - wh = y[..., 2:4] ** 2 * anchor_grid + xy = (tf.sigmoid(y[..., 0:2]) * 2 + grid) * self.stride[i] # xy + wh = tf.sigmoid(y[..., 2:4]) ** 2 * anchor_grid # Normalize xywh to 0-1 to reduce calibration error xy /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) wh /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) - y = tf.concat([xy, wh, y[..., 4:]], -1) + y = tf.concat([xy, wh, tf.sigmoid(y[..., 4:5 + self.nc]), y[..., 5 + self.nc:]], -1) z.append(tf.reshape(y, [-1, self.na * ny * nx, self.no])) - return tf.transpose(x, [0, 2, 1, 3]) if self.training else (tf.concat(z, 1), x) + return tf.transpose(x, [0, 2, 1, 3]) if self.training else (tf.concat(z, 1),) @staticmethod def _make_grid(nx=20, ny=20): @@ -320,6 +320,37 @@ class TFDetect(keras.layers.Layer): return tf.cast(tf.reshape(tf.stack([xv, yv], 2), [1, 1, ny * nx, 2]), dtype=tf.float32) +class TFSegment(TFDetect): + # YOLOv5 Segment head for segmentation models + def __init__(self, nc=80, anchors=(), nm=32, npr=256, ch=(), imgsz=(640, 640), w=None): + super().__init__(nc, anchors, ch, imgsz, w) + self.nm = nm # number of masks + self.npr = npr # number of protos + self.no = 5 + nc + self.nm # number of outputs per anchor + self.m = [TFConv2d(x, self.no * self.na, 1, w=w.m[i]) for i, x in enumerate(ch)] # output conv + self.proto = TFProto(ch[0], self.npr, self.nm, w=w.proto) # protos + self.detect = TFDetect.call + + def call(self, x): + p = self.proto(x[0]) + p = tf.transpose(p, [0, 3, 1, 2]) # from shape(1,160,160,32) to shape(1,32,160,160) + x = self.detect(self, x) + return (x, p) if self.training else (x[0], p) + + +class TFProto(keras.layers.Layer): + + def __init__(self, c1, c_=256, c2=32, w=None): + super().__init__() + self.cv1 = TFConv(c1, c_, k=3, w=w.cv1) + self.upsample = TFUpsample(None, scale_factor=2, mode='nearest') + self.cv2 = TFConv(c_, c_, k=3, w=w.cv2) + self.cv3 = TFConv(c_, c2, w=w.cv3) + + def call(self, inputs): + return self.cv3(self.cv2(self.upsample(self.cv1(inputs)))) + + class TFUpsample(keras.layers.Layer): # TF version of torch.nn.Upsample() def __init__(self, size, scale_factor, mode, w=None): # warning: all arguments needed including 'w' @@ -377,10 +408,12 @@ def parse_model(d, ch, model, imgsz): # model_dict, input_channels(3) args = [ch[f]] elif m is Concat: c2 = sum(ch[-1 if x == -1 else x + 1] for x in f) - elif m is Detect: + elif m in [Detect, Segment]: args.append([ch[x + 1] for x in f]) if isinstance(args[1], int): # number of anchors args[1] = [list(range(args[1] * 2))] * len(f) + if m is Segment: + args[3] = make_divisible(args[3] * gw, 8) args.append(imgsz) else: c2 = ch[f] @@ -452,9 +485,9 @@ class TFModel: iou_thres, conf_thres, clip_boxes=False) - return nms, x[1] - return x[0] # output only first tensor [1,6300,85] = [xywh, conf, class0, class1, ...] - # x = x[0][0] # [x(1,6300,85), ...] to x(6300,85) + return (nms,) + return x # output [1,6300,85] = [xywh, conf, class0, class1, ...] + # x = x[0] # [x(1,6300,85), ...] to x(6300,85) # xywh = x[..., :4] # x(6300,4) boxes # conf = x[..., 4:5] # x(6300,1) confidences # cls = tf.reshape(tf.cast(tf.argmax(x[..., 5:], axis=1), tf.float32), (-1, 1)) # x(6300,1) classes diff --git a/models/yolo.py b/models/yolo.py index fa05fcf..ed21c06 100644 --- a/models/yolo.py +++ b/models/yolo.py @@ -36,6 +36,7 @@ except ImportError: class Detect(nn.Module): + # YOLOv5 Detect head for detection models stride = None # strides computed during build dynamic = False # force grid reconstruction export = False # export mode @@ -46,8 +47,8 @@ class Detect(nn.Module): self.no = nc + 5 # number of outputs per anchor self.nl = len(anchors) # number of detection layers self.na = len(anchors[0]) // 2 # number of anchors - self.grid = [torch.empty(1)] * self.nl # init grid - self.anchor_grid = [torch.empty(1)] * self.nl # init anchor grid + self.grid = [torch.empty(0) for _ in range(self.nl)] # init grid + self.anchor_grid = [torch.empty(0) for _ in range(self.nl)] # init anchor grid self.register_buffer('anchors', torch.tensor(anchors).float().view(self.nl, -1, 2)) # shape(nl,na,2) self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv self.inplace = inplace # use inplace ops (e.g. slice assignment) @@ -63,16 +64,17 @@ class Detect(nn.Module): if self.dynamic or self.grid[i].shape[2:4] != x[i].shape[2:4]: self.grid[i], self.anchor_grid[i] = self._make_grid(nx, ny, i) - y = x[i].sigmoid() - if self.inplace: - y[..., 0:2] = (y[..., 0:2] * 2 + self.grid[i]) * self.stride[i] # xy - y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh - else: # for YOLOv5 on AWS Inferentia https://github.com/ultralytics/yolov5/pull/2953 - xy, wh, conf = y.split((2, 2, self.nc + 1), 4) # y.tensor_split((2, 4, 5), 4) # torch 1.8.0 + if isinstance(self, Segment): # (boxes + masks) + xy, wh, conf, mask = x[i].split((2, 2, self.nc + 1, self.no - self.nc - 5), 4) + xy = (xy.sigmoid() * 2 + self.grid[i]) * self.stride[i] # xy + wh = (wh.sigmoid() * 2) ** 2 * self.anchor_grid[i] # wh + y = torch.cat((xy, wh, conf.sigmoid(), mask), 4) + else: # Detect (boxes only) + xy, wh, conf = x[i].sigmoid().split((2, 2, self.nc + 1), 4) xy = (xy * 2 + self.grid[i]) * self.stride[i] # xy wh = (wh * 2) ** 2 * self.anchor_grid[i] # wh y = torch.cat((xy, wh, conf), 4) - z.append(y.view(bs, -1, self.no)) + z.append(y.view(bs, self.na * nx * ny, self.no)) return x if self.training else (torch.cat(z, 1),) if self.export else (torch.cat(z, 1), x) @@ -87,6 +89,23 @@ class Detect(nn.Module): return grid, anchor_grid +class Segment(Detect): + # YOLOv5 Segment head for segmentation models + def __init__(self, nc=80, anchors=(), nm=32, npr=256, ch=(), inplace=True): + super().__init__(nc, anchors, ch, inplace) + self.nm = nm # number of masks + self.npr = npr # number of protos + self.no = 5 + nc + self.nm # number of outputs per anchor + self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv + self.proto = Proto(ch[0], self.npr, self.nm) # protos + self.detect = Detect.forward + + def forward(self, x): + p = self.proto(x[0]) + x = self.detect(self, x) + return (x, p) if self.training else (x[0], p) if self.export else (x[0], p, x[1]) + + class BaseModel(nn.Module): # YOLOv5 base model def forward(self, x, profile=False, visualize=False): @@ -135,7 +154,7 @@ class BaseModel(nn.Module): # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers self = super()._apply(fn) m = self.model[-1] # Detect() - if isinstance(m, Detect): + if isinstance(m, (Detect, Segment)): m.stride = fn(m.stride) m.grid = list(map(fn, m.grid)) if isinstance(m.anchor_grid, list): @@ -169,11 +188,12 @@ class DetectionModel(BaseModel): # Build strides, anchors m = self.model[-1] # Detect() - if isinstance(m, Detect): + if isinstance(m, (Detect, Segment)): s = 256 # 2x min stride m.inplace = self.inplace - m.stride = torch.tensor([s / x.shape[-2] for x in self.forward(torch.empty(1, ch, s, s))]) # forward - check_anchor_order(m) # must be in pixel-space (not grid-space) + forward = lambda x: self.forward(x)[0] if isinstance(m, Segment) else self.forward(x) + m.stride = torch.tensor([s / x.shape[-2] for x in forward(torch.zeros(1, ch, s, s))]) # forward + check_anchor_order(m) m.anchors /= m.stride.view(-1, 1, 1) self.stride = m.stride self._initialize_biases() # only run once @@ -235,15 +255,21 @@ class DetectionModel(BaseModel): # cf = torch.bincount(torch.tensor(np.concatenate(dataset.labels, 0)[:, 0]).long(), minlength=nc) + 1. m = self.model[-1] # Detect() module for mi, s in zip(m.m, m.stride): # from - b = mi.bias.view(m.na, -1).detach() # conv.bias(255) to (3,85) - b[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) - b[:, 5:] += math.log(0.6 / (m.nc - 0.999999)) if cf is None else torch.log(cf / cf.sum()) # cls + b = mi.bias.view(m.na, -1) # conv.bias(255) to (3,85) + b.data[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) + b.data[:, 5:5 + m.nc] += math.log(0.6 / (m.nc - 0.99999)) if cf is None else torch.log(cf / cf.sum()) # cls mi.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) Model = DetectionModel # retain YOLOv5 'Model' class for backwards compatibility +class SegmentationModel(DetectionModel): + # YOLOv5 segmentation model + def __init__(self, cfg='yolov5s-seg.yaml', ch=3, nc=None, anchors=None): + super().__init__(cfg, ch, nc, anchors) + + class ClassificationModel(BaseModel): # YOLOv5 classification model def __init__(self, cfg=None, model=None, nc=1000, cutoff=10): # yaml, model, number of classes, cutoff index @@ -271,8 +297,12 @@ class ClassificationModel(BaseModel): def parse_model(d, ch): # model_dict, input_channels(3) + # Parse a YOLOv5 model.yaml dictionary LOGGER.info(f"\n{'':>3}{'from':>18}{'n':>3}{'params':>10} {'module':<40}{'arguments':<30}") - anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + anchors, nc, gd, gw, act = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'], d.get('activation') + if act: + Conv.default_act = eval(act) # redefine default activation, i.e. Conv.default_act = nn.SiLU() + LOGGER.info(f"{colorstr('activation:')} {act}") # print na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors no = na * (nc + 5) # number of outputs = anchors * (classes + 5) @@ -284,24 +314,28 @@ def parse_model(d, ch): # model_dict, input_channels(3) args[j] = eval(a) if isinstance(a, str) else a # eval strings n = n_ = max(round(n * gd), 1) if n > 1 else n # depth gain - if m in (Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, - BottleneckCSP, C3, C3TR, C3SPP, C3Ghost, nn.ConvTranspose2d, DWConvTranspose2d, C3x): + if m in { + Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, + BottleneckCSP, C3, C3TR, C3SPP, C3Ghost, nn.ConvTranspose2d, DWConvTranspose2d, C3x}: c1, c2 = ch[f], args[0] if c2 != no: # if not output c2 = make_divisible(c2 * gw, 8) args = [c1, c2, *args[1:]] - if m in [BottleneckCSP, C3, C3TR, C3Ghost, C3x]: + if m in {BottleneckCSP, C3, C3TR, C3Ghost, C3x}: args.insert(2, n) # number of repeats n = 1 elif m is nn.BatchNorm2d: args = [ch[f]] elif m is Concat: c2 = sum(ch[x] for x in f) - elif m is Detect: + # TODO: channel, gw, gd + elif m in {Detect, Segment}: args.append([ch[x] for x in f]) if isinstance(args[1], int): # number of anchors args[1] = [list(range(args[1] * 2))] * len(f) + if m is Segment: + args[3] = make_divisible(args[3] * gw, 8) elif m is Contract: c2 = ch[f] * args[0] ** 2 elif m is Expand: diff --git a/utils/__init__.py b/utils/__init__.py index c534e39..24746d2 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -4,9 +4,15 @@ utils/initialization """ import contextlib +import platform import threading +def emojis(str=''): + # Return platform-dependent emoji-safe version of string + return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str + + class TryExcept(contextlib.ContextDecorator): # YOLOv5 TryExcept class. Usage: @TryExcept() decorator or 'with TryExcept():' context manager def __init__(self, msg=''): @@ -17,7 +23,7 @@ class TryExcept(contextlib.ContextDecorator): def __exit__(self, exc_type, value, traceback): if value: - print(f'{self.msg}{value}') + print(emojis(f'{self.msg}{value}')) return True @@ -38,7 +44,7 @@ def notebook_init(verbose=True): import os import shutil - from utils.general import check_font, check_requirements, emojis, is_colab + from utils.general import check_font, check_requirements, is_colab from utils.torch_utils import select_device # imports check_requirements(('psutil', 'IPython')) diff --git a/utils/__pycache__/__init__.cpython-310.pyc b/utils/__pycache__/__init__.cpython-310.pyc index 01c6a2c4a7ee1de003d292decc47950f66e79127..ba6c3d53e5c7029737be3b48d766f6035af7580e 100644 GIT binary patch literal 2302 zcmZuy&2Aev5GJ{+)oLxtw(JB63bg8>ZPX^RTl5b>5IA+)G=~~N(?-&iI@?Bc1u_&0wOFRIMagqLV01rjbtaEchF2kqtv)ip-mVJbJSZQ?-F; za!Sv@pIs)K1vzDB1+E=x9Hr$sk#k_|^^^F1|G5%U^+lQtqi}U~wf}7%Pevlss{c6A zKTHPwr!pUkSbz3q|Iw3skCM?K*M0lKei-5#!>~I(1Vf{Im?%`M7?3uT4On8B*MIVS(H%#k5sBtNixr(y<8Do|_)2b6_hmF53)zNa7U~xD5$fBs8-jNqqS6cINqf1&B-WDr2#Lk+1LS6zeQ_1-E6kueb1U)=Fq_vP#JsJ+T!*Rt0iq$1Y&tPc zQd&ZTgq4*H3pI5{Zt09Fr91M<3Zxw4Zdx7rr3ZLH!^C@qa|GzwTeY$ZtCt3&xzYzo z<%r0y3O20sDmKxV4E`fB;Xzp|s;7-JGHjLs;6Kmj&S>8$=gNB7DE!j}j0dk5^mOrz zoDkk9f}-{g;ms45&lht=;}*Hx&%ypew(zP}HZ#T-%Vy#67GHV^{z37y_;S&FTQ~)_ znj0I_#q}K#??M#YO6bYB+wDrMa|mz{qa@tQGi_QkUWyl!L<-w5ji0D6&eLcBO{W3f z2D)a*#a@>bpKeWS?Z@A=e|q#I>@5q5cRRKizehWn*iVy8 zOt9LX;iKAdj&`(Y|M~kb?aq>128m%gq_x_yZZ>R-?{14s$S9Q^kjsyed;+AmIFssJ z#yg?qpT7q@@tO=3XK-RDL*uQjv$IqH`UkCO|jCiHf#GuO6o&l3_m4 zPgfkUw%PWe=g4RzzeZ2oi4q(t4!ki` zU1>J9Od46P#URgj?FS93L*ZL}20yyY>eOc*%sTa0le&&e1G)gUd7rv0U@fRoxdzAO zZ6F=G*{PXo81g*E_Gbfopwna^vE#`1k>HonEUaV95A?V2WtI^@X&alZ&6zq|%Wui7 bpdZY-=|fCO+v{BFyFNG*IIW7`YzF@V_I?|Y delta 461 zcmYk3ze@u#6vva?<#K9ke+eqBAVQ_LI5~(EK}FnL)WK2+?IqRIUQ=^-icoY=1cwfi zzrw%3|HL7ylbfSBh=~aL!u!6w4}6mc@2m7dN+O@11SZ()OLxZw;8l&`{9M@*&;krI z#GE_Wg3R3qf%kYlS33xyHT*`w)CrT@CX;uyg`3!6_!g=MT&=8*&}ohl=Cg@0f?xz> zPd(!y8o4U)8cT2Xjlj|6@PvnFRPysIl04>vG|?W9#c8B2y+^23jx76SjfXNCq-u@S zrjRj8$BL^cnJr>_^{fypzPcy7H$g#@3og4N;nI#Ni&p+wZTsi+O{>vyx8=xAEGxEi zvxGD4_9W*TCQm?C4gIW#>+QpS8pWNIFK%Y3=tOZlOQU|$rZgHvDWxmJD_zeIM9RBD zoc%yMSkf@!BXi&k%0XMzkzd_jFnBM32LaNAQYJ!5rDZ?={gawu+Hb~!WqrP3*N~W% I0T)i=Z`^!p#Q*>R diff --git a/utils/__pycache__/__init__.cpython-39.pyc b/utils/__pycache__/__init__.cpython-39.pyc index c224a7303bd0d219e28840a6287562b5ab8b4356..ec5d71fb40249979f404f262809c55e0a960fd12 100644 GIT binary patch delta 1319 zcmZuw&5ImG6tAlOoR9AL$Ql=o%O?2M8+Qp9&_hrpN)Ta19SAf+)7w?qo!Or5p}Hr# z8@k!U>`4f^GR0ded-cBRS3h+B>A9if zGzol1zut*^th=zXO@N%t=>oDIGI^~abG9IJjWez- zAVNVPioUC?`@@8%N0~ChEE*1#IowZDDLh@-lQ_&Dq;jmx>?F$t>>`O$E|kdy?k$fh zEz9LoB3soa{rqK9jMLFDYhiu{K$vs@&%gOXm!O~jac83ox|M$$?u{T2NiJj%yj1;3Z=F7Cc>|maCSS=(P75+(7oc5`Cu~M%l)@OG zVYWN)G-XWWFv;uMBo^f9bs}Yp<0!~aCPEnx!+0uGBbQ+$_QL4!e`*%TSo8W@5Fj%F z2-WFT`lvsUC!ZWgVv_Gbp|#I0bi%41+1C4gaFbUM))Af$qLy$P(gnbx>XdM99tRFKz2SM)jC{0}sB5P^WnvrRC(8 zaRkSx84i-Lwjr^l!3)&FJ88hGsr7d?)wfS!qKxaYHMUEWk^%^fQ!ca_sVyXKlooiH zadX@%ZLkp6wRXX>Su(z7dr10t-@I+@MEdBK);l`Pa370=)70Dg~|K;%3~-H$kFF( zh5MWsgMSTeQ(|9eh7b%ne{gC z8r3h>RgO%Sn+W)HRrhWRZ|yk$7+$;#p}U%&HA5txU&;K8!pgogPWdzzA7JPV39ajp MmfBj==(~3FUrYHSJOBUy delta 1082 zcmZ8gOK;Oa5Z+xo_QrWOJX-Yy6sVK}r8kf$s8A5M7DNS71hUfX0trc6jU6b+hC_RV z1MH2eQhP!iIPnAc7aV*+{0+o`8JE%uOa68qJ2RhW#=nQYRLjj$$z%9@><_o*-S_43 z8}q0s)?kV&(d7-EouVObGi51zlPO#Cj^iE<&AyvoB``kf^44-`D*}gYQ?& zCoy-`LgOO7;V42Z;CRGh*5O+yZ?SE$!8y`K3Sw!cF4Ei@wEGbdfw~QjnXpbSP_aV= zkdIy`J~hfp6d1R8jlZg`z{^|fExi`a?q69#l9oxVnag=fLT!Tz!9mVUBIY0v=th=P z3eS=j2=l}Gdf_O!k0GQ^^b~YcedekCc+hVhqUFAT=GvNuQS;57d5|?oz&Hg?5M*V> zNXoh-_wr~pf~Jsq%0w{-lX$MQIH`Z1DxSnWYvC`mkh}1v=Ozv|O(?r7d-=rWEXKJ~ zBV%iWWggb`5{WmQZlPC9^5}6k7(gsKCFRoO-wCua_^!Ofi{(yvmvt&hAr?DTRoLYZ zt)!Tgl5(8i8QP_c#hCBZcG(tFrFgjzdt*$Mw`^63i*b2`{nHDm@B6CT`J~df)lgE2 zT~$*@-XTGpeNG*XD|?gADCf;3XOi3IzH`cPbT8<%&A|DQ)j=8-#GJx$^ng0lqEX4Ad>re00m=NeXnP{TPgpC|Y~ zo@@u<)2OE6$!f!bW)CisW)DKGaQ2{e%ELozlufnN4QHV-)mxR}>Wf(C-x*>CLR?N;!f1O zOiYQr;x42nklH8iM(R#+k9axy?-fVIz2YEB?h*sy72=S1rO1dq;&6LLwzyABqIRE{ z7Waz>kh&W)f0cL;Gk*{>zX!R8#KXuvjNHqSd$o83xkr%OFN_OX_SniE`|)a_1J(IAKy>y^56JsAv0 z$1PMF6({dHYA82Iwo{Xr>q~*Xw7fW9n_OC6TF&Z0{4&b&<$57&mh6B1qsD9P?@pDz zdhN@f7=Pt=rb-_;{(F_@fAy^?RNj#juleq|Z%mbzH}1JF{+X{&mA>(H^_wbkYMXy1 zGDq>a69A3|#L<8>abXA(TVslZh=?dsmWW~Lc95B_TyAz3?QUE*G_8oI`->u##B&^v z+pn!hTv{o@%rf2%*f+b90iO5Y;*N}_&y042U@kk_YMo|E3`ZZW?NCeQW)`AqLk5 z#E=-iq50McZB<{_N40fb*WL)ax}aH_H@IBUK_R;K=1$4b@-w>Dis;%(zo*9dh~`H= zqJ7K=`>|as@{S1mhp9zc~s-~lPQ#iXZfS4(pP@I|I&N@_NSdnP0L1t=)6-d&3i#)u`Im#AmSDa6(@*D z;%J~Zo`3uBXHWn8%8jX8y4-;QH(n2rHFD{z%|fN@E$1usMMtWtoV5%z4N}EQ!FBVM z!eyu8W{n^^SC`d-mrJRV{9M_o2sf+ek_tH+1zD)NFBk_Wid<3WM+AxBaq;|YAHcgb zUwbOu!f`KX((v_rIe=u5XW$>Cp^DL!)PNf-2X$*GhV3i-o_4_`V_FDlG+FP$!X zk2Nn}dP3G0oT4}R;HBxuUol;-UaotWnqIl$UfN99?xDtVkT_CuYR*zaK6-#lumFtf zd$pAVTgl;2K3}ety?p-AkxCsq?>>5{+c7&FBs^KDx!gd>5rWKCG>|05AerwlihMp8 zIA5rVdiA{HmRB5cT-F*T0hBc=P)oLNvuXxz;dbI&2r1(;dnY!!b>|`-TLM3#6BaXNA-+8p~tTqzg`)AiG7!S7^%q3?fi&#aXgA2 zNhEv?DEBH)U(?sYbFLuG5^x^yC80e{i3vDo?9{p?DksdFCisXYteu)?teJ{2Y932s zXlsf1PHoi)Ifr4i!0R~X93@6#W`oqAh&*cskus%65GBPpuH4D0*8prG8R7!haRHAz z4p1V=Brdf_E@ABF^!FN9F@A4vv!>jOrh#4z^vk_sskYocQYpJ$5oi49{T!4hJFczV z`=YG`;#RhxaJOGL*CXhdjmZ7{QWO{s1PzQ@BZw3v(a>BeH&D%Q1V*_UL_^*YQmk2X z7Pn6fxDbBwFuJ%rtU>)C{^I&=&N*y31O7$bC#$Oy2jjyawL;Ya&Di;TwJw?vs`((5 z&y%x-HS*QyAx{(R+X}cpG667x$E661YqpUYQ)Q>N;1kb;4HP~NBH?KZy07rZhn98I z!g(7rkk+wsP4Oo1Ok?&`kf@PwRzMqBOWuWkfmL-1wZL>eAzUhNU>s^tUC zFzXEtkEugk)JmOs%5x}`PXGjZdD|p|q;R_HB3h`uOBsiBOP90Ax8>INriz?x=a)gE zOOroB#@F4bZ{RVd$NgMWdaE$kP12BWmds&I7^7Ot^2{|p5nuD59jrsrUNT#eRn+>U)@)S+I!6ttcin8)CA)Qr7uBWk z?IfO*AMx#VL#3jAvYm?gsdZz{#Q15Bd3v*M*)IZvsR`k3C!ZPNeKD|{4-X?l57h6ywX6KIr+maF-B_u7jP zsV#C@rx2Qym@@Y0<6!%P#)!TffBW&ael8oRn%zPQB!ezdK8FmBkVI)nB7{*h0L^tM zoxUz#fwV>Q2MpaHLkBXMZPu?yMp$zol68>E_R=F-Z4@H|O}E#)RC_fK6Z1HWug2EF ztnrTXosGn93%L(kXb3(h9(wsY3-RM`?O1&SQ`Toet>BIujTjmbpg3mke^r~hxJ3oxGtAbu*FswMM0Mj+ynfl zB*xb2PlGBU>C}O>w;Iwpl*((m2dd}6Etr?AkoH?~KjufqG(YY~$H3o&zDpzJuUYFD z&x`m8SQ6kNt7>F$$Xv^YHDKc_Rx8;`wbHGOpYc-|A?+u}G|2D1R=;O>u{CfLaG<_1 zZQWSKm)j+#-yfEsM;=)(FrrqVEEQ!Rwe+hGP|7HMB03{4Ve3K;@+4E( zJ2DBYuvB)lNl@y1q2Wk@(!eZ3`=cNUVg*@pp}K*;*PAsVA7Ia;1aBoEYXNI$G@P2q zj;gblCt0S?2l`?VQQAsiEZr9v%XpS1@tzEHSMsz2bLqazfw@euG|3czad|#xTwX-G zBH9<(roc7;OOs4Z7PjM2f*O6f2(;y#Txy4dz@3aftnZ;}s2>0y*r(s4#|`;DRKgOV z9-zQ2bAt+={6Gh6OdwT^0d#%?NKnoz7W46K7}ttZHoJfpt z7fwKcIUonP3;G@~FXHY;E8vOB#r-(?C4ebb*vgL+?-cH^gk!Z!&<>}r#$G7A=MFiBsw&7@Mlue_zXWJgVy%NsSPYV4G zt(Ejs>Yc*;NPp3%xkmenDL>@J7UIA=8}EdOL7qbQ#mZn=kM&lC+^EW7PM{>!0(SVO z7RYs5kg{UHchXwM@AH{ULf1~*jJ3c4AeAwnIml5kNxO~d#P1N@so-aEKrB$;B2ti> zX`s0y8}*9v#ylSy&R?7YQA8kinoc$)-;S1nU3SYg*DKVDj`UezR?9W{4i2XXz=ZCo zI4o3KWpxFq$7g0wp3k3oeCFiz`5;rQHzx-Uekhc6*u?md!eN5 z#zT_}W|a|#oE=2zUdY^xErGEi0$RJm>d zGh?XRS`y#X8af$Tn`ljiAj6z6DLV+^@d4-eA`4MJBNBceH8(#EiZGrgm+pKK7`ql% z<3uqIsS?xyEi$=_YD2$Bq3!?QBE4y}0X2t=Z($CAoP%#eS0CWYZ&Qaap$;b(Pu(d+Xe zKU;kE(~IQV}1;yO>b|3 z8Fu;ri$W>8ORFWirR|Y+VgzppqhUR(CME00(^$f;<_4q#ePz=sXSV(YC($c1P>k zJrgpXp6Wy639>-3BbFkqjH-58HDo2qd!0VV0ytq{3BfZOEy95hkLM6dWiD*1#^XSp z@l=7*v)->tag9PJ7kr*d23(|k+2XvR%|=_=;0w)&|un`4kp&3y_^2Jr&yj=VQ7 zv*V#in?{?!be0`?lwFjzNh2m0DmmWydU%0ywg4fxGU<4`Cki}f1S z%o-l~F;O@*2IKUl8)oHUAs^|9ED2M zz@35&lAF$na+@lH9KbguT}Od_JkXyA^z(sUkk6=s3!$p6pAGa{F4c5gCXvq%UtnonY|Ai1I83a4!Rlq(*ex9EMJ9n}g7)Z4>&nF=p5%rN3>`)*dzw zKsOlE_Zaa6EO2N9bkI@*M=$*t>W%*%v=1ZwI;4m72lRvHLH)29M<3|s-^OQ(#KO*x zssj}XrFp*z6y-r;Kq5dE%TGgx{WRo$*Ox>8D)XMPV1Wvbn`aHgB1{tgD-W;pkIV# z=!eztZpceRLdL@xuk~S-{p%PPmu61ZS&MdSz&Gan1OvJI0H_tGfc1NT z_xqq1r$8BxfJTh#8C*!9&ae&nLp0ptj-rjy(}WeSlkNKkZW}&Dyau*(d>pt_fc%Bs zM$H3nom#S>-5*t{7*enoQIOMA8QaQAZpHEdbXOn`)pB!~W~k*zlTIu*GP%!L~%sgw_;OIo_BSf5iM46ll`Q0J-oV073a^OQqeD{+gtWrD z+Eo)@&Vc$dV(^Ma9pUN$Uk8Q{wXMP@vCm38wLNM7G}TW0&6JX0?cC?4lxMA-TbtS- zFH*?=BB4zgK^Ciowz_eN!w1m4oDA<<;I2E0HXDBp02P60$Q6o*e(p~SmgXerhcal~q`jGe zW!i6!fwoehF3XU=K(j3pgrc1iMM)5@2(Hxdrhs^lI}FMN_z?_RQlyhLldfLaAZQ6E zFC4nyz_j|*eU0*@Ssf?r(W9ZBg9&Qc=q5M2b=iT~7buqhMnGoP_Kb$!F|%Bq3}WFO z*JNnDZ(Dq%T3%{4?&8|H^%-L~I7(cXzv6gg&>I~7`M>(zZ+`Gk|M{awibTTBkD(OI zt3^MH1g;ZkQp4;};w&_!!8+OwBVL4ni6}+7<5Hrr4fCqqA`YUbcRUVqgux3F8$=$a zur)Tt(iIXu;8BPlpXO%Nj{%0QEmdyw>hvhDjUt;}G(=prk)VTVq=OjHqFilOvq||y z*8L?wF1k2R^)QF~2)^Z)*wSgC$PCU%;=N+g&}Hn|`m9rf=lbHLQ$B}YISeIZ8jLh;0QU&4-swgp*WB=Ps9QkH7F9 zwSqai@DENXUuLB;u~_!zC*Ia5H@YQTM||7Fe(B)uAE8^q5!vhx6@enN7&JGswJG9V zyt*vT%P8k*qJ=y_a4&&D@JfJO=CQgguRx6}R8B!d%cU+l(u3z-<${P3HbGKdM!v30 zUu3TD0xrNIQ@pcdQ6{kLEsLC8V$!7z%fGAspVk_Y3Y1V<2fvpKUe zaq|56bLStK=uW;HgXB&S)hAZ&ny5BiZ{o58)bX5>BM&MB+fQ}m;Dpyicw#o2kRRoi zC^duo6`qfcsRr>5dCDZS>BrAbpFTNrayEbVPzwu(C?J|AmA27gX0qI`MBZ!uR>z3spGYtUZ<9g7&`kYzaBJ?v{f;+` z3w6NSHSaTK&}_2;UqE7xVnN$wy#U0eq_!-8kb9O(gGKsJbwQX`JM_hg!-OIwLP5@4g?8!Xf3Q%Y+KQul9frea>Qdlg}4&6uU=Q5=RRWTpbc|v(dq2$IF4E5IQC=0cJW1hO0 zEB}}!p8-&2HO!T8W#d?o3My04R0xYjs4p4~>LXxt2h<%cLx8s8Vz^Ye8hT`RD=I10 z6*qitdtquq}q`W*ZaA16M?p;-4vCCgrtQ~YcTr-*hjmYjkd zV>73SNpd%JvAl(Th&5gAa1a?!`^~sRk0V6o2!~c{y$wHr#-WZ#u}ZbtecY8U%`bDw zWKuS)rIn$c5WPLY$Iy}+bR3U64{$qz2WivVRwRylD0l%#;$bQza(Jm2jRoHW@2Egz zQT(ZtidcZ91U;C1RnDAGX_cNkyop$K+R%NspZFng%Lpzq3D=kV{wsPZ<4|Jx{UA3m7< zKu7l8{^&W0?$Mu86M^NYjOKOa`ozU1tV5W{kZQOVg`!X67~C9}IBSu0t%f6Ci>~u} z1}-Xu3gC@{wPrxZm$;pnP5VmWB2pzhn)bc=QEbaJQo%sMg`eFkmzwpa8-|C5LC}md z4kAj=4D2~sujb*hZ!Al-Y`O_L`)~roY&x&*nZKL+Kyz&zXFdWA3+!g|ZnU|Le(I%9 zoCkLUkK9MV99E@#N?nje=qB6&sUnXOkVt|^xGmEx*pH^U5w&O65XDfh9qQ?F zf!#~1D}>ygw{Li;^M5LZ3O2s|KPw2{JIk-7RrvcM+8h#PM4!+Au$YPv5S^6ssMEOW z;NGU_hF+yXOi>OIB-n0-V3wdkuuSkig3l6so8X58zaUWVkgqXCjaaFCikpXgn8&ke z&(-b|8G`tcFxvM~G|=3)0wAQpimQMIYU6|Y1YETi1JTU>J2MeGX7|UF_JFb1K)iYH e&;F(THhV0-%Z?j~UTG&~|CfCr6Uz(@rvDFX%tUzr delta 3524 zcmZWrYitzP6`ngYJ3G7HT|dEQz1YUY;ISQJY?2T{a9m;#pd?LjLD{5TjrR`rz|8LE z&Mbb!W(k=7XvL4@R!yQL)HIbUs=nO^QlzL-E2WPgZB^Pzw^60am57!8;76sZl~8-m zUAzW&M*HoVbMHO(yzl+WxwnrF6qCug0uNpN)8rQi6=fAa?te7+cnW^OFO=5@tcvCt zel#2P&8+FivN1oNjr(=kI-*cQbuZy3vq_>HRcM47OA0kEtJxIYO-&kGBH4P{OA|B+ zR0B=X`XwdXNG;k-TY%FP+XC;_SS!3+V{Nf*w2!7~JB&2bewv}%foh@sU9^LC0=b2D z(H$UQD;=OcbQf@1sd`ee9_C*qyGew9NX8Nt5$5D%aW>}`ZT`HPu5^GIj*=x*$*Q2P z3hOl*r8Z`=^JpuMBuQe25M7qtr-N~lsZWJa-?o@MBw`N-bEp}~;} zaJoKiGkW{gmhT?@+x*h#4I)VCHq3y{_!smvN$@%SP-VWse!$NMy5`k^t_QrW!W#Sq z_{#vi1}1$zwurt|7&zh`aKl`#33u!TFo;S_ z(m>)`oUFeIqW?Q%4ycKTi9u4No0u2X|IW8Qn2&?&qNGa|<<2O7H2OWG9%QmU{%Q2! zFj_75wlvlZv>*YPMzLDLqfIW&(Q>sd#SC|RV0KdQhFcnix zZDr=lhw)BgSio7+_-frAV+csLhqon$$tXXOc#Mqj%ZYwt6xl8OM~U5;4cXG!b#{3Q z>o+$YoZrL$mFP51!Q6=>yghk~oaT3uyNq#Me3WNW6r{bD+Vc1SGB(LcPJ@GT&MRfx zbLSo0FF*&hsX7E_hzzP}!g4FWO`RZ>&){;@nH@mDdcZocXWhW66OmxrE;;NtaE0a; zLe_&+)MirwB$J4+V!1#egJ2i72iHH1fOZSRE|r`DwOZK-^7bK*Obaq2BGA~JK&Z2O zh05P*NNR$({96r=r#D#@+zs495ZZ;j!zPiT`EG$xoGy*f{32!Je6IOP z@b*ue_c!1w_5#j~FzmAYZu8*(>FqgT2=a=vFY)n~$F{tQYtXheY{2XiLUQHxmL>f@ z?;iv%Xew1lBY>7sxkuw)H8?7hv>bdJCu$;M7p6Q%7k=hH z2KxekW7~7PSR+m(LbD5V46A%oHnCRV-Qzrt{8)Qu_Z1Kd0kvn@38YeZp62ZWbt$A&V70Rm{-ySQ@-F{LduK%$(~dhe4LLpI z(h&0Wupplm0-tWHtH8GlCSdiE`lN0!LPBz#c*Mi#RKKj|bU3$6DmNxlR5Q zP0*ET(a%9)1tljRy62q*{#@o9`Do>X%q3FU@Z*MWmqFCjhDW)}PbaGXHf~rfC80LR7hecc8{A z>KS#L^2UzzkW6V=Fc=TC3ZCDM-XnfCDzp3E*Y`)+X?}diO9R*qMHC|qzJ}7P9%s2t zwlTQ2DmwrDj@>fqQ#U)VeER&&YoqKC@MTrp3;OD)m8f=D8Oy)m@Fc~^*Eu|ni{9zHT9^%8Y@ov^A;=8luQay+c@KjEz8AuD0u-hmPL$`+Q zod09z2V*jjs6(}VVKgB!vYAAoO}Kv9x9Zr}al-omSz~4z+fLT78UC|x@J}AvUU|?X zYz1bvuJ7Khc9RU8u#yXx7@0LSu+)hoVEzJbZ~LU~CmZ`W217PiN^8nIGmsI5Eqfa|qoW3sYg20l=(kH*BGAw diff --git a/utils/__pycache__/augmentations.cpython-39.pyc b/utils/__pycache__/augmentations.cpython-39.pyc index 84002b6214ef8feb162cf8370c996378e15db9db..8ebec04a4ae56dc1c61452ecca89503f7055eb4f 100644 GIT binary patch delta 5764 zcmZ`-4Qw38b>7+A+dJMbb)+PUk|_R2B#)FR{#d3W%R0)miz*uS~Q0100!C`=%Njp2B^!XMH3({ z+5~9(y;wS=db-EjW$or2i#{IW;ma(& zhuVuekMJm3z5E^?=bgaz@{>HlyMXQE-F)X|Exn)L%lGhJQ2O{O-p>bs9pEWG$cKP6 z`}rI{$PWP>;KO_b0|xnNKF$w=GQ@ZABYc7%VyP@hZTC@B(TF22ELF*&@1X?H18s++vT5^81FZ56;o45M2^3!ZTGv$}q>4+Oz zsV^^;r&sDLb-BS_X!gTMmX>B*OY1zyb?(DlKHkCo+yrLu0Az*S*y-Z=TFELqS*K7b z+Z&(MH7$p?^_alIc<1rjolMiX*3dGV!#KOd*7VhHK(toW*L;qz;k#gPA2+yv-PdFq zn*IiBFmBRT@Q!sqC?;*eIs+DHVZj#G(ZWI);qirASOgmuVsEhB+8c~%&uHb38k&=+{|(dV_pDYv zj&|2oRu3_4&Cj&eJ|}(y60U0v|8?zix;N@stbNUY(O(XE-$`u}6qs@6qd)1)-eJcR z@+-bw&7_`x{WW?vQXhB*{b35U@4QRTzoqhj{^96zuYBuAw>vd0>37W~tB_xET>o-` zJ4>$L&Si_1>ld^?E~|db% z_WcIiDX$qlY)F3DILF3hhyNJsmMQ;?{H2kEe98Y>({v5nDlP&RRxQ`83Tv^j;uq+g8CSW{%nyi#)Ya@F-`g~--j-)fzD{LF)hPhR)h!Q+v z)jCPT=@e^vgiSE<#I26dUS@s-1o4<03m-M-2u;aMc$z&UUk%T)dHGIw+d3h?bpOxivk@s0e&P0dJ=V@F3u9#i2(!TR~ zk@nSVFALIWyT;KaY-Gk#Uc72O2@X2~py@sEo*0|dq51NyMC@VKOw(|sKCuaeBSWl&xCgJgBa(Px#j;&}Wy;m-@*m<+_PG2({ABMovvdfnS}f$Uj&)9+?i}d51_?KH zL9U{}P2Ywf{MQt?^~uh6*??LMEx}t69fhPWI*Fjbf-EJH<8`#%P{A&gZ6{mKSz?t4 zzEYtq8pJy1H7i^^FTa_%7aRD$iQ_REB`y%?29OVSP5eKNiv^_0G(IdHWRn|Nz6petN;zDbNe9K3QKk<84&aS)-pKoy_Z zB9C})%Tp7m-IY-MUwfYJ>nHX*?JW+0_Ab$f<+(l6&6XyD)D*V}D4N0)O(|7DN7867 zLUC@;2?13NAl?l>fdGK`eFAiX#b*Jq6axmEQgGT0W`&i_I7?!GvRBlJX%4{kS#?Xa z9qf5|xObHOo_wlzp4}~9>pd`dU2&DND_S74I{C$0+>d02g$ zJ>7&CCNnrbcGb!`g_o_X7;C>EZ|>#Hy%pQ{Db{S;v29y2P<}=|f1UCSy8seG_p|u4 z0q!Zo7P%<}r<2{{o7CMhn)nu)uD^y;dtBMhHSKJvTC~Jj;!~bJ8YCVcBC+^OBCZjj zh~q}M$S%)U%1)&wY;j)R(>Ge0r(Qb9;vE8XVT$(&TqMvWK*1<&7D^e!e3xBtSu^858(C_2SPfND)w18wxiw9 z-q0HQi`rF&cs_pMA@*ZgJMea|6G5(ogyITvHNNADzNqg(;?nS4^2x{gdnc9esxv`` z&*R1OUTPEhvy^h<&DPgaq9R}G|Amv}fo_2I6-LGB8lX1IY+Hsn1#%B`6T?7Odvi4y z(UI>3cB<{pztwl;2amt?`ILAX-Rd}O?)LXn$*AY$Y7_sRx>pH&n*h1Ew{>ct=csua zzzr-GD_Lh|T99Ex03aD}2VHNQyd8=XyQy*@TU?Rv4jdl%XBzVXfuR;qx030p9h7?p z51NB4ri%~d{9x6K3s{_%G}YPkT+29bIazg0h`%Q}26YU^&V-6oTfEZ9H)k?{* z_yb~i3m|PSFHuZMn^{3$@rQEa;Qrw|=NE5-bG$wKAREN|;%)iT!R{ux9a2p|JpYo) zKc$`zQ=19t-=%UC;cq{kK&VZtw=Y7ZgQ5!QedsD%?N$jsTMJVSQPfMmwFuK3qY=K~ zr&OPt>ptZAn)nM)Ov?8gdc)V`fpsI#)}oCLan1?c2sT33QHyz*^h@7u7+inR5G#!+ z26dp-vCi^rRs+9Z^}MQjLTI&nOx5#UC)_YEu|>v1-P&4A@kR1lBlcU2>(|&5+OKMm z)P$&VFN8;Nrl^hxUx=w%B!cQe$22r?yb)-qdZH1zq~&#*-cyywZgi@WP-{fD77mT@ zjUA2X8|)b*{Zv4pV7SjKAuz5>Epdmp#0NHOif*v1pr(kcnqt!qjrg{jB7QNhY6^T? zHAUi45takPyZ2KXv>@mp;CgD^1K$1FEzb_$b#yoSlwTnY3c{N*l*+@sTFk&(*`nAh z?>*cj|7!T~=sp_PN1&g;Ht#wF+5%hwx9fqCqs`hwX#dmeH~!mx zwxkvV93xaGFd?sxoSQjH=n(>w0B&G8E6Rm(UQD5t>_pDzWvO1W*F}8YvTwBaOv@JO zU>1=s13qyVbsr<3W~HPt9m!RS6_L46pd3k18ot#=^5W>eduHhP(d=3zh(tArVy6?| zZf58SJ;)NE(2KyJ_p%<=$07(4-4rBZ@^44Kc<^-+sv;)M=q)A)nQm~QvS5{wvZ7e{ z&E(MR40YeRLv9xmYuhd?)-w(wyuFBw-+rDHBGYPmuPzdD@5GoK8XFJ13d$Mr8Tr&$ zy7_hB$sjW28g6=_ikG$??VIrnU=zPhyoRciJUh0iH<{aknut_D4X0G2Fs$S)k`^i^ zq8{u+g}9n{L}i-yRaPc$O^nAFOT!J%%qmrU;!iNpHF8#=s3uhYhys_Yvn`UzBdd@M zkNl&<<4sD&yl|=#YIQgLI%vs=mr|(fr@fp8u`BK8fVswE4LO9Waa6(bIqLpA0EVl1 z=ZOoKz!HIF?*eE^bM>XeCRc`cCC7tSC>AukaUVBySy_zb?u;FdXmhkBLgE1;Tmdy6;8 z|M8CmiM#zVyejM`tua6KI&ooZ$)t<_UU(&k!%N4{OXE}~?Jw6#)w-A?>Ona&Gm4Vs z?92>%SYDnPZ>oIFjnXVtdfe}=|Ffml~8`hP@R0uulL delta 5459 zcmZ`-4Qw3672es~+xzj|`TUbOK0E%)pU+PG7bgxR#0d@wAvA&Dkmg9aUT*fzo7`V! z_Y&JZ=ZX@jN@EC_QYaCvL)22M6xy0fQ7ceOD}-8*S{1c)3la#b)S?ndNEFoH_h!#` zj$8J$-@bV>^XAQ)H*enD3n#AV-7&x4E5Y+brXhahW=VP%3yU8Q6n4OC-0?_vvPdLK zsFJEn*GKAsc1D&_HL`q09rY)zQgoAo%)j1Mbf#rluebf)7MtUQy zra_=WbStf;bwD-Idb;eK6ltd0Xd?{)XC=Lfw$XN=TIhD#L019QN{4B2HC+Q_8(mA+ zL5Fs_gLc#P!0DhhbOY_70op=)OA2^sAMJ;_RdgpEpc{c&4T=uZO`zx|P;?EHHq#*} z4MAxwl(x_tpmYP2)=~K`DSY!}i+682mekr}`D9wlnEV^0fjIfIWGDXz`C_pJ?4n5# zLZyfdc9B6P2UtR(P8dv~Du{4fft{&idF&iB^=!t76X>+?IDtU(kHBmE6M#wuDJq$S zlCxxrOg=?rDy8Hp*>n`-;|g`4>}dy)pyZ@QmAa-0kSblETxH5#raaVLYUeFeRTR12 zX*ubdLeahgqMue#Uy@M&v`hojvQQ;3Q(YjmdL~GNwC0>taO{%KlFOt)x=aod>9Ca9 ztw?6g#7jiN-w`RZ8OpU29}{UxA<|@vDFj@S3d$wvN%=CtK1X1lQ_2}79QupEE!0sQtiTd-yM^detpR4)Kqx2FL*(_6?D(Jm%|G4B@@%y$Ny6u+7*Ga z<On+io8=`3Z3_~E*nV6`5t+Y-cH>=?Fm zG7Ovi&viZjqjO3#O^qGPj%WC)`k~;{*azn|j}et!oI766m14z?dw>fH3XPEqw6Jf9 z77eiy_?1qx$Z)_0%Oq2prN$wQYs_*Qux&JU$0CU+YC)nHVjnQrG@oqzd`lye*IS(7 z(eWGPxAG4g2WVLnhHA3s5Cl(9f+v(pLPc1qOhUAr#|;5o(E;1eKRGqK6b2-OI$Bb-JU zM;HW%sCqgI5!@ojEt0Y=>VO4-kdTeRS)dvDlqpQO)LPznHa8K?#SByX00bEw0Fvy6 z^U6npglAhD`>p{e?m&L&K`n3=A}zTU+}V_7q|1;Di6?OxF~VpL(_z-OFyP_0O2GLu3){rmTt1@~A>45PCcm8dt3d>15^Lu`n;c=3D;+y#VX*iCTkc9`0UCxVQ}E;f?9B4 zT3)C+DiN$0dVYRgU*|IH*MRU@$L5`|=CIG-Ti5G>0Igutc7D9G zxl_gpjAIOgoKvrTxt7m#-n(WUl4}q;5%AU!u7ooFk;<8>Sv5Fq$sK=hfif?0EMO;B_6PCGfm-E1N167EY!uLw- zf%4*WYu0xWa!@z{-7qjDhIFv2P-iJ|O-~8I!XsvhEnQ4bNWfGojZ5FkU)#`Kyogdo z(0T}|{RkHTAm)p3FXDSeZ4>K-mLj+*i98(K2atIP;W32A0ib&q(im?s)Q%yXw!>^$ znkA2|H!S(I!LDH2?*NFC8=}1J3(HYYq4jpC)A^O2-lHX3LwQzZ!`N>J!cGKXZER}! zOU8?9cwt-QS#@QRH9*DSE#1umDyxA7y-h_AVdwdo-u<@-!>X(nIiIzXurb9756`nOvSD9nZCAb$RG|ImVql`UZ@W3g0X zVeYLc^#9ELEFT>BYFp)S6i4;#AU_GOB)y&85q7aH{H=jaT|$Ht9@+7~) zw`~j;KXV#>5OLpi(?AYH+ zo4p&G5+QEkqu3eSC*?k$+HK%394XvsxuQ{;je{#*#RfwNxcZhelTYU+?8K#m-#FN* zZUA2X6ORSXUz$fgi5KM{Wzu-lMIAY>69 zM0gtE4TNh5_;-(;#9)TOd?+A@fIET-_eYy8GDPzxY=}!>xwpaXGoRA7p+JR*2#VwDB)(TGQl~;cuYpexG7%Q7@cpuj&~jZC0_p~PMse&bpuHhA=*X_5YZ?kl(b3{q8L|Z?@Q*vOT1ZI`vYUsfG}WpMP2pgysNFbRzOB8 zZrO&M4|}k(gK~aDF?{bMtR6_)fHmyH{vF``0UW{YIJ^V7AeOG7Y;^yF-ZVJsoVhu1 zr)aE^tmL#@zA`yEG3zMBTBT-M#N2oO1erN7bpF-iXnx)@Oe=3xN@cw;JUpDgR4cBp z8dYNDCrjj&^`-n(vsN~WWaRn$_|&uGrPZYxVJYcMCM?nqaEMDG(D)sKBq)EFQ7*mZ znV6luGCP`Py3`R2az)|I#hY~m?7sBN-7ONc<5a9Eav(K;JAkgCr&{zxqjI^;PBah> zBvIW@k`z%tb?qV?!*QJ0YpJW$LsjmGl0fwU4N&ijY<@%3vX`pj-Euol?n!os;0@rT zRa!Hxr|?yI_+yi@R#H2+CD zn_~O&=g@n2)jb4|IefdN8`#I5Bj{p9<KbKVRnc9clvSOZwTbx8eW8g0R@Y{%K7)`1I;MYMpb-7+GH%^v}Y zo3z_WqVdIdo$Le=E_&?FT?wDkPy-rZr`T_;V|Tl7AI>bxdAaVbx7FM0L2`^^)3YYpkYZ(BV+8nQDDL?aq1_pvX12~UN_Xw=TIC%#0_Y3k-vRCqCjsC!!i zKH)7*MBDJUA%|hV5J?$Z9csI!vBXRBOBrCT%O6g%mMy1rv(()hmkDY zXJ1D?O)YUD3v=22T*xYx);0%=m3504=HSLHGtWMXPQeO0j`j~fttC^OJI7Tqs!oe; zi3&0cO~oSEQBRJ3sa9npvDaaqy%S6I=9}HsHPh5NGjECzmK_!AL@(e{k(ev&ajXyC zWXG|g;0-Y*B4C!o%j}u>SxB;l_&Er%2k}TpPISE?ATA&#V2s0DHpIS&r$;x0AaQWq z>!eh%OmXxbf0Ga1Fsg>BSIp*W++w6yO|$HI(Y7ZBx{^GWL6H?n;dSi;AOFK}?(%tk H5Xk%mdHPyl delta 1366 zcmY*ZO-vg{6rMM`#@@wV8+-kW4JJUrK>{_36pBI;R1i(n63HZ?aY(4~Y>JI-V7)*< zv!p5i2RsGSbtt#b^OI4|=a%rWcGgLI`toGaQy*K;b z&U|n7_ftQd8dwShRKy2)`2Bn&gU~}^JU=9ET;O-3x<4>_x~3GAnp#i+84B@iL!p(l z5bq8u_(_3yY<*{fBnTq{~ec5KyJDwb<(7Wb#aqQ1r(FdM6?vL1Dk%U zogx~b(k>!#k|4>w7F(G?^|Xwv^u}j^Yz@$6=qOW+=TA}XK01&@g&B06SFyG6Ev@bm zWJ_Czbws^X*#_cEB0ETfR9%$KKP{!AQbm-mgh~4zb~1uD!B(i^=uPBwurWDuzLUz^ zJTaAJRB5BW48mxx%@SqP2iW%4Jx4qv`0>)7EjHJ|-jhFvuEQzM5J30gR!bKso8F0h z{&IdK%Rcme0$KLD@7^n7>&$=v4+qy%U#%HcH?UYUiuJi-y|lnZbCroZC1cqtFV$VI zwXtl>6&tR1t!%7wa->Es#Cnw1Bo_>}qV&C-7J3YG$l?^F;Us7{<Afw_%C>5^*dZl8eT+}E~`Wd6I}aznnr&Vp3{Ou|i@| es)CKe-ENen}JU|b?0p|d# z8$8Ahn=<8yibG4z%ds0<*@4%)SvfEM^5V^IoPC>QH|HkKZVrFDNhdFMHece$&E-VS zx3&^X@Avyx&jpf>cOURgbyanBb#)#8yZ%+{=}Ct0cllTM&i&DEheChCMCV@wiAnq% zGaL%ppR#Lf? z!8)ySy3&*Dsbq2)Nk_`PmA+h`#G~c@%0O;F;<55zWhgf!@pyS%WqoeF#Le<>Zba(d zklP^F(aJ_MWP~bXxlKWd&AH8TXG?Aiu8DHCa!>A_%GTW0%DuUJrA)HCt#V)PzRLF8 z_R9Ua`zt$gJ0w3<9yzhEA_dew8v>!mu1J}*m^GNNocO$hMsT@)h_8z46 zAobly?X~wIwGXLt_I~?8yz>R*9xS*8?c?@SsA0i2&xXd|Q{R$2Qg939TEQ+VCtIkZV~}==Zg$x%m7U}*E}!Xf z!;h-sQnBEUo4$3rRxA6-ne(bxuo=f)>vX|A&r&)UvG}qoFBOziL_C%&EgdVBi{oKG ze)LLdex|VC_>mKp!a~uH%jMLPGj93PmXp#JKgM>U9e$Dx(hc-ecFA#7X?EEy+I*{f z(XNcg{mfK(cDYilx&^mXt2%zFu)M%z{=9R^AD$~09H%sYHDBm1HQ2i0szTM7uc?aT zC+BKQSMy5+$1VDaa?y2*YPNR8k5o!mmY4hi6truV{1Q5AsW|7BE)~an{q&KknW?i! zXU^u2oH*l0Po6q{{OB1!HT~=}`I)EA9GyCH*6*1+U!1#;5Bk<0&`DLixLi_2HkEId zB3BEQvY(hM7Yo%qzSR%UUE1%*OHO{SRxZr?3B)cG78c4yKjReTE1mslVbSrUSFTp~ zU%Bchu3Wu*9uY*7j51TluUthG8R)t(x2ERK=R5i{UoWa!e!ip}w?28cSj?Vx-6iMX zo;?dC_x$qg#9XbiXSs~7DqnR=bIzWtwQ}v!LwhclE|m7ns6wf_`|z^k)+)O>aN|io zJX=`;AgwO=*5Xoe!4EGkE#$(LrJ5gJLT1=4&0_#d^Yc0Da&Z<#m*y+usa&|=%=_WO zrBW`uP@2aB2QDGFz@T=?$MXkrRt5DTy|mz)u9FL2u3V~TKUmJs7SzfI5ggpNcjbes zo!q@|@9c`7JbCKy)XDsrsp&A-V8V%D8dx$rz)zGT3VtQsHVuu@14=J5z zgpe0o4PgOV(_JqxQK!Yq`P+fuMO6JlFRBeKqGHcjHIylFykgaOFEGMI(?&&a7}Hok zX3g=fC9Fh0TD*dl;`p(JqU)6EMJ!u$Vy|y3y%ADRpx(Dm&K*4Wtb=KMu2?QD7V>-d z?mc&;HYcmdId{C|KD9i1?zE~cVwUfJ_}tWq-BYE?Y|T9Ki+)x0k$ZP zV89$OhK#tOrjh9Cc8)7c$hay*ywGB(IhL!&tM?lr*YZM*$ZDt&UByVdu?-=PtZiHh zsrx+;5gS4X!y7{L=8EwgNDB-9*TWcT<7{YcdSA%df7viFct+@X{8$DvVcfhG#k+5@ zCAT>36oc`&I*AJbQ`IeAaoJCCXL)H!6&=S<9KAAETq4x-ExR=5`tdxz8-qXZM@nuH z04-H(PiFL5xKvs24abkGnueBZ`z^tT^X$|^c&#&nAZn(i;|7f6w=BziJNdR{_5Fio zsdKp5)sY6QgxwxHCvj`mfS4n9?)L)==)9_%XX~$tXo)^kcv)Jm&%2?V)hai z3;H=*o4*reT)OI>uT^(1mCEJXUw+K>h zaf09C;M^kkQl}V;F?Jfkv~RH?<2?c`WbTA9i4;j`zxUY56Q}bhj~<)JpE+^-sTn_c z=G4s8%+dV5NA?CcXHK2YKY40q=F~HOVg@kv?CDcykG9I~f7f2n3e0}*(KEOxWFOLi_5M0scJ zSEiVm*9AU+mpB9zX)Mc(p{^i$?5hamPexQdiTGS!kj2Y-{Ce?|4+9?(@)9cL^{Ny< z(3VfK&6Qr)^pe+&S4a7Q9u}BqdQmUpCA^qt0Ro1v8(!Q?dl@@?GriK+3D>G|K($?+ zP|b~arW^J8fETgo-PmHhkpL*@cO!0+3)fA1{kC=890fMCEdYd*o>?{Qh?iQxdsYU# zq#Xqz|C}AFM%|vpjMwM&+OeDF%Al84vudf?s z>Eqe|{v(aQ{+U;^yLT%fz--CMmVl5dKslw^axr@uBvF=hjwV*K&iUo}`Eqer76hvc z=+ex1>=Or*o4N%!ZEB=CO{MlzeyOX~Nv21d-;DXpAP**SE zrk|=VSAyvT8o?=4mdZutCzndqe5F{aDNv3eH_O1h`T44!X--niW~LKF_Nst6K0cyG zSzqjOK~>RwwS%ca_KLtcXR2zSEX)@1N#Rn}1%d|_50{HoMJ(%EbIW$YPus;yr8#_f zZMo{IM|mSLx3sL&c%*Peaq9X5N7}^yS>0rnmK!Ik^IU1g_K5h2(xQt1^iiX>gs!#|6v9nV(>u*bp(Dy#!g*hhQ|yGEBgXE zLe{gRUX;9x%LQ9~gk_}LVno>=d1B&{G=cffRMJn1{uza}{jdl`OFGr9u^Gw9v861U+^EY&MawkQ2lq z?dgWDgH-m65}?XwjE@skn+?D++aLwE7`bk&f+}MvQgU7>9}57u-xDknJ+rWCBA~Lc zD&qM(Cs#i2M{Bbrhy7F@03FnmtB{$=pAp8mIV1lC8X?aB>Em`B(^4;$FoAvJem&ht(%=QEaSC!Z!@{6O6B!ezANWi#4DBm$>J!%Cu1*y<;<^SXV1p zoWnE6XNi(uL_1u=6GjOVn>aBl+67=Qe2ff)ZXM7+R?|XJ8tGf$EL`<}qK0u(TIm}> zS4nFpaYOw$f;aFJLf^dB$GQH_-Fi$$1_O+dtzybyl#=;;rDiV!`sV#~K96Nv4(^}_ z?Ajd0h9jar-+Y(f6O7tiv0QGBoHXYrS!9v{r~fDMb4W0}VA5mXm@UskL&{Z)H)%%=8=~L-Vf(5vFC#AZ$gy z*$_cM(hBK7*;t8!S29;a?}vb5C1!`uMKGe_mH0|xCApFUV2oXd%o@bjHV|zwGP5A` zP@;NYa4&*;4{HeT#VG1HXEmR$4hCi7D02cI04xxc>3R#uu~0th0TkzBASpxnxCelo z#|I-!+Ps_cq76{nh^5KZE%p#PfYDBS2@-Lt=B4e_hEO%4uU=x9Wiwt}?tTcl>EJG| zUR>_>dda2l#ho7Hgz|k}YUvY5WhB*)`g`%q;Ma#=zuo&OwrN06Lfem?jIs-mDKBzV^{vQwkn=X}0E~=90tkDnq z02s2eph?pYGno{oPzz(N!m?ZQEr8~tADIVJ42sy9b93=Z4H&tm9Q{Ihb?dEuEVw(~ zqp?NQsV%FyqWaHl3*VuBi@|?kAWe%3!|mMZ6V#VjYE9=5D*QZt&IE$E88^~q9|$b| zM(`U2d)fX^8kz8BV>1v^#uzi|BX=9duHiCJF@Lh2IRz-)&_gC|xf{g$LlhM}ZQc1z z0_?ip#L836H*qGAA;ek`qG{1u1#4&n_I?y_BnXX}BS zxWxLHfu0Y4@#UbW>z>76vZrF`4#({Cb{oDjSO<}{uJ~y!lk%osEtEQTz z8ga=@qpYqm@ET)UD$_r#i5btUQcc?pQ1`zWe0 z&6O1JBe7?{;MM^MYm$Qb2ECM*me`P;STMZQ5U2rM(?cQ2T_>1#yu7eiZW7jS#{BT&@coZ5C zP%wZ%fC+>07TjCv#52e9$4;GjW@_f_ zoiLrBJXB^IAlrHTTYM)Q0L{hy?J0IAOxt?qu8mh;MMdiWMG&+|{SU?MeYY9#fSp|UMiv-Nu>vS*3Gyb#EmY^AUE$|IXt@mD02o^85ICeLO7de+HPp(N za0`&52+@VL0Kv9MYMR0l$1W*#f`y|cXCA6vKU!E?V%dHwey3~hF}C!mQZ=PWk z^z%( zH^h)aFOh#>Nm-2lnlV{BBu*8HP3RG!DJntSKqvvD1*nwYVLl|>5Pg!M@uNbyJ1Q#) z&@sFt>+o*0k&s*J|3P_3I!FlVrTlML0bb+gFBQt_kC;!|NPU^X8rtM5$Xkb2ItLL+ zV*y}^no%rJ>P#&Bl87;vbdvSWOk%&8vAAf*Iw+_}c zRoVe?5@FefMe-PKL|kJLLyJie+o&0AkQzwoI|-5qD4i5RO9V|HPf7pjeoMcX0RM1d z2@1cWlKy`cFH<%Gf9UYk>HO(4(8C=&Gxf~T{NYp2PS5C(A`tL5wCYU^mivt z(dc1jN_1;YQ4YImZ9j4_A7ekp5fR!FJWhRGCw6Rs;k)HBMgFew4kE!2?OmD$XLANk)BrXWz7o-v;0D zxSljoMA!*!`5M=vx)WIcJZ_xgdpYZ(Mtz|3U0PH-9=VMq9#5r1-U@ z%d%5;8kBv+?y)ntM(tj^57(GYHg3RA@dMAg02vkMtrXBdBNHy1Loy+y1ySc zN&B-A&Zp5?5dVt=JRcTGlXZhjEpmf&GvF}7m?GKXL2hE)Xy?e=BXQxE27lB-F{YuJ9HHJNaj>QohEYnS6aRcf~ zG)D0iA96SH$*SQcm}gUl_xys^{k1?Ti74=P;_L5ZtG zq(m;BbWw}Fk$2pEjr}i&UN&Ba+yi!`)=P1e&2aSbl?iv>;)Cp6d(%yG@qisctx#KI zj32rYdd;XkfwHh%$v-R{*1MQ`&2Ubk_DAf^texOq<{Ma}#O%pNz?7Tj0A#Gt)`Cz&Kq@dB2&FgHV?`i_oO)gnMUHO^5x4DS8L1e@@$dTf%Q~pA})!ES(=`~CT;#|eO+)nZmP2V0osDlMKgZvzvY!!(7Q$et;x zpZ-Uydym*tey8>I9xH%fv9mD8se!}EQnNjO zi+K3fW`0W8kGfFpfh_tQl77@KK%-u*C&8Z0&B101#9BP7NusR!WfZAfS!hM;;fek8 z^+@*E>9gb7eghre`r|wx1|G!E*@{5sL64pXH;fzR4NM{smSBz-TP$&#+yhP{tpy?M z`2uvBoFkBT33K(I5!a%yHn8V>xr)~dn%RocfzlG3`tl3NUt=JlVKeL!ZHRnFpl!j6K`%yJZAdF7NUrE)=)ukM!T>}(UTk3Xsn1LCuVxeMSPVI>R` zfhtT6W?>L`zEV&ZG%4C{wa}xQO@(Q!u$~Y&6zn{Wo4e>eH@SehafpO2>3Ag7?9GQ8_U)LhnG&%RwQL(&Ax3f>5RX@nu zN#TNjF4qb!#N;&m#9Op2grb#?B5k3bVlc{}iU5-U^V1DPnuyO`fSF~rbaA<;VCbmL z8QP4ux`}7T`!yN9m-pL!-{Q`N*7S|4Z_!Mw zrKCrmMM?}CXr872w}Qd$GY;T43K}bp{~<9%q%1uN*1He?X=9tQ-54;(jE9j*8k@tT zxEn=TQ2`I#3DnJ5cwh66%Zh0s;6H~533zb6SW1wtS>Wl+2DDP@yTFY_E`-!saE0Jp z!5x~^sR45c5f7|I;`T}8T7s*hD={w$-ZX~VBA#(5R9)vr8=xKpr+_=w`IN3TOW;|t za0JdtHPVn95-;huKdayF^fmEC^Ew{U@&F-%mI+LDDzoVUna_<(-Idna<|GxY~*axr-+02}oT>X<0~vJU0U4IL@dn za-y7PZtNolN@_DY#-LB(JP~UgDw09L22_bz-PF1h>%IeHJLpUC553AfG`)=;8}tth z^iq&T)7}=S1k@3jSCmPK^Y7MNtTVsEeUH6xGAdrDuns9qRs}Smqm|Lid@jemvV24GytRCcJS=^!lU&(z> z@v0w&d9>ee1GTBz)v0p%xe_+KTy|6si*lXAXb(GNwEV`ET6w3;7zH+pC;}31S=}O{ z`7(2bwh)eXlF6?i_yH7j_>mdFZdgxn$} z)pky8D<@Nnngwg!jh+4sYEl0;YS5yd_N)_&CWsX_v%t@zKqt2e__fAfgeXj7S^^2p zSAU){`Sf-$$vo`g3!TN!Nh2lP^aJs}xRvgcJ6*E~r35|6VmpWg8Cftf!qAc}3(~SJ zglW@!CG?6xhGiZ~D$TACEP$UTsz80PI24=AuJ^DbTeEcn37M^fSUXtISk#!UP`>~` zh#@~>G!wDI!y!FuF=1t*-NKVH`M@TCO$Xsgt$^Bt*J`p88&RABezUq`CV7|Z?Z??2 zY{W#UelO^={nAG4#KIy_{H#x7PulWb8RM?)7Ll)pcDLT2l&K4cx6mPsoJTr(9t$Zh`ZL4*>2 znOdI#spy$EOlTP_goX#{NZ#~f2*X}X_1TmpM3KVXMWeO>p_L9H4f-erX*u-@ zM7<=!7yz9~OCR;1n*ins-$X4opGwxRLKoXZ8vr*AJ?x}sv~;lVUCFpT9wZbi886ew zfcAR9>v4M*d!h5p*w#kK2PiEQLZ25J^^b0JooCoE4^NCV-WJ2A^g_C zKwO?(To1kJ@Z!iY7Kqn_1%e@g;yLd1a)Gd>+f3R&)@jmy@vytWOMfyXQiNn65gFAY zEU!mnaD3ZF^vy=+bC8~(tlguv2G5168?BH#cJW&5Z} zeH~g9Xn;}bLw%Qx%OQ6&>$}dL+!B1}fAcccnA?N=Ep|VxwHDVmvi1P5cYhsHm&L6x zt&iG+H_gVq*u4-XNLU%b0v_6czF*vS!F0aybcoc1y>3Iuy^r&KT_J-Y^_U4-r zyftKR*&ube4>We!eU05ZXB7w?^zoqGf5AeVs~$pC4SGE*Z@WkO6V@Rg0YJoPZ{=v) z_j05_PfeiRdsagq51k9IY*-mZ|L%47x%V{odmHS2?lfN6=xq#Ye_d*SJ*a)#+S(x- z+#nkC#O&gWBfJpuE0^vfcMyGcNuGp0V)sZ{YgS$~JFX z@a%SJMKjgU)W*=tecpYIC$WCFL;7ae_seSC&hcZ9O5TpHye*P9-j(;D77EEuHJ@lG+@q4`;xWjM#Y1ZxS*cj@(i;+7jqlekznM07ZZVY)T zZeY$dDYFGJJp{e zGNb+k;cYYxB_QMn#PYUDfj5mwR4*wTASBQ;KyLwmFmAkMPV9l)GAWW0hr4~Kca(jA zLdAfeq0u1Zi)*Nu|4Oohgm`@~1rMT&ZEu44_OjMLyHoQ?ia7ioK=2}@)Gw^NyDdxu z+6@;Z%BmUgUtth^mJ*umK|HT0#HFp*7nz_viZ+@ku+^ld)H%k4%UDCPPT{VPn?gC6 z*Y=0%d%nXDY%**0-nIdYn61=D#2f@N+X8oFisJzYMr#$V$@Yz;CM2U1bKEC|K2L*b zsa?)5mF6zM!Xy^#$n-7l9~LVW>`!+dClRUnAAa*&$+L5XDmN!(@2?Nv-!!42#8^|o zlxDvLi7f3?c4gm6W-BGQ3#cw29?uplOYT*PVc)1)UUIQD(E@ig$1Q%?lHf% zrSjVD2BJ6zn}-JRz4{d$W;&z0AYiGA@smt0Zs|l7)rE=dZBv(H |Ub5uAW=!5W5 zZ3(pRV@UbN1uTM3v4o7)Bx9tdzs~aCg#e7$JmWlHr?q^nOZ`AYUXi~#Z|LS};ZB?HnnX3yykU%xH1Do&Q!X1Ed~r&10%9ekF7d`4 z>KyrBs1w%L?_)K0t82smQeD`M9r$6mYlWkCt&l@}7C(_?33N&{bvuMhO^F2{k~L9X zx{8746Ml*yDeqv@Vd+YwSl@ z$hbSw7;w;K-i*SzyJ2J&gJAA?s4rT&gN#~AEpV^1&u*(~B~RaydU zragJNR4qDcmN(n*>zjP)Zy7A}#tdS9{HdcSPv@UKb5b}yb(uFj1}^Vfgh8^UqykqR zvMVzHF5)bpxl>cGW<}tl_OpKSSbp-!sWX8|Ut7wqjp4#|v@{Bhz%p7{=VPDX6WDrK zor990N=1WA#Xn$nRBKUu>q7A=G#?P_*Wd{Oo58Ua3Rj2PF;O1)5!tII;!^dm`6$2; zw*SM5j<$ND<2k^$y)E~+(G0pX^%_#5H-HSMSStJJV8@&2ZfI)^1qddYUc|JD%b&lz zmpdVG^@nt~=-H8XYjhoQ!B4W|^bS@O}@C3_ip_rqeaXKEgnz{o{!FhU*(eY+RM$qbNYV zn+0f00du|zentG|0wJ@lJN`Ac#;lfobCyPbv$-B@^er#j+=q)zdAY%^WM128Q)&$Z zQwmY=eR}o##$urQD5zWnUR|(HAzV-;AOpbn9JDi6@PY+6xtO1;ss+|yEdghU3Q&W( zu!iL_n?-&$$@WvQ^m!C_d_S~HS?u0tCb!mJRR+($$Ro;BhoWM}vvF~+`SUK@xG zL7tYu?t}O>^PRM{6()!yFj3qJart`ehZ?f_pgp0$dv#IRNlVcu&%|!Q4{YK%?!0ONyIZ@8brcf1nT3@y0*hiqXrrxj^atQ z3irj@3gY{%h+A*g$G#msQO`{5(W_4r19OyFH2)|>Y*{Y8jjk$TzW{6kyTd3&R!grAFyh)6R=B!y}8s5pJvc5{Bz_FN}p&e#*_=r{PhS+ zxU`?3mI^zmz!8LPwh(Q>cf^{#FdspF5Ax%x~f zA2^#)ZNn)xmK+IKT2g4q_dOT#K(9jLu<K|qW7 z%L@A*iLhbBJtl~caJE=cTo3|%1u3ajG{PSM5~JfQ0or+4Kk6o1CwNhCAczBvl+^d} z#y6Ohj`9DUC-VLaA_>0K~@);i>9=c7V0@R_7UWOup?!l2c}Kl z%6Cli#^({t;uEy0V&k1&8SK^Wvce>RlECjUe$H3$JQgSxV_?Ef?^Mu1)drwq3`3}4 z(thNEY`{{7V8krWPQdg7cw%`)Tcscffg=3f*d_zu29X9zQb{R>*D>1~5k0}DjWZOj z$Ww8HB=UzlghF&>e`o()89u z5ev41T8JeO>yf-3rXgC!)7?)Ac+03Sh)FlZ-Zo6g5GEu{N{G!e$boq`jPYXn+u?Tv z0!WIo)gJ+*w(HCUEwNw}&KB^BHqipo?(cfLl+0pBiNBZRND8e43L#+Yw!6!Uls zZ_Mg9%5!MF#|^yqaKydnJa>nUO{*8Vs)jM+wG@DQsbg4RzlCcKI|2*k;xbrhTBX^S zC4}(6E-ql3H3(qYeoD_LuvB1C+DsJ-*hmE0yXEsQqW&Y_@rw+8iNP;3_!R~pz*}Y( z{^5vDgIrMEWv~O`;*yh#z%ya~yuJjfOXFdr7a$GDMI3#(#0%`_ z@z}C0LNA>tD{Xje83ZnW*r)QMriCG zVeAJP2pKNA=Q=AB?&Q6^L1CNP&VcHE4X*BI?0(+8&R~E^A_VoHnEZ7H3Q0dA#vp!@ z(w#g^rJz7AvzTa%iHx9@CZ+(hQMHR#I=- zw2tB@5c@4eFuw(2;|d8S<~blXui>y6UeQ}>%wT)GMJNvQWvcRPa!;eyQu?HI_qcF-f-Ch!gLm(9qgBtVLezSO=oNaoqs&`SLI#%^qc+Hw>6@Zx9bSVwUDc=!bFb z@Mrl!@q&|wgF5(}%A$VVo3dyFGY{zv;ltO^UH=SE{|djCh6UI%fCU1tEMiS_LblvZ zHQhsL8aZ%<)s!xDeP@%&uwwSa9p3&cj)`0yp@7!v<#iFX7Ex*w$)ok}ZffZ0EW1B9%du@Ba-5X#|&la>= zeYbaj?d9`?95#2JYCOt9C3 zoM&)qFr*p#i#yzSV;$fzC^)XtweNunGEC|jf`!b=I?Sr|@Kgr_zZ+rDsO#)B={YD; z6+GBMFu`r#*!}?rCwAth1sXZYH336~_Pt)g8Y6aJaD~Z$JVQ#6&yrRIcEKhJ%yvG{ zXT>Yty4(HJ64*S*MYt2ei+J#bK8mbe+3*Gn5KII#(|{fm2qw@&(fG&|Fh2c8Sld4f zff9q=F?A-|GzeA#sIr4^n?mxp06*TOXwkg3BUImiSIDTToDNH&rrt?BK|tj102pb4 zkm%$V=@Oumh%Hi8(uiGb8r~XenL~{sA;S4&U~nI9PJ|8(2wt%b<+Og#(iF;n;5&bT zfzT>#c8*$_zv7qiJ!7HzgWvggwJe~}`Y`F6b*KV*Aj&}RN|wG6z3VMMs^#?92rAg| zBW$i7ow@Th3Y9f&`4a0Fsj`rbM6td#zYH@%O@b{0=RKm$(nZkRrW{4^-6kJJ#_)C4 z_9|cXs|@%Jn(8BQ*_0dn7u?FlG%0r=muTu)4*2HPIp4f8ryC~dN9e?VgJ*Lz?mv(V zU)fj4g|9NWvY#;qSN6>^55blFjP0M*q@^@aL=zk#?4TpLuG)hq(^5uok~e+>&H5q+ z&UqhFv|1by87kbgnep&;NKk3dxC>VuCXKy_ZxeLCIUGld;s;1b!&%UXV-_h(iXxi- z5I}q*p1d>FC`5ps>UYPEZ{3a^!N~&)!UY`GIYpzUU_lFu7Mq+2o$Rot{>ak@*zi3q zY>3Us9oW!#g-)XAF6zDX>UjeW0D%o91U5{xuwe|CQLrKQr$hih~J8G&j2O~O32-Rg1f{}SAY$$MUKz0gv6yDDfLm5;s&>m;SQW$uqL^7 zze%A~5~V)fEal1lc1cM$+x6WjokHm^H0zT0!a$?-G~>J1BQ034N3bB`7$=CYz5YeY1dfA}?_Z0ZZbW3iQ5aZo%{^d2Y?6C9 z{Bi_XkRDs&-hg1i0bs#CuRp+o{T>X}fDhwd=0+HF5wNQVQ#KtFBcLQGiazFgy&8uH zGYH(t#Vl~F0c*)#q4T7E%u{0Sqz6n3ENRif37!VFZKqf-=&~4v-8S|Yci~;gu;$GO zBv0PZBd~@9&CtZjQ9IeXH>mHS4%F-oEc1nc{)7RspUN}%0E1Hu3J7k)h{VGVes7Y25!_ENj==Udh`%09&f+4hdWp$yf`sKlqisUsQE7;8o?n7cX-V6wbQ2PfcJTG)m3<&Q@Vl}f*Zm!&!(Xwjf6bsx zL`2o!F+(VcHyQgngKkQ~Pj}$+N244xlBFamsFaOEE}%&Ys!HSXgDRKmTp=+?W=sU> zZP+rg9-H5q{FXsmGN zKmcX^7TimUL<(+Rwc%-6Qx%l`1b758p+ljIX^}~QUtcv5qtqHPa5CVZ;07Urt^yTy zlRYLAguSAkM3SC^{HgH!- zOWQGY*H=@QlEhEGn!5<@@YSH5Y-1Ff2TCWP2!#xtn3#@abWHeYQAAVa+z2l~_V#Je zW{}n;gEV+QNvDFe6{OQpOhc0ZuJ8(UAU(G6C=KW%jR;JJaB>yxD!|v zX9o1Qz+DK~8w@E0wioLANjaMYQahGj>8&2YyZAo3gE05+2N@!1mXlnUw?HixH0tB{{B zP_rwWnf)H@i(9C|-?@TYbw|HLM4sFq)Xa&qFz0jN;K zVCX>3JYNIctZAS|E{rxr`_!FQ-;bd6&vW(dMWhcd3JI_D_Fc+AM*x%P8_RqLUdi7{ znn%nd!XowI$d<9Y2+-TB^Z-2N{5-qo{b<9n>0|g)zreJN_LGQ>53k*>CA#Y@Zuvc0 z>Prdm(X%t^zw$d$I`8naPo6le{u^^Lfuwk9`f%>l*#o&jDpKGWoU&;6%;(V*j&>JJ zz^JHk*3MLZmoed(DGe3gQT+pJOw&B7G#4CeLGYy5OC4~l4ZJ>%8=u2N;!GtjDr{2# zIe~fSfAZ_04-X{c+vAD&)A9azG=9)L3AJ?;=R~lMTx_m3SKGJ$kuJc7acPT?32K{u zx?yi&P)z`mq;>X%8Un}>h7h7t9AgyNbI{vi0$vq?2)K_GPnxe|zYoQ&vT%E_!!!c$ z1Jwn}Aa5~+y$&W8H}-spsNav!@0(kX?b*3=5Bvmbd+zbHm<4J}z(>(G4eKf8r@b6c zBZ92H0>>~oN~}`2P=wKhqfH?Gj%DK9ttegrESe?2?1Q+%kJD)pZNwnLfH#MZPM_rM zezpY5EZ=fnxXj6$!My=YUT!d^sNQ-{^R(Ph-d0+8qpSMwec|8>qp?d*~7q@Rbr#aD7AAW`)$N8==|h zUmVb$dSI3`*xpV>P6zks%nVxm+l-Jt*lC0WDJ9&{p#84`cGKPT2z2`Ty>R_%r=i2g zHYtRm#SNG?J@`g_rR$l@>rv>S;Ba$sv|&J(@j1p8H&RD_lRRXq*&EP3fyRI}0=PfH zj&LW>ueST)m}dZ3rw5o2In+XXJ&jG=9z_9VV>7;Li#r1R+vD~(vM2{O!NVkdz2Getghm0PR^p@$pdW(Tc@ zyZF*YY<87KQi&*HjLvdY;bjKjqk0*94P~L| z71xu33`vgx28a=~-p7zdj#f_{V@&u6Zt&Df!4r(Rydj)IACn(t@I46pn0V26aK9!v zMeX-9ygAF8k;(7DhTtZSWMUl@(sQb?2m2X0Kcbe5I#(JM0H;ro-fR6wASo{nbH^#=K3(b zW9%@--suAiKzq&%w9sQ9{x_SOjVDps`l{NBF0E$*Y_Lbviq3d=;Wxj?zrT25;Ya={ z|Gw~q;Lt8>OIljCOp3>Xwqu{B-@_XVOgT6J@kSuH}X5Vr#dS; zA(C1AJ?p@6=|R0|Mh@niXde`VBa3j@qH?o4-<@sMfHTJU91exhC;dr^IegJUMw)-8 zkwG6vdTPM4PhgAKdLlTFsQu6kce3olu`T6YS^1ctM0N+k1oG%!R==Mw!KNG@L5Fj# zYSr>pRC5ZKERKe0zETcZ!d{tH!EE#6r43zJw+Zzqoa$0c(QFhL5Kha%{Q!eB-3GUX zRgk3+s7`}4z!AHh+oJ9!w)!jq<5w`O^|4mRb^yhe;^|LOt!5m;n&;qq1Z>0lC>HdN z=%wIIYp_*SUbX~rr0f0dOmwSc4Yae^$&xoZ)tn)6_*5`tvX!cS&o1;S_jTyGBgzzb zuL{br>lOFo_y)Ew;giJVbsC=gg^c}MHduDA2*ecu&L!skIg1iB3TUPOAaFPra3-U| ztk&e0aPIHZdpIgPD2sT(oUw{t76my5nNbkwB&cKJ6?Bzz_h4 zz=gJ!u&jZxx!4;bHVKFhsvsiH>!NylxtCZ3*u=bzUJ5x!aW}@CCe_fqYtupbOOU$Y zAZP%u2gETTbRFc~NhJ}?K|2vv%Wk5PY_#zhyEvjlJB~I#vYjmPVOPuqR+1@~SS)@itPfHX^JZCc@i_7{`=O>h$Mf5B+KA4dDIBnA3z&!qdfS^o719O)Ah z9fK{96p z4w$R2D^&p_O95!ZmP@)o&jPwXz=wAt@J%&)>untUD==90#J%(TzV-C!qsP@&LLoDk za+YW1j3huzKgM&QO77L=UOc>J2SELvgK&{0TSBv}0H>Pa@M8tY+Qi*<7Z3qZX_Mt^ z?Fr!b34AfpQzv;In1p{@8vVD4ibNl={dQab4xH2?)NgB)Ks7YZVWVCI@%BP%2cjLi zV0Gb1Du$@k5oB;T?D#u`wiZg7*G5D2`@U_nnyB)&R`K~ea4%1W$=Tr9hcL~`%ay9LD_iex z#;kLfwGWm!3y`b{^Fnv;@8Di0vrI5b-|*bB2+j;obz zqS$aL+g{{&Vi-&dAZYJ)m$O$dX!nTZW#klCMMFoug;WTU|H&II=1}%gt{q#^IguGu z_p-383E?=;Fz+yf-()a@yD(Gz9196AnB~m)2$OH{{@*k9S;l^x!Ot@gc2O!5%7zT0 z9~0&p);K@G5@fHnvg5~?6gE-1dy+T4fV_Xl={1W~(j0?jGtDpK;0AdT5qIaM5yP2i z*T()K6XTYbQ7A-4ac_)@k+fUoom9e91jH{P zb@L1iE(jby>d;-O=E5WK7i*2GK@`rkqoFbl^{nFs3p5Y`n`P*-41{Dt@|BZ zAvl@l*O9fHf*2M)7|4k>vs)vK1H8E|%LveBN1VINx;-J*g4%7-t6n04{#Y6J2 z{nCaE6mf3F#6d%lV?w8QlXN}orXUAS;fmcywB$j1Q=fv3SDt&M;Bs6fis@&jIUbxwC&)vev(T^Ub{YyQPGIfg1U7TU?3b52}@)i1sD)NeIuh92|o`JJZDZdxx zqbzU70;gm+3=MMurzbd*xQDX1myjAU`*4(t!5ITsnwqXhcgk|Dk6=@)j*5UOSXT57 zLb1hd-_s%nI*D0ITj0?$1Y0!_v>ikN$Tg9su3#u>x5@!-_dIDetdoF}ow%(};nCR6 z3q(334!)hoLE1J?ZK`j125w(UkS%pKzw>CMv0YipIkNjY3uMKY3@E5g-{x@~nw0%^ z4-*{-nF7*}@@eh0q9riFqBzFc_7-o+=N@Hp5=k6i)cUFvK3#N6*hhq^#p8atqrN_R z_m1c?{^YbGEqfS0=OYLpk;U-q3Ff4E2B>FnLk-T`Wj;0uIir{sm?W4MRxo8?A_}b> zJQXiS?T9E>j0V-{5OlzN4*~?|U9D#TuJC01bpS|ss!QN~P^AEnLe9D>Z^Civ5dLE= z9;^2qC6IyVVXjOPaP_`jdWN-tH)e-WT~DIzbrXc{!)?Gmp=F@JPleez)|h?f*|Rgc zI%0(;_QKsgGvi576aqwn#3jnn0BAP>5bZwvWFSPx;A`=f$6GHkIF6x#4DcsVOt(gc zPDZUgQ{Rn3>zl)sfwKjYOK>`kA*-*yZMfu{Z(W1A%-vDY5Z(>I9-yE_Zn+BA_h7T7 zY`%pG4|-Fm@o*LpPFLbxZcByOLGAJ^o-xOK4+gkU$T1ETnBI7WaijVV{-k+U9SJT9 zc!;iqVNYFs1t=*BlyuBZ!v`=Fjszd{G4Q&U(PtXiNE_7B2Yx6XC=tbn0@?)jzpFi$ z+zh-WSZD=KEDG8av~1qQJ$N60oZoUYjR9<+g+>5|$3SAiF8=6;U?3y{0K`N9kem)I zD8BZDq8|0uevAx}&>kfGxVV=GYqNYWvez)@7zntZce1BXoNOtU{iGy4*{e0)ef zfqL}(7+{PAj5E3SV^nAz0_$-qHEF$zejRn*e)^`jynuv;2 zJkBL8TmeR^z_SaFF4IK5&+^1Z^kACTr>W-XNlDIE)j*>){cz+Xu_Ql@b8su(&<8w~ z#2k!+6dwVpPP?%IA=$TMRsvsr^A0#FsHAIBa(R$85*+n38br$HrO+z?6HucRR>C&` z+ab+_Vn=oxu7`ah?a4s{UBu3C-t|DaG!;GKTvPBA_E&L=!dt31>_ly(v>+=?crXYc z&B7~>`{ZFOb^`PZ%DM^ATTtfMI93=As^J)2mqgm?D4qJ0K8!w1j0h_s3wwKHq3|ts z&!^a~pp71kQIYDkGgw4Xx6h`nQ9o5Wb{|0?sILK75cNZT+6#^NU}H$0jp<3chRS=b zlgfi~>CftLfZNz|AeGQGkTFN60i4OFtlnUHY zR567j592xYw|Jy}Y%P$(F4p{_126F{)Qt0yP**$MM%5FCngSp7i>MXQR;*Y>;Tc`3 zL3QA)neies;M+w|f(<~2{{OsRSAGTGvF0_PSEbPhns3UO^&Q{oo8Ab+860(Aei+gh z7+XOScw5I4Ka1>yP)r68e*9XpCW?e1$_5b+wgbLHxm6J<8h!*4Co2$#lE}t*CE!#D z4yqU2^RSbo(1}K7Rec`d5Agd0UXj$rAWJ8Uo#aoUr2h0t#z=QP#!u$rkHYqU+dI*B z>bw5&4{r}?E=dj?v>5p8H%x7#P+FNts%EFY`Z0aN%^Kr&FtbwTlWA8(IwMBu9?P2D9 zHw&NPjhhUX8PNZs+Rorj79sQHTWl0~C+xh(jyHr(^8f~-H1VW33x8bN9;Cs?ei=bd zf05WpY2JyL2!kpeCWO)?GW>WG2JC0Ak&=UY&7$ik6PO6Ds?7M?$q1Z0y=__RDLDF8 z(n9K;%r~qwT~7%&^^OJmWgKaC8%NcwylZxFR5?8C1!oC?Z2(K{>`gz5POirSRjPWPsnH{~YH=byKfXJk4g9$Bsc+yTD zc0*gORS)6DUt_V$0oE{WG~i5q0GxIbe*@`z<^CM1?)qb=@lRC!U82R9Fvy^HUrOXa ze%Min6eBMK@8zNxv^dVz2~3ok*`UmX6uI4ULe*+6 zBvnBv;YXw-gjaO#S#>CYMgeGt=FVTjsk=D9s+A9^J}e&W?Ce#D^ydp;WLwXQn?%|I zG;5eD&x+9oz%34NxxEgE!vsbVo=Ppp=D19ne~hD_HGxOVVmEYM>pBc_dsrr<$#GmI z$eBTyBQkB$TRZZ=f!OA9XlWC!5Go)CL>;#PA{NuVjAaog@qrvD`q0-g(iIkaXT}e! zLPdQD6S`{=W8`HK3t`g+@x+THYDAq4g%6gf2uGk~H(SRI1Qrz^M4hzU5ayd-sGU0g z5@YSzHpwKB9ao!B5<9V#xOIS1io` z9ly6d<7LiFGOvhFm}4_Y;}O1QaMFn6-uepf(`J5-_nLn_h>#Z+)cR|PU^N0wz6yIu ztVpa(Zqu<1_&&VdJY=|&EdoTL5FIA5*RkD_eyJPL)sT2vjLH1qsgw|V^W+HIgmCj+ z9^p;ci!J~X!9)huf)FG=X@{y&HwjafI0W4hn1!Us8loniQwhn1o6+`ou;PPvM=w@8 z2eV+zS!||Efi1&%1#spSC-k3&m`+xhEG}7Qvcf3f6oOUnlf2I4M-iaczlZUc7`)8j z27^}^e2l^OA%Jdg2jlE$?P66_fr@$G&*1-HAg1d-z?euEdKvp62LFV?KV@)$!4ETd zh`|*EICSeER8aD+jgg`YrS-Dk_p6Dwc}C2-Upc3imd5a=kB!_mhT z5&mk52Nd^&cn>6=6UvH)$L{%ZX;uV1@a`oC{h^$kjOV9udLM31K&1!jmoU+fLBuFq z0^&?Xw6iLg^>IY~K>_#^)#}9ja#d>;;X4*56eing6-LwiR0qOW^gif^A&*jG%EI-f zsC*6($VA(CQxU8yE<%-IATy;8vD~08R9If%!Iqi|5SPl@?-mh{B2v=M6xK5~%wPn8 z-=iNU|E+L9;RfCqWw4RK7=uj=sHV{eiDntQhrw0`_cGwL&{ojf8M_~W->+-dh*08s zJ-o~z{BsujD+b@kBsmv93c!XFI<*I7ne@k4;D0dS%=fLCspDtWC-}hom^sbh9D|Dt zxG>ab7|3GzEMq^%;Ex$>;$vcV_;Dt=KSPY2#jZ~j)k0Eg1ran}4mSJH7tyg=na-+Z z6?0N|teY0@_2Y*sHG8>Sd>k(cIb3|u4_Qse4b3b08LeVZi=iQw<&E} zVGE0i-X*C@&%j_COY0rf6ptt4qh_kPz`C!=zZp1?8O-!%_GOCkXZxa|5A(n8>AvL9 S*7#`Vz>sU++xbN1oBtoJ0MuUq delta 20310 zcma)k34B~vdGEbF?R;mhd`|o9yR~Q`5(z2z6W{;l@o&9&CEBGusa;vQx9rIVbD=~?RfH<^ zY)vkl2r8Z2OLQl?krT`=$n_+8a(S5U*JE7*-s zQ*Z%?dxGUIv0cPO?*%n6yhjlW#i9#}Sd?CpS#;i$xEm>p#S)|}Ij!Fbh#e9u#VT}q7^(eYHBwin7o)u+Vuu(MYtYxd=>f5}QyEov ziFIN<5{^zMhz;Tv#P36VW2YiEiLOzP*etdn?S8RU+=^xsVyC!WY(vTeLOZ0yciC$^ zf2I2DUw9T=-KqOjb*nw>y;Tj`zvmsWKjY2Wdwk#Bycrnw>avl|4<5;ymvTgTG*^qC2H+n{~He|7~`mkhQbVh zntdqwvL8V1x7XG@JNN(v4*)p6T-rQQ5biq6E=H7lGG!`nOcadtF544cwJw7SLq7oE zKe-=Y@>YP6IyZ%3Ghi(~JMl4ouGsO&7q?hVR-L6;wS}zJD3nRXYQT5Eib&J+Wa{%8 zV1o!=@>_MIwE3ao;_%V!2Z#3#@7ZkM6McI5ooGv{1O@@(s^d3KOpcCb(}&|eCv-ez zj-23Ck(2h4+QEi+qok#A{HIegKb{|xefIgN@+gwL|5akwP{L zyh)Dc*U2I^-7MErP49Sa%qYr-5tgS2Oc2Ns$O1Utk<`eEw3L*Prvx1Nnx%qQ_Jxuli74WnS6(xXdK_lHFd{}nT@5n0Y}I2G37f@HLh_Bj+&g;h6lpqmP@|Fq*x5mcaFM=OMn&u4`Gf z?~f?=-30!azb;R@L{4s!w&eu9;DiDJl|E`fQ+<~P=%7vM6J+; z7evk@e8La1r;C6H;@c}iq6XhS5f%}A`$ZJzpw0>13G%f!ml{hOFQ3zJhGS^$@Q4?1 z`mY87S}s0s$8bo-l#%g@RXJ5>1~Z{bjpdtC<@V`Pl>n!PQC6SQrqt`oM!4)x~^w^3{P zo>GLj{0y3Xi|WqV^6sJ`_%U;=Ks6RH>nuO$Xq^=}uWDK;SPFq&D9L)u1K42stw1Yi zZ?e$}S|JWKiQt%O1)CJ1;XBl%aB8y%p;U{d1GbuN)S7&|HK}|2T_FP3d`C$N@#TO|25vo{o@7`V0 zjmeKu@;t2kIMO~%>7M`?k|*rHjoqtu+ben(s@v`1-dMvr%B+$sLySwg%Rbk;v`oj{ z34n;XWRXai;~gE(rVS9DQ7829xJVb0quJD$qvr}D?dU0RPREnVgH7hsr;}X3@rO*P5cEen7tGjDVKQG9u_A5XSQ;f`!0y@oCFc8_ zN8XDsmk~2d<`dE^|DIC61mHvtnT3hHAhaoSypVUX;%_MT%LHDcBDzecgyWa#Y#Pi^ zevQ(+qcW9CJ7ph#8*%|9VkA^lHc{1i=$57eT7JT*PZ=YHyg5ELS(r3ZQl`$veY03f z-I&RTQ&~{|+}MCTL`@weaQ!)R0?8y;+h#MF{04H3H2`51>_-i2J`EffTsMRmrG$tB z|0)_>qek_R+M;%;9iAqSJ>0ieU2eanZ&CR?p`>3ZhZOluv=a}=|3f|UoWl3g!P}Xd zFehc2*F%;l^C1AIY3KF>$pZ)PK9C$bxc#o($(?r}89prknc8r__?xpN#x=J_#g` zy8Q8I`uIpH=cog-qaD%F+VmfV>BDqm7Xr*$6lN`|rIHvgqh-@$fuZG*fb9djtoU#M z^(o#=g#2KB-b_%aQyNq{Y;If|mPgT8JT!}(PH^1FPv+8ce8kbyjCT{`6KURi`3efV zN5DfuVB^eg?>I6vw0rpAp}mRSaot7U|Ddw7I6u3fyq2`^UYtl4lYFWVSQv#8P;8`K zcUQyyNyEajBxHUL=+(!D7NfMeMx6$#R{v$ji0fd*6m_wDc zsV=QGv7lHb)%27F*ggntCoN>fOXL@skU;S5XVR+^Q3lagOB!;5q(Du9gj=ey20WQ4 zA?RsUc+g88rm#eDeb0DGx>-Z}m z09-OzGtBhSQBM^kJ3)eEDz;n7f{YbIrx~mIGZSM)zZ;IpZ)3J`)g_CxB=UO%mg%Dh9m_|wx7P2EE z%0hN6j&Z&HQoOS~OXsc=uItc6gu21U_%P-r)W=Dl&;V7-14z0nkeisf95li5&#O-~ zm%JoQlXOc`2_bnUNCqCry|4xdP$OMAkD=!SgNm`@w5pbICdeQ_(y676p_mz>rXZ;+ z5N~}DbM<-O)$~#QK^?=W!6%GQ1fQtza_1^_hi)n)?HbEw#sU$#;U3GOex04 z_9<2n1A!`lv+vuyrJ|nJ zA``LvTNRX9R*73tY}w0JG>jgU6}Sdp3+6uuq2yYWT6tcuxHY}oOE_@q-gdupg75d%h#fpk9<2ChL{tf-_JO3+>kUC~a#5W`0u zvIlDuUQ`Bsx|x=;Vfx_5?7me4_PTJreR-*Ff2CY+|7c}o3vn<|&Tf3&w27@~;{Ort zXi}Ie%l}QKzil61bx7^EUs&~($lamm=D|-6Zj26AI zp#z8F@>Y8=++=Ut6f7@80iI`_J5Q_-;Q>}F1IZiGwb~Bu)r#}B#>IoW_wtS~7@P>eJ(d)q*#ualZq z?bJZaYWZUd|AasfffxbeRxFo*g&23E-?XU8qxPQ+++Myw+5ZXv0XxRV_ek02ARyRuv%tneqs&d7+*BSMNLR7GHi&^#Uu@v?4 z4tWR$2JcBl9kOt%MG_94ZtpHloEp@AsZ#E%b3J;%h z2$q)e0A-MQV!8+_2Q9Y%%rHWyqSelix}6)aq`F6Y`aA9&p4H8Y`)9DOY3;7^NlYmo zW`vQrJ^`AI$<%Qt0R2dgryovB+`&|Lk@m*jzECkO3q~?K4x45TSAv;6ja|*A(-ZNa zyM4V>@g=JBI)NBPdkC;l<2NQpfRbBb27o#@PmU$bc?d;~4g^}XI!Gh|4LX;rXK5{v zQ9^3$r*>pre?N(`@@EA8TRApQJ%3BFv+LHV@3fy?ch?anpwnxuHwuxcHK%n0(*W*h4ly5?1tjzlt* z9mhJ}h(2z`uuYmLVVHKV#s1v-<(mBY42@ceWTsIKze%GC#C*VbT}oA@%YHQWn*=IP zt3$`Np(vY` zxT>p?kY7;AUlRBgfqx`G2<7M_g^9DS=-N%CS&}6oR??}5F!)XiLx^2FS1+GE$*fqS zq(KsKRPAG#HRNef!=5&e{e@e0mRX;5SroT1AcHTGu&P!Wu+-#lZ20h7||_#|@z-rVOj(8Lrz$K9^E!;hoX@7bT+ zv<9~uKiR0ooZDIw zd?@!fWao>Jok4b8(fQPNvz_EED+*~VSQQ;RT+6x@p*A1_YRS&WwM?h+eu$YQn}uEV z@~D#U#NAxi*)OO{3AkCTHM^{E9SXos2N<;|Txe|t4rlk@S z^&l4Y#Xhq)v(T!?gc>esl|@sq3ToO#rPT1aA{yJVESbe8HRI>|l(#?%5l!uoMX4Pc zQY%_IsX==4QZ5G6NCUd8o^5xh__$W-v+Av|ed5;Ua`Tk>ptsamYAQ9CT1u@}!(~<8 zXD$;h4=WUc;8~CHwpopU?N$?Dht&+&X|(`$!6w;e#-Rp>tu`3WTdh`ex#+$GqXI^? zpq<+=S5#Jtp2{jWWeSofSWu&=JE>n*%SP71m&{gIF~;3NM2E=kC6S$}?n2rA3}V^VSRH_C#o|jE^46J9XDaJSD5X6!7tD$E znCb@m1GjaSdo!D{>YY}nSOPxX*Q%7dENpLbi`5Fa)x6be*2Kcb!mgvA`5GY8tbgZ6e=yoYW0oLB}-S6QtH5(cJWHBEcKL8q|@p-iSA2%R$pc3 zlmfVGO2G}z($X?(ndm#IVEh*-G^SY{v>unWQ=`^WeQiMmqUetlba zc?C`vd@4|4e;z27xwEv~yvrOuuU781mb|>0eD#(K3 zagd5L9a?3@MSmx}Dxez7E z8iXd>>}^B`h8@|^d6XC)CheIhOn_-_M9fi7LhHPU(7Yx11_sWOPzy;Rx6?FkBd`zP zIo0tB^DNXnUIY^Mnpl~gxTxC??C#o7?FaX3NxoBq-bkIvUTT4bmsu%*nDWmleb`oq zx^KCMg4OypJYBa5dFUo}e=$>c^-Nt{-w#mo&FcJex=xe*{Ls)XJT?}|d05wvEd+&| z4R#%L8$E8E#B%JR`g`rwdv0lFH8o3<%MmaLGYkV;ClP%J?-T`r{JdukX;xT zH%5}KBL9j~wo{59DYR0oMa=59lC92LsJB^c`U2JZ41t4`aSzoG+_C%41IZ%??_{Y( z9;S?=1js1r*qiR?EbpaIm@zBqqLI9lN(@j-B+EFV9or99FI#8TRadm~6vrnVAMEv$ zxF8J}>9HIRjlqlTc==Yz@v{cJVI7PpWC1W7{bc%VRU?OXY=7;JrtKx_#>?&~t{yZ+ z34U05$Dsq!Wt3|@sHcH_mGfZBFbYtY--MXFU@zHwOl`L>?CmWNp$yzivT(kM_q$^9 zdMd*k@-T&FcZCIi-kJhsOc3DhkrXls@b+&)$Wcv4O*>{qV0O`0-3Pf;H9If}MX zl*}ZIcWefcZ=ei9UpYb{MrqwhL6;;HhuELa)6E1Ogm}vwOyGTeaT`^J#UxQbRMxJ%3P_xL?+4^RuuG(ZGgrE&-)`YU$N;X4POrp#vu`~iVy z2~-H&kFtlyUVH8*U%%`4f!lF6wun;iu%9}-vrHG!P zEb19+n8IyK&Z&VFOC)mzBQ-w4!@-$E&Nw(^jB31*0H3Bw3QZ8;1J^`t^MPZ++Yf_1 z(2x=r6KElTBt~90oNqP3q;+M+ZL|i*+UJ-IhzQ%L@@~#I&Y($ znq(3NRoL@#$>iRvYwn#=YyN?TOiK(?$9ImlZhV-!<6S>Zp$(M7(@Ig4&4~Y((s<^) zVizcywO8HOcaX@UJVOP~0$gV|T284%n;dO4FW*P8_ft%j;39cg7$@cpmbb{Z-+f=N zs@a2?7W=Emy!Jocm$;wNGa%;;j|hwtNEt~uF^->^Gf-aQs`-K0_Ko->nM>Y%^?%$S z(l^jr3_s^_t)uUzR{bz)juvFj)yGx$>TUKUY(<3++TVC+doP($z_IT_B^Udtt(TcJ z-%V+*d;wdv9Ug7AbH~CgU!cZ9l;hTc`E2lfii*FJ0Ppi`XC!2N+5X_MJ>^D(An_2T zmp40C&-ree<@k={#xgDA9WLHKNS(RU9XU}TF6JJsNot?j7#|szW{pl}MV3*(@lIm* zHcCbUW^L7DgIXu3$=Rdxd1~n|0US7E=11U+na4*?Q7M{?i}@`GO%uQWX}^?=)$s}E zojXXS-es$AShIy!yA)9to)B1gPbNE2;x?%I*z9g8YLv z%^^VAnL$T8i_e)g_+I0Vjxp=6sp~Hic#*oM^@E1=HA-Ma`wE4=O5l1#bHa0&&!&GS zu^Tkt>gP^q>iyrOM!a&AYVoN$M@@GlN-lUN?#L%7i>c-BP-s2n6Zv;1@^%W%?j+-N z5k&^C_GVsH%Y>sCBU|xMat;%A2@<7Um(OO%*^}uU49E$^8u9};{uQc zoWSvM`<>ag@|Tg~L|`Ky%O^()GEZL8+#nw+I$p3X^8zp*Jx9fTAO?_*xLGsDN2<>IwAYdbHwOtA z9T*cVWISPiCm%2S1AT#DU|*mi;0tWl?u1LZFRDt)f7RQ+8jfN#%$Yw5lgPUhAA{r^ z6L+T-_}t*$yvFoY;P=QkSPI-9I7H?NS5EX*6*^R6VS%)!Nz?vmq3ISdHpbrT(bb8J z;3k4R@e5h-pTp>v9op#L4nurJwR;LnSm-lxm14oDB~&t);=%-XGi+bxQXCShkUwF8YPgH@pmfZ%XO-ghK;*3$4d^eX3@WT`SR7v`U_$^St6*X=`Q=3rX8l=u;rp-iWU z!qok*r0&!Fb!E+-_P9eCqL@Mm35m9enk)7TqIDs_UACiqrn}(r~(%XRX(Q!EGe+Ux> zny?vyXoPz$CGWNWU2a)U>k0|y1$^Dm@ zI1?L9jc4J7NeUq}2Hs?HpU0*GyiDLzC<{S)EL%A4O80C>@ELe|0%7u_G=f>tcRNJ` zRE=&{ToJj3LO-AkFf6c|f1=JsU2BJ|!PHDV^QU}_;%4EzdJ%`0p6bC)YdjQXOT(u3SHbjzlflm{q*g5_R?)Pjv)N?Gkn5A$Il2hyO@jPnbyl zYrmjH`v@W~2HZ@%`+dT}PuRIL3zrf;(-xAck-labUtGhRq4fCJ30Rx%vR^o}oh+Ix z#2bWJd?{R0B8L^YB6NcM;s!vyq`KF@aqTdyouS+~&P{$SSvU!ak5LEy7VsB2=^l?C z(Qs#WRMPQy5<_E~-vXuBO^*M3vDm*gTNp`Yjoa)WUJTp6Jac>Ur>N?vDY=oF*bguq zZ*ULFXJ{5|)?{wY8iH=5;XXn_pW~g7q*F7YWS-7PVmnpZK!DH17WuMtZB-wRmic%bxq> z2B(ox9*)ByFe3P`kuQ<(oYWB_pDcv7DaHm+Ij_a<0o*G|E0kexSQ9UBjNQofQ_8fT z>vXON!1e9|SRJ0P0&7>4YQt8zg2N$dh-={*u>FlA<(z<93JQT1KXFGXEE+^3ybi zD4N-71<$La8ih-dI%)(Ly3Uu=rFXOFrOySni2%}856#PvTL=%&J7}H(R}pk?I8){2{7$ag^R@f5ECNqlV}a zu}dD%g}PEb9{<4O&#J`*lqyqbu^yLH5es`L58@FK#o%CrQPlDMZm(7A)@UQQy@has z4CI@!xIWCSODvk5VLpUZeSWH?ZbTO~Xc&$`L~g9WXz(cH3S8mh8u9G=ajV>bTjg$4 z_Rr~dvDIL|qqJyI!J`h(s7}O_MU8bZuwfwmKc@g83JTUnj_SbtKKXpXJh{Sv9qE|^j`K- zVP3>C3kc*DA{P*NFM)fIjVG&5Q7Pu!uTd(ScP~)rOO){lg&rmF4gx0#Fo)+BXIQ%9 zXE_RXnh#KkcTrW=XhH9S%nDGaXJ*nc*a%UNDkUn_V(ui@)uV(vjW&e4-ZmdkPb59i}ju(*UCC=xXx zns!NhqOnvCu~tD2Sa7$MYvC~mkL|6*kR#W*Oy4@!?E{_?HT9rSAtp-1GAzeW|Z(y(yGgZ@Tn2aqUDkYwOdQ|R&6D0Mj(3Bi-?H8eV~=S z%i#Bd+iUJrv|WOK8T60eF(Va}$E3^T7E@bkAg2fV7OfCX8sS6I{*Bs62-3-Ag&LcFV*)`Iovv6|555^$YbGbYM?ZkE;3h94xr zMfftS#cHmGkYqmdl*?Q1vMX0Y$G~$)8-lpJ5nrxy;+rU7 zc*Z=vn&nyUCP>|E{QvFdpxe$7RGQZ!-l@+W)3-4NX0Lb|v-rPl-M)OF$^O7lKy9?A z9)AHpyfEuF(6@)#uPWb4{oMxu9|I4mcg#0QYUPRiGp*OBX(H=y+?D;!i|;9aii)%S zVMYSSmb5{Hb%kYavz4MO4u_!iqygWN}4-6csy0+bP|YPaOfN{SO-xJohtU_DtQp+yBU zw$ozHY?b3nPUg|$52sf%s&(;$ss#i!2zm574`sYO<+C&g*}2-w=loDM(BjM=4TcnE z;?71Lx_`X7t{LcjC9SyHgey6j*zjB!zcm624piZc@4SXw9SSi-Wjy8g;3L32XXuR`o+iOMrATX zy|Oz1PbLZ}Jn#r?J&?405NByf5P7c~j%k*|8A!Sqgx?(kLe-ItYGyc7i{Jb3lssm* zLJyOmzrfp_iT3>@2YcV-b$y*E#1e66c+6d|AKxy+SiG0_BYGJ76hXuZ6ei%Bj^9No zc27&a<=7oZcEk!3M(n`eJ7wdBYeb}npzKAl z3kiKta|7qJUln_9cEGERp`)>UL8hro7~l#p&J+F*+p8a2UM3}kMNh{|o(KjOodwZl zm^~ikc-cS8@$hq3Gi{iO&>=ET8?aAPSvKPh(M<6n|EyqcJdf9aW9Er!t`mf>mHXo$ zY#IUGr%H(s>=>!ze*9ht3=6C5PuO?9fffR4!gd-Go9t-ka8Xd8-&>gG7j)I?s1M`d zw$KL6EFRO?#X8oF9;*v7b?c0sJJ6VZzK(^b=;? zJz*3>?g7sjcR}jnsD=K1SU~^a$&NCfQPIjTaTlP5`t8C%9i2$L6Esqz=_K7X zv0@=RXsI3|Kz4rDCc~3so66@YjVzROQIj75H^=)_YLPJ=R!jKBQIURHNY-0M%anMK zz^@2kIjc@F7DcBhdXz#-3Gi7-QHaipqtkR9UBKZ%{sr~wrJ#ZP5<)j8OqLrwi_O#U z9e-6oar_gqKsI&919X+;E*i@cYR(5LJdqYofY6%`N+L*Wz-D;&*y)tx_zBIC%@2|w zL$Ct;B8~JCf#0F=y$hj)yPE>>rBOG&E?;9q_X-u$gxKT0a-o5xN%gwDGAWZzud_6Zmn9x)NtMgeSTB>$={8 zM@ssyLpoyDLci2&0=NiSh>~UPT)=}DO(+@wq~t!Z9STR)1=j+&F$v>)x#sgURnvTM z_}5jr3qy{F>Bfrh0*73^nFuLT=#&rP$ov4)#$9A1%F@kN{6Y5@a9G~B9c5(42AX*X zt=T$?lkv|rnlH7#`)tui*d)){$x7V!9m-m3KUJA*cOv|oT>bPDTr!oFq)W0plpUYi zZktn0WyG6$jF&+18vcD{{^^S+*beq diff --git a/utils/__pycache__/dataloaders.cpython-39.pyc b/utils/__pycache__/dataloaders.cpython-39.pyc index af0308a8ea4527e1abb9bb3ef62f1cc3faa44ec3..bb2876476fec20a0099c66b90a5819e6d42394f3 100644 GIT binary patch delta 15795 zcma)j34B{unXj%S%d)&Xj-5D3qil|q#M#p{OB2UQlO|1^HXDtbMv;AGTaGL_=U!)V zuYXFDhVIn#Kq*}vW(t%-dBv0#%FsTxfuU0h!z*wHN}e= z_w1iOo#i{<*}nChqc4BK|Iu&y%Mzuf5g-0e{Qc7QSHJhcvgyVzo?7^bk=vdO4ec-| zo7Lz2XN}gK$z77KF>9G0YB}#SryTXS?oQ6?zIN!k^S&*<8>hGKVJ^96s?VHy)IVsb z7Y*N_aqKtvylB>$({Ay#?lq^IGw{97yv&@5@BL=foQ3ZL=618*oPUea`o3*Gv%!qz zZt4ahAumm#+dxkKi1 za|MXsh|-njDwM8D)`Q;RAL$jr!-IL&oKLLt)>-jaYgmpb05A+?llFT?wmeC3036z~`?rs*Gww z?bfdrK5XpLPWZ;96IvA|Po_HJR;oX_p3O?6S;#0sv#_z)P&W=26+L9s??HL2@C`q{ z-Utu`2?_wzwM-QblWKkE4a4VDI~9)KDbK#?Ofr3aKBvl=WLG<->ODKqJvkEutg&mI z$!5{rMNUNrwGNf~j4Ar9vMcH=AiBZ!J~QE#n91X*_GG+0 z+n2G`8ojB!EvHtozPLS`PABYSyd$$x-H8%ehggkT%i4lePp8$dZe{stf*gTEa1(%A z(4J`TN~)V#Fu;N!HsYvT=&i7&&Pds@zJ#gH66YNAg=WG|SR0eJ>(7|@oKUy3IF!yN z%%oCx6LG@6Rko`n2g#NmTBkl#v9_+v2pWDPf)D@82>%=m`~R#S)0<{j>(_1y>Tgx7 zH}2O3m47rouHUI#S3b%*>XYc`yxvlEVAT~!#RBSH_WW@I4(M@1J<0Thp%ljB>0~Az z|G55cRp&No8FULU?arhSaD$oTNn3peO=Etc;2QA>MAWBAA0LaH?=vd&byZXKeUnyg z=a|&z35epB#^dejgk`lS)9LtlDo+umiQs;K2k{Xh4I9CNl7etR-7oWRs9tJ3p$}H~ zEs}8>GOzroBt3Cl?EY!}PWAN*rGeiKrfh5pmqQH`T(}rA$Y=GyH(6zR>SnQ`psgqzzji|1gnaT*HdjVMU$YiSobN0NV~_%JUrycIHL`%L33 z1laVOzO*rr3pvJ+|Ge+8F<=Z7nt?+FjxQb<2oDquLT0{dDK!04 zeP<0~WOn513Q(6eLuTPEKBsUCY7YfRT4(C>IAP%2iy4gJjP{aaH&7(Lc*qF=7CNj7 zJ3&B988Bk9w%7>`8RvaSl{miclI~K#z*OHriBm-Nqh57N%;KrOOvL+kifV~l=7gp8 zU6hvOYw_)brMBFO3|?>a)}XQ!g}!)&Q#@$&)+1jg`AT$Bj!zjr75G$|5gS-v$%df7)mus9H z$2CnTQJVn1%|tUm|H2EmTbHk&;a7=~E^-&`LVcCZzeX^DkxBbIEGKO@WZBR+J#N6x z_PW7zvcq-@PNYn`%MEoUQ=MIQEU5mT*xw}h76Atqo1|ti{Xc!a9CV}An@-uFB`oPF z7f#R(L#cY&`ZTi6)nmEAM4z2?gFRU@=@xXTL{HKUSnYOexF?%Q*;!?IJxbrB z-MWSO+E}TlZ$ef!*QeT(>P3!*ov4=x{*eHK!vuR1AZ}f366*WJny@^SjL^9MNFY~cT!#M&{P$0}aHp)r@puZefod~IE&sT>G1va58QZL3wy81d2wr>a$-0dHNxOE558|R^MOy4>1 zW*!>n{2df6gM!yx@Sx8mW(eQl{qDm2cR2rD^q?t-2Sf(~nJesK zhh0%(DX(Q_=kFcZ@~v_}4}uT&IEJu>)88@qjwa z0;WAFrMn?1aC0BhZb3q+#3}VXrh}a-(dKc8NXtcpz~izU9i@r<*FzGO39CJuu~VIW z**?o7{W5X40YI#15!^tkXQ{e@DSxVG0x!DZc$^kC9{(8-tSSJXe}S>s4?oji2TwBs zKcGK%5vKE6p$~GhdI4ZxODv=&ST3@<*fsSkQyVV?P0W4;;P3G9ESEnr1D1BnM5l_|s(N+z)KkVb{mZG* zsx>UmJA9UzQfh~;sheLc{*b3b^sc(Pa`j!}jj9rztDBSjHH&@&kXJrJX5z++;(42x zeB4Ec?otY%($Jru)>u{u#8IIh z)Nf5&ko&bsyIf$0AS|%i2o1|bB*Q~R=V^PcKzVp5a^82yKM)uQIsxmGB4%&|M9iRq zP+v@3r1&vlwa?i8GZbNJ03US%#JhyzEm+~9(h+**u>(S`EKjaXlm>{`!eJA|Vc;9K zy}K-*FUL2W55PibkAlq33x{J@3{{FA2|7X8aAVN0rs(&k$5NzGg*f+W45i9}0viZH z(@dhr&|QtsWPEDOVrUvz`hgJqhFY^UuW8Ce(}e6PLsOj)TS7mDq-EGC5WP~~;ugpz zsXcmdMoocw1xu|zHDg9j{g7KEy5mPoQFp}ZlpiJP{~+y_?u7;24h@{JQ`wBS`+p{; zaMIIE30u6&l*<-jm9$kVc|7SBs$@Euu#)OOSssKzHWf968xq54sc24e&=9N=9=nv* z5lyE48The6&#Nc5t7k|6{n+v2iL|065v`+s%G8+TH!=Gvz+_C!qOanE$qO3ACH@kq zoib#~^q($!G)w~)D^@S-hiBFn%baNNRZ>4cv#wAkT5RaoW?nY$Ac{x)a2d&j&N$^O z^_=L(jf46hqRow+oT}j-OQ13~bV)U__GbWZ;v+`@VSi*+I2djShr>fb ze|7dh`Tgn};=8@PA*dIftX%E+Zy)p@gL5xifyvAy>5F-9RBbnMK+XD{c`J&=+YB=Q z%J3ESjfU}A-7$am6k7WJU^HPz`vZ%Yck~xTw_dw9rZ%JBtw%;V8QVfGg8SB$=uJv@ zFp{G5#@wl~nX>Cs(lrt>10LMTl+|}5G82OOD;xG0mL6<~RtWEp(Ry3I&@gWrbuD82 zEd4h4fsDJ|Ku52oij8|?3yrCIQEc_(GIRv@(c0ac%p^^5m%&^oV;c+aLvvMb=&!`; zauE)P0gl>EP)Cpjz?AHSvh!9Q>Gq>;s4tT`)|XUKwizJisCri)KsHwCiA#+2V`Cme zB3HXc#x+uTgKaIh)l_1W|6SvRY9ZByETOo0Vl|7|CT6MM_YdE`V2)ACZR_K)%Y>Et z(!woE`%w%FQ_OZN2wW6+x+Bc&0s@&SN3Htw%0=@wJjvt+)`%>AgV`&Qb&U=zbzeJH z+PbK$bKOqsk1R@F2Bm)K$ahfJHU8PylaD#(F7afJ8x&8ZQ^!5#nW#rLf%bi*+N}F= zU`S2^Ym4?0)*dr+O{fy9{g9v|*WaLXi)ZS~>WlQPCo1*UB`b3?Q3sdiVv6rlTZW^W;DxC1zC z%J!b}CPUv*tz!+{KGD63Tm+{@_C>Vqm@boPHxZMH_CvaD>5U6yJrO3eR2XnzU zI_Pip8%t;Brm(Z;iTnbHwg)gejL^3{2Ty}S9MOF)%d-1V&HVs9#FkAsuR-srv zA$-wyEZb%LMgM47>lRtXekYNn&`(uN z((f#PXx=@*?pz^%uWyv4dZPkGTU%ts6om}x@VON;jA!+aR~$}t*hP-t4BTyG5DMMq zk4O67>GTbhGKR5B&LWm@j7-omI_X+{ZlKIAb;`~f17%LxP}zAL1UaR4d3U*8;gp%d zY4CKW;V6X1B6q{R8Yn&Xe&AJ}$H}~^*AJ~+76_X~`sI}~b9GB$NBMscpJVU0r#dAM z`W*U5k%t3kjWY(y>Tn!Xg5#A^9 zeb6}dDZ@8VVfgwN*wY20Ogyb}rvlg&_KXL855q}3W9ZDPg}HA!Wf{x|G-sNXJmBfB z8H$=!@Q*6{TkKigvzE@t`v$6R^O@CCu}B0GF%43tft z>a*vO5=U4IT2kA(tr{%**aB zFnk!(%&9`dUO2SKtQczaN*MUUiJfFt9t{rq7g>LD@TLBgU(l#@H%k@@q30RnR^D99 zHj8cvn{%{#`J8!`ypkE1=1da;0c#)H2h_fN`?+3wJ$v=R(zRH~>CSX>9ADeAHcO)+6f1>6$b1sDEgaS&uezoLSBsS>hE=biL1+Js4D5oZ0!Z zrBYUeKL21=VzGe(`d$oQ+2I%bkviaBST`6r_J`a2bGw6s1?ovOUNtb+nVWB1FT)tg zRqE4MOwP^c#GQKbAGz;VKNh3mg0h)X7Q3Kqg_JG0plqo#*Qw{E;;Wo_ObyJ#9aMd% zzteDLRQm?%>?@ra622T5hyr7}6Fus`cg@rl^+$aeY?l)wvQx*|XX=~=vmV=5pca#e z(=g39Qa4y|Y_ZzASXZpwcGY5Mfw^QF?!g`mn#&xX zjVymSUAXcsh`uf-g{?}{@mKD z=g^+P!{XUh61Ny*CV)!w6;El-Sa-ScOEJj5I~~;NbsP4I8eRbmMURjH|7gstBzS!v zTT;=gJxqz!6xw4QY(lNeN`}t;OZ2iW)AfdR#rl!OVZCVmBIA|egX?2P8yjGNf!=H{ zwEdOR=qM)jFj5nS`^OP#Fo%bjzJZ_t`ZF6>PZlrrCN^>i&JYafw>M7Pd78x?&2I^K z#fAe4E6%f!-?N0LQ>vbwG!sM&eWa--cRkZ%tn(j4dXu1=75kBL!&hIm^V<0SJv(8C zQI;k+MnL}vXUn*=kV}>xw^Z)Z;&2-+#>%@{U*eT{+@QMHZaL}GYP%8rA59gj>Dh~C z;RgCsy>1ARY+uq+S>n=j;tgM>9dTg>A8+e^jAFfcQ~jnv))&b2p*Y6sX%-ZwGA1qy zEk?#(5Y$Lw??_!jby`^j5g$ZO-KRgb>GX2?LAVJ__zjsBeeV)II4gMPEQ*0@23Hpg}` z^mBu~Z8%M2j3>+jo)EE6K0K@wdTz&IgF=ABv8-}a49QTI~gGzsmerkP92m0a~ACWR`Q*5@nlZc|Rr03)Mrd>00j8@f$2}E%2Vd@S7 zku3@OM7BgD?_vcFsH!KJLLjlze5RtTJxNf-EH_Jih1ur`EM(n+{-nZ5Y@|bFd*YeC zp5EN4alsoG4FMsy5$^&rX7c2?;kJ^TjN%;7Tz}V6*MDNE7yHW+uzOyU;~Y@kOyrND zpf%!Yws^KLZ_h`!0!Lc$&Z4A^>zkf<{OIucmPd_N_^v+hzG4qZJZ`w9sSK1_Zx$~Y zpfh7tYLFPRy&q&s)QL=WfY~TJE|C<0ETWv8+@v44wtf%Qp}Lnf8X#8WvFrvG{FGJH zQ?9=wqrSnSZ!%{n*a;cKL_5V@dsXP8&(qr^jS6!C#*Q0t)xzl8=99dy3uR{gcFnS;}^z#R1HIW{$=Ms@V6L`Ia=`XPP(*&~b zW8KmXd2_Ve75Q#!^r9t2Ic#NsJTRGr47ngsmy)r^Lzk>_Lv2}wHzYWNmc2c}VR%c@ z-j$_I^TdqLP?Q)ok*z)xc{kY6hf@xZPy4W^S14f(F?3_{C3ba8$V5&4j^u7(UtcER zBs|fe2##pJuj|XNk5-73%dU#VJfl0VU%p1D3*j;IRWNZiZy;4!(k7-}Ku<3YfA9Lw z2DeWRw41IWV;5s*RDD;(Jm2`D=12tk72T7tH`V;R&xxp7g0%$s{Uk?S_0 ztO54L^MgmWuvqUtyj%D|{g@q$;|5segn3fnsf+Q$;!$EN^ZY#wDK-)k$x~fH7C^qWJHE*w2x~-cHB79y%O*KQXHp# z!tSSdJVka=ud;jD%ZaSS+MlsDDmL*oCjURUM>thjh|ZbEN20)K5k^@E#99kImAR(1 zXvC>rBO`&%?S;%yGtgEPFw{aQ_%4 zQ>d3xf?wf-zt=XZx7hInKW+?LXX$tlg>MTuLA6W`7Z9@wc29QbkPs1(6i zxwioKBJG*l5d3lY(c*Bc<2sR?xo$P6yFt;iZ?bPuEwW@hMTO@Xc|iKEnMV0Z_t-(pHMF3p#yF!k?jUxLdB zJJp_l;mPe)9O}4z7cp*P**ySH>T5dZ*G8)9!$sj8;mU9*yf(ZO?*K!7L*1iq?|f?d z&w&*y@dmJs%_K?_-dd**bj_Ladlb519#=E|#iKoVs_^62>JN5R7fWwmm->Hn)lBIl zOP8PpAHkL&3WykU4OQURuXas0mh1oQYB7GHcclK#n61Oz59?2KKRV@gAj3RLTuR3B z&f3*sefa3Q1tbkKCkGKdL=T}i}yAgQ^nCeL3J>E0=-J>%@ zaiSj4v(nRZr?{T)5&V_lx2S=$)tSz=c^)7m0lY-Q15&sea_SLfV{wIKd7^B#mtY+$ znwb*6Nz_q<*?xkX0Nlb3$h*B_xo1M*H9^&UhP4c~+=8B?44dbX&RtTV5E7)-?Zlvg zq%(g)!Lf^#dD8wTqKd2gIpT;e5MQE6u+wdF#^HsD!q%T4n&>DhC)bGPkI=m>12wml zZ!i<>cDyZnGHK=X4LwW4b$&icQdL^_G#LHEU+#I`zlvMaf_*QhV?yEgR_9`yvI(`H z@;2oMy|d@PPGt1X3k`maiG0kD8xnaYW~r2(aqQ*|g`n;l3AG=H5(l>6aSaa{ywidR zI~Uy`#^nSn#KK7Mx=#P(*p!?oP}v~aFK!Utq;VD9VqBC%e2ypMcp>&ZRNx9)IDHc6 zww5I_8MzlWQm|6r87P)O+MryQt>kDl5hhhyHz zWo#Le8Jhzezc$+V6Gf_eaMUG)r~+yPzi58VUIB|jslX&qd2AlNj{2E zir}-%jw!Im_%L zte+6$f1YI`=R!;vj*{&bik5?){xM=a!}h|L4>2o?lPP1ei8W6m^_))M6!$;LJFeQ{ zU*7asz*wvA$Thb91ql1#G8SPRZWuojuo%_K`96IlZd|K(pI$OY zrnZdOV^bL0WLn9$htHpm82M}D7j9m#h}Gb3(Wq1{434N#$C{BhDubZetZU9Zw_e6U zgQi9`2_|w2#LDbtS8^H*`#{Gf@MtWM9S{+ zmO~Cc!_qddS9u{U+UXcqO3cq{bO2pAcFZFKkiN%8z7R+?LH*@hb{aLh=v?orzs2U@8Qg|xOREHmRmH3?nUUi{ml|4zndv00oGIajNa!iAaF%ua#ob9z!J@~y!ANl)2 z^%T8&e{(e3Yelc!zBAg9NTvH!GAeJMyol=Cn0c&PEdd>GmHbRaK*uwm_c8Ttf^QN0 zqmcuIvCPXMC1%{QT9jF_$089ga#_nskoz&hQe-B)_D!6`!E_HE2;(8-2~Ui^OMG$| zuZiQ{Bjr}!4#=}*k99=XdMpr*clEW!IXQ5VMMn4eEAZ=y2+ldf#%!F0;yiTtx?8vU z8;_t4$`0w%pM? zODcLe5lM~WK{TcBmHHERoZmy4#c+yxQW^XXqtos}bU}qocRb_$zC}C@S!oI$_F6{r z7%2~POrw;=(YjJUaOd{5;;6}aC(zi21#S_JJiK3Eh|_QvWM}zyb7U=2BxxLnY&UafFC50z`qOtUGFtSH@47711Vp%sf4OUV zPE<#dqh883^nS-F=?Z3Bh%FXf=DdYv*AQ8B^cmK66D(#~4Z&MPp^kHd-BvaOy@kU* z9Pt2JcrYO%Ok?2|Ziks^!@<7itDa}OuLHD3=&|GM3eOS7+y$(fOibE00nPgv8M{~> z)IY!b-^w?m_x{4D_w-Y3*Z=&1+2dv?lancdneu1Y_i^MKIcS-KOjG7Sxx>~=U*b5( z;A98IY{=@4F0otItFk!%vAiR2Yd?DM-VST$o>jSe5Zj7E=N z&k;~vtG{qWRCbEW%p2QYruIpeC>Qpv=Aq&(m~xAA!!KQmB2^HzNH`Lyip*iH{#-@X zaP#2PhTj_JgREQCi>~OMih>um5_S8vi}XQRQkWD>!s&b0hCadUC-n>W?rW44I>52r z#KMON9wvB-V049Ka%sIuzn6=n0*lehdII?AhuFTAAj(ovR5vgy;et3s;$6`FR09MD zQHucUxV<@>&MLQb^X{Fy_r#mG?ODEOTeG?zwIfe4#lVZ56u=UA)6rEAv-}o(+eWRNxX&G_XA*sZe{u$!EFS$6Wl>?7s1_(z2J8>FvD4S z_j2N=SuE28A0iOH@54-qVJc_pBLp8Mcz|Ff!TkiQ2u=au&7JrLGGH0Y*r*>N(xZ)p zGQjgpeT?952xO=j;P@?Q((#YX%bnxW&qj@}D1x7b@>;C{zaR-CHU8&(_%YwxDuGGG zN_a{k#}iwNxRF2+aLLtU1hNfJF!dFJpApou8DhZc zc{@63;w*`s#H+;4`bwO2yxFYRpOft7=uKiL_GZ^{zKxwPUy^lfXX~$)G(rpe zt@yg;9aZ(}y;oJQUUl=}E86op48KBu55)>QUi{9b%rf=mr{+DXCbovXLrwY= zyyZdA(RK zIis}g)Jyd;`rV~Z)u+*Kt3F*Xr{CRrgI=juol)ENY*O^u`kXV0J|{f8r}DI`?KWx& zB=ot|H21XDwwJ2&^!ZeqPqlqitJZ6%RztNmeSuy}>~5#lh590DT@0X^u?u0 zr>ZZ}m(m~y#_PA}%cy>k>dQ+NeT6=w)1|M}S5aF~U#;ItybcksHG1`0Vz!os2vK#N zUPraM@U))xo(^KK>zniq`bHuO>)I|Quz4ic^`R#7z3-`Oq$g#+x>UBO^v_#E%PJVG zrIPLI3Y)?1kkP%EYKD)1qUEZ&YQ8F(rRKY$Zk0p61GST=Qp3k1ok1fq5MBkcbSCqv z=|tvJYgDm+q&oExRox-K>n};$PQCA>5D+c$xBjaO+fm$0!1nfr&F;AFh?A5llewEB z6f^qbMtGzAMq0IcL|)pMFV*zB4o;FK%?O9};IX(k92P<1YIq4KS~^K1j#H`OKTyY? zXaT4v(2ccGQBuVyfMu{fmi`TPo%fY7!Y@%JpphBz$X9(2-u#EP)B!I+=&JY4Q^WJ z!*+_ZfQC59ZAbb;x;O=#GpMKNAv0uzO3xJWYsp1KgCykjB3`WDTVDNJ?Kd);wQ@U*r&95uW3~YBSeB2aYB+`J!9Bef> ziHZ0E_5O`~CU0LOgb;rVfHpfL80?6K45K3)jRq%@eHkdQCUGBuhv;EL@~Li5y2s}d z_wi(V^J~>>^3nYM`7D^q6B!kQ%aaAc@L!c{r|g}_1e)!Rn6%iKzjzAZ`q{IP&&Y30 znZ4<0^gaXdb%4JE;K^Vf6UdV5eT_Pvq=$`%1ox8I*I4X5Q}<=QbEifoTu%M1&0@#M z3scjy2HRuwMI)x|iN=qGg`L*8ZBwve`_653t-G#iVm0B~>hTF5zUjP3gTDMUPquo#O1T)C)5VHWJY=J zax{-xf}ux*Xq25ZX4gZ1Y#-T%V{fnw+tV3|h7D5ZPTPMpqKD(b&SqioIXCfZUA2^;$5MK{Ln*_`er-@2baBmRruO)$ zltxdwt`#cz%CJVBOf$YaDjD}z)X z%CuDWg%if=9CRJo%j{?fW|yur2?l&v(cNH2zYs8ew4O!3UJI=$SJDREv8~sOW2O#e zV<4J_i(IXGQ8h~BnpuGec;(O`-bAA0ST4fw#Fm@67RJk?C!d}vdfElIo=yULEYDDZ zo-wTI3jNEx%Y`$Yv&1;t4~H$|v7Y^R8DG)$gX`~vu1ZZvFfq!*kV zMZ@tmSp%bJGeMFQGCJZhGt$)`?>9n1giZv!NzDa+Gk7!XY)0nhN#1 za=4~IUfeiEzFMekyQKB((31WwIU~5>2IfqKfJ*-Ic*GdEZ9MupxL~I$ zEZ&g6SWsEY{y4>m_lpj4!EOXCwyU$x5Tp<@YUin`a$D`PDIXDU^4iAs zzHlt8k15ZCwQEx@Q;+yqk#E$NB;H3Eeo|}(C;^BOpd-o*3DYqIlsgXF-u_tRNPk#l zLv#z6Jj0)(be=#U$FYuL^e2T*B0+0yHD;@kQFAdCnIaX~Y~t!Mu?@UR(egEvc*$~5 zWlezYRE&IK;Y>9XyD_k3e8t{gwBeQ$)I>+iA3!*hXl!?v2(|NJV*AKeMI?MQEK0zQ zrwNgTDfcd}T=N8qHRypL!iQ2dSg4&8b@q4AIve5!w!(|HO+L3cJncU~_!IyOo!o51 z4F}|!C4Zuj9$|QOQJy@wG-rWhRM??lC>kNjZgg0$BjOH7Qzsu?s*kBGuQ;P82jm5U zVnrPE`L~Ef78L_-aU(BxGwQ=kO>Am7>LzMxP{ct1thwXL>!}pTW(RJ&LeZ!w16et> zQZO8fm|MapY*$=PT~@H`JrH5hDJ+loopAgv+^d)e8uqq-LKRv=kqmFe2WUvHH_wtM z_#t(KY&)K_uEqTK)>>Ywce1Iep_x}qz`8`ed}HNQ zx^R57^5E2VGxf9*({$yW7W5B%(4`D!m}!=3rk~b?C9PFc=RIv@m{~npX10~7yGkjz zDOFCZx<~h3pwMS9UmK|zR(66Uop}fW^^)@B8!L68O7u6GwtI8nxs)4y? z8DnH{5S3|V6Lyw7zj|rnTUKT))66)h^-R~Zi%Ghkf}wIfhXS6Qfo5|?Plc63^5$O9 zhGtT}#G@&9D}%Y`6g{t)X5BN3awMAJZBkCD)RteYn3do}PiZ0X;@(;4C7G&NChf#l zXJ+TLp(-m!uDi7`F=bfY?-|S+%paUGSTI;<SvkMP@*!HL!{b6|2xHH0SD5FVK-nGEFOH9_GBE`Fhq+wbL@}M5TFp z_F?x$ZNBk4i+;rKMwf9q4_9tZeOS5T!GfMe)@6q7JLA(Uq_$>eWi}E>gQZp}GjJI# zMDG$Uqx3T!dfBvQU)GGCm9&&)R+&DFQc9KbD{BI-N`036>6+Q4_0|kTns;e>wqALc z)~OXM*7U>LP#xh-TU(f@w<@feyv(&$`D(>Vr7_;qvuSY`Tjj)Ul(ti&zKZca!o9xl zqU*?eMB~2b5f9Pt(!p8QtWnxZ9&@ZlNr+;lR2i(aDu?QamDa(kvl>mj3KO@gHqd^O zo|kgl?CaX5bK9Kj+7?^0tSSf}TxwOKG+0U5u}WFCZds!8utLJ@v;ul%mu8heZ!`!F$-PFR*;1mbF*BR&{XKMemUc zt9qz)m<(B0=+>~Xxq(lR0d4MF@hx&Gt>T+}D*k8Pj+r=rwig+!QAHc2;*W9>7J z zzb+arXD@7&hkN_X6XlUuIiIj}eY&)9`7NWv&@DU2H*2~@VJs&oNgi$E=fK-M7DyM3 zlBPLMo~gwFbl-sN-^a-~mY`hFI<(vj{a0h?`SQmN4NF;~WoYB&V=yk!pBNYGizB;tKiAO~r|SM)5TO4*|NV89^i=et?z+ zfC4aF0`NW%C@d9`KHJa9lxQRtHbfITll%Q6P`wVYA04fr@@?L@wI#TF=Tx--#mQ{!eTzAZPx$|>!qt-KgHiZBiz+X`f507GRiQ< zI_TyUqlXvQ_VCqF{&I6w-96y#;oK$1mtq7BDUp~?nMwmUN~1f}7Zy_?&zsyA5?w}| z0`X5%OE zQ^Bm2sM`c6vC~UWn4XT&NLY7N<(S;tv_!3u7n%yyHu<@xz_#rW$KBUX)BG;zSsz`F zf$)BCNXL4D0RB!wH(;DfhF5j|Wvba~0hhC7>6RUICGOp_V2WS$xiaahoK7iqKV6jl zYQ8piWMs=x&D%me1`6bR>kAH$o?$&kl?|mG06PJ80ki^aAuv!jc3miMj*l5*CuN6q z7~_CeQ|?$T=EGJiw#hFx&+-oxkI~6rHV4!unZCU^agLDeT#DKwohO2k-cVQAQ5J!y z&e1m3EDbAy3!6BCjvu1L@}5Vv1l4i9;!Bi>q611O;wT|hw%taEhJu#bUh<8h-hE+K zW=dT1;&LX9Ht0>BON$h1}QcvL(}b<#Y?-Pl^R-MQA8@}uoDithpP zBpdIeHlJM9^5Au$H``Ux(Ij00LhqW94_X$uiaNoq5aLk5xkfaQFa4RUYu#FV4V_;A z_#(g)08auOBHY%l53jxd{cY_nYw03Vi`H%Oh1T^o2dIxSI?+g9R0zBy5x)h%Nf4+I zgFs|I!z1_0n%&bAxXy{Q0FFs;34mP_Uy%mDgZ<%JbZkYn3ZMvpH*q#f+34*C$VHVe zGhadVDF8ujwr3zL;)d<-6!G3*tiQJ}abm(%2a876Cu|+PbBO8T;}gW)2c9g&F+#Jo z?pj+rR_ol1c?mE)GV_!w6L$jnGBvdM9bXlU_m7^O<6A)qonTjL*rW`2Z!oxQq;=0# zHSISb#4^(P^YLwk%TEyxM{Q4|v=Ti$v0hZmQT-zT&z+a-GO7b|+uo|3aBJcW5bq*D zIUiO^zEATd4j#tVI%DD);Cvl5Rgiy=k&En`tY_Gxlk(epE&eB6J>?G@Y{`e*<K;s^el@ZUpu%daVrRKW<}S$ zH|?^$?QubGpaLb133OpRXBj%W<2a8TLplTgd?2y)IR2=!pDz8Zh`T-gw2#XLs{ZCog5I{g#)NQn&V)T_zoodM}QsZ2m%}efUA*T zDJqbk3fIo$8Q(0T{Jpb5KMxzLc<26!SrYl^_L3J4>BkwKa3sBcgFkTS&vzbO@UI|! znZRiL>7>S9L2Hs`9kF(~sHsja?7Ahv9SlZ;YPDkmGBhZSk*OeF1*e-Ska!IP7da{d z?GUd6oww!&njyZ2-s}Y6_W}MN%HSv-kdFeeTCm53T8wK1b+x59K5j)@>R8ORf+Zm* z*+)*Y&xw{vSrShEunoD<6%)LUc6_0&ouKFVBKlqANXN?b0}TBy071J?vEINO-YzUBUehcTH(JvNNvcZujo$qWU!am`AmoPT2Kt+r`NC8RC#MVJa%-$8 z?*(vAYiw@axHGtZcYWh}@uEBy3minW%U2oEM;J~X@mZAEk>Vs0@J!+j0(Ls(*v&}C z=vN)sW&yb->_2M)D*;kiAP--z2O+ zx+6dXSS<%&9a||=`(}oJKy}-PtSl~#^n#z>no&3n4BHnAALE3f?MCMWF(84RLvMRc z+Q*;>ck~N`(${t5Pewmx$)%9pPyc4A;*YAl)K{i1kzeX-R^OH%^}V3ZlFuEvqUH3+ zz2a)ow}=@XetWPC&_O5bEDp#lW7Sx;`xR=x;msXx>~_cJna1o~IBeU`pBY3tBlNca zYw}5BdICzwE_@)XJ$}5rGZcx^8#RRd6e{APDIysCJ%INCUZXaOueze~b|>h|MuXR& zZ%-)R;Iu+H$T%M2h0O*T-zHb0V+pD_C*jb@6}^vQ4;l{>uv0q8WtfW>Ifp98PFK;1 zyOQncJ&ZeVB}nez20@{JRD1#$aE*xBj|qz90-4TU{sgET-F_B0>`mBZ)iQQ{w4E~w z&NYwK|3Tofn}dI|)zKXDxPI9g_|hZPVFqcz!p0Vv-e2o0)26A@RNfm|P)v&@kIpZDSBn?K&6^KvS;8CS6V$0ypsy59rB~WraUZwLE|ca`HAgrYT39+q{+5B zO8o<=<5$pgcLbMUrzjZUN&lbLab@g~AE=T~B&N#eJ4@*OQ>wf@F=HNL8M@9oTyC~y zk2+iS`GoF#{B|QenBmp2fjDfEU%7XR+;ZpgU92EZB*noNZ?JpZHs~!RHU@frQv)9w z=gb@q$sgWnskw63$qfrwVHrRyawB|_kE<~U%1?}wzbjupS*0G5@1CqqumX6U+q>9L z#B`aYxIbhjwV3YQ;qYO{cGCz$909=PV}c+3SG0{=ws;fOF~c979EZ9*!URfSnFWjb_#nyiry~+<7=Q| z<-7+~UYxv4aORU_h#JqzS*L^A5Wfi;dEoRlm%2jMpRHc|7Lm4+n*AAEee_+SVFu%e zIh$a6a8?-9c`vw5C4PY{+Z*hUQD)4sO>+5XOV(miqKtr@(%s*VbRIsy;I!UfKnY(f z3=DvG%qG3f#y4+cH^>z-<=ldqJhfR6IXQ*oCO=LbzCE(-pFOT;bd{$4^r6F4hgEEuN!^k#nvfaz_h(kD2<>1(V}TqV4Tw z_f8HtqMO~~8|b(v0UWk2Rv8~Sl%p+~b$AhoDmviA$8-eRVW;$lj+0XL(x*T4vCT2Z zE?#2B1n_n==nUG11iLft^^06Z-|0X@X>z|IwJ@g95Q>(qQba}RN1s|j3Ce6Weecka_X>K?5vQPc^o4D5x1pI1 zB^3r%%k!63S2&6r#DuOFn{19{P-OSL!#iP&Bvxu~Bu3wIcA4FD`+z$_xE^!9gXK7a zV-8ppZ8ghtKR7=H$*z}!P72w$lA|+DS`r#)&|s%hR>S$IlOs2p8k*bu{V~3!Md_Q9 zwesh3>w2)HTQ8ry9B)A>=oS#i_l#pJay#18qqI)Tu*27jkuQ5M&sW#Uhb~X^))FGc zhc8|(OR!JSA@xkqAS#~VeX~*J>nESrJm>Y?2IN&>!(jo^Mq&oqSn0k86ez3h?lIyq za!8cDqbv?VBj9r~ThLQGrIF5$cFMs!@0_lJ4nb9$pFJ4|&&gnbR*fgdnK8It=j(9{ z@v%!j_Sf%bEhiBMQp!1(OiCdixvyfv40&=qB~mK-La#2CS zNXi3GsqPM9&XL9U9$dBb5LFO*vg2m03SsP<#V2bVQ>=SvrT23ztvvZqYc(&>7Kk>0 z#)kkN2KX{SQb^2gxzry1m@gBw7IE|gP!PTY^xFZ-(aQd+1y#Na!2BG)hS*Z{0yI-E z-MfyO>*LY5urt1G$wAkM|=Z)7Gfp$gXasa6iqbx0w{o&YP27|Odq$s^Co(6HSWNfHujf6 z1-N$@mhmzHQ?cl)flrsq=#~2u^vx-qLz$M3{wqLA{HscapmC1jhh=xb%o)yl*UEOrI;D5-w>?;m*0HAgL>6UR^Ah~o6o#{T`c*aOu_M{a_% zJ`GOtvV~}=r5-=DKD#k#Y*g!n{?}d3Y$0_bY0()98}a(S0;x;4|In^7BK#OYr-u?||-AfFmG=v$ehS-($kDqhb-7 z8vyu21t#zAriQ~ZCi=JkH{QyS*u-f7?0K5a&PZwuB4EaQZI7`JoVx0*g*?~jJp5Q1XdC#}cU%a zBw`5$RoY9dgRqA_3>8!l0or4WqP_G|py(q&Z$;6)^j09Zphv@i`~NH1u~ncX=HGw* z`RAXR|MAWIW8$}OWc+m6Ch*(%(@f>U*luQ&e!6@9J9kH!!OTzSVw#Py1haw5uyK}R zY2dQ#B+IfKa5+|B$5=mbee4t)WJAE^*=aVyP5^g|Df6UwX7{h>y9*Ts7diZ?@J&Gb z7Bo=<(w>kcAltMEB~Y(t?-NGX zl!s)Vu$XE@CULx$T2ZzxQ$oo-Nb-`=S7C7tU7fkG8&Hb{7R%X%W^ebq@&%;4IvyZUB7XQbDu|v+Y1XH#B5QG zRIn21FtxH48Qc*qPyT}N#DVyTj^fYXq zm%T0NR>V}ruTQ)6a@7&j%PlcgYgQ{1l%M}F%sR9D zOl8`>vP~gD^tpv1jk4fuf1|`5=5jc=XlE*ZLxM{tJ*QEXtDirEUr~=N?yR>QAw{IE zI%O8AZbL>Uvf$B3h0_!j9v>MrMbmFgf~(Q*FC9Gz!Fhh}voI8&JEP<&e>41xGC{u| zexaPBKMaS}52wEd*_7GZq@-g~(w+=RFS$?nJ@{t#A+rL#r?Ny~J|@f#l;*d?J$0}z z3EWCRL6!;#&~*4m^|kKzU`}eE)Y5^vM);2pE$tk!WPpAq&>oXLZJ!?6&mzYlzYpj9 z@)b+JKa~FJ|69t$Ta8UY734x+qjQXF*wr8C=-$QfpZaKb8acZh7ec8nsuBGkXQxGZ#VI|{+BaZ=xCkVrSW1p+ zkhkOXG|f;8{-^0VYJIFcRX!dtus;Cufu{#tit<}9?MeQnWSkBgMv`6(+r~)#XOM-M zWFD#0jtcH7gr6F()6c@c8SNy?vA$0T%ikp1;rr$&?S(aS?7|*G0|d2C_SN!;WAHu3 zj_;_w{{1*qY+#sltcRqLge1sq{WScYd8X9Q`q|))f}sh_M}&XIhB^tEg%lr-Lp~B% z5W*9I4k0ukl$Zdc@yJ*_G9Dyg#AGLTG)g2uU0ejG<~WuSp9<$JL2rkDu-Zk;+M*IE zQ!^3WJetGyTb?TY~k^rY~IS^R38y1oZ(}#m}SK zk%CB9UAfv?E@2?0xD_Vtx9L!L$G+LUf!Y{v{!JjSlsclg32h#lz{RVOpC4sAl#!d{ zBfFz8Ks6~xqo<;OZTbrzS0;q7Rk+J1_{-dw7)v2b8H&|5p zrMsx8AwU5Vw^ml%qmJP3gT^8>fzUHS+GoS{#K`oQ-dWTeqV8?BO*msgma7YZ55s8H z5E3wh^VgsgUQOs1|ej7i79P^iB>T_H(u8T;AtHfBpA@Um!gj8mjxym1SjQwG$FcX%HS;GzeonL|Al+_0!Y z*7uDV=&#z9?8UEO1szv6Uecic-nY928~>%2QJ_&or7*6&gNFpR*OHX5*=4 z@M`#j^d0(6`1kZzyFC@~@>D>3T8x)!+%E=j2Vh_7Jp=A{60&`#CD9Vu~r82Y|rTEjRecarIw53eS3PRtE0I{IWSio3GM8BGR9dNsZN9ax{)2GI!}R%!=2njH%^@E z7$yrcNv`<3&fkG9{{|9NixP0lm88GsH26alVkJhlvkA9!MZ!JGmqAc8BgJJ0i9sc= zp^oaeBtL~5pv`hDAqWh(@ko9f0VM(7a+F$W)YC0Oq!u zqIuv(6pixDaI5b}U&rc-%N>wR^P8~c0!W~^;tkQ2Nb_8{WnKj&j%)uUyqiDSMXpHU zg>Ybm@-y-DIX(&BL&N-Cei;cqt>VWKe-k;pk$4r!CKBB1ODBoX6u-5sx9UD? idCqkT8Umjj8nv_ouOUsrAH8g=cG5QBKWJaby#8Os?#~qf delta 2424 zcma(SU5gvn_1-%l8jZfS+7D@Edux-eWhc9vFT1hRB&A84P~y0F-Nfq@Mb@2_M?2EU zcdpjEVuS*lhxB0!<_1#YE|vOF8t6lz4|yr%F?kE*A%%fLp%BPVh(k!vnYClt^dTd3 z?z!jdo_oITN2mU^ob@vqo508a`^B|S7H?&%^n+Wc-+JEQ<~w8~!xy;4lkd=tEHCpE zPXm_Y6`tdH!18>N&+rmp1zzQId>*hOXO~Fr$gM9)3I47Sw0e`&v(mXXO z`%YxlE%wi+F&GgIV&y}SK}xn2^rh?7TaFCefl~!qpaZC|qg0gDj=oF>>QDMf`mXv^ zzjFF4Tp_!=P027p$>4NI+Bu%&_CBQ~G{zcFg^AmQr$g5Jjk;*e7czkHkOD0m5`a0i zV>~k23&}V=A?-}4Z4vR_|FL3%Y(CU)lLLKn&vpSZ4q*iC)P#t?JYeOcA8y5@81te! zWs2PThcvP~ZV4}3WD`QIJ~mIx=O7ZbVwAxU1uHG5>xhOYs_JWV&4{!> z3bm5BY{a2e?-i0S9OL~cfHbpMp6Yag z9;L@2cWHnpsr5Gdmc2b^r0E>c=Dx)>O~n~dxKr$V61>@!ZZlZj^*w+4Y4eW0eP-ox zwQ5z0e}gLJlrxWX=um^e9aC4WAJfm(o;5I95Zym3D_MQ*;II-0R;Y(`kG{!<)-drp z=`KT}^SdA1}%uFBu*hOmC4Wo`BW_bz6z2jMnzuav-=E7B1~)( z@j0IxCS@)pd_FFPh0ucZmO}&5Yw}7=D?dOxESANXQ;j)C!X!v1_|biX5~+n`k3v5# zT@cueVi7=Ode91iY$|HE=rhW-2Q};kXdSk)7SWxDGhsjLxghR$de6O}(y3qPB1oE$I1O$S4{+1aaqNTCTj_zgEX=%iz5FYwD*or@llfu_(lo zc)mTb57X0K>B)O|Fok!faBm9VK|FO=_srBLnBH`zHy&&r{a$G&W+lMUCD~}L*Oji+Lm@Ms85wbUjC4$HWaF9z(2%Um-^U%Cn34T z#ADp(i|HwHE}{_?oT~wwi9EE(+L%?@~lkm#S=7^%42IBm_wtRZtHyUK)YwHR#pGbUn;~Clt!A<h$_Vf|5pN)9BS0snT#TKKC!Y0e$LD>|c@Af8a9rhQ%V|rq NleTHk+D~Oq{12$qPI>?U diff --git a/utils/__pycache__/downloads.cpython-39.pyc b/utils/__pycache__/downloads.cpython-39.pyc index 6f887e43025a80da40bbd52594d5919b272e24aa..699f0ad7daaeee44b2683fcc1fa9d126b4562b85 100644 GIT binary patch delta 792 zcmX|<-%Aux6vywmGrKd6yW_e$+OC>zD#;p!f)zxp<*#(3ALK(NNy*$*S4UUxOt!nt zAhbbGf^{JX(exz(AqfA1o}z~ydR>D4g8qYu&fUa&IiERmzu$8%_s;D0ANEJ*qEUn3 zc=mO`9{e1oG#s)Q^Y-#l2YnHO8S!4L!no+sFULOsAq*IG$O;4s5SA8>o`F5_RxfR` z7;An>SbU6BA(zj>GT^=(OB1(3dB=3y zf_G)(O`Hq`GYw-J!##^wAU>Jx41kd_GHV5tf!x8TxEI}3Un}d8brriR8I>o|UV}>& z^}Ox4NH@Pw3zv&dapPh|xv=K(7BplOepC8Xw@}IV)xz%6`T0VrF8xOv4ctt$6QjJ`$v`egss+En6%XCqq OEa8eD)|4`6^!)>>8?O@p delta 746 zcmY*X%}*0S9G%~8%YMMNWxEwtifx2MmqWo=G2wuU1TAJ8G@Yfb?sl8mrIfOb zi9HiBb1@JxOOjvmHB2!)>TT$ha~h}j#be_H z9gmzKY?!Dkv)uS)u1I^zl&fPMXZ8g^VLX&t=m5wWzS32KWQg!s2!{xdhcZJmj31(- zA$)aEGykh4Lc~@~44mx1nG6-STEf@%n*PG*U wm^L3VhdgFr31o}&D(67j)HTduk5?u*xb&T|m|Ee~=CH$|Nq-w9v8dko3v3L<M-s{B{pTI2>#3)ndg}O9)$gu)%AK9@0RArh_Rj3CpAQ87J8xS5!noOwpZnHO zAfN)NfU=Zbuu_&>?UXIoU@9nAC*{aBlnTi;oC?b|l8VSRnu^LbmWs(Wo{G!0Bh?|- zL@FWI&QzydyHZ`a1`FNAo>Y&;x}8FAu{YH#@lc_!xGc3y;^9Jnad~RF#3O});)>J? ziAM`7i>p$rBpxfQF0M(fk$Ai?SX`T0D{;H9F11eTyDgQJ>+Pxaa@~;HDAzkucgS^9 zYLi?yr#9o-QP@%(N(~jarnX8siNc-5ZK-V%?<{OD4yT5TJ5oEWKyF2D=d3liYd(b4hcv^|+yKXGK@X!=-g-P}XUnzxYef!wQRtz3BGxVn8`AQzc?7`3gR zwBK#%gxXLFwmjdsFQ9%;ZBm<`wNj6$-&aFwD`F?qood^&fz(O0T@B+pqITp?joE6a z+V!lHI<5Yzx=ZarzSk%_cL3>kt9x*FCU<5wfZCn8v+CY?D|N13^LVCEN*}yFN_ipRLN%aTn)#?G1mdd4O1F6>?3#bRx{$~Sf zf6khCayFQH3MmKFL8Kh4r@S61hty%D9ImIN)e&_R?S5MQp*ogZlgq3Mpp_4)SZuj#=uQdKB?-i9e!p2D-q+|Xp@{LqkeO(n*3Av_JSxupq3+hjlt30G=^{48hx`ddEal5Rh z5%Y2f)o%2!s@{ORiuyBEQ!|KNRBu#o!rPaWuimWQf|SeZnLHsetDeQ(w0cgx70;{c z&(#(60#e?f-lpD;l$v^ndJ)$d^%v?r>b-dKM)f}RV@P?E`fuvT)lVSh8TI~La_-IQ z{qq(|{DAr((%zyzq&|#Xv+5;v71w9ge^5V(>vQTa)x7!$a=cajmHMdqDa4*f+E1&G zA$CQ5Lw#KR3}P>+Yw8oo|2Fm4>Sxu@A?5ArW%cvwQ;59-vHwT?N5o!Kzo32*x!$RM zN&Pae?~46O?9(wjwj2LW#C|nqssFBiO??JA->tr>KC6BmvG?TOq<&-8!dQJyeIEDk zRlljefOp@g{ziRK{T5PwEVpjr{iz>E_!G$WpVgO;_5tUWUqgX(wHf5G+rxew$% zn0tRPXa{ra(jUO@gZRBacX~GPZd-p=S*e$leKwGb&wXeDW@}q zWW~!D-1q`f_Q;UsM^!F2l`T(AOFZM{ynHc-cv$DAav2Y?__^~smrDu6R=w=8B%0^77?UhIt~X{M6BWAva|Ev2%L*$mMKq%JajyVtFF(;(6D& z%T}bvbDo>8GFxP{d^w#jSNzCW-Yex?cfmsYeCN!8kw+F@!QVm@_o-!1oH}vp;@#=P z2hJThd*s~N^x@-Y(1VPZaZyyNum1SJqvuW?A31yOz=;#*e8Vb4xT!D z|b#>%>w@w}W~Ae~Gt8+r8Pk=LF+lRkOiwdr#Y zojG#g@L4~sGWt@!muR^)iSX8D$T zEue=``t^shd!KyN&1v_^Tp>S^N$=db^U1^IY^9hhdG3?P^4>$0(I-#q@A(Hp zXTCN2>o;_nOoVi~9}y3oodet81=&G%6f0JU zb8{J;J+C()hu+M9oio8!#QZoKmpzYBc5?@D@7f6ZZM_qhEPic&d?sz-c%l`73F~4& zT9AqzI(6c}!Su1yk75qnW!I17N*D9GTxzLLYL-d~k!-n;89j!AzQbbUw!RCO8>$Q< zQL0QL!V1~_d5rUm2XXJnXop5p!PAc)u6FO3%#4p0ayu@SH74hHs{4@x$Bvyi zl795eiS$7%nCifJ&zo}h?$~kZ(xqXOc{p1x4h8itRHSz!@Pk;DH*J0l%{T9yKt2Ik z($@Fz9Uu#@qDyW4;LPQsm42K@!~^iacUbcQ9VL#c-tO5dIBUJD53@O_92I&NbNeuc zc-DG6UE(Qz*+F$6Zk!j=V$TUbEfZVq^ii-z@@-Yw7TaIkyA21 zmh{f&l32G{pp>Mpl-y*#cItU?S+@YR8UYlZZh)SMA-w_jb6s z!q|@NxGq+{`+1bSR{`V6d<+=UvbB zo$?fxc`#F&_9H-cT9-Aju0!N?LygUnUxO0dJ_HHdv1}`D^;lb8N!Ve_enlTZ^3g22 z-1uYHo1}I3(cDm4$26Jr|)i#up=WJ8mUIS;?%Hr+4HSS=Z+3VWCHu)OqL_^7xp4^ zBJB{x%>-+f-dGE&;KhL6h@7z!^T%f#&z|e31?H`pP%SieGME9U&x);F%99YfuD!0{+JGK-7#msna$Q|mH6)tGXj}*$| z<3KFFqswK_kDkwz6sDBxcWTh~i@Ar)y&ue%y!wCunP4^f&he3>r~J^7GiOen@nc9l zfs%6CkAiNW$X(18{LZr;xQt?>hy)%SZBSA_wiu0Pl&WVMiug_`Q_T61i#a`7c5}XU z9!<-XCjC%B3Js#cex%;arNi8TD$?~HO`!Ac0D^>_u(~kyS7HIgttet~-0RolQ7SxL zE|f3c?MEI5-xRqV;;LZedyH4(c{O+|Flg#PP^vRXf6yvEi`Xq(FVOp4d|JZuoMKU4$q=LOs@qe zqAH@IFLHumi4Mg^s@(^srVfJ-+i|9xDPoFLBOolLvHW;-&58V|&gkhK$Wh2=GvpGJ zXDd@vW$l6Rq}j#LiF~PY*$*EUwn!(DR&{n@g`Wf7iV9XsmnVl}hD?p%u|asw`EoI* z?`9FvsY1ph?*-yN?RvSQ=DPGl#Yrr7AjqSXK#+MF8cLgjE zXf->K#{19UWtZzE4%`^G681_=kc8z}S|c%wU)vw&!G8SQVFbAVR38DTJvtP~9a#E0 zse|GptRi6PLn^9bxQ106uuS+7uvbLYe}|XluWf`mGx(Ev#u;0~sH8F1ChR$mVjyjE zf$OL72t1MH`5~_iUdVT{6_xRc+cFn3d2k`ZpF5;)x`ND0VGvEH^QF9(P7{L({}#q- zsP@!bH{5u9!zyUDt_v@MNexJ=(8@-uY~Wr98Ggw1GTOUl>FJwPA4{jfa$*$HKi;T6 z4uDsC7wfM-&yq|w{un3t1w0!OX*6KMS}DM(f$mIzGELa`0*>im3$XMXkYCpj__l)7 zfXI*|jYET{r_kVwnL;J!$FLl;xzS8^@+L6DWChGVgm(pIJyw4k%*^9k!;G8+8V42$ zBPt9&kb96L+kl>n1mU!PR^?MT%xFnAk-MG#V25Ga2P z5gD~OSdA`BqKfx&GJH(fL^IUAKCcE zZ^$v75O(X_PfkHL!!-ihE^9sPg=X48U#B8&kIk=IAk=#qhw75R&JQ&^CM zFfWef{2*$#&K-kJkLN@HX+!uycehC;>x~)EI)-`eP#F9(NHv)2x+Eg z@oW3zl-!S>s4rSqHdZd!Tjk&i;ZKEe4N-&}iuyf|8xi(!9_km+uj%F@Wzj6LInf1U z)@h(oh}=L0h4>qeA((|OG6F16E?k7-N#=ek0&Ne;{8C^N9G}InPW}3<>Z+yfT3m@_ z02_a;%;0y(HyFtXa#5ZCHtW%d-F_&0o|ThB(YKndpT|Q{8Gr<@zDXngEn28Q%+jAm z&=B@tV!{U*{1}79VHWH&i?lB?{VIZ+cDpf&!Z)2ni{xRk;Klj#0TlWf{AB$_p%?;P z2x_L@fHHzek9EXm|$(!s8xX+oaAB94}%apQplu6+E@qGoz;ZUqhJ@C7@mvP$?sIDpIyz^z{w#e|!e4xNG{#;dN zEF>nyMe6VUHl7L29kBJ!upTF)$K5QU)A)-a62(s-_H{%Ij+(H+%|lgKgVN~9(2O$^ znhE0S!*r=#beZq&0BMEg)(QH z#{;Dl(98k_xy95Irq>;0=@Z$Z7z)aNZgz(y3 zi$ZAtVb#$3D}hGs^VSLsR$Y9JwvRZw>)fh<7|`&rtBem_vrfwPwFh#(8nF zUklcUa?-ahf)l-lcg@rKK&pJ!4CGhwM1a38P+d2rA>pP8{g$z8X4OgLt=fB&*X?b| zwwc*zr^~|DBX~30nP{{V;0V<04+pLWTh&L`tyde(k^ThUSZupy6ZKvMe~MpSS(FS^ z*WR>s4aL$>uOGpbodQ~g4!@!`Fo%$QKRlKuPwOWzxZBfDY{0p z@*&jfcjz4CuKdMZ+ABj{f9ljZzk_Qe?N-Ld@|XSiV`Q0y-S8cXWq$a?sbj~Eobeqg zJ_>~>crYw5XQG@hLB!(_q+;&*@})GTphmP(%IYj0>HVzMN<*E6q*QPWbe?P6z~$!{ zGefrsh2KY_sBq$TKScUIYk(>o$ZuWHZEdu-TKbE~aRY_nU>)MC&mb@eobwM#7*TUc zJcxB8v(gJrgxca^la6`6&x_zG6g#uFegM3D%FSX*(qX$nMIGcX_)R{-lo<4BsNL??G z>N@?zxra`TJapjfLuZd1IXu(@6)luA#Y_pDeVVBvSoqy7DQTcV zztdQHpkom;NJlzdk3ujQgrJ;O(D7&p{lIzoEM6HeLwR<-==xF4ryM!r5QNWC$?t5- zx~Oy(MLnqTe+H!WA21kd24*xx6lEjbKgSax263orAdBN)k4-5hexCRxL8if+&a<))W zIsFwrkow3=vpzwPhXd6O|Bf0#@ji`LKy7VU!@kC{NU%&V{+KZt`R*k}{SQ15g1XYy z#Ctadq%2z_y-_0#0%h0+41iIS+RDRHKOmFvxI~cp8@ThW;Tr%;$OvG!Ap#{o zNEZl}brouT6?_q-7Pzh-I!}vg?FmOYFM_2~A+S}zU5OoAx9%upTyN{}ly?q%Qw)rK z1sDZfxa9{YbJIdKheEzHUMP?H!D8mJ!4FexP83bfQo4jSC2ov+WY|+N7MUg&wX~C6 z>?BANl>BKJe*PJyNEiE|yVKvo-3|R~>P<j zw0IDv2K{Z^-MYoYc*+(-zst5lTLdv0lO8P%ag5mF`ZWUff@5NjT(`ClMWy+gi^>r7 zzh{g~*>|R4Lxuhn_Aw|)(dX@_|hWH#PWCPHvE&Uzjy>+u6##4o6Hf-9! z!vMPO#TAxqXz5@(I*VyW;Oh-uNEE2N57WM3(5%T*pJw?$py$T=I6^LeiyRk0L&c;> z0jP7rUV>zSmLgDG{1`m~^}x73n2VA1MBf1w~C+rEz%+a^?7cTV7Gb&Z)$G(2)Txg8G&-h!2(ur{y;f>%ZV{-S8h z6^zKw*{jgmx+(dNW>@Q`$PKhJS15=zt&Y~rNdF&po901H>f*JFfoT>)6}a9(-5fo{a zhgk^z1wbZPiSFRTFb}EF3YdFV0Kov9L0B|5OKP|!)tP_-#|u+EtKtAL#6;Br(hw5% zD4{x^jn+b3w=-ccT7y9eLSYCt%9w&EN&m%Fftg6DOLfn%l%5)pmFkstIi^Ig-(l6q zbda5xTBiExm58+Ei#5*0VF`_T9Y`IhMNz^EDQ9KPLVVTLh{=gI#A-3MdeYG!uSL`v zFlaBR!BRx6t=Ed&SDl$S`n|3e!<)CMB=UDnI{lnRx7Xqb*RySE!`0Zmff=V1Mc$2) za>vymQ@q4nr?Jn9{&Leuby>p!0|R@$GD_!*9o4>VNf@q@l_}y+Hwi8`S&fQ|~(5^J;WTllgZK_6eH!`O>TBdllyQL?v2Pdyv$!eTw zV_*}`Cj}FsDX-u~KT=mpS8p>fUEB^<$>r+JjE?Hu#qBybmV5|qSq=DH+|H_#ReN~& zy0xPkMb2!w0*f{p4(tr!1~zaRpyX7gP^iXOE@R0`5;bsK0-z=GTldwM| z%VS9nZgO-wS&hKwq*%;*BDTU#nuNbq^66^N(@9K%r&(AQ2nBN>@pRHF*QdpBwWl)0 zTPcPTcCx~8NxuBjb3^k= z6EpZEPKKTDSvumII30)$;|}fZgAmSF+YsbnH*>(M_2b`0N#6!B50?0q$bUU_*YLf= zJKH8Il11q!WNjsGjMo@-!4&eqp*nTlkpt(E)6>wzm6EU#X?oiD&iRa1(2V{rZbT*T zcTGu_hO7%FH9`@$2%5YUNt-U2{0G_7KVvCG*VjWkhw<-vv@S6xvtc}H{E-Ti7)X;# zh=??b2G<~&5=%nxAiEhPM^u9dSpqAHE0lLspnVu|U?2F^AV!=Nx*C~@)EvF77IwEi z3;@(36hz-*)k6Aiq}@$7x>|&aiy9bM(D611HkP`uN~_TvkwT+n&mcs>hhQm-1VM3X za@_Bf_0#~A{xaTN0wUp$jsqTbCDqD6)!?zlOE=+@DqQuCA2>5|eB@ZNzNV^!Tu(Z8 zp#rmj;$kwZY+G^^YplA0PwOhJ!Zba-JP5Kmjw=bDOmRPQDWglkaxgMP>Us4b2aWR} z6$XwiK@+l?DX&Eer?*SgLyWexTbwP&kW*X(20)cp!$RGI*-A^b%%y5iy|v;$Nq?m* zS8d}jfk-EQLXW?RlISDH$vR874EPIywt;N{wYBvD!v~AkPY5(QN$M-(PZmFP6||Zo zO$Uy5_0NQb;?i6irB@L>e}wLmuLDAN#IH_#uU^&adsT``eKFK`ycQUTpQ(z@fMU)> z>6!P``k5FUuOf5t8t657#0#guAx0)T@Jo!@(-#c;1bT`bXjO;M!ALEt5}=3CCy*bi z;E4X8Q2&10>(u|~bOTW;(1uwHHs9%X_P99cTm8;DiXcWITUkhf&@!cFDRVZ#UF= zLu*F-Am%UqRST*L-zk~TGDG(27-m6rRg(053{*!!gDk#`WANF?shdmXLY#?1g*bPM z+z0!}MbT4C05w4VBRgZakn0BKi%}JvAIrlQ&Xo&BbWsb_q}fk$Is7*~u5L2OuC>iA zi+7P`Cy`kQ=5=d*)n1r*Fk8#KBGU@~Sfw-8FbB zkbB7SB*;348==_HpSJO7AjV#1GMCJajX}7nP<@bm`RY58Lj#5!lK{}sxM?6F?$^|s zYa!CFW-Q4}eFlTJ0L2xJC=Hb|H~j9$jP~^qT$12K<@x=Gsq@GK??HMPDZ^I|bH(py z@qAyT=!P9w6g$Gkg;WPPsv8i%=_t`wIj4Y_35Y=9IaGuSHw@niX!Hf7`lD>Spu_=o zz|xfYDmn`nyoT=p3+Dv#`)-^vXYtT|7!MOcZnQeMsVq1=}U|59+g+pLg zTlaXP+JQw7G;$Wo*;&Xwr7rlXQa*t~w^ctZxD)2HS^(Y(J3U}oP+-zr=L%_XNU)G# zD{+Xs31ubtWt}Dwk#(9d21)A67MswzLWBBmk=O4kVwOQ8a1m1pPFsduleKEYTcc1e z>lWhFT(UKSm?S+JsIE67kS~sls^De|wnBoxgxqf5jyJYbxsZg|TdaU7hSXaGHOx;H zusFt&P|_ti4dGB-?b>D*7nd!T2Am79uPtC&%YNv6xTV`OUJzzeW}cG@p%S-hy6^zA zL}`r|Rf{lMuVyJSk-FNuaS;zh;}N$9tf-iFpz*Nu0iL(cH<6qVX6w|;FjPyb0KP(s0%?#7*3JE?}$OBZA}4XFn;J!nm%XntlI6x zU>1#Veq0rcf1N4>-Uw<+SCARRNEPTfxg&dV_i(;Qd)JmBGB7Id(l<{5l_d`wA9Xk@ za#Mj&i0a)x>V;$FGByAt4{?`4vh_864E+WpM6ODL1+EB*ovS>F=#sHNvREZNt-)1`)5g;GV^o*$V+{?GDf%tQ;Br>mz!F`+ z8x${$A5{<4W6#(>)DI!g^|(=(xKK=Av#LQ{+As+5Ix)A*o{Nao0^p`CeJ?PHC72{p zv%o2d7Lzz-3|{Vfc`Z-^=QeJEYf8fS4nAcT%2;w|;OxMqL-(<#=1_)q_kfcl4-Za; zs#@|hoQD@yHaG@ep|GtEB#saJ`ovl(g`5rh!7Ri8Ka>~$5u15!l~2*f3hEWOdFRps-BJY zyu{Y;VaC@W)>a<}t|?xTgK5-<oB4ihyfQqPzS^;lqXNZq81XAprTg|DoISh-+@kr z2E~Zxz8fm(r|PZhfm*s(1t%R>0X4-~!c{>~%~rzi1HY2+bSLWBU+Iu1W= z)rra1KZB_^*Y7Q#SngTgK+Rc$jlyftvNh-hw-Uc7DC$DhTfr&neh8%i7LB@0s_L7x z-A~{NAcRTUZNKY>akDZB@yvy7L# zo;X~-RKi|5MVfKlA=$VdB%?QQ6f*#L$2?6M-ghr_JlO(_62$^`Fr^~(c=dIMn0WF8 zmjba=Q|{ZAbj!(Xro=?psgsR-P@!L)!OmZ^2pp;ZhZLY4^j8+N9J)O*5 z&SarEf)o1QgI_=)2?{C(+HDZ((lEe0$nwDJdC);3A&Lr7D_CZs2UM&#YL^-I>Rxqi>% zhU-^WEmrRYcPL=((AMSd#FimmcLROm3Fj_RE(o|SNL7?2@H{f>E<6#Lh}ME0Rumk0 zEft~)xee9AUU^$XQQM++6m4zB#nf?kXtJ>S9D_#@K-U?a(&cfw7Z1e^;*Brv(J-SV zuooIeJ)8Jn=-06XA}8YlflZ8KFu3U~Q}#0#0loyxO*Dy|zmCVk1Q8{ZS6&G|I{E`W zhV(XQQq9n0gzp0$hT#o<3s-;(JzGfD9y-U(!g@$7r}uaco>8@cy&p`c1<}|}=&PwB zFNT3fBVMTS6xNogiq3%%Fa3jDqm4zsMSEz-B_!e}$eKyfgWMMV+~Rl<_H( zI=38$Fj)Fija;f%1!zfXh6&3#^5(H=tJHT|DLeto3foeY>|}ZlxUB zfcH%LfT%@kQT@XjbJ*T0wT4jkR?n>lLlUb+(VxM2Jky5}ml?n~pb2lhi8V8EDRZru zQ`U*f9bi#WtnO zdbb+hDhv`RO9(bQYyuC`ojb(66>hbI#73ScH&^;m%ihgN=w=EkS`qC&elC)1>=AP z+kkvVWkG>cV*u;Os3~WD3rXUO})?!Eh3VqR3MysL*a}i3C8e7 zg|j7Fy+lt1@1e)q8?zUsKsOiA>k%~c%QrTqVTXqgO*`0djzFE?xM?HLvXQha2-rj` z&W#}iTC2=%8!L=xc?4wbtgBeS{udf3qWQ`jM~Dovi{E!xmB2&X!04;6RQ(RmLgcxn zzWv*T1^vG&cOr-%HcZc=VzI9E+SZ&|WJKKL9qrp1IcqpZ(*4$1WBez8N)_ zn;R-f77k8kq72A9bA6S;A`b#AT;ISYd?dNY7M7&Q+ZX~rj<#Xb8`XjO2vc`4DDaV! zyPV6S-Bdst#V2_WIp<-hQx<6|k%1bw0Xe^;UKX8PjN@Wf3gcT%BxZzBaW(oVpWe%- zO@AqjrmX%cJ`&wf-AT2+-hUgp-BCProIz`&?A7cFLSq%3HCL$^f14fejVLb3vklJGS#liJ*+jk6d z;xLRGF)t29)?KQjyK`!HloOTiIg7JLKm#UF;I4JsVE z8Y(K)Ee4LB=?P=RQ9WV+>6Kh!Y%vrW1`gvJ)7NkV@?+G?pQXE~-**@!FYAf)bI^GI zwy~k4Kus$2FmAvG34q`w0@jVVWCAQF{6GMpnk~ET7SkZgPS^kr&%xLT_i5bIFGYo4 zf>FUiQ|WJ$Kw)t2N?I2j=B=CjF@`OB#2bmIh_;pADkwhZ<8okp0ZJO%ec z1Ka1>Y;j|#GWJ)fg&fqyJa*PRgByd*1d)B8x1u<7;3wf~(x|04Z37B{UhM5GONxFE zWA`w4KLbH3??5aSg{#&SWn?qF@8(k(<5+VDBOKuMsHMkF*wJX0h_pRoh*%FaK(ych zY2Y-7ALyUJ``2T{L&CHhSqvAJP$c6COmV zZGd8(Y?E*uTM!XP?4HIx8MFX38ygOVc-VMbclH1h+t3TciEY5!xAeT;hR*AcBS&?u z5Lz+4f;U^{J2n|b($Kzs)t7^D~oJ3s{!FzU@z5K%0c-BbhZU@zFp zetXc;KgoAR^q}UZ@kbVd_d)#J4o$Ew?^i|lq_+L7zvZq6M1knjpz{e zQzcKA0LcE3u&+SA#oKuE!aIWBKBqYxgS8^jM79@wP5^pC)WbWcc}nd zfW$>H6~s0lr{Qoa1hk=i3SF`3N6D>=Po0^8ibWs20u}&W+^ND4H6t@2k7EcwNf2jv zd?8$&GqeUI? zd=M{Al-Yd+!Z186S-x{sd>k%pl02gPMVk0CT-bEQ(Z2<6jW2=nCR86-lHZZOEvxrR za>=uPcu;;2YzOTeU0^ttQ`|3YkY_k&Qh!qV1tl|TT#3|f3gc9gm%w76>{ZC~S;@2d z#ynRn{a?^CsKnblBaCk_ z;;z?t4~StL69`%Zco;y-JzlNGAEll^ygKEX5wGWv3r;o}oxD7jgvY}6?ckHaQo+G9 zspt7HY93aTIGl%z*r^+RPvgnT@ z4$pTZE(y)_t-N8)$&f?g0cJ6v^9WEO0iq|FaGpU6x76VM)1cS)5@FB_(UJXIU2yWF zmY6?ih+ly*7la79VQ}9Yt$w=?&Vbz6MgIWu;tq&y;(JcCx~@^^BEoK@u$pt)V_m2-QC|uf%%%qHspLi1rg|x?l?7*{d z8d$o=9c3=cQRI+#Fk$2LwSG)$4hGbsFq2$?d&x?8TVKHjYAHw-#R7*Hef)X$F_?w< z-JZ(pQ6yf2W)eFGrjRnd3%}iqD_Pcnhc^w=vQUI|FAV$R8B&3{Lm;kvB_#^&Fk*kS=5^V28r*&p)4>g-g#BHo)BcXrW&dxd+x{n~$NopB*Zv0(Oe^}oHuZ}vy}**&kU9~16RB(5 zPyk&89;y=1abgf2G(`_6rc=Ja75|>I6@1cH!8O8z5tVKc6L4t&mj}@mni_h>0aZ=f z`+#d}L32nP_%$%$YoW&k;ml zLd@^L)-fCzm%}bsoa%}VO*n>2XbAR}=SyZ=#`l;p&b^muXhd=HES=(TaxoIkwtm%5b_i%{%Pzhvz9Q`%`1y3SO!in7o zUq{Jph@7w`uHx(yD>z{)I6)9O1rOo`7(Q|0k%IAY9wzF3p5hDm2yAYc$Rv8gt!zU? ztPg!O+Dy=P-8wYTCC<|-iXkx;3D69~%>F)F389q`)n6uT|B%5+xkUi=79rF!jU&B%qzsAWq{gR@l$Lm0lzXh08dopZzCUguy|S!IlggB?T--H_Q=p_X|Db> zD>;Z@f#t~lp#{Qyfiy>R@E2l?t!0dx2^MHiU+7}MnY%!SV}VMx1!(foXZ3y(MSo!U|UC&?xgN+RCV6ch7W(K773!JnIgvkPJI}4l` z3uH|eb}-nFi0V2 zS(UOhC6M7C$djW-fD5%7nfaHu6Oiv5M#cqWLA{0F!`|->=o#Z)>^*_yY3S z<>|}QyQwn&G@+cpE9>l?)=pSI=*6SA{z!PgjO_p4;Z10_^b;^@fyqtay(E7-yamEd z7z*^6A6r*%4)a@--;%!xcfAon(04G{!eGg)1qd&l2M8}u|0bTcfRJDW2=C-O)Y)fA zHX47#J-nBB3-#2Gh9fj!&A#2DrxXCVdAAl?q#1xGg(E{lZqZ|T&uMwj!1dp(Z^hF3 z%$)u=>y!6d>oai1i3N*r#+1$iHljAxL>wt+Dx)q*cwSSjx^xiYV)cyyQ?P+j zfL7XvrSZ;92BrQEQBmU^GbD|7q~!Km+1{4?W+)o@Z;oDC%Woe791;pQ%=}aupKO5f zMKtYAo42C9y5N?*fc5rrMFTos|ZJn3dNWJeD9_!2)7bSkkZX<>?Zp-(Ry& zWI@*18=>98#Jh`q2_mKO$CXc}LstG2BJgMo!0>?$fx=>kpu)u#(EXT@*a~Xl3NtCM zm}4L;(0Y#`XGx#_9Freo zZk>~@!)9#%6lvHKoG-}^6@7$R!aV6euXj+_SxiV6^Ft3A@z%oI-1SDJb=(@j#l#s0%ojMK}d-Ea#wi(v~I zv)J9G-Rx>)XFqf9KcP4`jyLEA8^?5jJi(*b7rqrlhY@N1;eOG6@;2OXev1j=0dy57 zQIVs;SybR_l?_o1OZz#QwQ|4}&gNkbh!_@qI^p$g6Cun8U!cV?WYXp>SYTjQYU-U} zD8SayFi-t9A`6TXu({0J@SO$SfL9}IFW{l0b*_VR1;$7xgHI5&jTSo+!%sw*O^ApH zgH4K*jNm?rhMNX+*Ym2te6a3L^)#|#4icAe-oT$_3b*80^-p0oTnidKbe7L;d@?~+ z9JvLDt8Fs-ERY9w4~Hj|98iIObId$8>=BAkTdOGq{Mxa~OVGp5xsm&!Ljzyp;Eywel|A$6&$* z86^+fI9g_jP^JY3*=8vY13o}Dacmpgl{Ql4tC1uLJ52%(m*O$=X>bKNPD*?%5nZ|* zJ6xRdnKNX}!E~wbY*}yZhLj(*lE6RQmIx$$;zte%HIo1T4atuhj^?wLp{N1ybTw!L zdtD>52zIPA;ys-GM@rvVI3VZ8kZLCSLyR3~@Gyf%7%ZWBT?WF9*ZXe-@}NBk8-xIP z1O>EVJ=`zO`)}fg^Sv&+?Z+I(zouFWvlg=u-4rWuG0#I-ojivEHRmu`ZhJFzce3U)r+t=|1Gf3TQ}iLVKHPiEE)Hj285i_+G?R zTGRf1&DLB3#-XWw3^bR^MgpF?iUh#Te@Sbn4a&-Aw+8u3?I7RSY_Q(&xzrT2JfqZN>X3{0xfUwDB{1_0jnSS-n68HiOrxf+06M{0~M9&y{SBnzO;-EMv z7-*9Ko94h>9h1-j%A2(G5bXVyySoGq1lu0Ma*9X9YbYVw z2^F8|q&=e+>xURS)1^9Rx^b3LoW2AnfwWcEOb-+rVEJaSiL4e!o<4461B-woc2_`l zL3<YewXpsQw#&PCN?v` zGLb$2zn?hERS+^!_HtA9%!=9ownp?~p@vk@FeSAW^^{)K#}tg_vSwOe@5b7i1`K39z^|dUiuv#jMtpo_W_fAjjWu91U`Tbsufj&@gU!+hYisBiQCnr+ ztqsogsR0^dCo%T`6ZF5rjUAWJhDQH`s@seOMh~7GjnSI4=lZ$NS>CI|a%OF9Ew+q2 z1BkAzbxt}c=>sh#VZ3m>>2kDd#fug;4t8=A4Bszx-71$Uy*WDdmeg4CFDW|bBrqG- zHQU-8k=i<_AsRi9dLiW8Hk0%QCa~e9c3UlpFDV6QR@YXWSx}2eYhMQB;l9dt;aLy9 zl6WV!Xs(r2#!Vvsq*hMy+lhCUz=V|U^H!lGj@WH*m$k8SZ?D~6ORC;gP-x7o$B3@3 zt#7uX*|vFG#$f%#n#LG`&El9h=XkA~b<|eZZl6OQcxYjrN{f&SwJodlV?G<ltcsvE$84(p3T8$g=g8g;_9B`C;XE5YLhNN(xD>@59gIr`rLRtiN7=QQ8#z5 z+%O?YCBK|y)y%n#6>KSOyJsi~D-A6Ca?_9}nChl&N%I&X98!~TlE;w*-X$1ElQ?H7 z$s;;;4ezWb8pRwhKbp)3*1>^rCw$4r4SN5^gLTi~YY$@Q&R+oW!`Xvgp(Caxzbq+! zG=ndNa33|qn#d%+lBx1VInodJaURMC=@4Kug-(5Ze+AVn-A0cgNXkhfg*;CDN@8oy zU1Qa_i>;qooMnuoiPjmq&RyH$+;3?38&Hxlg;BIN9;+ghY~%wGJZMdT-9F=q2GNg{ z$|dfi7gdQPa8vB=uzCCJ;b1D0B7F%rb58BqY;L0EzhVjNeYrVY*X~R}L0taI7!kgfcvp%>LIk}$`C!WCJ zWiOnIQ4;FuM;_RSzwh81Y~W0+cFg!yA8$aRy z*ojydgv)N=yB_10C~R@>h<-1I21_piM=E$WW9G}W%@s4^gKn$blywdbCNk!@>Ey`lX;mlLz?D?azjGuXS_&{|fzho=L$k`^)0^!^r zSeJ3i;I?YKzF8=PbI6hatU0eKIJ|4jw@)6#h8L!T**zBk|WI+YG%@Jx3INV-cjspiV!AiA85nu)f$|>Tq9Z zEi{KIrSHczjMEPym=IAo`5g12JR(%~oyO%H1X17BbDzZSm$+esYmtEfj$Y}+RO>=} zI=l`}C2>_x)DmD8JK*vPX15k?0dsMf{l zeibY;Tw=S`s+C+6=6*HO*32z~^O3+4C9Ne3V{~z*)vA$ao#a8SU=$=DnX+ci+f4tw z{)y#i*+31Sl2>afJ7Y1g0OOdTQ`1s+eSyHgwn1R!+$!jn;kK*RU$w!CK_4zH+^{&} z{6+`i*?hK9o<*0or3N=G9H3%Gm5$DF`4-3>oDb&EH!gZv_&78*ZYJk&*1cHmK0iH` zuHVyTe<)Jz#Ubk&-$umuTI-)`u68w&kb+%0)$Yc9@xbZy&f&YO{Yw(|?i{}V?rH*T z#SYL6pcmIQcjeKEmOC&pa(9D*6RRxX;-|^oh#DDA72S+~nW^oIRE|UZ99}UVMYo$T zRTL{Y0Tn2%MLXH>E2@UZ`O#i{pmq@wtO#3t%$Ky2*{P&pBEd=3&>g#Xy4!RGD^}mh zM!XY2HG))pse0Ru+bviW@&e@O1!Z2Cu>y3}k)G%`vYb&QFYSjy+E>_*kF$JyXBTSP z^iBFv|FR|dD0oj*iX~SVt?Jedjq~6hqF)d8JemXZlShD!LP>s4dYjlJlav43W;i$MVa zj_VWxXg>QJlCSu}u~qp|e!@Nt48j9Fx!1mKSO6~!&Q4P~ra#Ys+o$wP2=J}+au(b6 z%h;!ytshxNRWH7!LU$(0QK=xXQ7TFjE+@8!M=Lz{T(+5|>R5|)raH{JO%r43X8|Da z<@kuRF&2o+w@~+wvH`Mlhz5jI$KqVS!C9@VeX)IpfFuCu)=ehTd8N-Lk#~X#Frfq1 zdR*7zfV~7x+3f){%Fp2N^EWnrt9ganA%97^hJUy*IQTF99ewp2%oRtu;a8Y5{KK%5 zq|5kwI1xDa8-ElXh(8YD=e~?SfcOXNDqE0z*?JR%KOnpdscCxZub|1maTl~?op1IL=JRF&JQJ8W5s79B~zY zE=U~{jz9sI4rm0y2w4-oHHLk19y{Mkzz)!X!#9@II>Z+GBAomCaiS>>+n5`ug(gckys z$X@sc6W)iZ(-5XZif)|?uONMBm63KB2$NB2QvkK4%C#Jxc5up-^U2iUz($>qTE ziysYzVzJg3F`B+;`ViMLPw|CdAfIl9Esx)d?#)2%!!e)Zm2$zg~C*u@v?|A5)k%-feO+>&sl-+feC1kU}B0r3d%G0c#LqWPgwwW$pt@ze(0R zw{t*5dzUOqb{r|)D_f+ zP-YY}6QTmQ_{l87F3==eB+hI3?#(z;MfUH&7)x2!;QQ|~_8A5z2xn1)2Up?5w6geDSG6mmC=@{rRh(cAds zyA$hz?5#gSz~a~ZW1P(tv`3{DP20wa8jeBuped~FcEi0OWM*VEUsU*9EL_-Su16_h zaz7pF)rCA(vep_W8 z!W`v!O6DT|pouumIW)b-3h*B``eRo;PN{@-kn0+s_6RrI4$U0fE`vkxLd7H>t(Jot zVULnw;5=X2^hmxaRCzPoPFl`Su{A&M@yVO@0yzz6$v<(<;wvSXnc~?Yn>R&Jut)r^ ziaCcP4dooaUBz#>$T5>?eFt8Zk8bEspo%4p(udK|9SvaO`#`*hPB4j=aN)`uvW{X% zax7(xx4{vB!xko-hYoN`;quqK*czxmpdC}jDqg~R9g`%u4Y8v~`7&L8r3Cm6@N{(j zFoUEi>J;Kb7QTP3x$hJu{58_g)lV8Ss5B_Miu5n@tzVJs;%*E7YB>Wy0wmY#Xh}V4 zK0jkG=0i0P5-v}(@~A$;_*n!aM~@m`A5{8Fdc<^QEO2 z_OWOWJK$W|NSEq2=h5miPjrDgc9^^4G|`-V*y22!!C8tAcvePnL>sfo=MXSsr>XYV zTqJ%3JM*~wRIw<)u93_E|?cnh0RL97pC)4L1I&8ob!8dv|U;lOX`oxC2-#OFbVeQ`H@Bfw&=;Bu6_pyM`ks%DYm@lkeOwN zfkPn8a2Na-MC$l|nd$MTzb+XGT7NwnKtL z2sXr_@Nx5h{7Pf%KkCL8|KV5-ur&!e!H|16R?0r#9xyWes$&<#m3S~7j)tP4u28(A zt1H~iRG!-t4~9GYf_?pc1JTa@P`o?d9Sz093H*zOqlrY^iSay^`09AbN#HD>_0a_K zbi_klgYkHDe|)eD`B3t%Xe5r(gwt`zN-4}sxEhS#sm0MfBq*EJ3kf({5fyh|H8O=2tPL- zLasn6pe$t`nDbJXGi_u1T$tc(~ACT$@@e@kn8yxGuF$ z;?crjaVRw;@mOJfaYJf@#O=axYFO&nnA#-Q&8ehZx1_epbz5qiT(_sT%k_@b9k|8| zJBoLv?vye*3OkDfR7Y-JQEf_2lkV2|T}VDWDRAfdzZPPTik7C~x$tKEAPd2+!7>w^MJd zr}wW)KdjcK-jv%m^MD#q>(rncx)wxRHp%^ZwPEJLyrqV(*=X;^Tyo~2nIq|kbHj^A zm9=Cc-@)Ad3sx>Xb4+bI9LPl$kE6ED=WYF6l`I9@o^ClDP+QfuD}mGr^+k1u+Hu87 zJ)-Q~;YnNFsdnP-(cGg80hH)0KBh*NtkmP`OKP{;gC}p!y?G&!I{8RIjjFv@0%~v0 znmM%)OpPIBpW2U<{q>a7NI9VHLdsqBlr!pXbq{KNi@I0chwE8&zdDHPIrZD>P;MwU zJ`_L;-lz^=38kJ;Usi8Y51_^;)mPL*>Ih;_seiAIs$+<~RUKC+@aEI%5%nmpDfO6o z9M@;mSJg>%3OSxtUsI>m8N{AL+FR6F#NL*B!=$Cosd2>95`RKHiTLyCDfL#ool#G# z6s{Awy~y{BdKS;JxwqubN)kcD}eKoo&8YLpGp!1$i1PF5_tdPqfRq{a`^r@wV&~Dyin@qcA@?S= z9dNj$F5|AKzOLp}6|s`~301?}Wi_wfj_a)Y1NDM>2cBF|FRBHkXmv%sgsZFmP`#qw zg(sf+DfMooRMdY|?@{kX%0+c8w`uW`y0&DYj`ykeBki*KY4ri*no}QCKZ9#k{R{Q8 zxc-FtBlYv@L&#B6f2=;NK7!ah(ypstK@5 zE>*13#2Q!W>9=bXuS)3_$G z)4A;Vbh+ZqR+uY#de+OAOBv>gr1G;T@&y*tHRZCB^iC-1qU%V%Mv|D{tD5&FWk> z@1nhaWLB3a(N;e`Stw_`^!a>ARii&~X#f7vd#2_`Y`vCc#3swSnDM-vUMAH^tr>gb z)bY2TIh#IpqKz;05QlSio9V;OT%0p9@?Hj6}v# z!81=Ct9I`_pP8B}0O6C({ofId{A|FztD>?xDSVFI~Dc zYBG;z%f;~%BaUyET|bg5UCe91CWuksHaa`!$MSC4RC9fdx>MQgna7 zO%1SqEF_dL-GVBwF->!>=R2HWEwJgWEGq%1T+(?jmzgN! zI1AiC1diPWpxE*oem!b8L9_=CS3oF&1hznC(Yq1x0t*4%<5@HIOz>FXx#+yj_#)s! zig_X8g}m^ha5JRO^TC>>x730vcoBn%oY4~V$L1Z+T8!5MOV)g-7Fz76IeZ&wiKW1N zxE5aQM7&EOmFc*z=Rn@>T9|1na4s+s8q=La z?CG<942dUEQcn9(V33o!i@Ac|dCmg@D>jNq;K7N;a`t1bXgs4-J<~|UcS@OJ&W~Ko z>4~zN^Q~z#ErTHr6{OH08tg~v&0IaU9jGE*@6lQG+Z{jar}!S7RSBj z_@%;gFR|N`_oR=W9y{^aBk5z0on0{jIQY#8pl~D01H&?J z*KD2e5OSDH!X&Doay}Bo;M$A9r9jPDbiB}F7)(=W$;JW-dXZXi!P47n!BWt(Rd@jl zjp?=EOjJcw^cn{n%Vi`sR_#7AJ9{kSW%iyeXNnlJY6R$`G?|~OZaA5r&>20q7dZ;~ zY=&4Pd9E@$Th<@{7~^c7Tril@8TWNSo&f{`$3r|v!Gjk zFgrQ5da$}Hz;9{OgEZdH;bpf6LELsQLRbkyR2$&z0T3*)2xFhjX%culiHJrco0^w#4Wa9 z#vTkP`@@b3UdQaHFKuV6S!Pn_+)BZ74<)O?(R(I+%f%>%W;GY-lPF6+gx4WU8wvTy$u*iz`|)(T zSXPw+IDcP)zio`5;QX??B=IRm7*V-)EM{GE$V4P zu8x&o!mBcY;#PIA*{VkG>zQs^ip|BaPg=)?pse#jJpj~cd!Vgoud@WX6!#&-n>0`8 zo;{{du*DI55*K|60dN$WohhUu=N^0H$jP&(beXB4Od+F-2Fh<`-jx8BnR^xKF4>?2 zs6~(E{IJ^F>_=m*-?W+uVwoVEtE36_4)hcED`5uM1wBx+yig^e2gwGp@r~bzV>%%u zdwe9^>WZF4k@^CI83ts`Uac>0VD4;MmUx6QEOS3BNfWDk_B`IYhM${2kPs^9Sk4ck zcKh5hIA9Eb%pjMZV31{So&h2FW7f7pmbc^Qa+XB{J$5t{4JJaG=`3i)AE)F)__^B= zw6AZh-!>$!!m_R-SkFT8S3&H2xTG zOqJJ2?hJPg-nx$H&3X(PZYz|XX5}QT`gXJRLwNWa``lwy*WaQMb>ei}39IK=y5KBR zt0oE1BxYJOShYf)N7{9!{{(_tcDpf&!nd46KbkgNM4=zVPu3r4L(~CvVCo8#!Tu7A z{0SIe4nSas7Bfwll4N9z^cTMh9GJ%h`9#_!{=nMAi)eWAW12txmqlXLSY^LBoFxt-ZVs^gRD8iKZ z4&Jg@Ki+zI3HLHV6Sfo93%I*240R1pZv``wXn~obdovcHRufQPbI9^?VS!17WQfuHkCraU+>?GqF6qqdY)*d~ zB?{r|12gYn-I?q;)dE#Lr$D9ws#n+=DNV|iBBPC0KgBY+k^!iYeitGu0A(S8^M^l? zDO!`w3tWXXd<;J`<(RecXCp%Jt-;UTiC_w#d#-05Dh7{Ww4lJXv|XbL0)n55*!%4Z z1f=EL=`_F<#n2ZhsQM8=6p9e-$AuSlFXg@IS8d-O-RE1Am|W3(2@guyI(arT;cx*= zJyZ;{4}1o-3y#!XTkvY-8?g+o`Dx@g(2|%Lfcks?j@@D325e0fC_}e` z+lV2R<`F@8CY?8y&BZ-!^OAG z`_{!*EiJ5Wnmnq zcd(s_MmqrxEX@xGt_PpQBE0}r7iO1z!+O)0Y4OjpVbZ!*6Jv*|M7lhyANUHMzD6k6 zFC+uijkj!LBkxGUk6;eYf+j;rSd@zqC>pXjG-Oeb^8srY{^?&q`U?Jn^Mb#le1X&v zG$&996fGO@E!rd-n4TNFpy$kls2D@sSqv*oc}z6Ni$Jaq9S(RgqwE1Y+cDFMJBtab zfKUdRUt8~2VNC7Fvp}=yPW-w^qDG=)!Xj3~`$zX57~MBsjU2jr-@$ti-Je=_^ofU0 z9yynO>h#$+pF4Bp=<&J|W5hlI$p;FxZoeNaQ+9fkBa(wyGMcTZQpt}t?){KS=rnp} zs9{818FF)0HVof)0x6l8M?J;3?&tJnhOUiZ77k-MGDV3 z<=drMzoVEbK~_vlLTn;e%1#$E`n=z{qBMvfL^b-e4E{BPRTDKXf({FDzlO)6Jm?Gd z*wpjHgXD0BEzOaY7gu1Y#xU|K3c!~GFVk8f3`9*70sjn~ehhC5UfA@k4U`3}aMdDS z@B)IkFosv6S8W!wAlMzm6M2Rpcwua9QUHTJM(h=9nz)q>yQ|SSSl6%kEKuF{&!`d9=VN#UgwKXf z|H~}vD-2}Zf15ED?RT##>c1lKH5!Kc6vH~((UwW$ z8%SNzO5C>)b{}q_W<$FG9`LO*(83!Kb-JKHdjKI=OmZ%CF#;uMTS#2eX?+=QLd6C( z;H_a6Mzdz~vwwqZ0=1~^R3UkuxI&; zUwjTQ%;Vm-H5z7Sz5Z`V(45VyM>Wn-ora;%&HWP|3kdsd{Y}JgYD#k{C-Ky9mO?DM zn()JtrlqRJv27?-{tGf|VhXhDB;(W(>Hm|#e@6hAN#g`uEtrKNx6I^SP-N4lq^-Y& z+$-9|rPmx$&Qa4QgwzXS1Q#95w9t|@2oh1VW?(lo(so4iRX5=SdfaXZo(Q-nX8z;VK$dw8ipZD{}m#)Z}4cn!LVDfsjyN& z&1OkML&T75aQzBq`#Kfs_Sg+;&q!2Sum1w?OLO(N8RM$-ojD*mm<7ru>i*H{Mx(8p z+8mQM(*T}mG`injZ)q;1+c)}gJXL69W9?w%p>ET?W(Gi42}fTyXt@l?IMun4;ng7I{mi{1oW>V2J`^j4GU<{trYd| zBc%a)ySQ2*2{Oh>}dmWfJ>uLL`1f5fgvz`pWd{&NJt8WaYi5CpCu>GAyDEUbrKjuxU4 z_!OA5^a%2RNBABts{q;bi=aI?x2e&cs7;e@)0q2xytragt>_N10#q-8j%nWJl&ZS~ zfJ+d50rnVL9j=KrPr(J?C3^^+m99E-cQm^SO(nV-9lPO-PUXF71VM7o(PTB5R5>@R z^Aow`o;_Y>%Eg6l61Zdvm$16YT!72U6$)art+N$or2n4XCLR`qrRNa?)*>>v2^gpk zoVlrKCxJoyPUHVp0S8R~7?hPG%YvKK{5!b8`XVC%KtbNT(|kblBJn(_6z> zyw@0zt5Ne9QaZ_3^$z!Zes&gGuB3=j$(^OLm&_zN4dCBX2Co6V^+TJh^>q^40 z14jDl;OfWa$w^M{k?LNSGzqS9I=N$q6l?6Lm`C6oQAe@tnYUO)vu zMC<%W-4;{bY#MYCd=F#+PQr-^ox8Y4=O&Yn!na2MfSu`Vjw&uqm9I)y<>H zC(wlFS=Vf(PMt@c!Ac`GGS6_%`!+$F#Hk(Wj~;VXkB zA;|GRGDgN#bLqDh4=uVnKMT=Ru41XvA7_?NFd&JC**FLTp)?Wp>!R|50Hhy=p&jC? zZ1&?nS(;Fl!S<{40E0loLP}M9f*Pz6>e4Xpz`AE&tBek$Q0yGjKs;R-I ziu(g(uxb&}lYp6m)5{9jk*VTr5Xw#Xi}(z)fV_tFzM5h~x}7~g<#);|Xn;t69&fG! zj*#n9fJ9x89t>2?0&l!@E3!R=<#?e2W0m6EH;ZC-aso@Dx{iyZt}83dB}Ii>DC88z zk(1GnzUYQb7`C;;tU54dKB#0f;LZ^>x)B5DnYP~@pVX}h%Iw_tF>Fjjj|&YF5#I;gcZ zZ588F$J;wfoiIJ&O4C!bW&NgQPoqbduuk;%)mpEAf%@tHY#?2N7&)J)mkJ)9j^8!3 zpiU?UYYC(c$@BFj_-exf)}tD_o|unIzt+~`Eix$@$l)hJvcXS4<|j%$q|u ztYmd4NrE>CvY?=^rpIb>G6QB)-C8k2-p}2EKnx_^B7DHEa#4&UGngt6LVzBw+r~U1 ztFnRXVpKWjC-ZRI_&iDgNAwk3S_no^Ee*+p=w1Om2x%q@hH| z(onH9Hk#SiG*dZSqp^2jbz3GHZ5cGqdDZPLU?rg`B~G{*V^lZnPSO#!2rDDliVU1S z_ZT*1bab@Zc_(xa4g8U;uDLTQtOHQa&L+>$*`8%<)!u3>3gbfLwGiQ*qscRc9DIPA zMX^0P2NhIuu3XVVwvjX{3ifI=S=W~c!l`!Fsk2JLN|*Glc!eY(S?30-Z#+xcR)aeM zvj9Pt1l|-)W`Gs7vtA(XUgmr*nVXyh^FK0Rh&ri|4_!gi*d;Q1i_~8x%h?`U&$ZsP@3G7h?*<%W$-*6<0PexkhsnSNpl@nF02 z9dHh?1H;8=GH>h>`u%Jl+-0Gs%=?x$*!l$|QuL|lh6zumj*#TOn`i7(c-^JIk_giK zCu${u$JblKuiG#51s%HwuB>!u?ZFeq2dq)tbs-n+%RNEo1^e%v&H>!pmh(D9zSsL~ z=b!X$l)9WmsaYcV#vh3QM^X^KLWCj`5WWqsSJ805mtz*3R{p_6Xt?7UL=soO(X-=t!51J#VHY( zZ-p_c{ZAb^JNDSvBZt5eBjX-Q)|B~*74Xdv`HG;;`Pl+y{$vsw;v{)bg&bYG&C21z z#=M7QzZo@~MzZ;O6u0oFr=nPxR8GPXClx{^Zq>}6easT2vx?{%w(zyRNR!_9T`OB( z!-KlD3mymH1w`Dp^j%2L;?3qC?!{^1D6Z~R1fYL(caDPoVaP%EK=*Yw?jR=Oj-F1S z|BE52S*Qp=N*KPbpty*E%D1OLNdWE*^e;Mh6!Z_zs@=4UsR*Zqp=x!<3wRW%C_Sa- z5hL}ZKkVM@#RH@HBJF-VM~F35-leS^7_^kEOHy^0dZA3!dw$dlkBBNdd6cT?WczFS z4d^!}5;3F%%UlQ&8?Qdu9P;DFd7m{URT;t~DIk6OpwdpRs2F(f%ws2$k5R}QZM2>@ z>;OD$=iK;-kq#rad;?WM`nY%iiXG{x{tc#tpibhBflHaZ_vYN3-wR>i)8)A%g+kre zS8!U}Y_aqUZ1t9AAeg1To5U1-m9v8UvJGeWF4#cnEp5CDdx&FRuz#ozA~Q*uqB}h zfF}U#`@$-My@V(LSBbl8P7MY&_inI~6d5StLm!99B8=pPK{6+e9}OP^N$wn&^*Ajs z7?Z%koG8#;$_f+eK{3q_c@->75gLJoB_lk}%a_1a_~-$gId|itA3$CW$tz%x_%lq~ z(hS-`YRdRoWOb=AkHRP!h7d?py~(m)h~995`XM}h)vks{=Q71Y)ft^D7FO7riF!EK zWcAT^iHMh8BMk*IHVK)9O+_9UqE;JMHL9MC^?Z~~V##q^W7(~=Zgc2}Wu?Z$c-p8D z>sE{rPzg2l#4hAGcMX;7RJ3vvjh+Oj31>|&G=nVnm3@8Dxo^B39o-)##_rt z=)MbO!MM;U%cMg2WV;{2lO7>ry+Xc>JOdm!g!)GM;6YvuQ-O47Z#7a^B;BwctXuC- zkUfTCBPrYjNrs1YZR3xY&#(w)_HckWIamU=Fk zyPU~F9Rm-3R5T#-)mTY{;}XsDdr%g(;pjpkKOy!p?AeG^N*5|nKtaob?FgPot8E2lxco{qdRF7koL+(Yc5KgZzGd_O$yIl#)cE6sm#U>pwp+vZ(fo#cz}?zBfh0p zElK?_o_-uZ@xh8i!wr{S5qpQh6hSEy8vH?*wHs;Fe(yjE*d$BdTCp6-2Q?SRByPAk zD4ud@%y2sgCMFFi?p}Ds@wy+_2~XgOWQd|I7)z8KP;i(j9iD+pNG<4LvciMhf^s=% zG&@Z^79Oi^ZfGIfRF9&H%wUt4=kC?V(LF8Nnnw_W5;=dt%Y&iM}rU-H&pjUyniGz^yOL#1d1@Rp>+6;mzjs8Ht z73nKrNo7Klr@R+P4%%k8O;RzJ``~VeG9U z*9ku`sOzBsSOQb>3QC!YDaVF04%x%DGRHjld{Xy(&3w7#Fe1L-h(cX(|ca_jqfr(ge}J6nMo_Yk;)2&7=0kUhWIi zzs_FOoFPC9)by9gf17(Euu>dcx25_~Z`7okI&041z=ijTs${JixDr+CnEtHs!i&sD zYvK1<`d!!&&h}!5nYR{Y|6wf*svIU&#!L@;18NBG;wkSJtoH#qqQC0^59}T1o94*? zo(eLL>c6g8?%#WZ$T4gnJ)adA@(E2&Nzpt6|RjQJ8&Te+LvLvEyOsm0Yc zZmL3wxL@2#mXu1B(y-LMUFv>PYPmyd*KojiSlIRdJrm?4z){qSXFNjCChH_PPGTh6p|&U6jGy-x)*t)btvv*og`U= zp10E*;auUo0Te!{_5(@>(4)K9qpKt_caIsjKxed?pB+CTsJ*cfD^w{Vkxl^xIB7u= zrT#i%TwdL3c$csp#v~4J3vfDcumd>>!MK6lv_bA9uuIu$2g!@%P2O1{FLUV5B-AYh zl`Q66Na9n;Z(e`5AL7OzxAG*Ox8;|_oAZBY5y}j_J4NLtOk$H6;I*V=PTrY?SB$$4 zRLI2kBL+jq=q>dz=TMzEph?Y~?MX0eT|Z z|JPWNKtr78Xt!Uj`?s{vK~dHT5F`+Y6B7R~>-a51DmxLxkD2+I|3;K6$i#D zWFp-Pq$}5@*)3V^0Nlr&C zP4H&JCD~xZR;}Wr#`u7ojvr^+z)$mSPA20ZNG~Mc$z9H6Q9lI&qv#~VB%WC?XL5n2 z64dxMfaZ79%c84?5k-HFC9YhFMlnQFzTV3KXU+ul|72_n0t}?AeDNRPeD~`vzxB2L zd&up+4e5?!Y?ja;2B}lT_IPI#800Rnb_2mEQm7@kA42VV=pr}4zB!B(rqael7LdAx z9;DC@qYu9>IQZ1cTF^kRz3t)nHF9$?G2K8X$0SnUAAk|FZ9 zdeLTSi<_LKz@>=r#;f}qWy9oud)JV?V3I01ckg`%_uXGP+$zb~!^k`Bt}8Cd3vm`e z63=Ior}CFk(V519DWl1Aa7G#FOLZDKpVA-v>kLqEX`7e zH^aNYr-D*q%^?hPfSJ}~C+ujnOXTk!F+dPy5H&bJ6*!jU2l`!je+5dwJ;bxRT52C6 zqV&gN2jgH9JuJbs0FonSGxr8l3Zw#xK1B*rlDG(93(ekY5T~!`V{9Iagc`7EKcX=R zx>rEzaXzM{T%TmXA=9+^V_n=z`i-L72dH`2${wR$11$H0AN6i7Fi5aw81rEL>%Hl(J(k24n_YEKsn->A|dVu8yeYWTzL-g2_YO=0K`_!K2j& z$Q7s)&k#1WAd#!Kg7=u1>(5fs)u*lkwWGuLnJqB$A@1c;QStPGuOZ%zO9>snAR8Ia$CSxwgO zE*2lDv#th-i9{&M{9VWm`T1GC^b7(pWdwkJjtMCSTUas?nV)E|V`-q!U0nV5BiWoJ zM8oT#=ox#!=ESW5XcnPgMCx{{-|mC!S6mD%#tA3!Jtth<)F`e+?HVbpU|dhLda~U& z;YO}hS()rMxqXfiaZu8cT@=s_PrIw%XCi zR@WtJVQPu$usg4qne~GhZ%v#=aT_dv{$<2loI7^wH6d?+NYEM}3t-=mTi==GaTr!b znvByAnqy#E%{*dsho}--RALC6Hmy4I1b{dC}^<#>2On64lhQL+( zDq={(W?m^smdG+%NX5)YP!bqK*h=l$y#4_0UWJAWZmY9MncI)wfwlqpAf79{secA< zS2tkQrL%MB_020mUPAew!W;Tm7<`h!7QCl_nK6!tA8VE{!BT&b4?c|`i-*lW0)~Ah ziFK)JFvJsES{x0ET>`95}x50;U-p)hOsKOt>-cS`q2xn@99V&qk1P@`C z>PFb3dJ*=iH3<9FT7>ZZLWpBO5kh5)a9C|bxJhkBm{eO3;^0n%+tnQicksB*kJulB z^rc1?Es?!;sW-sW&Y}t!D+2^`9Y#cLAHo$QuzO5K0KQ>3a7YvZLSg*S1lX(d)zF4# z?%G$>>i`Cc@5i|^FjVA9a(4h5f9K%+UcMx^cNOJwcM40-m!u~s_(|*nAvj5iUw*SWIvA^T= z+J6nwX+{6nj(#b9nI*5l?ZoR%+^%s$!FUJ?{z^c{i9wbyL+Pc$bP5f);@?XUA||nu zgQv5BndEUpus(xXvmsK01BZwVRCV4yENrVepAD=TIO~nV_%71yU{R|e=vd}EfE|O2 zHMZwXL0Ltz4eCjnp+8IziR{$@uYH`>22Kb1`7pfcaZ;C%670v$V@n^s>i+?+;E+u0 z4okyv4aYjtUkL|<;+ze5s=(qaifH%;Q*93OXXH|jv%h_pB8S6YXL8fZDp}L@eDB~8 z^&tnuG90=#0NFbcCjG=2d0#_IS0Hl2mbi+u&mBcTm4w2xN18QMbIhX$&iC0fofBt}&VnqgSi&!d%4LldI< zON8wg83;u-v)-ucxA3{l%nu^=v+Op0f!%@X8(N?dwLk+jZEK+E7HUS))P^mYT4(0u z%phuRWUSDVudx0Mf>h@UP^S{+RtB~{K)=JhY*qUJt!Tvu*-`?2FgRf8-$VWtW{M$3Zu&`NA`j&6Yk5TIl6~2N~W65Kg%5qXrEo~V$jV% zvQQ1O%$c&x@mlU@K+W^=0E2Z5h>Vtp7;IoL%z*k-y@!==;Vt{Ryp6$j26r&n!Qf5? zr1i@q49J-+)0(o(iLuPdwoEnV@;(Op8E}Y~i3FGLW^fOKdl}rvfXw*vK?a8yypaLr zi)CuJmLFj7AcKb(9AR*j!3hSBGI)%^;|$)+;3Na8YnRFXE}urAKgVvpg|}xJj5BzG z0qtPRPce8agQpp!5Evv$K(xhV_y@A(Z2A6uvOWAC4#+ z?ow4s)}hYI!ug#(2vBU+bs|tGtUz|nJJ{Pvy2(tnuerl5a4gfa`=Wbd=~K{-ut5WW6K!uy-Z{{J7|gmzm$ z0i!mU+ydUK@~?!qK)4A*fj;wN-(3TTu7vq*%5TfxguCts5cFCGgA7_zcPk+b5MG)F z2rtk5DxS81kYEG|H}IWzVqCH$8;w8W9^T8mh0f_m!x3t&X5VhpQwjjJAle^l(G0+o zGLoSox9PFG$Gwb=_Y7QrY<=ri*Jt4SW9yUm+UqlL#?IIloH3<83D{_&M@=-9a%MB? zl7!QG=oW(z7lpix!sl9}a6fD^{D)E4h}7E;fP(?Rj+IVMW`NA$v}8y3ALe`~;ex*SUFU{ICvMV{j7Vtk*Z^y(pNcB zq^k~iu1(h6_*Y*uTW&R4O*B7)SAHbgxoNrln57{QMV|I)X<*2`{tYHeFs`71VS->4 z8VFyWD`EcqG5bUwWRtxeiZ0AND#cfDkX-uYI%Mh3A_51w0QP5L51H`TA*f=pC-5Q6 zNbC)?aD@qrSIo6wErdCS{bNILQRW+%n^3FjyM&_`6>|+IEH6>n3tJ%K2&bS-6`mh* zwD6$y9zoEOKK(vsKL*_z*Iz_y%-H@e(y;q8UlNs$zK>aOim;oTqI);QO*zdzJT08I znF3sw@X;>0@aSI+9UMz7FCM@V{Qs>1qPU%ezUOXBj}Sztq4`J5NFWM`$ceyl6^jsx z`a$Zio8W@egwblFqtI<54tkF`LUtOOYtVj=OBF0RkLYNeIZ@t}cDn-j-B@z{>%=lZOp{_=D_A}@H-zZK@di1J{V>&>f*n_m?zYLf24OvZ4I++kTunDL4Fb8A|i%x>@;Kk|5=yLEC zX!RCDCJj!0Hty`E;t2|W>@p1V#BC$9z+41N%90IdOkhE{%E6+7`CmV4%dq{lf#TSl z6{E$Dka-pvhK#JpFxbOJ(FlHdXt-%Gx4{ktmV$Lh53-4vgTkj^-asEWs}-iftHBnS zeH*8+Z9ztn5X1%OJE%_&BVY9ncz41a$`c&S^lCrEsU@!n?@vx1TG%ak&x|lO9Kx3Z z>IIjF0*PD?0n|7^h>jimB5`sCDua^i4XiWDgqG{nex_|jv3~DO?f9Q~Cage@y%pZH zU4|972dP=2vBuvDE`Z!C5C8@BQ4T-FcAOJ{5U3rzsoWuizmyGB(Uwxfet)O}gOacL zRx5SVo)48^D-uNjo(rNuNkAwK?g(bKDWJR%j^+Z{L7w{|;evl?JjYSfZO`%Us^?I~ zz0`Q$*(mSQsRtgtAfR#>Y5mlTRpOWy*mBcZ&M*Z+>gL1`_D;Wz*pKWZg`e1^pYs}FKm@N51t z-ek_cg&Ph>U8s0yCFSLs1CRg^u{qW!x)O>~e+FQ#cqWY4ZQc`Fz4AR8yIOUcVWOU< zRqyJj7`A3R%&;}{OIubyJ&fE~=MC{fXhKt7hAC(>=Y31O2t$O2--6x@Fk$)b#zlV# zKw32dnxp9ev57Zy!X5^=hhgpBX9MH&*|+fy{{HB}Z~wv9`S;%+T;7eEvPAriza%0} zLa-Z4hqRK@6VAGf0q!b@Lxo@trFIzf5<8a(83$AFyrqX>K(*XEN+1Q;rvNel17^hR zS4J+oo@=k-SZ;`i^=EOIXPEb( z<)1aNd7PR(6GVDHoZ@2&to@{2TVu+dUt8nU)iW(r1iO*5^p zaqmS;i72yCf*|F^=n^2wIu27i7(@HklD0rYh_)J-yI;DHuC1xp5@hXlgwaUrYqaH{ zK>+b)?s}Wpzxjb$2(G*+b!ACr^kzDsx9`7Z&G7urb@PMLmVw$jJY7>;R~uv-RqrHR zdk{Ae=whPLhUt16N@Mi$E^|i!#2PNsYk}xJX z2eA*W8oXwy-dchu>6rJMkg3;ID28eKtHcH9h4~OSBVuw%v z;@aA9Z6iL$5}aRGTW7|y7LoRRy0#8AM_4nSb)%N=VjJpuSr>Fn|AHDi&+B(fK;xzR zy#bWOu^5Ion2oizskW)Mk#7BH)n<&z6vFka5?}!VS=#WJ|_jWDS60Ijo70?B!zHRH$GN^S9)GymnOb@qNL&U z1jQywxT9Bh>`t1;=1|!r{9Cc@*1H7bQWEEu$maU}qx-6fMlp|-pGf8dn{bHdz6uz( zyX5a14^Dk%(@TH#me2gcgFK|rH0bNV&9I+e#bwMi=wp)NM>F_x1osZYK`wGWmz!1j zqB*+ByyC}s4igw{KyOMA`|QUbstz`{jbbE{vKzjT$ALKfp2uC2)wqkTS6Q5Eilaa_ z81h1eq9H?+MvMd27RJapX<^w&;$iE|c%pUjBc*bQ+eC$n(_d!ZuQP{pF`v7HL%NOO znnI5s;km=ur;1Nw@Nih}lICZq%Cl}NSi)JZab~iMMNx3KJZ49dsW>*a^8iugPsKTu zX&H}H3||^EQT^{xb1Is_sf9cuHq{}!pmDe*Y!{*_r)lpp&yn@*31iGi*_X4}&xX`X z(LdYwYJ*Zkuqw1Kz%8_B0egYVf;mO61&d8;0tOcpGXxPc40{)~KtV`A>$nF~MZi(vIk_nDC^KQH_F+2Y(fbxIT=cPpVj6r%2Q~zcgR3A< z(s4bAjV+ysfwNHEz-K+i^-4Ii-ZA|W1_bLX0bdh%Zexba^vvPRU^*v4QjL{yEJl%s z?7{jAs9+1dIP)KB)373zm@6jyfoge=P!u8bb34)SAqQjF}) zhk%SjDPVa^-mrF8<8}A;436iZX~1|x1xNQ!`u3@Z;ipX|kzc(7J5D@d9LaGM6TTSq zGVb|FEjcE|O;2fZ@AQ^$J0+GUyvyGGkF)SkAb@&Fd@NgH_s_8kYgT`kS++ETtxWEw z8^>(A35koGoixbwV=ta@(xzF@90}1+%|iwtsYY`b`Mrxn$rTDhDyBN{lWD|*9x(;z z<;Szzpgfz$*$og2geWb*@dB>SU_l{L!{LzPv^XeIaFA10B}WvlH4&u8plW;s6N}z| zU^8*VjKkw)Y9T6XaDqGZ71+cZ!Gwsy0pbzyj*H0=JUEo6;Tu=aeF~1-aUR6ek9r3J zI69ycQ>_c_>F_!@mGE7Q$?sEI? zKa>Pr#uYQxuBN&RXY8Hytak;H0^0xc7SC>fMLbx_SaK|60Q;Ezls^h2D3f z_uXuZ9Kh4X>3*FI-C(U-4GnTpnEUle+pxF>ju`?^l(dm7icwRTR;xyyO_B$-)^Id9 z^O3D;=G<)h=k?F5Mau?iT@V;H(%g;3yberBf({?_j=Tv1-&+BJ!9^SoRO?0yw_e9? zR;mi6h1*&q&a?Ch&*ro3@+`W#Ej75X;3NS0a>m4$GcITL-@*A{PXFMdmxaFr2N#>k zIhu7ZR=cO?X4Cb1Zb%u4RD0p%tntaVy`VzoyOPzeMiNrsTwd*N+!v3WN$(rIr`o?N z;n2R(gZESuU>Wv;MgYCIuDL5u%(UHsL6N(gbU#>S0T(|#$u&={4&3d)FV=T)tz4vX zfY?vr72|EO#T-Citl+#jptLr7bHnGN8k*u4BdZ;ITaXZrq#8G0_)KPJlY)ti%V+40 z1N+?F`f03Ky@`!@2ZCw@sransZm}t>=mDOJ_4pinL0J31A8k$?JB}kl;9x?gEsf*H-RhlfkZ`d-69@87=Xm&*`4U5&zdsbe zZ4Jmy~Pxhuee0 z|JdKrSI@y*ag-Z=ojJol3_D4B*Seet?MmIo#{Q#diTh3T0Zc%!t~~_lmpunTY!-kA zmT36@2Q(7cf=+8+#nS&9%mDUt>Hl35b^sh>m`}t*3!1M6K^1p@lL~TcQd`?S%we)1 z+ADH^i(QohVi@X&UJOKQ)f33uG3@uk2U$TRSJjVa9WcTql++B~dDU~IU_%vj8ws)# z&8HF3)1bWh2+TRy#sKd5Q8GiajlYvRF6xIo#&B-n0{SX(lvGaz(Pk>kG3WAOD|-wY zpAN7#4G7Vb4RIBJDM&5RVh>u0uSi1oflpb@^wt>msSd~hd@BL(wGM21UsLN4bI~>U zK~rCXZSITEQ_ZZS*$6rbND5%0K-$r_2y{7U0q-CoK-ULNLuXwi7PYA;P%-X-DWll@ zy9bvcU>m0+aoHHLm;a0j6-*uB!-2>`imq?VuOoeA$f#UpiAEsf(Z$MGE=1H73#^!` zM4E-4qfZVlw0TWgaep|K|r{Uw!iu_ciTyg8J zT;g1J*8xp`{x-F*4Z&EL`XbW7>x&VL!Uw*LTbzQ}SlnlfBj=E&Uqq1N$F6~L3hsX9 zafK46uqDIewfVh<13?Oh*jy028%`?2 z5GDJA+*WWuSoI&+kC^L$g36X(&n`9QjL?U7pXHmZUPMiiVJ04)RVv7yF|62m5gWG>0hx0Ti zv)e<|9~+sAzF4w70bcqNf1Gaw-Q29%wD16jC(7V?4zNI60s(s$oGx*8GHkrCuXxjc zOrVo;;{&A@?3clQ0GVE_QQL`iK^E6n2w?o0e~eQE7xYKJI9dV4JWVGGe7_QgNxNZ* z4Vjr4%@@sg8w^>BQsU%>G92XtyWss$L9r+F`^16atDB4+k0fn>&8D()xCp*-t41~L z2=^mXwbwL@IGBg!FG0AYd{0STgaI@W=dy+7Hdq1vdm7sD#>YFjzM&TjH`{K$Jz)lk z;Du^QzU*!eC^wCl`M$d8k$h1I^lNN8i?uPIn`gCOyHzhZmj$#O70f39morq%-?Dp6 z1P6P}@2Z%czgV>1^ERQac!pg(K`bNeNGpN6Q1o07z z9|2{t+7Ca%>6C>~UBWZpmhMxf@7R8Qq908?&pA|1MpvR`{zc9Sn zN#6cj27iVC3b$zuqDWqokgjBb9$mK^TYutQ%AX7 zNOMBSewJxgPDFtZR78d^a)l+%?XMGZ+fL2wAO*e$1RjW)JSxjKeQQk^EXE@_JT)Nq;q2~XuD z0pB_C*vaEM&+fj3fe@;@_?R4&*|k>WgQkCmAHzOpm~`P3p6VET;#7M4(X+>o96M+B zus+DV+YtDXCy$?f`1HBsDd)uLv2nkL-$WNyk(wds!nGwhEJsOB?*!nci zXNti^1`7;GfaD8;IKqq!_OI~%a}2)9;13!65rc0q_!9l4{2qffoC?B@2>U@#J52{J@q7~B9&vin^nNgW39Y}Pz!m+QaL_`ig_sDw(eFhT zKk?WpjxWC8cJlODLF$?~(eLHAp|CdKp2Sb6QRzt%lnRX@=eag~wuR8xiolN^HlOJx zdJecZ5P)UCq4c-=mjIn>{lh0%+j1trWF*Y#UO|4LojVwNz;N)RYhB!W7mPz49`5ER z*utHC!M<4EK(sU35smkU;vMn6cqkf;#UD z*d9&9yOBTCH589W4@ZaN;pk{Ig11BQ_5Ew2k!T_sm%l_j8jZ$d$kk(aLQHA?i+?XZ W+0zr<8Q&J(&=o{~qXW*S;Qt59@=Wjm diff --git a/utils/__pycache__/general.cpython-39.pyc b/utils/__pycache__/general.cpython-39.pyc index 6371f70197d5dcf0692e31b92d0d03c9244af1ba..f096d602edeb8871b612e6a1c7ac7b123a632354 100644 GIT binary patch delta 17328 zcmaKT33wdEwQhIMs?lh*cFFcwUdM}Uc>!;Dw~UQ!gDqniY{sLhmNe3AuI{lGT8x7X znB7o7*qlg&B~A#*eI^Nz5S);NEM&bmTPHVC!g~p^W}3vWPyAxj4Z^n zIJqblj)YT-Ba3A{!Q_(E(#TSoE=ev+wM1G{%OlHGMX%LYjHsinV@hPDzOr`}E6vv{ zk=6Pd-8*^-D`Vwj+K3vtRA0*~^mVKh*!5!yE3H#Tv=J?`0mX@~QeWM>k>Xo3l~TeWQE8uDMS;qwzX6H|?F0KX0SL zzRMP{g{RcW73?(@W{Z((XG_@9Q%YnnTgF;WDf!4geLq|Nn95eL)>H1tmF#ChWS5Sqk&XiEwUYI7dPnae{I2fOb|^>dB8T;B^bY-S z@3m|lTYpL!K?Bcd#~)$eXP2=}s4b#LMwG~PyA+n+%(k3T*cM&wJu>2o90h7C+Xl?G z0_J*PwzD0;>?mNOY$t0&3y!fL=#6@;USYc+{^e}Q<;&IVFfw}BHSAiDCRl`Bhi9+88iYsKQRMaM z;SoiT>Z?ao_Rs7Xi=9$O@E=LCe_?UPK#^iUVco0;skEeB%@U0YYiHGLewUl|vOX#i z$*`ZY6ib6D%Q7sV1)JmSXUt$GP@MgoonZY)8SEEqfDIyL>f6{#48#z-0dF~GGm8x) zb%Nc<@+jTUe#t(Yh)id)$0>=Wz>q;6%uVNbHBkUGuYU{AA8B6S-($36x6+u3i~Gwd_K z3uZv7}E|a;kU$%o+h8J zXXBadpm@O3QKi{M9ei--KwQt7wpUMOdJ~5Di|3MgZ;|GU%(!-c`~DMaqC2*BY(2QM z<6v~h?gO@;#mtzYn}%5LJ*n1-FL}SBhQ*NYaM?!Elx_e;^Q(M`c-@z5or^cVkU)3> zEJn%>B#dahCy~|-eH-2l4M5TO3h`%umH6ecBJqfS>Eg*UG9?T8sbD4<>)bV^*eVpe zWqm6!d?kSG>ekIE94AW`p>$uYyF013Pbt3U{O5{V)Fng~)@-e1h3!s4v?(=6)4Cf=v>tB6A>fT9Iex2mZD^%6Cx@-5v89}dD*!f8_i}aob*Om$q7?YXy7jOTuHb7ZW0knKq*ZMduaw-| zG>t(q#ygtKbayAx-Mu1PI$J#|&Xn$I;l(I;PPNs6=_2_zCS$2i7TYvTRj(%y7yhz0 zs;@`V_OvI`xdGd|BgXp^X&w?El-1YB(c4Ru)DO11CzI0oYSB_&(@sNeds2PqhOme4 zq&J_QJ`p_;JrP&Dt9!a2WjRCVm@w(CM0XVBPl_|;^=cOLrA~aU{G=Kc z+bfpNGKlT8M*E}DL^@$cqxXrsDy~=)1qmjH+f1Z%+hdq9Zk|(l|A!PTibnOc39iux zMM-7nOj**+2Z?kDz}A?_iDTH!`^BA=P4$1zSsx(IkBF~U9y(empQG|7l`l31)A1gj zNhgMMr`j7R@8Viy#>ap+s5=$ai|IEMypm_S5=p&AE-bCb^dAbL^W@xIT-fwUSiz`;M5XJMtlc%@7mgaU@yOiu%1{l z##4^0AJ)OtVb6-rnoJ8-C|6fU*gIJ*r>^4n61aoFodD-($Umr=U3Iafvfs~(m9-1@ zjS%T60yh(oLqpPo%Me)A7QLA=Y+aV=`fw@k}ba>Dzec>&5pP=C;VjjncbhBvX>ym(tCi z3^QIu-njMp#Q9l`o7IgI!ku$*hi9Lyp5Qb*PAvs~ED?A3{g`q{Ne;~ttuw2|vrPf4 zr5Bp6oGYt(YNDWgrP6#Iz|SeMUM!v2?WHx&9}*AGyjp!y{CQ@>b)AIR3}CBK+ufB* z$0M#T%;rUem(@sd97(b~kv5|--fg^wY@-H1(E@6f>Q^h(8d$C-*sXGPp2}ZBj<~R~ zK^+h$XKiS=RBoC+)79%|>WGH4YqW@I!`MTe-Ey=p_b?d4dinUw7|be-8+I$O|rsaQG}OGXLHNfp{=KjKTD#nSsffW9hVA!gQ$O6g0VECgO75N*95XXM$<}G9LU}1SUp}JDnnx zKe01@iEN_^fI1}zJsMPlS~;Hd&%ZrhIPOwJTj&RJiOAZ{X>#~EjRkDQ)8mAafgHiN z3AJ9d&DrjyiNZfA?wM0pCI|Q&-V51Zn6o7GPk4nVps^VL0a5;tfE4-rlv*s*<|_5y zMNPBVN!YO4_A$LHmP?xa8NAy?SH?&~qAzTZp30ho{KIAyG5w!Z-+J+<=B4U0qH%8H zO37N|Uncxd2}rgQ$et>PvlT5clauHj(KGkvx)4ai9^2iW%yinWRBXWM{EYC-TR3}K z6{S=~GDaRwl;_fa5m(HcC7zjIB7E}~&XL-~$6ct;cBM1@c1ah{q)4-44X3i)EB4QC zYR!;j(%YC;x1S7N6t2#Hfui7_HmUsgqV;5z_}u&*QL31~K|rpe-%{##0JeKjhlAls zWzuF3e6ZLc=8I(WVeUeaWCQ0|$Lt^HQ>F4h6!;X0?1G0kI-1b-!DAzOPRwi{T5XUm zCv!cmcjYuGMU5DeoeLKo;2?qRl9I~lA;tY8RpTYk6ON+G$ffwd5}hNit))7r)oMG^ z5>uaed10;lU66~HiWU42#>0-4^j4o~UlM+O^{}jJix=|exHOpe^A~CRR&;A#X;k01`rKq92 z1prQAHkmLd8^te^*hHG?1LQICM3%P`^%n^|4-hWqil}I*LCuR=R?m_0en6Sdx^jA% z8jgu>=8OemxMexS`%KG~Gp+_1eB>BsaD`r>GQFaF`RwpSo1HoeBb6P5tx9jBZqq=E zjaHSoX1TpiPDjmlW&64#CD|A^^ymql8yE)u9jf!<+T>i^?Q~S%iUq}GAUVACz`ou4 zc8Rkqw&iIzARA}<`eQte%|h63`{Fsy;SY0~C!EBNc(aLg8fv_PFo%JG($@`ARg>09 za+ae*joU?%({4u5YB1mp*iMzhI6?bT?oc_`kLMEN`qsKUO~exXT=*HkM=_H^b{cby zxQ5-PN}(WAvlP<>b9IxYjJn5^VUOh*^^7UTmOYB)epF#@%LC{+>mE^WQjzB!Q|?(b z?7daNZrsa!r~IaG)Ngsm)EzKq!^&ZpGB^GySd>40U#nuASOGTle=UCLa>=j=4DYj) zZWLhtVIM0R_FD>KPHn?QL#3!NU=@+wJggl5m|HP@y~X$iyR^YKoSG~j0|mq?KI^(v z8TMIz7R1wkE$CO^XZg;k{0%Fw(Zei}uynLcwx@)Z5_zAyN|CMe0hXPG{Hii^yIGEa zfoinuQCK;(*F-6^f>l^V<+fapDreoSlFhhF8?78uq-n1*XN)K(P>LV3JZbdiaG=m{ z{;N@_`XY8URsgtKNl=Hu&{+M5Mwqj~VIMKGd``R5>(TD2GwR^$sxn**(L%_rmgO46 zyDLLp=`b{kdsa26s(5Bq{8E}DSfX-H6&YDQp6E*GT+Uo*kx2_VvB;0mgoC+Vy(BM{ zk@VT$SDS~l<`AdZJTxQZ#5_WBMsYnwGnZ!N(A;#!40UC4X|^;JGflFgnBbwUIWu!8 zOKQKRrDdpT@-<}iC9+wag%)MHx=2rkhl1HeHY7dIWNk9k!T$sGN*`^=AIidSfsGQQ zYmThVd5{r$mJyrzQBV|`R z>v=nNz)ARe#<~-@%Tp?d_BL$x&{TTH2e;#A8W= z|8HVf2bsb_J2<7M4E{s$#3hZ(HdFq`3CIOqLei@o+pJ8E$8}pRxZR}2;kmqXN$u`~ zNE&2u6c-tg02X%>;9C5=nj6cVOiL-2ez{t%Rl*b1kZQ(zNYy?-?gxIY6c|LE_FwuM z;!#a&8<$F5OKR+#hW%kOgRSGFewmmP774Ll#MdsIk!t{PL7|1-cx3dQ=kF2suie@n zA`)5KuR*|<;gteJ=til@Wk!)1z@}4Csx80 zNEtnuettpxW&Ofca=0c%RXGkDrt5aQU@4q}H;MflW~=K&X2XTNZ2erSe+9M5W9G7O zTxbGfKyzJP2^hCzrXLG$9&oVla`Z1F76Ag2^Wb7@GS1Pd<6e*!x`gz)7u`{cPT`xy zvl}}Jhca1JWdCWJ+XxJ*7ZbG*r zcTBvrWsSO3{I4xf<#!ObodntdXlt6~nQj>?2{@e`*uH;X+wNTye09W(x-Cginc?V%+Kao_^WTEaXB0r8uUlMb+RjY@^rQ2qzmy1K&2J$zWfzz;W zw<)X$W~yn}Ww}m6>l-)0QUu5@kSQXIar`DzIjyi@ywYm+5R z)q>F{>Wy=hbi=I_rHAc1<##F|doMFl>2a_4_4e5_7O-06!^^O|X0a6GJEDBYocsc_ zcC=0&lp)vhQpp-=JuN>@yn>c`Ml-6+de$gyZo_B;{E#Bc4-uOnqQ)OW+C^JwN~5Rz zBWiEsa6r~N)AFPDXGz04`>cn|r`gmyb99#Fw*q50e>1hb=|O?n%#HsL9m$y?D*((K z)5GT6MFli^A8j5}uvdq@O|O;AnsK?@Q@ulWH>T z(fCgB^_^ky*PR;|M3I3&LR{`EpjVI5Qn;P~DcT`dOUrV;N)>14*B+VygwX`htys#@ zs$o@5F`9(x7)J13?YR4(vy7mBr1p?Dn2ZenKLn&Eny@Wz5|j1f^KD0$IvR|xCL$uo z&WfDcDFxO6`v<4Mrd^AQr${M|@7l4PY6erOttOdXtobxyX*3H5m^MBW5k#=Wqc!1x+w~07wL#1>l|*J zROoV?>|$)FW1R?)=75&UUUt0fo3kYPYXl}Wv9z;;gp=mOq|~Hh-jSsDA^gkGh4V^Hej z@|8+XghViTm| zo$UvJ9|m-tTB^-*HNaelK!5@;*?w`;70t6v)E38Y;?Iv1{apANws_?VbMIP9GfOBW zXhCTh_gk8!vJxm@&3WIWIJ*&PYf2rhfW21PLF~26^1**`Hb$&mZX+th$@b@9zt-$+ zQBA@2E|{64*2~E*8yiHZh{A;NWX8~o#nXE)$06CD_wMkH5Zx-Ve&3_{%1B8H(=2JU zAPkJK50`b2x8@8*f>J+218gtTPb3gNz7a3Z)R4161_Le-fjfhvO-`Q2^a&!}ML!>+ zx@6TSDYX#bdVY$Ma_nwED&p_ali6sFC!KcWDOWb8$Qd~_$p;oj2y7IsTnqaB@R|a0 zcMQiE4syAS#<(#!I1Bj9{@#veP{Nb)!d=U71KLn-9dzPWWW>63xje?5D=En|eUSv| zTw>~$OiNXs6kS&yhdcB0EBCCH>cizMPN}M-4tWZsP7@}^gP#L#De8fKy!xtD2Ped^ z9WhuAkv$oCbU6-;oVy<)P+?UYF*?PV5DNjJJH@ZAx<0?1`V2oM?4NY^XnT}R#xQ;| znz4syt{@=SEjfTt&mZP7`7stN$+cTP$qHS2Hzp7Ufpqs*TDwVy(kp>@CGAuO=5Hy+`Ka$`NuqrSP?~Y#s1r11#S$gjH}4Jvm$8n_3I`#PrC9_buJG~*zSoOd2tRZgZP*NkCuHhUp)(3u-Bt{D1&`GoIFxGINBi{&R;}&(hUd~O}5-=1~txU zrfk~%c*RA5OeSfoIVlVdYbScak%(yckxK~5o*-o_9f~#7_A0z7TGd=qvaoaNEc^l* z_U+_y#5Hl_+&Xdcn&o;FWEcqPp^y_`dpq>Bf$&2+UO2@NY$4o}#+(L*0CgTF%8+uf1~zMIYWGG%9}#Xap8AhOK22JO<=B7e%!7 zEHHzu_^q6rG;%Oal%&Co%#o*|sN)<}{V4*UBp}=OG^HGahWQnZ4?-xind9PoTKp;U zXpkha-JQvt&YQ#o*WF*;h+?(}n`Aw0dk6I-q7VETvFXU&6-|^=1lLLD6mr8dZ#`Kj z{&FOgr!jGueuu<|1;iyHSimWAlP7$)lpBemMkn9oEgPMv*QS<<>p%QF_@ImDQKd8u zaWdygq_a7bKS?MW0jJ{67h33ZJcxer=+QOm3*tvdpRJ^3g#*s=ew4^AhFe~qSUh&Dsr(5VPU$MR>Qw%!D&Fr)aa(75HC@e_lJz_X_rQxB@S{l0Y?qDe@Mo>hD2S1A|``$MkAn9np-a z;%{9|=O5C2>d}efkhl{i_*Mef07P)@iSqoGQs}SrPsEv? zdUclgRL>n-&$;$Fm(t{@$S$fUM$@{;J1|I%_zkiQ+CorQ&%-~K_ld+~a)R=2L0&XL z?U1K#j;P3ro#T>Bm!Nc>Z6Xih!tp%IJySu9NVQZ8QBU~OD%?)@)HzKZjG>{q zf$cUeukn7s8d?pk(|_b%@Jp5vyp+INnL}eSF?4pRA>|)0Uxg-A1)8u^m*=1xgnBz& zXDAXIGwO~wCqv}U+ z(p5)imDp6_UF~xtc!{xdW2F03egK;()nI9iBO=^hOU35P=R~+cr4^_KB-QN&D!)ZX znd4MN!$B4pE@8ziXcbi{waRd5o|O!jSsp7uJGH$yYG$Rw<)&L6J6B+b7XVSE>FM?2 zkh;wB*Wwa91opA=yR>eVDjA+(;+&XveP>)pKXpVKt{Seks;y$Hsvc7nX)pFYPnkuQ zmoOu=3wp{)57(gHm={?!h_D2xa^{i@svQQywN?!R;1!tN9_G3kGaS8ATfkJXO2S}( zW=!BJ$KTC>Q!DDN5iNXXzS^qAReNKemNw=Z zZm=4h*nZ$6~KOxw^EVBB zR#fG77txi&I32Hx-duc!w6WybhP4)H$gn4VJr`dH4E|o<-2KSI^mlGE4llk<=0cTW zz_F?8}XeH0>1gz*aA2a&km%UpkY<#k7pH_WWhByuVhxOzi5PS5KF0aI-8W z_W)B4!sm)p{qxkXigW#qwX`p=eYpCEn_DK1U{bc$X<(!9v;Ir-6sAV`gzd%sZ0wxi zs3fF+Yir3){z)kY+#lL08-3EZu+?7j%hO%b-LTcTv|BiYj%VOyL|h5nF8CTXN!qUo zBWBM)8=@q{_$_pd%zC=@XPf{Rr2?CI-0Q4Fw0nWN0I_chZpa@F=4q2(=?c2&fIbA5 zvs_(-ICxOiTuZT^sd9PM1?onvSzX`?s4YG^w{|1`xz%l}O1X`NCg@1sRCxW$s9tB0Qwb8eWU z_MYE@dKP57?G9-AnmaM1|_s9|WcJu5k}b86!tP?$Z0HQ0zn4#)12hWGf!cx2oR~ z-^)h=&cyqWZ{1#^H0WQ=HC$?JGp(C?2~^){L^vbAuSMS@kLZgi zSi=9mJbvj2=p4TV#j7VP@*dgheNHd_BX(3yZj|w*&<8`_P`9yu`SNf>;gG6NA2e+5 zi5P+)h7)$6J*RW|=eLxi8=#XxLtLnn<13^4qT{COO?1UBO`k|y03mtv-EOZmDCeA zJAp_seJq6&HLSLaA{E5H)VXn<%Hduk@?R8}jbzjt&VOd4SX)OsIVUh6y;!^{-QRnS z{@&aC7AZR!CXi8su=dvLgczn#sSxkpTy>RHdl!I&>cU-VL@TCfIJt|JyR%;t1p-CJ z@tm$>`paE6={`=I0!LfQSjlQ}^)0LNiwG$dr;HS62wG5vJ4n+{lPPYxT~wb(mJb%*;~vF^W{M!4q0 z^%$E=oFSp2sQirNviG*V-86eT+Gv|q;G-j?20pT5s3&Z%DL;mi*V(WOl3hSUF}=DLf&(}AN;P#&j(UoWai@Udy20?oy(`lNbj)nAr!)6seH7drHpFP z>D9Fll4~+H(gqUhc|3kW4~_p9k^R*{l~YyTJg+9PC8$BFt&K|KGxEaG7p_lGdIN!w zQ*aB?6Ebulbt@sfl1E438iljQT%7Kl^T(<1C!A8x)5DD@6;K57P80{Ji_mSdw>3p!YkEo=lw3|{C?{dy@ zp`f8N@R=nyafo2ZvxrDl3ip=9o;#uCDH~TC%pPJ*v&;@4`i|>d^oggu7LFM=h`Z0- z+%IWypg~hz9^^Q))Pq|H*};O2&f-Iun8As;U4k3hQFuA>0;R!gNiwOA_K?J_gu6eWAd)J(5dCb&9G;;~LiBipC@UH9fE9spsFvyzqobqlVkbjWiCkUiS zsFx`9Z2~_g@N)vM6Zk!WKM;7Az5P^jRmJp!W zm}8eVP>O;wGAP22P^uRo&u^gQZ3ONhKq`hmh?E`Nz4yS*tvjOI@h$fOeugsdCwv8c za)Y^y@F;y&MLu8>_=sm8C=X6ifs=N~zY(`1a!Fl&O>B6+ngc<9S)e%J2{e@k0$W7x%;Lp?a(~btRGe!ZMZ^f*2QU z@U;^=Hi_iKj!B%vU)W9TO|i31b;>52x-W^pwB3CEC2{(<8*euG{^z_=LBXH&n=|*_ zckVgow$tvNk34O>GGQzY7ZwII{JlF-8R`9?ru~c_X8!r`a1%c1&WlU$%r6cW>zdGo zJ63Y>AGT+WSY5CVk4CIM*udY7!6yE04$kB6`N8@8-4bl!?*+jH_zuJt#uo(_F{%96 z;&^MYmCFUOCGn-frSWCKWx8h0F_(|&<82dKaD};|zg-koYrzh4rRg1CC5lAxgpnQ7 zgV&iI{i{VutrooATq8=&wW1IRH%w@vuu2;<#*E-Pb3HdH6Xg>I2yVc;iUA{dW3GPA zton^&Zg7)1zyBstDXK)Zs5$R~Fb(`%E9&|;M|Dwu-hjZ_2D7PuOaIo;HnVPgyU-_e zAg(vpjp?She}`z?pqak$onY5AU|i7UUqo}lJLCPl4Vw6(Xb}s}=)qm$OJb2&jM8q= zDwdqlf_ubLu?*ign|sCbCw0*#R-ADM_lYly4zUt=ox;eP8+#0~N?eD>Tg+R=G_ZG% z-zrv5=)wIt*4KlzAs;jM_uq!kfgWRG8Ep(bykMNTQ@kqH ziw&T5$UHQr1@G9Yi5tbnGn&|F>ivhuT)`k}Hi?^1a}#Q^RCNSZo5dDXZOI`W6m{=IEzNTA}EgFyU*+Z;!$x7FQew7G0i+`c8uxb z8{)VKpV7zgAM6+36cHhSF(Cd`^ol-|VvO4%qP3dXBg(~|9=GTh1JoiI7vBN|v}&kg$W| z`{HhK8Z{@yT~Q5~W8w^+PKmSP9e6(^{!N?{_o8N4+$Ziw&4_qFoX7Xug)J_Mhw&yO zlz0R+qv8iy@lNqBRNW~aH5l+KFR#M9y#l-?o!xA=f~7NvW{W${7apA$b8&xsGC=3em=@e%P+ zl-sMS9|lyw`mFe8)IB0z5uXFrqvG@83;4chK4LyGq7O%ofBIByMm7iuahF{RG;^B73g+e zmmJ{U`fX*E`l-KT$uvHfxRIY4C1c_4 z9W$DAph<<=pC{L?K&@;CuwA{THG|_cg*;Fg2>15J%$sMBSas!p^4j#Jgw|@ zjzP2;6qtS9hF%QTP)cq*s1D{Y)jjHT{^FI>f;vdxAd-;*Q{GA(iU2gDKzHkg9?-AX z3v{_r{V@NT!z8Tj4M!qoI&HfTg>Jc7dQc_Tas6Nd0u0CvcxZLmE)kWs+cMLZypeEb zu`B?~Q!;9q;qI7uME#(kUf-_%T=0oDVlImTHq!7)ee!W6f5GB7_=bDU)^uyBTtzQB z)N6$u`U+K2bg^z0gM3VSD3eEGy`VsY`qMaSK*Ni9oqv^}Ktq;wX%D*8N zj(3aj#tczkOQ2ibR{YcGQ50>@-e_WQ$o6gv%Tv*WY(U+JdxaHGb+ziY-yDjjtx$5H zHBa)S(q!@Q*zUe$+>|Scgg+GvTRlk`x4r4%v}MNScBUL3z;LyC1?0A(antsst+2E%>+<9^8s>#UX2OCbp-0uBvhLfsrCSaW>M($92ul*j zRyVf?&EXVio(#tZO*_w$;fUECjtpEwAdDRZdR*m~-@YM7)*`ZLBAcHcPDJ`-G7%jy z9d0A^?&?9}7njtNu7fTf&n!X;Y!G6nDuK50LI-w3$<_?Iaj z$aNL_^agdVBI;+MJaUivb49}qOMz+o(}Ss$#DdKAR-Qwxdee~? z_U8t?pbG<@9JIDenUwNgyl!h{2InQUr~394Gg`^VNQ8G1xJY2SBix6(Q1{R3>(#A0 zuQdqM>vOux6jPfekUJ zqDV5H+V~|r%eAVqc77Xo>%H{M9Js{jzPM@iB}MuRcsFU&p1RUkyGdU&CDmn@ba)Qf z`bkNX;!w%)6VZsn@0$+4*XmlZaa{Gc&SwfwPvK8@ZVl?b=U|_#@Ac9g%6F-s)gRQ~ zuhuoxtPNAejR3YDvfVv{iAd1ZgQdHW>X{5n;RukD(S#KW%ii=?)B_ET`lr-$4fXnI z^=iZVgF?4l{l0#GC@-VSJFNho@&zIWlwIQmjypevQz%?kQGc;;Z_uogj5en8lLs!m z2H(Tm*)`(rXzN(n)~@z9t~yj|dnGnuGH$!mrYUSAk;DAB+CGSQ&s5f>rl{;2Fyf4jr4E;PNs8-#H# z8@vQ0O$;o=Gx(Zb1CplmmsGV@4L5IHPK!l;0N^rw6VYw^%%MnZP?+*7^n&D+&jRFH zyxzQY_49ay+hB;W{1kKa zB|n5`JMY#oX-}Zr9y6Y@hUK*uA<6JNBEMEO&tIm0NOjJyUCELd@{?5mT>>lx1D2u$ zj#?WTShtH>Q__1Ny-ShCx8#lu6+oF&xSmPL)TNE8tXcmnW_LUJ(iM|EdQ zgR0(Nm~8?POB>e_P3i-NtjlQQ#)LNR5{7V1!2Qk8GIV!J*3OM29JPQ6no|67KGn7W zV3KI9%o1OQQ1>Q2lUtphBRT;7RKdGCYjs&aAHG&g3yGc=@IWoyhU3(B-eDc$F=D+;J*gwhvw>;Y(s> z`!M=xaw;wn>#MuK3%U#xu{3_MaPe&?fCBrjF-G;IJ>U&U|jx)@Emb% zBi=oWs_jTiO!HNEQKk3yK#yqZmc>=-os0JC73%*in!jlS(a_}|@HM-AL8Q-&41|*8 zYMYhR;R0Y$@M2(Vd-0KfQhOFZA1S8B&XRP5{tZ=-A>p-Q=Q%-AG?~CM;laJS|8_wy zTry#%BQn};HWMe#AQ^^Uq0b{e|9$0H>(6!l|EeuZ59|M_o?N=ER5i=@^t?b#c;IiK z(w_({1Aw=hibbvIPRaFL8%+pvh+J72P02lk{c!@H0BFsZNZw$|x}motSX_fLhP|5Z zl?&CY%dczZ4j{jh(>Eo~Xwyhw*;|9@C91J)gNdP8_ccNh@?2{c@`p$N6qW{DAB+9srpW0+J2V3Ng z`m6x)Mn3G#qsFLzG;gE`6!SBAl*T)xop{!*Sp^sL;c#_kdN% zuxPwE6TqANOGcKOk91ij$W-X*ianYr$WWugOD^h&1yrk46meZ~#)anX8CS+%ttAXm z@`QHDElNe%d1Jh6LL*8UpD16WjrtC0iFG-F>4{K6o^NR(VdWe6)12;Ob8+=VTKl z#D_(yxMWcMVRWb+pNtRbKts&2D#o;vkWhZc8R`V4Wwani%?Eksib~==;I5!)sLB)o zR`dHBOsXMj#|)~uR4^KFco7$NhUz+1>8e`YP|H_E*ipph<+YWUPMMKtPt=t1RV)zb zmuXu*wf&Dfi};JHma6~SK1Us2<;hk7xfd4hSjniK(a&iob?MIN7mX9{12gp?(UgtU ze0qUSpst%K(n}Pyk(Q=J(rW5S4kknse6*%$x=ETR2BXpxP2Iyyhi}Nfg>CBa7RbZjG79LQRn(fusq`-!z;YludBeIH};~+9Njuov_oi|-Uu?9i)>ku?$?{(rmi*0 z47Xka3s;2y64)LpSLy9sT4q$08eTj$>BisOy8mx}%E$$;|LQJU9#GA%;ma5~{bB%B>_a%s4LJ{Q)#0%HLO zii~Ci46pR9Ez=ilb2>s7OHF9uT+#7DIqCu4%00@&$u7t-DPxmEH$(K~w zx<#vK@@I&xOPtEg+;A*l&R97O&#BnDMty^NVBO2kNHi0PRcK~=ticqED8v`hnh~0{nVbea~vHS?*eLWK>Sz|UJM2;54lBsW&Qdc z$B43S2b@792M@Yvnh(>QCY(bX4##6p5n1v>k;dJAW$%Uu4LBn|*tllL780$QMB}~1 z9^aFAw7u{cV$p8f!#m6N4JJ+uCJ|-$r~#?J89OJf?%32AVW(x4imZ(6NyQzbV&lif*j>>{umfD!~L zne62}Rlu2*z}8zjckJF7+P3>PM_?9k%0=P;R^CDsZY9tRp%7iUd*F>r+M&+Cl2o%~ zq4B?wyi-Db1@EVP(*W<&J&ZiB2Uq4`rXT0_QZ5Cj#edv@fIkfciY>~;y$XVnQ&s!Sk7FBCj zp;g3g$ses^tHi0JgCiXt>4N1KWWzbVKM&qV9h@5fc&(_1Xoh?b%7`wVGP>#?)BEc%>XqYF zq9NnQ_%&u+fK8V?WcjW7{)X|!j2|;%u#*=6ev@!#gSUAtrn_wf5e0>rT{~FKQoyZ#so!kBX_=!I`QU-uA*S3zXj7_?!--#UOLb(&;{2JyJ-B1r z3POY?WfjPt1^ZrCt5Z1vq&FNI`p&gWa!gsNTnl{-(0}8gM|LJASulDtUO64S28-Wv zn}>JRFOzN*5qViwO3qSuhV6r1CO1d+60tYcwq2dbM54I=K($s@68?5qqXunGGPHW*m)8h~UGCPiJjIA+IR6Q0)o={q)Lh4w(_iO@1(3$Smqf3CmWzY%S#V5G)|KS0qm* zNPas%M~w64Y@MGWYBSabJ+j4?RN6^kDbdVOiKY%~h8G&=71(_!bWF3@<($TT4e040 zFg3Ruro4qtuUzEaEq3|!8odnuPKm1qhQ0tszChoIvKJ0s1)p;HQCFcR_RMP>0-4Fw z6%k`oGG$J_tA4u2+I?LHAx{R{B7FfRT@+^AV}|p*M}wLy94}%8SLCPz#FxbfU}UV~ z;cqw*p(sHNSvX#z-gomq=(npo_O|J1bUa8#00_4Rgvm1#e(`KQ1b?+{H zk1F4{&3g~wU9XPqdm>vJEQIAZW9c?>`I8CBG)4&F1&A*bRQeuDwpW-ZqwtzXshU>_ zZxK#@yhI4@B+j8{FBtYa{qjyC!&J{u3Ab)EiEvU|;donF6odXgGnNX$n{m1@MmXG$ zJZGxVn>_WQ&bp!o7;j1}xiS3mKJ}f>{@a>JD2m{q&^XwjJs?j|Mb7Wyb;DybJwSQn zaq%8m02d&asVhQsjdtqC`8DeKTe|d3%5&?U>v$2loL$P=rG;fZk|IvMq&@Yoy%`67PpeyR-wL>T!0Dv4xqI#S~a6R z#%MLO(O~h(VS_M~naO23YCF4T85*|!K_h)YHmKuWtCo{h8L8ctQ=v^g;b?47n#&O5 zTXGmlxF+@*q(Nxq>YH727ac(n4h4?e-*mP0j*J9uPr$iARt~uI&av@I72RH!eT16% zf!Z|GGt$&r?il1QtP4qj38n55bW+S3V#4y`@AxznV*`7Tq3(0{PsZ43nV~W5Q zJR;Z4D;NGvFJ;OH`O=4@Kb8;@8vs)KaDv`O6~_R;lX@&eRB@ER^!6{9v@pnCn>WzH z-vB-uLi-{POb8mEEX44r)*U&mEkX zWo^Nh(Tx|jI~otCY!~@$6k_14jkL3T5#?zg05>)!`h}USQFM9jL2zwfv5w2mT4q(9M6P)|7mYNjiuD zJy}9(iw_0HPg9>xs2|?(z%~lkUZ-jhA4fIP3CXl=q@waT zDlfYz>u#n{Gu(mC%IVqXX*!9PG);$_H&9PNEB)&93V zz5dfAw_B3uAU{h?Unan-&gr@&Rx|oN0nE!zzdJ%5tt_QQYw*Yl(lNp@x!7y(jt~#+he-~6G|ek3uDlj@>oI1|Vvq!vz0+G((wwm#!BTQOu}CsCd^xvQ z_2HSb1Cn;$(D2ak%Aw&?eJ>Ip=e(`~*~@l)8UAJMmClG;-#!N~Q1ZGJNBB%O5 zWN7%6*+htttfMyf=&Gux?#dP6(|1mhgY01%rcDG60tDTuusFrAPu5(6SAcjWnAz8q z-ZMwN(mUtM(w;IsOGI0Jc77xlO<@F+Qlw`MjI1Pryv(Wyc{Xz>Hng=}P!ey&J2zN~ z@1cF}mYW+3x%D(-tE-b;GB(D}(A5+7SwrU?8m7+tjL7-Xf6Zt> zz|o@1`RYR7lYEvXzlu3U$Mh~fLUiQ+GR?CKXLp=ImNyWJ7nacMr98UrITaO9)ZRYc zlg=2^Xd=0qNJpV-W{H9Oh}>{xcXH^L)3W#jh-=DpxkO!w_GfeUp@r#$angXGjhtmi zgjy=`*ph6xYv9k(#2pe)ML57lIZQKst%)s~L?;={^dCry0FMar<_NZ0Tsl(c@9lpP zNzlUsw{Bx;3}=y}R|r>A(0!|kJh=duUDNz*16NGB$F$KnCt$$fLfij(U}=_j6P-xI z(sDS=op=c}Y(G?T8pjr`AWb)1%W;+=ez0F4DK#WLzUoh~qIht*1|_ZhnNL3EPF@?71&UZ9IPhqee`SJ@z1d zjmEgKuTK~xF-BFdrmXRkC%&C2&oOe506m5WJ;{_mY*l9`U*sg9{*2Dec#yH(GP{v8 zEeM{9M8$c%zZgSPIaKC0Y+sdiiBApX`cDR64_p$pFUW z1PmDC6=07snI4_dY)z&H+3ath_;#W#QSX?y9~8$z;6kROg@%T?G%lvvCY(0Vw7_2m~z(8M(alFt-1Y3glFn9^%-NrHCmae zbY?H(V}V98m0;~7)_7NZ!7U4fCsV_FgVNEDiRuB3Y74DA9z}Ap>P#CpBam zGWDXgT7y`Pn6R2mV@`~$0cm*cRcjLl4RoflzjksWYI#m4F*azKB4UDBZa`iiLrGhT zMZ(O3U1_Ea3z4^0rld;C6e7C+wpBM?FKRM{qV@^5sB;2x1n=$>+psK4y-0i4k6cmT zKDoQFn{tN~n3B~{3r|JLS!0q4Yc<+fX(Cp8@>C?divPZ~srQA3v%lQ`!h1G#?SPJg z0)vhMMDTVFU!6{qXSl}phv68~nIR7Fd;_ML645x{|IanC19S-qh7c0;f|91CK5)}W z)#UMJQ|G<|O>{4ULy?m$*6hC_TtQrqPF!954Tk3Q@s(7vmZR;9JNW^INY-0^Jp6M zhkFxAy2csIkK>L_6j%S?5VOaOqrmVG?pay3(Vd3-{K)W{qm)*)y}06yxB`YX>O_G? ztebCC+AhokTR~gzhd+g%1>}z1BWw>5eJ9tK@e9K0hTbCqpS9V1o2d1M$;;Gz}b z6cu+v5rMlF>I;#Kb#tZ<**+XP&(+IZ1up04xdmCk#YoB)=!R>VtIFlo7wXp=^WZZC z^fn)z$l`|xdW*U|(xe|$ua3M?dlsBArv(VWk@UmJ;x-f#T1*DU8Tc&j#_m~1m*1OhwcdG&+S=Hgxw^Z(20Q;w|q&Q`D}&3d@2oz9586)S4P zF(7_y^tTajQ*V0RiWRLjxdW5|6Nlwq{sBpv9Yh(Ro^Yb;=Tv%wz*L3{nK0+pAEkj+ z^O-s8<)bw8p!M4CsT^mXYZ-l&nl1m=u6=GG=lRP z+KmG;jvQV?Gs=UcPYjwS);Y(Fr1#`Hnp*ZE_t4`SweZ}kETzF`IG%=;bQEU-_1KkR z!lZ)++XWXH$|m0KxaPN0UrqxA^YHr^fe<_0K{#pN0YPsBhfT;&k1nd}wy`ngrO(_eq@N3%R6WQC%|XwoJ7P(*CO$v<8kzb5If5V#4iB&+x- zjUg#Nhy*8T4?ZWv)m<~iwGKMR8FWu!)^4|pke-odycGc;Tz;$XKWO#N1sS@5TL5KP zghLX#yX4P9TGNnc*tdVV$*mr^ujv@aQy#en65)3nabZSq7UV2WKV7e;69JqhBhp$6 zv&p&*cay@sf47uye@B{O;YMP_$y@01UglJP|Gr*YL9n~tDL(jF0rBC%q0xZ(N)ju+ z^>sOSwVI}5c5i*rIR8Y?Pkd}C3E#Z`j_iEY$rb_+{QYRjwV+utLq>Xr z4?vTLuO<9N4nl+o2zqpNPI(MFfI`awU;pNOaXUl)g|PpJtFp3;NP1-@Nzb4Pr5!tn z>I$mEhoZ+Dsl1Uuj`>!Ur$oWe_vHOl;bm4`xzl}TySd~EVo2VK{D9NyQ}pFVx+ROc zd*}r|IbKwqtKGpZ8S(G&jA=Y@UqO3H@5GwiK;w8LKv@zVum`Dp^22ll4>=ohrkZ@Tsq z7CTCOm<4W77WjRpw2<~rbM`z~!QKll6wwbr;g6~I2fbNLFl;dG2I5Vt%nl&IoWtUa zgIICHbhIITjEihMuRn@7omJ|=mBiF=PEV(BJ2IS>F^FInrUrU(ebKo>nC453j!xQ1 z!$At%CvPUn^VDY_oO6g4X|Rxg7Tks(f5>PgEy+J)rEmK~eEB~Vl62y3yLar~yIn@9 zH6O6D?z@?=iwN*hNSxj-Q1c&Z&Tpp5&Gg)&4nK4x+fL8@1n4YXwh`zcu!_J^0vicz zCvb#7CxIy^Me_N}+fYYIB^U+#c=9x1BnjLBMs~?oXHvI8Q^2ag<0Pj|o+Xfdl;BeY zX!*%cQR&MB{*}P@3H+Eqh`=uh{EEQu3H*t`e-kLBwsQ%5o*vt%w3@&M0$T`dC$Nn` zkiZau`*`g?K*b9L9wyL3fDR>`BZH4piI26vNTquTe1$*-%^jO0HbD*aIFG=50!s++ zkuD``9NR?M40(`%AV9|ft@1P#&k!KRA|FD@F4(>Aw(Xm@g|^~)?``s4dU;-jE>>RG zgJRIriTVRG)Ep@oCxhhQ;9SY$Umd445OllM^B3o;moJv9?_4Y{rqh)Y!4K8n$ z`oqP|ivyklZ-Lie;13i)-{Zz=O~4ZdzZ diff --git a/utils/__pycache__/metrics.cpython-310.pyc b/utils/__pycache__/metrics.cpython-310.pyc index cb615cf06e3fdbb1caa9bcce3540208e4ab164be..c24832eb861bd4ad7d32dedb02c25de09f69ac0b 100644 GIT binary patch delta 5345 zcma)AYm6J$b-wq`a5x;kcez~dF14cdrerCS?6|cp%U1NVQ$=trC3e`2=~>=cEtij> z?+mS#dKo%cDR9sTu`o@7Dlx!?^T%!4Jk&Q@psyqaS|o1z!}YXiVmq*V2^m7&k7FT83A-$rG1oEz5WFEYE?I<9R*?ZTXXgkMqJM!V4>` zHpchxNnV2XaX!VTp}oNGh2zT&r{{;JzbG7=J3QD%&(K(~ zOb;xiU|;V%+_-P>w9x2oBqT7`MF1hyW}+dgZIoG zd1UXAW@p~@=lXuL?ag%@Uo;!uzTSl}zR>i$j_2jkTsnZsW~s;oKc&A-&EPVt4{<`F zAPrJzcpcaE7bj8Q1Sqo%-9o3Sn1#ZLknU5uZ~ghF|M?5w_}-Teg`|;yhV>6BVOfL6 z`voJ!CnG9plC07}LK2ynB9ba(h?>R%TPqE%=g5-AwJ9=4LI<5|T<5V>?HpljkQ$_+ zE+&%_b@VB+z*cEg!(+G0FrYTb46=iqZ#+j<=rQu~t8;9SAB;&R$65>)VaYtqGcFUO z_$nC`WPxkL7W3xiIM-TCG@kw-*Q1q|$jIaxtb8IWua%0jD92@dlFX37q)(O-GL>B0 z75q+egBzC_*B00mfsMqk=rSkshY0j4$_dr0bX%_>k)>O@>8e{6yZP4ozSVAa52pO& z`Xq7+Ct2?sl~&lCCNnaRr9)&eJ(!WY2<4R8Tqx6{6xa@yENA5ODr}CDr@uB0dp2dZ zKx7V{tjtZ4wf&WYTx{p?em@Hx(lX=cWNd{>^NWNhUtlvt8Y?kM{5-UlH?*aqHlF60 zE9>pq4ei;r_S}Z{+=ll2TKibkKJ>?Ac?8F^O8NK-8x`Q(5;7ZEOeT~Ka<`Pq8%xS3 zz#|4_I0&miMibDVz>#L7O-cd>jdh!#ZWC@_mSyHSGAjC$t8^iL=K3b%0an0ZX#N-+ z`*f9tdfQp>MK0)?hm9zJf^V2xiZ{c^I@E4@e#7nb4xb5rVqP*0=w2;}V(?7jS7{>n zYNEaagC*2Ej@=CnTb%3I%c6i)hVA(mdQPY{yMA>xxZ8RlZuEq+&|D6BR*7y3K5d=a zq^8;l>OzHW%I?*Bj;J@yr(FiR`yTVQw76fMva6^~y(k^CXukNo)-Jn;GX zgW_)Z5flNp__hOZS|DFkEm6ekQcz28esVvEVaykH@1p7w#?CJ~!VwtmRZ~2Qnjc0u zi*N=YG=PW}T`rK##VLf72$8{1!ljFQ0Oo^N(wkx^HLm8TG7p2`kQ(Kr#&{v9WGWRW z&Xd5c6A_gI-`J^(VTzy>N@7LPzd9Ko7I)9u%hcfG0} z>YmeHxY;N;s@=N0(5%;+-KJl!KO0oD$A$t(O;S$#M9fGwBH%Zg)PQF)1E7s13J;U| z0*es{fA%ZDgont}7N=*BBA_3q>*RoO-2y%vP|4Qnq2Cl)8*^P3IBu;j=Ie5}t_r`G znR5MgMas3PW`zw5KQ=O?0aL_VIxtvV#*sRgOhoYy(3*h9f?Br9Ku@(|3F#FU9xaoi z3^@*TM#)t&O0AONz}z?#XDwa)y)>n{*2Pe(O|PmUc>FSOI_wbI{mfGKhL&15rspd# z_`1f8*8gja;mtvmv2Got8}l=x9JgS{FlDq`uD5ic3v-l*9gO*qiRQF{HAoIp{ zsqTXmk%r^5WExULMkb!8!t@I=0}jVp3`^>0Aqfp-2G{$p6Mj=Eyw};ksfK$$Oa{~m zelo2l%lZ?L(O|k2`aJdSQoXZ)V401c^E`dJjJ|_UHEB2L^{?bRbQ=9!#1SxV-^O?r zIfQ(0*Z9`=J&0lh;ilim=iTKx_&$WND;kU9No-dB{=VQh#>@BY#j*qsYEC%8E8|;-Av@Qo<+Zwkk%tMcnrs5ym5QnrF+6{z|EyTk9|J{THXf$Ov|Ju+(&de%?iM3yJ_GS z%Gm-?-83^;j%I-9^x#_IbOkL?r2+}bJ>!6V}koKvW$r0F#&&u5fmER8{wKt%BJ48d#n6$T3_8>50W zP=ON&g3YII<|=Ua7blOa=e_sGk*mr<_Z)lP72PPTR1Cx+ zhb-EVbTMi|tqW;f>$Txl@Ac<1lrpbQ?Jv0pOAK#sopFD$G4 z-&PtmiV$JJ!t~OkXvTMFr5YFS2Ps-atS8{o66#BC6Rgp(d!gp}LK&^fqDXqLE5Mgw zfPW2UXR0L|hnfedY8#@aY6bO~?H^shMv0)v?Gq?z2)HeQgWTBgnxrlmPeB9fdPtEb zv6yL?`%KL+QZzLyp1S@|Gk?lv6`Ly0P(+HnTD9Or7W>_nXa&DB`!RYv&^LeLSRYzK z);JH(B0L9cx>$4%iXB)Tv)jEzyJj@%HoVwvSPZTsH0Rei__g3$n~&4)1gXmL<2Pm# zP0&$SFx3w(nHs3Sdkmm)9NdBxdgwJ3CSJb&`AUpML#ZvO)tllgIAROo_Yi&`;i~|l z4&2Jc%P7*baC!SfZ22RE^HloD^@e`!d_;otjv>zG>?`9exkh zRTRWKa98T|JL1=&D%2b8X0MvwI6P|pCBk1L{1w9AAp9-Dw-COG@NIya1+D~FTHI|d z0k$~!zgV>rnbqEgtII&ol$oJrtxWY_7Iae+uLiT*-H)K5tJ(&0{3BNAHSPT30fd9% zI8=!D*m`K3U&PdTAC8aq%`p?gr9wp*yuN)8tpr)Pe{S3H>QEX>Y5Zq_Pat(kW0c`V zMQt&-{AlH!j5c*zo%%~mJPTvLjpjWoJ_(5L8TNA1i<0~V)QA@W-qH5m zweS-Bk@e3X?jc!!;M z0sczBb6U#56}$+E3=<9+n;Y`-GDkBV<0{21YFgr5Lh%X6#P6a=qo3(8UaxbvQLl@yq1qRM zYkT$%l_R=XR*u*LDU7Lqz@n_6V-xa$`tM3Sj?z5{Pa%8<0Vz;1n|Kwa5CMHkEpvmj zsTD>feyHQ}e%m=r?*={ZP5_;nx(Tn?DWC+{-@rS07ibb7 zWqCJ9?I5k<$a(|b9R8_H@XSB!973^ zP{nI8-7-R1~Jv-ES!AM(}Pj zp>U;>)OF~ha)rYVEGT1yv3jT;2A$8*9kfw9$tYW(Ee-drl;x0-dbA$1{dbdjdJQ>z z?=Y*!>unBm+9nxHBk088I|)Z?1{O%Y-D&4aW0F}TPJ%0wOcd|@09RXc&5)+X6_|Un zRbQ;7oRpJr0v#kv>S>!y1f5W5ab56Eb3gar#kexcIteT!a9nj_PJDnsuauLNy*gI( zY7nR6f^Mqp7R7G%xpm9cY^Z1K(9%bo41Q#(Z>u)Xh7~8`#IZI&>Yep2M-`rvk&E-3 za1*@=Q+B$X&IMQ;C3imA1#8xvXp%TFC{ZWYK^E7Sbxq&;#>I9FI)t5w9d~>fq*>zl z=Y5peZJ^ICYQw0-G&#Q15?ySG$(H!Cme^uTn`}uewj`Wv6SiZ4^7eVwOv3iSthj#| z9qMRut+AG$7Hiq%HM#e&^Ytt^JeVZS6bwLM>!Pj2I0QBnb={!r#$`HLCvvjkzm_Ha z5V57#A#Mlr)Kkmms4b%IZ?s+hR(7~O!8PggIie4gOO{=%PStkZq453NJwsaJ6rk0D}T~A+L)hs?+X^k)GQ(gXsBeVbtbi7teQO+!Uq= zprt0RN6QZ)+=y@kfaeFo8Lx7ITp+GRxEi75JCtzh;!=P+-0P#OHlb96a={NLyBmsy zV3Hxq$q?i1?x&-<3{UbXj{zSBc$~KZy=puGyqI!-8MSW5)#UxHC2Rp1regsA&4|5F z5xY{Gb zLy=67NuZ-Tb=aa_=6sVvh$-i6RkW2l)n?}`eK?LBWob;xSxYm|>ONa(s*Vo`&o`+8 z|1|uNSip`G62;e`IRHfit#5&Wq10my*%4@`HBE{#oG@V13@(tS4igJ3ABn^8;z=jq z1QvVv~J&6 zbyzV>skK<3Ew$dzoIo=MD~Q{4fetJ6U|p|=fP56miJO4o!}SO-dl(o#>S*FkJK;ou zsC+=^Z-N`!9Tm=xC{ERhj4~g*x$g=%rv}KK&&hT&k$jI9?F1DacH8BSDnHaIKNPc5 zFc^Glo}QpqT6T{CCRYr6_T;UJIojpJ@WQlvLrNH}B8m`m-)djmi}5K20KC9Rb*2D; z3dvU$#c?r&Eq5Sna(5-WcW*#ZChA6%WT*!rPb*ffYMJ8wsJs{fGgTB2FatcrsLbZ$ z!f;O{*LK0h#I((#U8+`E9Aa8PMb&Acxj#*2cVOOnY^*2-rNE0T6C+X+)gs&{3L}PH z96ttD7F?Z3m+o##t%=2Hj&?CWi_<935OWWw26H%(JQW7KRtzTR__+IA>O=dbS0ooH zQtN5FWQfW91-G0z(eW-}6y!1l`3PZsKTE}2J z<{pKZCuLHH!+ru%Tcq4f$JT})Z+ypD-49yL4q}AC9V7q;kvgG60t6?CxSZ=1ys3qc z!|BQ4LZdJTgftj1fwz*EtZg5RE}Q)C@0>doF>)GSq~_5l>tgD+*Z~;IT=9 zgjS+F!Q0`>$$lp&#vO&H#%KqDkEEeqgL=nOoo9{*n=#($zSEIveDnwmhRR8R2iV!ho?IuNzIJ4^5)dYr+e{CFf%a)@wpfT+Yy&pu1x8b--n)b8Lt zj{X375c;6SZD3Q^kLqZNQtGIFgdQRJ+~E0R1){o`*+UPyk7RQ0sm!_!Nb7Tv5u-Rc zCaTjFp4)e$ry6iwy-+!r*IZxcwLLNr&Vi>M6w~HV@J3AUgD2n#%{o zHjr9te*h#;ov4<;F;hm(Q!HCZ|K(XL*)UaBP7FK!+*LGj@w(|pi< zJKKB12-as1Bn{1>gliJmf7zJw)RmWmVQ6rmSV)Z;(V6Dg`ZY!OhgXZ?lM~(FWUE)u zCsJx&hYcB@u1tzCcTdl4^eXp(o}-x>w0Nv|1j;y+Z9}RUuWl1--L1Ly&XouK$Rg{W zb`R(F(&ycWb9?tnddBgEi6Fxf`@!8%0rO=Gz`1)G4q*t>fGi;vhDnZz7fx=z$j2IT zC^>+fuO_~PBaR__8R2n+uK;)|Fe?`?phyqGMe8KCJcV!->X@+=$~KRUzAe zXAr)N@GQdj5PpF0eS{ynrQYoS@~4q?AMEX*r`)G|tGA#N^2)MRzlaKTNI5dT6=9oP z;(IO0^B)<fjI5X_HCp|_mRH0uY^CRBEX+Xo>PxB5* za0G7-z|%ll`OKi)%*U9h!&q;in+}Sj?#t`b^z-gN*7rtU1`UK%wOkd#bo=}JdQl_J zAp9O7kI)bB7#sUSZKrUY1Z^etz5OO*0Ei}_I8Ae z5#)}@tau+*KZ9_e`>+07b~d4GVaOt3c#tW=RG{r#c&b<^P7C*;{AKXZfnVfbX@F;=@@z^< z&-y1~aD|2cbS!!{e$KNUEzgQy!#Lnlz}A`YTa;cxcm?4P2oVICS+9aL7dFb}ocw#n zv~uEOs8Vx=ofE$!4KVy8TK@^*HH6m@&U?B|dRqJ$wdY+cy(<0NjSbSjGShZowdHA) z-b8o{;V%g1{jRtC-jI&pjcw?2&)+&9#Cp`=28h2RNTz(R9oP=7H=W;sZeZ{(@PAT? zvr8n8FWUjMg?kb@C$H`k_`ejfS?(xqU*30c-@bj~v6hj1b$Ee7fme%#f_MUrKjog- zw7G$SCngav*;_dmMoD_&07{o43?n>`fRrPtL!3hCGy;Z$z!@!zDLI!A=say;s>-L! p<}O$ovHAd1s;L^r(<;bMc!0QdO}jeUtq*H?-KVGYbRyK1`VZL}OX&ar diff --git a/utils/__pycache__/metrics.cpython-39.pyc b/utils/__pycache__/metrics.cpython-39.pyc index 68ae29c026e43a5e59cdb7dc56327eda3b6e8cb0..9d80778b16353cd40bbea1ded3f0269e8d57ab31 100644 GIT binary patch delta 2995 zcmZveU2t2)6@Yj5Uj6?|*Ro{EwoKw5;h!XikQs0aX@Zlc5T?XTh{lXZ)=IW)Su!g* zG43iF>a>#wTu7I8+NNQoPMPULDKMUC`_czWDddSsVft{%Oo9Ferlr$|zP0q6D?4#K z+&lZ-v*+yD-E;Owx}Nz@#+UW^+ytKUZ|~35FZ<3l(%i{i7ilKI@8$s>1S!ZvJbaO4 zLdOY@aN{E3#yZP{`7rO~F|bE?oOglU;3GW2lc4F~qr98dgD{I`3Pj z+*@G$F23g?U8k9trQM5K-p>aLdXNwDA@-R5+YHkjn=nbvh11-6dfQb|3eMe8m}q$^G&waBDPy{dIL+?U9L# zv5g)l1=gZ%4Y%!1dI< zA&myt7FqR08Nsi3=jj)@-kxTRG@V?7X?L{iTeYZ+%7}Dyk_4%D*2scOdOceq!MBs! zx&51rYXuf35Q<|%mq8htAmA019hO(@4zC6Qx7ZzSy5$x?x0-u9a*XGxOg&!nY!4#i zILNkdySC1zHR+cjtWA)5S3M#17pRz(aSYFrd`&N85T-2?va6-t)uI#<_4F)6?vw$8 z$RLz}40e*OD3fZ@HrTjO3xJDH`fEXHTc^@_g>cVRmLSr;Zlk0Y0&8-I&D*wdANOx; z+XFl7fh~J*hdsE%9@?^p+xA8+ER#)Gq!#6ob=EXs^;|O0&YN^unF!udOYW>$djobx zJqgRC&Fw|x&~4!~J3Z8ecyUK0l``TTrd zZjHF#_LHOB~3*E8}15* zt5(Oi`mllN3-qA3>)x>O}Ox$jT_lXO296>MN&?r#}8+^Rr()uP#SZ z{i1KXK2cuQsGx{8Vb(KSw)BnehNzKAxtxMV_C8z3gfFco5Hp z)EE5HCLPkrLyLrmd8A0W!8^EvN8wWP@Xif)Gr(im_6*@fhj(40q#Ql~XR6Bf6KcI@ zfzr_o^0?Kjqj@OEySBs-Pgs(DAzX_{1{ZEJLGY5^U@#SX(~x$wa{_m6ui76wMbD~6 zEERIkW^;>kqOwxvrk-7?Rn$wdwE8sm3gzkt@nL#Wy%k?;c&*%-u5v{zWlP1iyf_9) z5Dx=PRxO*htPt5}Os!Cy6B}s3Enu>8S=2ztxNF7SVktYDFI9K17bf%9P^$L>2y?Ol z>Ss|Jq%k-OVd{q0P9u!knf^IrjNW?Hn_cVn`@lz>RF5PQ{nnmf?KS?QUH&pOy`i2< z4Ekr$DF;YvQ=(73k=WNehpIXPyAcfp{`Lr*o_JXWlCkbAh!>z#{QxI(+nr-h&1tt& zZ;!}s9!uU}w50la&(cZt_1+7ybI{6Ux${uwp^RsAF<%)M11gjnrXQ)psiX8=^=#@f zx}dJ7MtyDyJ;M(jEf-ZnV+Cd=xF}hb1sLNs0?Ps&OXNsPF4=UDyH62UEa&c*8 zNlb&v)N`fca@x0RZ1Dy*e-rT=#BULAA>Ky(4)KmU-hWSgFAk2GBu+2&Ye|~?T-Oav zm`e4Z>$@HG@1aFn&uHi84^5XCih_dr+Ve(b}EeXt;P!fp{CY@Qj&&^^zB>Lx}u zCB6YnQ`p+8#j1ezBH(odXPS_xt!*-aRZF)0Ik5pAF=UAh(9C#f(RfM z5Pt)#d9$TbDp$%@tNCh5JclYhm93@3huGxrh?|H{5T63pLZe433$ZIL?j32ll@ZgvmC0cO zn`c*kVzvqBXIlMtcyJ0O_=&*y)3!;`ivw6^Acaz&rJkCcoD@Gpk$%yq%XG}l@JeoG zM!b$r-&fU<(V(^V%UHM8zNB6o=^c9#>xU3o#JdPQpVpZYCQ2V59zaaMb<(Mf(UDNj Rop##X9qy>x!>AEu^#1@lo-qIb delta 3011 zcmZveYiwLc6@X{v-hDs4-lx}&*G>G``jN&Y5lvIl;KWUXl$6Fz$dYK;-C29rS?{{D z*G*z(vmgfus-lKYRFM>Onrlhb>crvm+!uu@<5QwyWU8MRuM(c)Or!(CY6p zLJZQPl6uH0Z8#;7&IFOvB9l(_qSn`RT_E!s*Ak@R249_PT<5k`ZH_R~@HD)jvq`r^ zA3Z^4*(z;oxNW1X;Rs*D-v}7?3uJ}vBbVsOkrS*DY;;K`yG}7!flCJAOCjlOIaW!d zTXr|Oc8b+6%MiZBFCV+i_4YF3q~+uqEIZupcX|;Sks;}bktAtE4KnYN9#1DEct^RN z+rPxPHp>zOLUCNwWk3dZ6BredVQWl z(EKvsj*(7WNwr`bZq6D37~+*aBPeZ91)e04_7xi?Mi=Ol9jzC&+{gVFZ(95vOTe-O z@2~_qmM+T@>R3WD+3LnjW9_!Wur07GZa;yEluj$+KqumMFL`Ir+I86TMiLV33X@g@ zCLpk7{&p~uz(%637j(TCr%X!U+E?uTI_*>6cHghwcbDmqI_h~e6sJA-V_~|TdTE%7 zqiZjD{!Qt2^?lz1nR}q5O`9Qdi_2oWC7&)8MNwe6r=8+3yu`zZBZx-;rhTDklxkey zF&3XfOe4Iv;6rJrI_MvuJ5|O1w;kI+0mE{JD4!7r(7G2ju5xWDYm`K>p4LshUaZXC zHj2X96M+kQa}Rpw5x2)V^0hNr7z^vJi9$)7L~{YL6<}(GoMGDPMy_xQ zh&iubiF|ng9ZV-L=jLkFT&0~!t?mB+4tiA0NB6jwhmY-j!^GqOor_4NuFvN}kL^y?R1B=bhP#c zd5`r;M=Qva9jS}=T9SP}WOPdghpI10@DjerV43z-SlZFf3EZwJH4{ISItBsFUA=-zD#dl&NA;_Cddspe4?8#~YD-m~nts^Sb4y0;!^%VIfAJS+UOkr> zr3ch+63b1Gm1omct%-$PrMz4em^`r$V6uAEwB>}zoi(-D@|<`AEm&11t5$`;-iH1B zBYG?Wfj4_ZHwJ|UT)jK^a_8l-n98i^Ha?q+6 zs3g>U)KPgB48K*+Cx`u?Lzf&Nt<8vY>h0v#{vxVAkHBd}1EC>sapGCEwKv|E1@RoT zx*yg^NRHQxI>M(5Su{^NANdZzze+yF0=6;4AdL7U9zqEwp{gKAr9 zl)j}-rl#m?YBhD3mejve<31OKnHh{BOk7(V9f-2#bI=j3|hw!XT%qAJcR;3Q?3i}C+iV__?G(5@SyK45KUX5R;dY5 zQbQv{5!8r3Bi=@gAx2ehuz`agsC1|aPc521VzVO{xCDB-aZErfNJrk@&%+jc>BpDLw?>8`a) zX-=CaDB3Yvv5DWK{~r;5LcD_rAWDe80G7SEN+ngOh*hodYX;D-oM;9!x~LlJ7_9?}#gi z_Ywa<{1dSuGwwSwb1NyPy_3wl2^7-C6x|2Yd{*_14If4c{+3|)X?mzJntDs8^5a$Yy!1kj;wk;mD`e*T=?#*2Y({Z*Ba%`t?}<#8K?;L}U@KA+90t lWLcFh-b4v^PuvHWN=)55KGv0Yr=2!;#2s~e7!5}m{XcyTt7HHG diff --git a/utils/__pycache__/plots.cpython-310.pyc b/utils/__pycache__/plots.cpython-310.pyc index 0e47a8e1d22c587afc5f2c0e7f4ea87fcf1fd923..b7b7670fa29a55af6b2aabea99727be32707636b 100644 GIT binary patch delta 9464 zcmZ`;4Uim1b)KG?{k^}vzrFv{N;*lmy3_eivVM_eNtPx5%EFdyyp}mu+jFZu?VoyP zrMqJ`D>ykAs$^SkAb?`LS0-gd0=7{M2LIqH42C2SFyN2@Y-5K4L2O8ogajy(@AaG{ zi-OsjH{GvazwUnh`n}h0KXpHQ?-@2)YHEsVc#gd=o_Y45roB#;w>}|Mw&S(q`l->L znw~dO24~#Jn*}pvG7V)jA1DMMb%PblBhQs_G~UYFPH4PsUQdl%*RHQ1SS-kQu6 z(pl?lv+*?Bv^%rqz^%3=>;qOlH=UjwA0I!ktCXn}tfFHd*pqXvsT@48Pn4#ujI(yb zfgRVb*^w(8EIIOJvyTnS-$;}7+O#Gb9ah&-G8S0fTmWv;aB;>enwW6{ zE)<39hjr0S<2pf?IiczBye`(`8&S0=H%{tR=CXPNH?cZ>!Kl?67c_-xTF~mvRCZci zHXXy!3f!&tXfC4xr(^R5TG+G>Mr^v(XQb--g*w`%+em0UbW*RkL+i{1y^4vsdQ4OG zFzOiIH9KfQ1~=VWfGXWuht}nz7-x6uTeYJ5mK8nV8W(3w6Rs~#mWLM^E%w4nJ6zp0 zraMt;<&l#HkM?O*-PN%i@Fh59^bm&CJ*%!{P$Nq_siKqXgnHhCe;sZen&aRZSXnR3Mo6f1@Dtmq^rw-Gj^ zM6qtIxRq)qnPD&Jvk@+#B9)?D!>X-0%L~EKmPLts<`mJH2WP?jCQv1#Vi^$!lp1+K zf3s6T+B_OniQQv%szDJuJ?Zm?V1s2SQ;vRwMKL52>8AM`Mf6!B=V|K?sV2ruQ73ceC?uujUiwZ;cR zO0eRCK!_ZHBLH47UCbOR2|3j|SbGErI}8wp1UgtRn`^$fI>(xEGNd=>A1WJo)e6Q? zaA3S)oEdyT*kwgic@MS#gFb0EWFg?)J}aqgV#0z`msB6?HA`fB}xUQ_HvuOw!o9SBVWHzLUs4M98STsjW9oN+Xo|DB-EOengXx?}_(|J!qrXjsb+Vs2**K zE{Tps&2Z@K!)*`f?2O#d`A)zObq=?R;~??^nM0N0VR1Lg@>`v^?`tHanJZ3}ypWwf zYE9*`pvjjIq0odqlaOKu6&sG_oI{g#WojxnBmNSYWV6R+yoN%FR}kYU1M)FB8!xdZ z<+=D8_BGkn_0mqoC7jQZfr9NInTcBzyF;+BrL0I7)^c$VVZuC@%{jR_%RksgzLZUd z#j|p}dspXoL10t3Fs662A^h5ycu~H;`-Om#puD5!nk6rxdGWjvyXJ!x2d8^7UpiJ! z+xCy;b3Hq+{~=nEMyk=y|{M8&f!ph&RHnUJs%vmq5KXuqK~!S2YYXiC8fsBjBQa4-bC`*3s0BA`RkX;P`nwuJQw9$@ z;prwGmPh*g*t_J({*gd{N4P1U>0hjUCw7YOB-XHaMMbD{tyU9p7ebam&d-watmUjXcohZ~yPB zjq+6ga{2Lr?%QJ+y_L+WtKZK=tJ^l+?Kbl^gpDl(r*x+W=C`p2J*Ru2DD4!z(q-(V z!8E5&g~#S$6Mg>~h8iJ0*Pu|QY#r>XMR?p-EA$2%wtgFFD9 z@xyvG?#99C8m<*X7pUVzUF~u^d^)th+wG`F7c|~|n4u=z4usv2ta2Z2bhGa|I(T3k z4=o}iKC%cAk~n=9ZVX$~aY^UqMZbg2JS2ZIxVd)5HQnAGt;pQoYA;RBe!}f?yD{A! zwdilC2|wlbp4Pw}koPXO_nkH$G^+g&=u)@G?RWclKZ)#qcI3lHz60#C`f?m392Moy z2KMHThoHyfq|9WaRiq@^)!CqUNX0#37A5(IwY~EBB`e#=(2EIxGrCwp8FV{{`PDUX!O26Z?--+j|Jm!Sezt%u(k`ojKo< zl64iIB|tV+{2PH$0`ml32k;_(bVwHcak+A7f;}PkEFIX`Mr~|A9I;K78zECE4}&c3 zqmIW2D7W`CO348aaS(2>wm2t0zH~i1Ex)@o(e^A6eG|Y7s9Uc1febDix&0S}euF@f zz`qkH6DSc-ZuCE>Gz8!^lo8|m*JJxXfv~3m8Vwjz{7eK#%?N>-VeG^5y~|dxG5N`5 z<9Y*=uPpoB$frP>Z1H2tCy1G8J62izgo=%bhSLb$M5$7?#iKGXG}`b*Gzf#hQ*!Ij zP{VUnr1^`}^6=0KeLa&83@u{=XP+1fvNg1`zF{Owc-}j|Mr8u~&*+Feb6K0*RbBpU z#d;OSh*nUm2pT$Y-Ff0OOeBDxi;~Nm;A7=tR>Xin(5c*G2*EseW*qT8>Y?oCqf~l~ zz>5H0)V7?-v|UD0K&W+m8`xw}s68M#dwNr5#?v#i;;Yo@1^MZf3AS5)bLA4YTE4mR zWi~6nv1(`SA!;PBUkuVrmF53B>c2I{)@A1ciIL0JB%q8mUA19z4Nh8QEl1g6HEFfP zWV0X7Akn1xv;G}%zBuc%RHa=JG*izkSm~l?$a{yox@i8%fIsz9M6o#IFAgUMZU;Y` zRFi_^I75KUHsI|bcR%sjH)Pka zVEL1gW$b%0Jh~>hi4dFR+eS+p{u|V(#;Xy}WOB&9?6RDsDg1UKx*yF-2HB5&l%B6& znPfo9*GAK|PoQni7#kZ?p57P9fKo0ms4im8AIZWC;#P{wsAp#LrGtM;csPmb*5`ZF zfQlb|33kwI#sP>V3s36BNYx8oNLb}OMO9t$TWhxNzl=x%Wr3@cQj%m7oOCX4c?Qo3 zU*6Brk^^+;)`}OX41(-h&q&X#70;vYp9i%oYpD8$eAn1A#^wCjt@=A~?8cTNtYeq` zvgUNuJDvCfyRV&T@6)E3%JzkwE{Z#L!8bAOb{J9r3V7Eud;st(MIAkR7b$z4E}v!|p*_2Pk->DI(~lg-zygNkgM0~3 zkWGiP^4SmarNkatk|l1Cl8?Z5z*jUNzcq9c8Cq(Q_3;swkpDD3(zLX`%xRlm&Xa&c z(wtbjZbf~i8(v`3tK29LIKyrPKkEfK7cy=xC#M&?4PnP=EX zv&zDV5yGisOb+U&zLuW5l8&$Zo3#)2{EE6OgR1};T(JV+VR`4e8xzXV(+IFm$_oB5 z(bv{(FwPBXl3l*M7NMao%>4Z6RozI9e0y<#h|*N0MJWe)?pq;sBB!ZI0gIZHah*iz z+|LYD-$l5G0dW36xEPDBrV^1S%@K7Y@&Y24J>+<1-kNg6htaZI^ilg7L1Co$0)hXP z-@bezr3{fWlurTaMGI+%ls=z3=ouAwLSXP(d;oi84AgxSWli2z)y2j~>mgSfGT77$|7-3GmbCyi>@ ziJ%@v2y7xuk2n!t3sF?f!|fmfD(W=U8=a&XpH=UL9(}3@+Wth3AZ~a4er^w@rGZ<;k|?C}2vsuJQT50tmo&=yR`Yf{ zj(Qds(Yr!b8lTZrPw7YKuMgw}GkIK{eIuf*ky1DXE|`-qgQu;7OI({9M7r=$>DXjO zme<8=;xL-TgJ@DQhaX2PuWk6(O?AX+N?E+>SuFhON@Wi!q=v5@|Dd=NUq35?Kp7pp zR%9id9R6Ivva&_`^ThYWpCrOF1U{j~DCF8NDoCW#BYyM1x{exgT!;79{Q&Qwbw{d< zQ~+*4J8j;EjXQz_`Yj0JQ~El4@5QylPGURWl3@_*!Qv%C{ThY2f%9w6+hbevCCts< zHnufu6|I@F*ftm4M+xZwWu&*r_ZXcunuVCB?k6s=RWiQ&LRHC_M*05vQIh>$d&S9N zh;$`O%T7DFQqdO2i9lJXHnPjGNAaFmi23A;wh- zwHXF*HKIrTcnl>IW|8XjnoJib&(3UkoelmuH~A_pGc9K~t&J%s+f9?|A@GoVYSW6X zD!!y}RD6#B>9yEFiw+&li5$-%Z>~g8g#CwOOQ0R;=|Z|pL1h2t-q>a|&l!m;5@M8w zS|@jJ9ttfwdU<4X&#of%57S?blNtM{xPz!>33Sj{bafCnP?fx$7epji5tbki>gggQ z_EXdA^4pu^!54|NL%y-OyWtrs`kqn7whS~Vr~W)4-;kTOjIkfe?3Rt}k+TnNIn5f? z!iK;H*9EzI>*SjIh^IRK_fzT5iH^Y>Ky)u|-LY4(px|7rqaIgKNeNAfPpRc!6OKsE zm|hTx)5BOqiEtNVak@$&`p(Jp70YTLB}OFfxdwRWN*`Mz!I*`sHl+4iq2 zbbdD;-2U$1dKw`s2Y2+ZQZiJsJ_F=j*vS>F#IT)2F!&+jnFn^a_*3PM5#oxr;ZxBy;IScMV_gmy>?mzC-8CHDdireySKVxmvwlC!_*Hnyl@N>#e%{tDV~?K99yZBsVD2n7#SQ!MlI)_FrAOKNAM|`KJMe z5Z>e7stUvydS_bkLIjn{#E}LLVkcin`+6IkRE8Ex)QZY=a4Gnzp2A?k=sE{@NJq< zXE{Knn+QA&kZkbGnc10H-v*N&;>MZI;qG0Q2d<8_5{(~Wni&C6KQF6SC-iY$K6Ld! zY6uAz@m2H@9|OR7`#Nzn#CNFEcL|V&0sn}??-jqqcP~_o>dHrhSh;EfdqH?F@zKR6 zWzX&O{0ylZU80LTZL4F6KPIg7!ICJ7 zGW9x6?Z&=qZ}3CxAa$hu!GDJ27X|H|pb*w;@*O34V1X=ulp1I^eUH^drDg(>=Cr|O-5zcYw?>=HHTaKhO>`jI7wsKf^8Zqm B^C$oS delta 6536 zcmZu$4R9RAm7boN{r~amfAzD{>c^JWvLstV#y>K~vMpf@vMq}P7Q<$>JzA~2f9@H{ zk}`WsAq<2>!O%&r2;s7VqkWS=D^=`n}h0dS1VNuX}XQCGy3KWJe(o@JjHbZ@)1enUtirvGL)b6B^gUrN2ua z-qBcAvPw>gD@3A%s#!JXh&u%B$U1YbxJ%G-)*W|)T+Vvp9&z{Pd~yG#ED#S0-j;Ys z+^gc%;$9Q4fx9zXo2!f0qs_Dh$U!dW`nc9U$LL`)EJNucZTYC*1eY8|f}O2$H>8 zr`AG;RtUWY9<*INXqfKC{$ODrJx=$~y`Z@QI1BW8;Y<$0jXx?8L;B<-F;{`1sLD%QcnB zX@z-%zbd!i=}ToZ#YC!5U{tq)nS6?AIW2D_ib-SEa%c2JQcq-M1Le=t2Efuw+j1?FQ@UrI+xrTq+b9i(FWSSHwREo>AhB~Md zT%^D>d#Mj7mDbS!4Fct$^&&h-+zBqNhM96v0*-80zs-A*kPzSSJx9jYpY(6Y&{*B_ zJUPveg>L8nS+_tKFVsh9N`+@#{^1hB>o8pUbpWO`Csl|kuaPryNi8{`Ezc=H?&2A4~Khe7C{(OEID7aTuD92SaOLM!|mjw{E_g1DrI7V^`Zu0 zcj3*=-wQYS(m>h|GI_)uZWxF55x8{pyIbb=RiZparDb!(glCjggwi5wtd(04lv>O)3PG@SRz|EUrg%yC7z0o9{)3t#FV%nR!DKFU(clT$t)8c{{$s&b{zYE z8{s<$sLxQu{sO=v(-ukE&-ioxi)efKDd6gEfDrL3)eb*lFF_-P(|`Hr0s==bqH7w6 zv{WJn#MlZsBHg(aB1|qRrcxnmQbk^a*}AjoqI{QxaaVGhq@qBCIn%PC8V<HTxX^v2!b1-28oDRjr>UjztjD{$%00HRwAGAFOdENDsR+RTi%W z^{i2XH>i&q9S=y&lHoEvrguedmP#Qr1Ot@V*NhMhAcPNGBPGvf3mytBuTfR0Hho6T z{n8AfZtCIfU2OwmK&2|979@3M74FOixB5gpVJ?Wv+=zwCVyRFD#*}uthMMO{iX!oK>O! zUz-F{8zis`GKDeP}b(mHs-I1JdxOyzw#hai!D(er`7#%@(tnHlY_Ul5?Lr_Zo0JDxIc+Yf%(| zX1fT89L*9C;Otun0S)jHu^be0Bq#ft_zT;%4d8lZyHF&vHl&}l)#SW>HKnm(V7>vD z&a1lPJ=Fgy#Iy0ZmvK>NyK9e{0u$Phc7%T}%9IL>mdAKJ^9_4#`BKvAkMqt~L z)bZ^sr%}KhV>wfWY=PcRx-#RkL`5*61zSq@z-Mi)%yvcV_TSvtVaPXRX4OE`zs{<#Uxa( zM4A=mi#p@{{p~w^e~5~Z)g<OE9|4@3_ z5$zr5i$cuekj1Pm`|RQ(YKIuOc+FVQ4%BMYt3miFTE05!Kfor=48xIHYEDb$Erl-+ zG}PhvV-7KNi|Cif`}9C8(hP?5Y7ke;Zc+~+;3vD`s>N+a+xou^MC2Obe|zj=%O$gW zc1QSO$sNZKl>>DUOosryaB?$Hj51p5l{{DW5NU(@gAB%_x zvz!orVDJAq!*arMH{b}Mrn80V-{n16tFV;Qn!WH}MYF@Pe2Z%gxErk!e`&a#e1-qZ@XyQ30Z}-cX!E^pD3=Z_ zh%1nEZ-sQNN=Dt%CkTw4NVh|ZKQG}fpi!#S(-x7KRign*o(e?)%5;TB0H4jvP~ht~ zHN!@&t+OJwRW^675lUk#ZKKiEU6Apil-XvRXgiuUTyxE)6VH~wuZ2qDgL6vPs?(P6 zC$H@#E&R1>djpY5t5H4IMq_|c?%LBn)L!W@-D_m7)AUk@(Pes|9`|il6+c*Wz~Hx{ z22^>wwT<7gCsNmqHNDb9ccBC-&=6b$e05J^*OeT}lIYf_0q}SS8F)UI*QYEs3CB6H zJ~jqXp^8WLo*BU8AmYhV^o6v@CoGIJA{H!Gu6OKk+&G8}l|Q}raq=c_Z>(`_1=%dW zYv0Y$chLsggzO`xRODVvyUbR4W#4XvjT5f!@1*~M>?>p4T$y$^j1fB*9LEt{c@3t4 zVB`C+VT9#&8;S%Vmx<(wRW|c3BwY3pZt9ib!~$2d3qPQ!qwvHZyH0^HB0ch7`F5(~P~U{un8- zaz>Fl#LtXutzy^1DC2HS!f1fi@XAPS`6sA==S_ABA&nq>_gSPq2LQ{qn1R!{<--iT zkkK>KSuGY|@1w%c5S~JaBK#!)oDoyDGtqaJlF6m*Iw&gQcTr6&FEJrUPNVBSR1LGq zX)UYY0$lwDP_0+DlMp1r4h8F>S7{>Kh)kLwC91Hw56UedtLOTGE;6_N%IMp~jsq`F z@q=jjB!BhDU`WJG7@ypTa1Re2-8vFQIVMwvwb$Mn?F63XTF9^r&FE|bTWY?L*B~uM zK?zQSxnvO&VBu&}2)-jh0*oGrvPLx3&L2J6mA(?Y%g)nqLZwlNKYVg5Y!}}<(WV=} zg(On?0=og5&V^)lUej3}jss6WY#$oLnYWyfp5_^xnov<@DK?6tmwEeGy%XyqllXzL zux|}XJ4*OR#v(ou%p#wCz#kavC-?Dhj9p7Etp9xMG2$2R8v;8}0{LfdNQAOzQ_M#W zDY2G>55I)Xlf2{Dp$X9mYYb~g<^iOH_k|>ffpT$o(n|lMhoNW4RH|mLXa5dOpLA!fU&w)8cOI(+|{@zU=b#|iNTX_EFmhQVzU3mC@ zU@p7iW0w}~(PNO_Zbz*+z!>`&|LV=r@|6gSHT_PfC}>M~M!dI(C$UNSDGJ1vdN101 z1e@qQ+aV*!6aK+ME0P79K`ISkIpMQj0o*$(!M@mz!7I*+Rl^P#_DGP?;JZoDP7K0d zB6$kWd>oC)Ga2DAFyxr+;n0x(L6dnIXF2!$|@j^`79N`mD-1p!@P8=r3(3W?p0F^X5bS4;;E=?5&r6_ zNI5F2ggp&UXXgQ6@!@6{iq>=3>6-``crnKWdmZi*>~D~@6Irsov%3Rnh2c%W1lTx> zret;>niX=@J}5wYbGiigpyl!;610#?B;ahOPY#hJ|H5QjuUO1ape}|d?iceksf)!6 zU-q=*bb;kr0*yb!|9vtX4v5da{b?-^nJ~+6ewhb=_naOqiHFR*&-^ii}qjnIP4cM--A#u2`ZAhPO9Nc|Aumk1v00smi+SR29^!W_Z{A_XP3 ziX;Xj_?fr5>v`kUmMJ_Ws>H409v=VPu%idu{HrA+vR@7o0k1%eKan#+KxNocI^paqK8g?1V&in2e^MMk9@8#{J&N zmUxpf>jVh9hJ=@-fRJTGs142r0&%gM<+ow^1(u>%me1DX7syiJz_L)aEL&k~v*+F? zOSUV>_3m`vzI|W!?c4XB?&tGQsgGS$>r%^>MHKv<`P!!R7sei~>r|T`?fYp(s3MY! zPj49Apejb6p;!Awji{a1&a0yXMr1M}4rLWlCzfZ`kE-ucArq-?6w!H<4k@BuG(4z? zhV$C!CS%iiWpvQ!7LDVYXc8+PR7Zy((JWdZ(E^EK(P{)l+j$l3?VDIqu7Ms8^mp5 zPz<4Em(hM+5yN=R27b+Ev4#8IC^Y`JRcu4sdb9<^cCiCJZrUzJ#0_E>io3<_;wG^h zr9I-9*dz9$v=`$0&Z}a-XcXJ>~tz)Tw13u*VJBTtABsbYFK{MH({JLU~@Qg7%MHQoxI zY$L)Q7JIpM=?Nn}l`Lj+$?-zo+U9&9yh=Ujd^X&5Vg%xbGO7w?R1O(D@DTJake>TxCBeW^WiQf(q*WPWqU zi`@_XdVcriS{l{?=i!=Dm4hg{`gH27>z9R6Ubwnhn)b#`GjRRpM5<`GTJB6d#49O} z5j&F0=8fc;tgt5BFij(AX6FpalfI1vg2vg@G;BT^&KL5@l$p+EuQP-sd#i^Xqjp!z zeJDyI7xl|;P&vuu(u=j*w3X}+Di~Nu-*`5cgDa$|o!{2y)GZ&Vj8naZWR^>MzpN*6 zCx{!CM%qf{Gg$w(5R<1B%orAp*!5Y)S&JRa)zdRW-t2-$504y3?%gYolY$z^o`BSG zk`ES3T#46seywBgxPIp|%R4r2A>m-51jPatD-Plray4uGx{14C^hxGYV@B?7Cmg-2 z@=J8Y-z&S*ey_xoVjb1@<1sZ&QEX)rqqo$mhLS$7R(T#E?(f>u^-wOS*5@Gxy&uT*R26=p4)SE_YXwwBxKWE7Fr z|B%+E*ebnnGJ0M|i#n-6kvduLDdM`eSV!Ar139h;UeKzI2wJ{*t&D-$T2$eB2z9*P z_BGLjbm6lreyX%8O-hT$Qib}EwoA#szBHppRJqR9m&Qy(t}YB$gcejS^ut`6Y|Yju zSD;ic!WVQA=}^j=tzkmYOmtjtBaT{a7B!W5l}Dt}Fq)Kd0Au3uwJOyX7)UJ#?SLI@ zQmPG96Mo2qpuSaT8CBF?P|H|?YllW0J1Ca5L7yFJQK(MCthOx;yBvm23u^Eo#3RY}7Wlou~loc!F(?%i^Tlm{6Gv?048ZkY= zUdzav1sUHND@sFT)0QEqr2-v=d@NlOss0#m_nonm`Tp2Mf9%YC*QiMsas_F|dXHvJ z>m-xIdF#IWpm3pOydbaDJ!SUCPM(zgu}pt#?7rBBSpHg#+394aQY^)mhL)Yq#Ij~A zd;Nfh7Y5WHli17#hPbIeHh8VJRIWIYiuI1?3MmVNqFKi*h?~WfH1g@$ov}@cO|kJ( zE;oA>zhXfV_=RHXbjetp@Q9?CgH+=e)@;#`i)rZu;!tSHFp45OZE~cFYi>XoGugBu z=g=pE2iv)@M;iF0sKt^To;o-@xmv#H22*BA%G7LygO0=wS7a{{N&`q)1(}|311EUR z+;GZFdduhfOGvKU5s3^rU9t>$l*ILPvE=&mrRn0VTtQ0iCT@^TF+3#iqMDDAy&Le< zNSj#UQr@g!)&_;PGO)gkeIyh6SQq=d)Z1)_ukT7w$WYE_?Ft47rZwN`7# zZ&Yo@vrem1d(~EOBj7iv4X8JO-=ub;&)o98`FsKSyC4(%V%hK9AH8uqFEu_!Tw=)6AZ{R)Pfrx2 zl*FOzaQzEN^cm+X^{Xnh_*ed@tmEOpPjJUV1V)5a5y+#qnh$XbQ0Ebqle*=r`n~Xg zT0khr73rW0MnqmDDS_}BdOjAQ>K#OA>~YN-_AzBUIM+>{7XA}>OoC8~)~U`lE*W-Q z7N*&EonO@V4e<+g`2gf(g$O|e4+&RZIYs2H z#L!0MS#B5^;f;r(5??+}%!u4U<())sAi~J)43)@@c-_2Zph?L06ImJvmjL}%D)JCo z=rQX+LRw^5WMw1~@kN3mzdYppwqdw#6Jmfsx|&fDi~F68jV(3(b_hAv$MriJU%qLw z!4A`TK{Q{`RV3YN|x4X^T$$qn#%UbdrOQt7E`6>Aj%Ff#6vB993nlZ++ z8P}iA6-q|KOfYs<<&R1K-}W#@FuH}2*w0&6Mpus47ZAR+=U zVj{WG0+gCS<|t%jYI;b>chOrzBAdxtgdI6x>A6A%K=TXKq|Rb^?`vJxx3qSI*xdjr z84}+`*(8vpXbD7v9{iKvab9ixS~t6n3sq+hA~O5OD147%Gp3+iu5rHEwsGb6(aoC} ztE?L1&L+%k1_~s1`~kIub3p`RsM_`p8-i<9R$K)(6b+pc@884yn;ZW zoL6sF?i)L$mG!dE_6dXrAR74SL)ti?T2+OE(OFTS*G}nWe>qSNT6)!I2PXZ(*QQj2 zJ`QLE3>LDrY5+|^J7fo~knM+$2F88Vk)X!uT5e zmzF-_h7IH(L#AL&@=y63k#iu(D#*JPZnjECI-;4=`m8^nnu zDGU^>lq9UquIQ=-nP*+;LVg^*bEd0KUhSaN81-}{twPf4{2I7sEr_CZ`6z)Pjl@*> zTQFXa@Ilv1Ocpa~zw@EaR`o^a)1BLH6>1saxEw%i4V?0ogXK^;T#i^l8(8qX2BU_m zVIVxcQJGgCR#1yzhr_^x52sXVqZZhql+ofOx>{<2z1_aCf@d zxxaf&cv*Eh5OY}goWJf`fA~4u|1PCNu_AWZj-1y3T%&drItdIGttj3awZlz#5AdT` zi*PMs)mIzr8msZ0$~bOBM2%C|y>fkR6C9>oZ#AK1gR_JVub?N^pRWIwNs$wMyu0qT;PXAv*)tV)>Uq@8!HP9e)i*T3Y+B>cH?9~ z`0<+NDXrXWH)CK=2qj;)sHT~u%Pn@3Cx`a`wVSGud4$&~6}8vwCY*D^8O^U?7?B;w zth*HO{35u(3wyl$K*NHaS%n?N>NQ={xo+E4ot>|)+)=s5_Sx-iN?x)iHa&TPrz|!|nzD#&Eu3zTR z_g^GI1j(9^4TB*0b1Druf@h&f42f*F)jwiAaEygUKYAwNyU3=tlmPNm&MdYv0rcf{m- zseFbABbd*j6z_6@bCH-$dD(f}>Mg@hQZs`&f^}|Sx?rZVX)jZ9FS4=x7Pa~LDoy^- zd3AN)y}u^*e-NPvCVxYO3?d6e))4tU5za?7MI7(EJ2;apY7*X5bi%^5z`Q=-;yR#T z!o49UwdT%1gx2q*^My4(QVY(LJ?(wZLLPS^lu~%wzyp?1l)pgTt;KOD1?-LspQ0(B zcV6x3ulWoKvOzxUw6ENnOSa>N! zOQg@bT6$J~l{$Tm$aRyf^oOY@krT3u z=EAf7I_j_MiQ&wgKi0Q#Ach@56XLa=t64~Clyi7mY=n572cXxI9M9`@(!4aUOH^eK zrn$I|5%0c_riSwjyWFGHzBHKxRj->-lxMW?dYaW+@G`Rjk_w%-kQPJmudR5RRJrNW zPvW&&il)hv!W<|x@B%H&l@dSLYyz71T*S0WVs=9Us&N8=@`VV+N+?2I4ib+#Tqbk_ zEHURC>7O1Sq*>Pzp*EnlFkLWu_lpr6HKq*7JDQ^$MV<^E{N1}}w1(e_Lua~>E zo5elFsPEp}kKRsTg|u!bQgFHlCb#|p-9~E<01T(IM&7#Fun_E)gdRaNuT$oOAE3Xl z??%9TL~&jkNJT%2wmChKNHAWLQD-pGn7ej`+yHJ^aIxb0GP%Oon=L7hg?xjtfb01f z<*jQY4RRw0iDc+bmNxCQ8#FXb_{m!Lz-8sUGo46qFbyKF-mR7*+BwG)o!th(i-mOR@wpj zW&(Z_Qf3YAwtFrFycXxt!9F$Se15QR*{bSlt6_4Dh=cYxe;8afytcZ|4$Z5R>+Oi} zTfKG|?-}6qsh!>qZNLlFI7tA?c(o1A;i0Y-eFR?cd1#PY9??#}1J47_+lKDh&W`N! zY{#Zak&(Uxg4>%Q?lVf$dGmzpOO%vBwd4K3wbw0H}t%JLZojoLtP0Std2t1^5!{^0%>Z zv6&{Qi3#sSj`K8&%fHgma6gGZeGC*^3X<3FesCWL7oQrknF-7F<&1GlJ^{`_*+HEK zWG!|22O|GNgp>Vo=PO&*Z{cmmq5QLGaS;?Oa?o6M%+*WC{v7)`M$-Ct8}A#=s;%wO zeN^Ybc_)p&*EzB^7RAevTigJ`dkMMy4rh96>xSD&v#*c`<2-euM1Beae3kZ$Og6>0u<4AK@AD+_3K8CVyntiWpOdPW^!dAh z`KYsQTd&&f+`cUqD3iqgOQmhY+8XXcPHxV9CquQ`J;tg}bYuQvh1NLvio!{pnqcGRye0#?V=Z&4W zRo+g#rHTHh zUvYhO@6I0M`tlUtgD~+JVb?E{43GLNH&8JS;;Pfv=kq1EVXKfqT%2IdiNcvA4#WTG zJiKd%`tD0#+VumqpDYNhk835FEy_O^U|wNUI$ynMeFtZE!e9&8y%k08OW=o{(C)Pr z&d~(VUd6@>btxg zOr%!I%yW9|#+dA*p$vf_=5oZMPZS(R=JcBew>Z!3t`Uo|b}n!n_gf+c4K&Vps|Q|l zG=h*cQX-i)XXHJk@(htC2zf!_2vsSXxB(=^k~CzII^mn5JWfroI^BDkE8ih`PRPee z)Nf_2oZ$v$u_2~R`FUzx3`X$?CO1>a%^3XBBhzpn7 z2gZ1{7Gao&QduL%2eSGSx|6)pyhlDn6D5(0J{)xzhj|BM_ZtlGao{L1bw;U!`7uBEoBQ zlv=S7v3EJzFQpLzOINK_ojgH|yNJ9*-4_8fj-c5b0za=7Euj~qIakuUd1p7ul&)%A zzjWSFK^g(N-8TQ|yms?O_1R192Y#qFv&{({%Z0V0SNA(Lhh`3MrlJ9o{rMYjyuqG6 z8XU*b%rHG9E7|8;4?*#jO!* z#-oFjJj-V1B_{)N_~Xsvs+M=OeuYL=0D&@mUY8f?RlEb3 z^x*rk;x^#(6|G_t5*Chrz&`cb`wnkcU%0g6a7sPULP~focvW~$Ko4a+`tdU4LYeT* z75>#Sea=sB-BUS7;tzwkzL~6X#yf*jq>%3>(!edb0-al_&emlxLyqtIr>6+2@1>?F z5soK*D&0(1zQrWz#7gNG{=1AQzNqKm~mzJ&~+tjs`Bg50$}&Ar0lyy$lcKV4D(0if8uuphM+oQ1PGtJ zQXYZeUy?daGE$|6E6@+@g%x7%em9s*ib6UWN=mY}{FL+9(L`X9q>9eVM_2YePeo3y zl9PlX-x4Pw|>>O18j6NI9BsIaMXcg3$Hi)yx{^ve(@_xPv{X6((z6HNK`5k~=quUc1OG*+|%^vkaV4jze8a}$6QF|D|jN@NUcBF7cSbz5HEw`c}0 z9nK5_1odDb?A4iQUOW&y!dBOVCK?E+0q!Wd5UOD}$v+LPWe4StLaqCUz+h;jjBBGh zujM}Ohb}y@F<2}xFRvp@1?J=R$~M`kAG)i7nFKgHp+y@OkAy$UYU!}aUmpAFnv3r( z3@=4!*#q(`6=zBZfjXW{e8%yKTp`OHk6FmL`=T8>K64_THyk~EJ{nY-5_b|goK9tp z*!dK8LJfH4!#K&GRiY%Uv%n~7BF`h$W+kUZHDDI-JmXQ)l! zCC}0agx{yY;?E+3dYjTW0|AQij;GRTsCtb0)oML*XywM!2DR&mXQiO^#VP{F0GyC8 z5>`B$gnmClM0Kn{(y(a9j@L3~EM-8BCow(Xo-T0v(UF6(efz|5`Hht=Y^{7}W#_hS zG+7{5z&H+p27Zlce{-F zbjm^y%8A5;<4a5wvQx0ed_kUR89U*#QdZh<0%m;L7*8e9pU#KFm*g|8-#&HCJwxuMf`|=fKAtedAJXK< z0i247lxgLXBAyxGj+e5C6XdC6%A%;Dgy2c%k}!+U5I38}9$#Ivq4%2d)yDiZ%K%@X zosjA^y`MF(_4pI7%F4EHb^M%&n)cIS2h*KD6xAKy7@d0*o)E_ZqC@_B+orZ}fX6+w znznMRU|EKUVMso1nxB{Z+V|i6I(VX<(Fh_}ezxCCB{6|e1OJM8g5p-1C)$(2E4fCB*ju5XAiM1Wv#m zZaOUIYU6K-Nok|%#x=fzE>|(S7Z%>Z#E(IFKb*?IkInwcu6%Ooeh5ua6_Ft6UHwpz zKy9V3FUaRR21|$=0mKN_s6lr66cZz8d3I_=NP04efP`RF3>Cvx$PU>43wkkt5n+TU zZw*E~OmV2fia-ycTF6~tVZyj?+`~QSUBUg2c#0KPW!Wkw&gjKRafKbR{ZqOK{5jJu zvvZX)xvtBna})zouInnjVEZ1@nl&q6hwShLy;&<(*;Uvcc*TTOg$-2Ef(xt|Dz~sG zT48I|e6?L+)hL4t@(>IzY=_2mGB%i-j*UcU&U)=3tybe|tHo7TEyk?2SMiEw@Y$=X zc*OSd6_5Ce)mEJy;krD!{&1-h+gtgFmua?sSr_eg^<;xx$*bx$dnLhf-DG)%KRF>8i!qN*GwWgV2#=-y;x`0VON*9maSZ#qn5@O>+M>X5B>jP*UkqP zQ1?$UwBEIA;gx};?t|CZ;hivyAq_O0`^vPCOUqWf;@{v-VOPOGYJX#J<=}OL# zOLy5`yQxvj>ULAH37hkn6L!7bfbG^p2p-WxG-26IPivSSkT;b38=v+*<0-a4z-#SB zyTxwiO(eq0o^w60mFGe4m|tgmD0q--N1I0kMJ1KK-IDAzOyN7_DM4UK9q|*dc0e25!s(@hP zEa6uGmbw9=o~5^m@Q+skDvJrispeUtuqpcFOjmm;2Q<1~+(8rx;P_7(SrhdTg$&0x zp3cRs>mmmwDwT`ey!8|@dB+Rsv|Aqf3b?cE5N{GWWQjO(9OdCBX;g}cR1r}+8zc(K zlb9w>5c&*(aRO?W6eGkQ0$l{E0UUM`wVFxNbo^G1BB|p~z;33RJQ%tCaUdk5~K}UE&b} zqw$R>S^k~3<4^1o#-SE6fTyL$C>kagm5R4kE1e(C2<4Eo8oc#W=~(mr-)a{ z>IE6@T|YosQ=LPoBda|sp!T>*?J+H4W@5sia!34HlV^K7Sk>Z%UO%gPi$>o>z%^+3 zWM50g@1mVLjZKLmd8Mznq_z!zaBr8e;iEL*T>zx38qnv$RDf*Z{rPNCpyJb_VK>jY$WCz&Qqn%Q(m0T)F!{MshMq9{O+cR9;M@`&10_=CzLYEXDZ(* z)KPDeY@mLvszlWgvd|?;$Vv!HAz)FDcw6oq$W(OEG1MjVqWt1O>mK4&!6gX zR+v_S&*GgUk&CJ8jN_e!H;#Jmx%2irG0{evqMtxkMz&7w_$Le-jT}Vjok$s3>wsY) zo-H#uzIZx65m$0fKJ_Gh-WZPJX2RrWx5i7Kz~DJge}BL0HcITR%JLmQ`~vgcAaBQy z7-ku$qm${}*oP?yj)nM?bSrCB5xF`F(3aN&5JQqK{a#aE+O~p)B7|4^Ii#N?)dv`bcL3$~;GLAx{oCp7_jw_&oYt5ACFNl(zqmXg8?5a@mBH znESwNQplI5w%_5ugNAmNWV2g(t*)h&m2w62a)0fMK(E!1;aKld?wr{%dFr|Xp=dR_TY zpmo#L=j2a!)K!pIEWxqy)Fau>Gn*)AD9^$>%XCt`Vu?1QDK~u$j6ZiRWNDb(Q+{&i zD0A~`X*eXkyPCW2qsf#+|9%-$&2`jV4=_dZb;`rL4)y$sXtX>6nA+NhaiV*!P2Rq% zP5y9K9UGGWva6}|J`G$>9c6EJr%+Kz1&(91&{^XA90BEYKY-W4L++z41(bWK@Jl;i z`h^D_F=Brh0B!=ta)Ghzxl!+S^-6#zsZ?^p!qvhUx5OM64~Zrk)hi-2YJtEKfj$Dq zlH5mIN?m(B6~}x#vD%p`C5fz6}Ct>uJ?RdapHDDk3qy4GzRgsTA(I37$hNjFbti0!)o9YXn;Qt4^h99cj) zTdU_M^B&o=ySuASuVhWE$y3Yv@LS8)AYE7LL8R>#y&8#IoTI_J7H4;F)z_)nl))` znpGaB%Bq~l>w&rt&KG6yU}s6?=XZ&Cg@8)xgBXreq(q9ROmUK0Uc4(Ch#zfW!plM? zo<~BZeHuJzxV7_~KGnOl68%WFl&$AH?YrAWJ224=fal~OJzet@>M^v@zh}#K`PqXN zrR6X^=RbiLHr|dIXrIvrwfoW~O8v>2hLA4ORKp`aFgZdlm|vdZ0#A6XKw zeqx*$b&(K9sq0N};$5#WMga;)B2Ldxs1-}-RJr;%&GkI7`Dfu1Q>OT1qI`+~nL#v+ z^G@f|(}vo|t;Fe_GGReC)_fxAL8j(;6SISoA|E!Y1ujdnid_V4)SLwfC)(Cf~-KOm`Da7K1qOd zDt<{oDR4j0;JWbmeMG#Lx2V{Ryy)FpDPjB_+*X7c!o7i)UjkmqU>Xbs1F3O6frF&)gj_IR4gnkqt_3j5B ze4u<|G%$`Unqj*2uu!_WLaRRswz;5{${6k4W)wl3V!1dA>LKy8eB+MxYt9sH`cTPI zCUC{8glAJzT{zw&>}~>kX+tWGs)(wjdJEB%kX2|rOw9~|EC5}`GdbvJpK>PUZp#vn zR;#ypfyx4p(s?%pE1|2}Mn%78XcB!qrNpjOP6AW`Q~8#|EVsg?Osvcjli?pjLS-4{ z9ty0}8Zc3HUgSuNF>qM8{^2?OCro}YvV*<2xZ}<^J6K1vC}k+MD38GoC@Ip9n_9~_ zInh=6^*lZDoxApyBzp0PpdLCL?{vyI@7_fzlsKV6);w1*47^fQQ`?));U-HV(v@=` zVdMgiFEd3&|6U?g5m150N7x8~PXk0N9Pi95{)?qj581Edm5&{(-%6e0I{*mG0s>a1 zd%aB16QO@y#1O+{_;=a=F!|20j?z~##nnj5#3C)>5r0XOeVxD+BKStcPw+b;93pKd zl3sprAl%~?6r;p~sqUv4b@2p#RL9kV6YX*VK%CKiP9PTJ6}dz#CMJmYN%`RMet(L% z#^o!=+j><yu(oQ6v2J5emTZ$^H+TpQVGv6?2jM7>D@ z&D8uAfrA7N5%>~;R|$NNz>f+1nm`qeSVLd~0d+xA65S(f)oSV%cm>b zAa)!aa{mvsN5`b*ak=~ChGtI{%E7RwvfA&dtg75o-5L&sR|JDPi|m&7pRE0Vp?dTC diff --git a/utils/__pycache__/torch_utils.cpython-310.pyc b/utils/__pycache__/torch_utils.cpython-310.pyc index d110637b9d6960500ae746b6cf6885a93424be42..c9e1ca1263332cbad372d429780e99b4d2eb94e6 100644 GIT binary patch delta 8793 zcmZ`;X>c6Jb)KG`*$ayW2!J?%ZwK#RU9R$N*q?=+lk6QBD)+bid_|FT$QVwN@P3n^}}T)Ddl@T z3xcF9pyqAQ>(||{U%!6WyjuMDGE0{m8|$yfRw65M2}Kcl>fk+{Ck^7b)b*parOhaipvTr zXUnQcxsihADO|k`(c(99RTEvVUW;;IUr@MyC$hqr;znxPih|RMPFA90BV(f@qmXA) zF*j`%vreg8cIT|BJuB$+7Wx4$9zHjDdh(?4Z=b0B=yM-2B%x6!v}cVs3zje) zLrU_{!En}0OvEr;(D4Ti+3a;3kX5#`*+8}Jp!ujMJI-<273KNGGiZt^(Zz1$gl!$i z`h7H33Qqzly!BFe504xMr9*c*rKVV%wcrt3@ak`g%(6G{ekc-QRXR#B0FcqwxDo~s zLFCx6v!Velh(RZ9D?YqzB0MwD?E#sS0%%cHroEMlw6JDX5Cd=gC+eSV zUgA2B-cmg67}!|SX(~^@u85nSUQ5&43R@zUqFqXT^6TNO5>sc#iyl~;J5gwoGgSYe zK1I`gQ#8x{{2?Z8s!zJkWA#=`{kOIL+s0$Jb!x9ozPjs7d{QcFh6VAX#hl| zq_AXm#>}}3U`K4EsiJ4Xt_I7_R#&@%m}9y$b&x!dn+qdp<%)rB+1$JtM30v3>*G9} zEI*gCZL{bEG=A;nswsfYI#vaXJFc*}DRyHy+<20Ez*aXxOb4t4vkswFoq$;n3$*JU zC3hGzIimn6bqh~0vpNmJ*EGjLG_Ley7|BwDn z_LTqi=;wMLGE*sTUbo=7^5q5F&H0~-b+9Y`XJZC?_3n3KUt}#_Xo$61x0hNW$Jy_{ zk{D$<|EGzsu{ZrMCC3LcJ%TKnNDz1rK#-nabmw3`>t0w9N>tP z^rk!Wxx6VJr#9mOn-3y6F?+wHsHf2}i*T^naOhQ_oC5&cRG+8zM3Xo#am%lz4z`LG zpkar}F^Fpfg#X{Ej-6{#T(kHI*D9G=XVK1o1cVOxh$M6X7XJCW6OFH_)qe767nMs2 z?_XN?E9-1M{Jp_$kY`+9>s?Wpa_!i)ot_3k8-(F(aC_^a>U=7PP5xBt1I+7KCSMztrq_kNOaTZDlOvWL>KwW&> zMNAx+HEr|eyg2li{-Jfl@H2{UuRGHu_F}*!PK%-q(dxgxu4{uBqxRZyk#82i4e%8| z(XwsKr&n5)9IA~dtoV)t{)$L{$G_6jTb<1G)z=^#>8ZE{5a{#5LX<#khz}8>aW&kK z5uw;e9ls|jIhmOtHd8F;+_634HX$`!n)x8MdbID8ggCxgQ#?;2f0GdL>D#fhQm;B+j{f+gDLsn62^cUJT z4)37mgC_xF?PQ6qV$m#q4gEhOFV(4v&--6(n>Z}zzzc9e6y68B5WjR5Zr+>an;@3H zu0<7J^7pNOF&(l1v^WCb|8V_N<7E2f-o$69^$=4nmTi%rfzQa^Fs<1+*ZG!TZtrKW z`X3!^^IvQ?_s|GfVN$eAL@KH@IdV>@QnVz!YdEn~xd~AItAD&>=W!RUpds8%mfR;a zUGnpFbc7>F5@86Qnt`jMyaHQy{+ZBUAn*t9!Iuz@`PA*TegY z!!{zWbr-*fP8o&9;f+#Z6#AOK*tw0_eyy{+ittWbQI09kcGiPCgfp;EQN&#Xo?3`` z8dp8WBexNod1@_A?<)utuQgMQ#WlBqk|wUdpz&xJ$x{H7iG{&W5DkTjeUtGIqPY?| zpGq_I)GWeMTv@P94kKh#A|tzIE6v7iu{@nC!VHG&XHrvbGJGF_DXQl}0%JmT*CmGT zB2YnN7>v{<7~a|={-1UAR>?k#3W3)EU^V0+{5>JQO@Md{b>u6|PR^EAAT}rrZh4pSb-CZw%dI^e0whQ1etf;;B_nomH1q-sdGfl}Ot#X*Uj>rVrb^A7;R~6Ip7wsMrmf;&v{6 zl_AhUTGzVNa7nQ#{2|6SV%l%Hq?a3(S)se8Q)Oy}7~irKb$e>PUKFK)t!|$i$CmqP zW6PTuP}{r+;C4>~9Pk=mQ0^p1>!i)eO!{Z~yGF!4M=w4JVZNi2Uk3rh$q)8tPt~1I zjmV{ueWysG1!H7I1Qr1LRLm`!qM{z!Ev9McDgX8Ujs`N-mB^v7aq)uxv;OUfpSz90 z3AzVw7(_)UECPTN;=|O$l#2KmfsYfg0ALKqW-2lN*Nn?mLt1FFK(ap;Qq{Sz$VQ0zUlT{ie;&PK1XJ@#NgOK+Bs}7!OjCJ?jLz3|Q~; z>J<8n`nAocp_!QVz5b6jZ@3afusM;yGE+$h0asS$+aeA`z~%yWL;MO6OY0FMW+*@i z5)P_!W)?#SfRjZ_G8-+d6OYDv*Z>n!JG*>qOKHztjLGOhJe%bf5}7O{3b{Li#2qA& zZ41}AVYzeSQ6h|rzr~OErr*4^=aGlZRzlh~%CRSCVRBY2>L#YLw5t8D=pwGQ!#r0K zU-r$d+mf)Ep)LBt-QU`}#Ey6>Ze5q#>FHeW!j;6^2Kg4Nzje!d*&}f%7fFiVJ89_` z;_wT7{;BP!2Z^cGNUDQ`(n;V!R%P7&ZvTth$2O3I2(N~`Ij_?z20)g6bihc9-f+4; znxx;~H_+QDo38*JM9!Z)b^&XJ7{4^Iw{IipD#`LZGB^Z%V!yn)6>Xp>4purd{uc*~ z>Hx~7xy6GhL1u=GO46 zTHClTw-B&d@qHRd5)KmgHvN#WgQl>Oc(4B;$elwNm{dENA06CQHIOH3Z_z^qMr6%z z!Htc8?9CQr5D4lQT`TYGTr3yM*LMf8@Yi`DJR&nfyLcRrL)PlPB5@KyrphA+R&g5q zLz~3u2b4<6t+IlwB71B`wcv7IVjpG~5of~1=!<(o7lWwv9u3E51B-=pQDougl^Yb3 zhTjjyE?~n~t$DcnqQzx2BDR8`siZ3w92Y6od^t$XpyY3&jjWAls?oQVXXh5CQ4&L^ zcpt{%BY{Ta!niz2N!#p$h71Rg4#kX-SsM5ZNtvurN{xq>3?P4A>tY<4 zuexsx0h5n{di9TFnZP5Xh3}!ksSJujJDhl=Ls^QUjZ26qkclpj)u{N6cX=66cJGoQ zofxB#f=5>vT53U+R4Am@B8Wz{l~w90Zn&prbx#-Py%>*Q)tqCGDbg1w`gOx~2nx;E3xj3SbtBNQQm85A7nFifDN!M}*>>!eqUk$pHy~>hS z^`ah9k(mgLNb97$@_`O<=jTFqIt<|isc93ZH)FZB={P~eEaigO!aNQbWk>WYl|}=a zii8iJ2Poqx8*&E6up9%qm*vcv9CZx((hyQmdR#I`lIPtjY^;$pP?TDmf=n(-o5nDe zSwS~sSR>|$u{|t^g>?9EgZ_GAX}rm{S31rSv0Px4ml;tm8I_bFeZ>trnszunQYWaXQR%AhWW+dun%S_sAonP{8VyvE%+>x(9G_I6dRcf4 z*OdMwh|HRbxsR+;Rv$jD$&Z5DjG+L_=YS zcY$Q8K{}^wcu)@-_#hQthLC&~)XpX>ugNSm4Y!`gWq`ebn^##f{Z=3Dh=JRIa@}#} zucTW2bA!)sA=@v{7LM4SIf2ZM)yX@Z_J`_KRf#7e@ z|H^RR#1k~tQv}uq$ri;2;siS0k4M%O<1oUhC~&mtyPHQc>X?o`==aTelqGgyz{{%G zAyIB_A)Jt=3E~IBQs1FZ`qj~W{7Zz>Oc=fJwB_N_xG=W-F1iss$W6-hSawZOzoeA0 zn&j~k;UuH9jo-!NQ0CI2Mw#C~HsmjDNmL6!creceSf#!A+?vH(jgn7TlqJN3(-PMjC;DLKBCY&hdJ+r$?;&rYVj3h954*}wG9;di;G|a zp77q5+6C=a941bsYD>So;tz~(oL<}EuYzVOQgXO>r!XXlh6bcYnUdo_KvVfY++RE#Rv33G$=P%E!A)F&iG1{N_ zM|N$gK222bA%KUz;mQ$L@Ph9egM>dv;5vVH6^1je;>ya z7Kap#CG~hbof3qHUdgD6iIFNT8WB1CNCqx|gAd|m5;T7&89E_;7}CIIBd7U^M6NA~ zeaq11%fO3nkcb{yK@~fxJxXnDQtYFbM*uEl4hv2%q~xNkqygzegSbTC*Zs#P_OVuf zd16cRBoIM-wK#;r<7X$fstwHlr-^Ph=)XBJ)Jy3N>I^7@+Slc$RMAM2U`j>2=pWs^ zv1(AWhrkvB!vxL{I7{FPfm;M*P4*>feV%|+u2-pbgupilyhcD)lPJ5F=}0|@r;Sjk zB%L~}7g}_~A?a;^@a delta 5898 zcmZ`-YiwM{b-r`=?(W?ODZa$#l3Y>}#npokNt8rcA|*?vqQn-QD2^(I^5$~qlDzcp z-PPPn%4C*Z7F1vZjq;iVNTRq{A_x$*Na{eXk=9Mo{AkmrKw7tH1K$As(*UjFB6S}? zQ@f4&omoZ?b#mbl%KcZW$YFdXVqttr*Gh3%rAO-eS`Sco$EBa*#j8dw4G>hj^Oz zfpVDl^F8<-;(Pf%{0`eqiz9Q45Aglq8s&pL1L_ez#SijBpd96g`4A|__@#?{lpg_U zjK7bM@#Dab^Gp0B9|v}Vzn@R=Nnj^0YWyrecT3~vHuULn^q%DB(L0acDXw49G9S1% zrFSx!j?A;wdoM>qtkR6*iWY!OSVZv?g#Dt(Ed+^dcG(u5Q*^W0Aavovvm$}fyDSJ7 zirmiM)kGKi?+oTn%{-U)or2}t>2phd(Q*niOTLr$X4E=ag%6chf|ysZgrCJq`!LZ< z0yOD*f*B8zp(fU#i*8v?e4I7QpC?icJs4FR)Idt^P4;z+(7bv8eccSzvj{U;6Q+wk z`F!#RtY03k`}P#T(huq4*dABa)eS)XPuku^t6iHK?|nfRKP`ueelGyu=^3Nk4XBe0(a>q{&C0@S*tUeQV;ZSs2Kxx^@qhw`>7hUDGG zeUZR$t%5E8wy}TgI1NRF=L@GKBGfS)cdjS~72S4oxD|0&b!unM)QQWoy(x8R3`C)( zM{6Tqz}z70m8_f{FehL^;PYf#36ywZL+Jw90>I@94d&ix)^p z_Tqb16a|pKC>uKlMjc>+q#&1YmkM@XED`B(15^{fM08g!baYoTJ>mxSl+rE}b`2m1 zmxSZ`qC|vQBGd`{#*!m!FFP+R9z+)`&+>gCgyQD@O34nQbNQm>j~*8}A{wx%QV=bP z;+&JW#js)&mOF2Y>%he#5r)lKEBC@pOYk79W{aJR+f}d&MX{2_rnXqNd7!(l9PQkF z_B73mSf!Hf@*rUsO8!bVXXO^`Oq2K+jfILOPh0^e=tMyjD=g)GPb^c9739mE2M4Yy z{83w=8xrdPMKo^)z=+8=J3q?S<}GwFJ%l(p9+qaFwrv*`rmOba6*croP(v1$4wG z)EeiD?(z{X-%fq-Y9~=YN#N529>4dX$#EUuvGVYO>`mL5U+}$efX$9)?$f*GdFB@cRs0whe$ydJkKHTpdadX!z-q1FZ{mYTla!qjMv ztatkp1wy%pg0ly0X}-3|%G#oi7D6-R6Me}FXI#G>t(qJe4w}^#zp1uiyS5g=@@)?{3|CVe z=CG+_%sQ4gC8#h_mUEY> zcqVp%km*)8Ea>VGbKGoh!E&*r7bJ^I{==S7j`N4K!s`V72;f3c|5z~3st7Oc(YT72 z;atAxB3CXu_Dz)-0)so&d=TQ!a$tBjgfFV{t^QQRrzzi&b$dQmdE6#~hPfq=iX3-2 zYr%_W-M;~=*AHOo2HXDX%y_3EqQBD;OS2w!AT)>)$k4?bV4J$X4x`D1fNp>4fY8yE z;rv7y4q3@9@>BVRZeqtZ8@EYz);D2nRK8gGNE-t>3*4YaI;VBX8e8;l$h5 zVt&(NGw;AiOLaHzRpkvhIe;dRd>>*L2=jPLy?GZPCiHF2o4lv5mKCYssscn=mxL zbLCH&Mql2%mb|99>}E~)eysO*eisBxL6q)lnD60xsp{kV)*^mSwYMC(qwxVh?VAv| zZ*u?=ZZMGcmqUPqWdkr%PTtX8iIJhJAavle93JQx5)Z2daT6;2m6gIcVv=U>99mUGBw=6_ zY|BNx=W|gZo^=9c8VMDw7e$7;%JC?}itp2av0@eSL1^CggNlJV(69Nk5?EDX!^BFe z6M^FQe7m4(YwNQz#PtQvNV={n#`HQ40Ok`^Ii}ZmhTjI z$_7=T#-)AWlBMDcDKcPF6y$IrrD7fryC8&EnTZM#L;O3yblk&*WxWLp#o52984Cfs zDLzOH>U?h#rVbhNa#r5XLJ_*$b+9Ay--<^L9W0#sEKn5SW;V+mJTtOE*uf2WO7TG) zCAsjun+{G9^=zN&-%Lb&lcxW!eB)qhTy3a2R*_#Z*3 zFEGw=t02B22M!IyDZZ)L_RV`AIkd)(Bj$v8xC34dKkiVy=vJ>&^^C1v*DGo$}?O(Wk#dM<5STPE^YTn4k$1Hro8bms;Qa!GHbh zBt2{LA_&jo=(skL{OXn+sNJHphbh@`ce_G2u|g2Ka`D24Axe!Rj}M<{OMq`RUM%6k zh4d#*skpG(CVz7{T}cnR#Wc5b)=K73dNr1=l|n$nX-bBf4&jkpRNRPn2)s*xTtc~C zt(>9b1hHqgeOSCq!z#UzD+Xb;wK|)KL6#u;p=Yk(nNku-d2D2OM!A|vbEwWHW1ygr z%_=L%#A=?WeCy#c1)V8i^Wv{*8Yw%7Jv`u}dY9iFX^FI9+&d+IJaV|H6E(N-pw{9K zko(%?C&%s_j)Oy`eE6$7HwBYS&f1=zopXHG_PijjdUmOZhwX;^!0~-O-8AFz3ns~_ z=+9z@6b_|T6sdu)Cmv%BD+Hv&md&Tjze)MOg z_z)h-2=mLp5h@@DtRe{c)>3Un<;&*jyj7Ux*2J&LKY8Nh!X27t*vl58tX;5z=u(Lz zTzF!L1bK;e{w@H#md;wcNi{S~-J=9{UJr+oAIaG$TSL2(gBu;>SqxY)zJjX_cwgDfd%s#4l=WoLD=z9%~XyPD*&LnCz zT+}V9w|niF>eNbI6xMuO1w3(tj{GjEX-y8C>6_cp(nmm?4i!8uu=X#FCqbm9>={xN zIa*w=N7_j0twl!6lNiLn(`dEFqh_#p*o@aF6&>-7UU1~Gw)SD{`8;};VY}Z_B1{RYe!)ap za!UM25qD04lTw@&UjbbhgoObzPlz*wlPid`1ojg+Kww7ZCr`3g`L)Tt4ZR=)=Jpd1 ze@whNxljL3?Q>_lSi4M}J;XBd67Y(&&-PWiX?7ohQ3BTpTqm$jfc}*bcL;o)06i@P z{YM~PCGZ-7HwaJxAXL_*94087DTh|N*g01X+j3L&jyLTI@M_+)Ja}$w&DCdO<@`(iYjB}=oo2Q~4@_^uCLO_msqi8Cs`Vd9^2Zo}Zg-hAht?|07m zoj?8V_v1EfUxO;Ws!9>?xAA2(zJ0H1%I?iI;dX9xYV!2tc=HeM6@Gv4Rw-$}U9XVe zGq^Gi?rv$?$_p#90fu-}E>sw{A`}!vDpy=Z*c3@CR27B7A=GS!R1`ODS$E-Z9uTia z&4Zd3m#~7Y*62RS7Wg%$Ti$JMX zT!!POn3Bk8(~h)$D^Wel1}tnZp%3@oazekb=|Fn_6rt5>Z7auCFGvUijVn(VS=_dqL1A`6uM?Kmeo>xj$`{Gq=VMtCYR20?y567CBT-h{$*Z6uOQ%UR7x z#PwGfoKq9iXW4pM6`AGo1=V4j9)ETQdkJxGR}5PC*{+PkLj+#_Ro8$oNNO{2QFI!k zSu3xN&Ik7y>=wHTOY9dUWPW}<8i7InAo?6cO8-Ph050<3o&fZh=6gB-I!mt~8-jC# zMEn6&pk%UoHlZ2Gc$N(j>L8jt3RO0x_@#7~brMVwrnB5p#&jgo>+xlLvqUDhmS8dN z>+OUkezLcNcMpX4jo!B*#vA(jLN8FTHGNB1)0Z_ZnM!6gZL^f_`x@F}qZX$P0e&^+RHh8HAUw@V`nIAn#6wo z$3?JHXt;taBVn(ys5saup6YEZof&8ryX|N&ZHdci71%Mfe<-pJ3(D%bGF!Qk*s8v^ zXy`+?dEvz9gf&%bh~&&ZqDg*7lXR3@c!~ha^di|U&({dEs=7!R(OQMS%OkP7E%S(} z6=hE=@Kds%Bu^`um;W7W3tIEB$giP!!M2t$SowJJ!NDLD_=Ul%je)%&RXR{j4WYHb zp~36Qr42d?dN#{G=DwjLTXST*KmwP52iK}|5jU#S8a|sL<9QUOQ`3y~_}E3)ijgyOgxDHX7YAn;FyT4sNxt&PzgFvfNcltIMUiuH+nhB~W9 z7uG-|BI_bOO1d=32555}#f&=2UZRbiM~2^rO8(98_v#~xrE7;(ld#4VA+4XpI0-r` WY?hlNSKNLjptzI@C4|y delta 1478 zcmYjRTW=dh6y7slukB5os}tLaoi+j7Y;Mklq)D7h1I2+hiP|J>N)U35cS-Ctj+u2D zD2`gCJRm_U&-1%X%>Ou8s93~Ti)NTw>dfd{wc!0La&1REtp^j(WdT0{U*t z1%{drwT6ORaFiIXMJCfoDoE(#*QHH~f0?h!1Z@AHzT_+cg_#OnxyG_GC2c#=)^8VT zLTtvy9u%y{dEI`&fV8ckz5iiBXECJ9x~s&na7fg)HTax7jvM9E2$n0C)b(_BMI3K? z0>UEG)-G(E;rcVf&I3Y)QCXjEvOZr zMm~Z+%1<3>0Qg&+YrhCp;^+3h>i^L1E}2Tw?j+w8)!MfK39yUzkg?n^r7xtjMv})h z2frh3Yu6zv);iW=ZcJHL&E&d~Fjtc~gS(KhVAX;xeg#+ZAb~+SF-@^&mtA78}&!lExa28Ts9YAUsqSSRjAM|?pF4;G4XrvGtgZQ^&JCP7BBV(;b?iQzZ0OV{K>!soE{_NFQ^06 z%qR0kLQkjie4JQ?YzhdP+mcgjxjgS8npRk@EhD|Mnl~-Q$Ze!n@s}Is=DNXSqJOX( zUKZyEJ0UJkjfBO`!FM1gMu&#rqgZsq_8YgC|xmvg0oJSX>;x5(+$6 zsL79X!pQ+2y5Ik9-QVJZc@N0P*E6Rl_O)6~J~m5lS< zN7^7e+=_j>f54bGVG3_16UKWej#4bo@nM>bAS`NA+@OgpW+p!aRs1t~Tf0YV>0f}? jkYpcIH$_JY;skUj_!;rFBzZ{7 diff --git a/utils/augmentations.py b/utils/augmentations.py index a558735..7c8e0bc 100644 --- a/utils/augmentations.py +++ b/utils/augmentations.py @@ -12,7 +12,7 @@ import torch import torchvision.transforms as T import torchvision.transforms.functional as TF -from utils.general import LOGGER, check_version, colorstr, resample_segments, segment2box +from utils.general import LOGGER, check_version, colorstr, resample_segments, segment2box, xywhn2xyxy from utils.metrics import bbox_ioa IMAGENET_MEAN = 0.485, 0.456, 0.406 # RGB mean @@ -21,7 +21,7 @@ IMAGENET_STD = 0.229, 0.224, 0.225 # RGB standard deviation class Albumentations: # YOLOv5 Albumentations class (optional, only used if package is installed) - def __init__(self): + def __init__(self, size=640): self.transform = None prefix = colorstr('albumentations: ') try: @@ -29,6 +29,7 @@ class Albumentations: check_version(A.__version__, '1.0.3', hard=True) # version requirement T = [ + A.RandomResizedCrop(height=size, width=size, scale=(0.8, 1.0), ratio=(0.9, 1.11), p=0.0), A.Blur(p=0.01), A.MedianBlur(p=0.01), A.ToGray(p=0.01), @@ -281,7 +282,7 @@ def cutout(im, labels, p=0.5): # return unobscured labels if len(labels) and s > 0.03: box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) - ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + ioa = bbox_ioa(box, xywhn2xyxy(labels[:, 1:5], w, h)) # intersection over area labels = labels[ioa < 0.60] # remove >60% obscured labels return labels @@ -303,15 +304,17 @@ def box_candidates(box1, box2, wh_thr=2, ar_thr=100, area_thr=0.1, eps=1e-16): return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + eps) > area_thr) & (ar < ar_thr) # candidates -def classify_albumentations(augment=True, - size=224, - scale=(0.08, 1.0), - hflip=0.5, - vflip=0.0, - jitter=0.4, - mean=IMAGENET_MEAN, - std=IMAGENET_STD, - auto_aug=False): +def classify_albumentations( + augment=True, + size=224, + scale=(0.08, 1.0), + ratio=(0.75, 1.0 / 0.75), # 0.75, 1.33 + hflip=0.5, + vflip=0.0, + jitter=0.4, + mean=IMAGENET_MEAN, + std=IMAGENET_STD, + auto_aug=False): # YOLOv5 classification Albumentations (optional, only used if package is installed) prefix = colorstr('albumentations: ') try: @@ -319,7 +322,7 @@ def classify_albumentations(augment=True, from albumentations.pytorch import ToTensorV2 check_version(A.__version__, '1.0.3', hard=True) # version requirement if augment: # Resize and crop - T = [A.RandomResizedCrop(height=size, width=size, scale=scale)] + T = [A.RandomResizedCrop(height=size, width=size, scale=scale, ratio=ratio)] if auto_aug: # TODO: implement AugMix, AutoAug & RandAug in albumentation LOGGER.info(f'{prefix}auto augmentations are currently not supported') @@ -338,7 +341,7 @@ def classify_albumentations(augment=True, return A.Compose(T) except ImportError: # package not installed, skip - pass + LOGGER.warning(f'{prefix}⚠️ not found, install with `pip install albumentations` (recommended)') except Exception as e: LOGGER.info(f'{prefix}{e}') diff --git a/utils/autoanchor.py b/utils/autoanchor.py index 0b49ab3..7e7e998 100644 --- a/utils/autoanchor.py +++ b/utils/autoanchor.py @@ -122,7 +122,7 @@ def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen # Filter i = (wh0 < 3.0).any(1).sum() if i: - LOGGER.info(f'{PREFIX}WARNING: Extremely small objects found: {i} of {len(wh0)} labels are < 3 pixels in size') + LOGGER.info(f'{PREFIX}WARNING ⚠️ Extremely small objects found: {i} of {len(wh0)} labels are <3 pixels in size') wh = wh0[(wh0 >= 2.0).any(1)].astype(np.float32) # filter > 2 pixels # wh = wh * (npr.rand(wh.shape[0], 1) * 0.9 + 0.1) # multiply by random scale 0-1 @@ -134,7 +134,7 @@ def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen k = kmeans(wh / s, n, iter=30)[0] * s # points assert n == len(k) # kmeans may return fewer points than requested if wh is insufficient or too similar except Exception: - LOGGER.warning(f'{PREFIX}WARNING: switching strategies from kmeans to random init') + LOGGER.warning(f'{PREFIX}WARNING ⚠️ switching strategies from kmeans to random init') k = np.sort(npr.rand(n * 2)).reshape(n, 2) * img_size # random init wh, wh0 = (torch.tensor(x, dtype=torch.float32) for x in (wh, wh0)) k = print_results(k, verbose=False) diff --git a/utils/autobatch.py b/utils/autobatch.py index 641b055..bdeb91c 100644 --- a/utils/autobatch.py +++ b/utils/autobatch.py @@ -19,7 +19,7 @@ def check_train_batch_size(model, imgsz=640, amp=True): def autobatch(model, imgsz=640, fraction=0.8, batch_size=16): - # Automatically estimate best batch size to use `fraction` of available CUDA memory + # Automatically estimate best YOLOv5 batch size to use `fraction` of available CUDA memory # Usage: # import torch # from utils.autobatch import autobatch @@ -33,6 +33,9 @@ def autobatch(model, imgsz=640, fraction=0.8, batch_size=16): if device.type == 'cpu': LOGGER.info(f'{prefix}CUDA not detected, using default CPU batch-size {batch_size}') return batch_size + if torch.backends.cudnn.benchmark: + LOGGER.info(f'{prefix} ⚠️ Requires torch.backends.cudnn.benchmark=False, using default batch-size {batch_size}') + return batch_size # Inspect CUDA memory gb = 1 << 30 # bytes to GiB (1024 ** 3) @@ -62,8 +65,8 @@ def autobatch(model, imgsz=640, fraction=0.8, batch_size=16): b = batch_sizes[max(i - 1, 0)] # select prior safe point if b < 1 or b > 1024: # b outside of safe range b = batch_size - LOGGER.warning(f'{prefix}WARNING: ⚠️ CUDA anomaly detected, recommend restart environment and retry command.') + LOGGER.warning(f'{prefix}WARNING ⚠️ CUDA anomaly detected, recommend restart environment and retry command.') - fraction = np.polyval(p, b) / t # actual fraction predicted + fraction = (np.polyval(p, b) + r + a) / t # actual fraction predicted LOGGER.info(f'{prefix}Using batch-size {b} for {d} {t * fraction:.2f}G/{t:.2f}G ({fraction * 100:.0f}%) ✅') return b diff --git a/utils/dataloaders.py b/utils/dataloaders.py index c1ad1f1..6cd1da6 100644 --- a/utils/dataloaders.py +++ b/utils/dataloaders.py @@ -40,6 +40,7 @@ IMG_FORMATS = 'bmp', 'dng', 'jpeg', 'jpg', 'mpo', 'png', 'tif', 'tiff', 'webp', VID_FORMATS = 'asf', 'avi', 'gif', 'm4v', 'mkv', 'mov', 'mp4', 'mpeg', 'mpg', 'ts', 'wmv' # include video suffixes BAR_FORMAT = '{l_bar}{bar:10}{r_bar}{bar:-10b}' # tqdm bar format LOCAL_RANK = int(os.getenv('LOCAL_RANK', -1)) # https://pytorch.org/docs/stable/elastic/run.html +RANK = int(os.getenv('RANK', -1)) PIN_MEMORY = str(os.getenv('PIN_MEMORY', True)).lower() == 'true' # global pin_memory for dataloaders # Get orientation exif tag @@ -116,7 +117,7 @@ def create_dataloader(path, prefix='', shuffle=False): if rect and shuffle: - LOGGER.warning('WARNING: --rect is incompatible with DataLoader shuffle, setting shuffle=False') + LOGGER.warning('WARNING ⚠️ --rect is incompatible with DataLoader shuffle, setting shuffle=False') shuffle = False with torch_distributed_zero_first(rank): # init dataset *.cache only once if DDP dataset = LoadImagesAndLabels( @@ -139,7 +140,7 @@ def create_dataloader(path, sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle) loader = DataLoader if image_weights else InfiniteDataLoader # only DataLoader allows for attribute updates generator = torch.Generator() - generator.manual_seed(0) + generator.manual_seed(6148914691236517205 + RANK) return loader(dataset, batch_size=batch_size, shuffle=shuffle and sampler is None, @@ -185,6 +186,55 @@ class _RepeatSampler: yield from iter(self.sampler) +class LoadScreenshots: + # YOLOv5 screenshot dataloader, i.e. `python detect.py --source "screen 0 100 100 512 256"` + def __init__(self, source, img_size=640, stride=32, auto=True, transforms=None): + # source = [screen_number left top width height] (pixels) + check_requirements('mss') + import mss + + source, *params = source.split() + self.screen, left, top, width, height = 0, None, None, None, None # default to full screen 0 + if len(params) == 1: + self.screen = int(params[0]) + elif len(params) == 4: + left, top, width, height = (int(x) for x in params) + elif len(params) == 5: + self.screen, left, top, width, height = (int(x) for x in params) + self.img_size = img_size + self.stride = stride + self.transforms = transforms + self.auto = auto + self.mode = 'stream' + self.frame = 0 + self.sct = mss.mss() + + # Parse monitor shape + monitor = self.sct.monitors[self.screen] + self.top = monitor["top"] if top is None else (monitor["top"] + top) + self.left = monitor["left"] if left is None else (monitor["left"] + left) + self.width = width or monitor["width"] + self.height = height or monitor["height"] + self.monitor = {"left": self.left, "top": self.top, "width": self.width, "height": self.height} + + def __iter__(self): + return self + + def __next__(self): + # mss screen capture: get raw pixels from the screen as np array + im0 = np.array(self.sct.grab(self.monitor))[:, :, :3] # [:, :, :3] BGRA to BGR + s = f"screen {self.screen} (LTWH): {self.left},{self.top},{self.width},{self.height}: " + + if self.transforms: + im = self.transforms(im0) # transforms + else: + im = letterbox(im0, self.img_size, stride=self.stride, auto=self.auto)[0] # padded resize + im = im.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB + im = np.ascontiguousarray(im) # contiguous + self.frame += 1 + return str(self.screen), im, im0, None, s # screen, img, original img, im0s, s + + class LoadImages: # YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4` def __init__(self, path, img_size=640, stride=32, auto=True, transforms=None, vid_stride=1): @@ -232,8 +282,9 @@ class LoadImages: if self.video_flag[self.count]: # Read video self.mode = 'video' - ret_val, im0 = self.cap.read() - self.cap.set(cv2.CAP_PROP_POS_FRAMES, self.vid_stride * (self.frame + 1)) # read at vid_stride + for _ in range(self.vid_stride): + self.cap.grab() + ret_val, im0 = self.cap.retrieve() while not ret_val: self.count += 1 self.cap.release() @@ -328,7 +379,7 @@ class LoadStreams: self.auto = auto and self.rect self.transforms = transforms # optional if not self.rect: - LOGGER.warning('WARNING: Stream shapes differ. For optimal performance supply similarly-shaped streams.') + LOGGER.warning('WARNING ⚠️ Stream shapes differ. For optimal performance supply similarly-shaped streams.') def update(self, i, cap, stream): # Read stream `i` frames in daemon thread @@ -341,7 +392,7 @@ class LoadStreams: if success: self.imgs[i] = im else: - LOGGER.warning('WARNING: Video stream unresponsive, please check your IP camera connection.') + LOGGER.warning('WARNING ⚠️ Video stream unresponsive, please check your IP camera connection.') self.imgs[i] = np.zeros_like(self.imgs[i]) cap.open(stream) # re-open stream if signal was lost time.sleep(0.0) # wait time @@ -403,7 +454,7 @@ class LoadImagesAndLabels(Dataset): self.mosaic_border = [-img_size // 2, -img_size // 2] self.stride = stride self.path = path - self.albumentations = Albumentations() if augment else None + self.albumentations = Albumentations(size=img_size) if augment else None try: f = [] # image files @@ -455,7 +506,7 @@ class LoadImagesAndLabels(Dataset): self.im_files = list(cache.keys()) # update self.label_files = img2label_paths(cache.keys()) # update n = len(shapes) # number of images - bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index + bi = np.floor(np.arange(n) / batch_size).astype(int) # batch index nb = bi[-1] + 1 # number of batches self.batch = bi # batch index of image self.n = n @@ -484,6 +535,7 @@ class LoadImagesAndLabels(Dataset): self.im_files = [self.im_files[i] for i in irect] self.label_files = [self.label_files[i] for i in irect] self.labels = [self.labels[i] for i in irect] + self.segments = [self.segments[i] for i in irect] self.shapes = s[irect] # wh ar = ar[irect] @@ -497,7 +549,7 @@ class LoadImagesAndLabels(Dataset): elif mini > 1: shapes[i] = [1, 1 / mini] - self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride + self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(int) * stride # Cache images into RAM/disk for faster training (WARNING: large datasets may exceed system resources) self.ims = [None] * n @@ -542,7 +594,7 @@ class LoadImagesAndLabels(Dataset): if msgs: LOGGER.info('\n'.join(msgs)) if nf == 0: - LOGGER.warning(f'{prefix}WARNING: No labels found in {path}. {HELP_URL}') + LOGGER.warning(f'{prefix}WARNING ⚠️ No labels found in {path}. {HELP_URL}') x['hash'] = get_hash(self.label_files + self.im_files) x['results'] = nf, nm, ne, nc, len(self.im_files) x['msgs'] = msgs # warnings @@ -552,7 +604,7 @@ class LoadImagesAndLabels(Dataset): path.with_suffix('.cache.npy').rename(path) # remove .npy suffix LOGGER.info(f'{prefix}New cache created: {path}') except Exception as e: - LOGGER.warning(f'{prefix}WARNING: Cache directory {path.parent} is not writeable: {e}') # not writeable + LOGGER.warning(f'{prefix}WARNING ⚠️ Cache directory {path.parent} is not writeable: {e}') # not writeable return x def __len__(self): @@ -867,7 +919,7 @@ def extract_boxes(path=DATASETS_DIR / 'coco128'): # from utils.dataloaders impo b = x[1:] * [w, h, w, h] # box # b[2:] = b[2:].max() # rectangle to square b[2:] = b[2:] * 1.2 + 3 # pad - b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int) + b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(int) b[[0, 2]] = np.clip(b[[0, 2]], 0, w) # clip boxes outside of image b[[1, 3]] = np.clip(b[[1, 3]], 0, h) @@ -916,7 +968,7 @@ def verify_image_label(args): f.seek(-2, 2) if f.read() != b'\xff\xd9': # corrupt JPEG ImageOps.exif_transpose(Image.open(im_file)).save(im_file, 'JPEG', subsampling=0, quality=100) - msg = f'{prefix}WARNING: {im_file}: corrupt JPEG restored and saved' + msg = f'{prefix}WARNING ⚠️ {im_file}: corrupt JPEG restored and saved' # verify labels if os.path.isfile(lb_file): @@ -938,7 +990,7 @@ def verify_image_label(args): lb = lb[i] # remove duplicates if segments: segments = [segments[x] for x in i] - msg = f'{prefix}WARNING: {im_file}: {nl - len(i)} duplicate labels removed' + msg = f'{prefix}WARNING ⚠️ {im_file}: {nl - len(i)} duplicate labels removed' else: ne = 1 # label empty lb = np.zeros((0, 5), dtype=np.float32) @@ -948,7 +1000,7 @@ def verify_image_label(args): return im_file, lb, shape, segments, nm, nf, ne, nc, msg except Exception as e: nc = 1 - msg = f'{prefix}WARNING: {im_file}: ignoring corrupt image/label: {e}' + msg = f'{prefix}WARNING ⚠️ {im_file}: ignoring corrupt image/label: {e}' return [None, None, None, None, nm, nf, ne, nc, msg] @@ -1011,7 +1063,7 @@ class HUBDatasetStats(): im = im.resize((int(im.width * r), int(im.height * r))) im.save(f_new, 'JPEG', quality=50, optimize=True) # save except Exception as e: # use OpenCV - print(f'WARNING: HUB ops PIL failure {f}: {e}') + LOGGER.info(f'WARNING ⚠️ HUB ops PIL failure {f}: {e}') im = cv2.imread(f) im_height, im_width = im.shape[:2] r = max_dim / max(im_height, im_width) # ratio @@ -1118,7 +1170,7 @@ def create_classification_dataloader(path, nw = min([os.cpu_count() // max(nd, 1), batch_size if batch_size > 1 else 0, workers]) sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle) generator = torch.Generator() - generator.manual_seed(0) + generator.manual_seed(6148914691236517205 + RANK) return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=shuffle and sampler is None, diff --git a/utils/docker/Dockerfile b/utils/docker/Dockerfile index 4b9367c..764ee27 100644 --- a/utils/docker/Dockerfile +++ b/utils/docker/Dockerfile @@ -3,7 +3,7 @@ # Image is CUDA-optimized for YOLOv5 single/multi-GPU training and inference # Start FROM NVIDIA PyTorch image https://ngc.nvidia.com/catalog/containers/nvidia:pytorch -FROM nvcr.io/nvidia/pytorch:22.07-py3 +FROM nvcr.io/nvidia/pytorch:22.08-py3 RUN rm -rf /opt/pytorch # remove 1.2GB dir # Downloads to user config dir diff --git a/utils/downloads.py b/utils/downloads.py index dd2698f..73b8334 100644 --- a/utils/downloads.py +++ b/utils/downloads.py @@ -16,13 +16,13 @@ import requests import torch -def is_url(url, check_online=True): - # Check if online file exists +def is_url(url, check=True): + # Check if string is URL and check if URL exists try: url = str(url) result = urllib.parse.urlparse(url) assert all([result.scheme, result.netloc, result.path]) # check if is url - return (urllib.request.urlopen(url).getcode() == 200) if check_online else True # check if exists online + return (urllib.request.urlopen(url).getcode() == 200) if check else True # check if exists online except (AssertionError, urllib.request.HTTPError): return False @@ -87,9 +87,7 @@ def attempt_download(file, repo='ultralytics/yolov5', release='v6.2'): return file # GitHub assets - assets = [ - 'yolov5n.pt', 'yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt', 'yolov5n6.pt', 'yolov5s6.pt', - 'yolov5m6.pt', 'yolov5l6.pt', 'yolov5x6.pt'] + assets = [f'yolov5{size}{suffix}.pt' for size in 'nsmlx' for suffix in ('', '6', '-cls', '-seg')] # default try: tag, assets = github_assets(repo, release) except Exception: @@ -107,7 +105,6 @@ def attempt_download(file, repo='ultralytics/yolov5', release='v6.2'): safe_download( file, url=f'https://github.com/{repo}/releases/download/{tag}/{name}', - url2=f'https://storage.googleapis.com/{repo}/{tag}/{name}', # backup url (optional) min_bytes=1E5, error_msg=f'{file} missing, try downloading from https://github.com/{repo}/releases/{tag} or {url3}') diff --git a/utils/general.py b/utils/general.py index cae63fd..de7871c 100644 --- a/utils/general.py +++ b/utils/general.py @@ -17,6 +17,7 @@ import signal import sys import time import urllib +from copy import deepcopy from datetime import datetime from itertools import repeat from multiprocessing.pool import ThreadPool @@ -33,7 +34,7 @@ import torch import torchvision import yaml -from utils import TryExcept +from utils import TryExcept, emojis from utils.downloads import gsutil_getsize from utils.metrics import box_iou, fitness @@ -42,8 +43,8 @@ ROOT = FILE.parents[1] # YOLOv5 root directory RANK = int(os.getenv('RANK', -1)) # Settings -DATASETS_DIR = ROOT.parent / 'datasets' # YOLOv5 datasets directory NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads +DATASETS_DIR = Path(os.getenv('YOLOv5_DATASETS_DIR', ROOT.parent / 'datasets')) # global datasets directory AUTOINSTALL = str(os.getenv('YOLOv5_AUTOINSTALL', True)).lower() == 'true' # global auto-install mode VERBOSE = str(os.getenv('YOLOv5_VERBOSE', True)).lower() == 'true' # global verbose mode FONT = 'Arial.ttf' # https://ultralytics.com/assets/Arial.ttf @@ -222,7 +223,7 @@ def init_seeds(seed=0, deterministic=False): torch.manual_seed(seed) torch.cuda.manual_seed(seed) torch.cuda.manual_seed_all(seed) # for Multi-GPU, exception safe - torch.backends.cudnn.benchmark = True # for faster training + # torch.backends.cudnn.benchmark = True # AutoBatch problem https://github.com/ultralytics/yolov5/issues/9287 if deterministic and check_version(torch.__version__, '1.12.0'): # https://github.com/ultralytics/yolov5/pull/8213 torch.use_deterministic_algorithms(True) torch.backends.cudnn.deterministic = True @@ -247,11 +248,6 @@ def get_latest_run(search_dir='.'): return max(last_list, key=os.path.getctime) if last_list else '' -def emojis(str=''): - # Return platform-dependent emoji-safe version of string - return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str - - def file_age(path=__file__): # Return days since last file update dt = (datetime.now() - datetime.fromtimestamp(Path(path).stat().st_mtime)) # delta @@ -332,7 +328,7 @@ def check_version(current='0.0.0', minimum='0.0.0', name='version ', pinned=Fals # Check version vs. required version current, minimum = (pkg.parse_version(x) for x in (current, minimum)) result = (current == minimum) if pinned else (current >= minimum) # bool - s = f'WARNING: ⚠️ {name}{minimum} is required by YOLOv5, but {name}{current} is currently installed' # string + s = f'WARNING ⚠️ {name}{minimum} is required by YOLOv5, but {name}{current} is currently installed' # string if hard: assert result, emojis(s) # assert min requirements met if verbose and not result: @@ -341,40 +337,38 @@ def check_version(current='0.0.0', minimum='0.0.0', name='version ', pinned=Fals @TryExcept() -def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), install=True, cmds=()): - # Check installed dependencies meet YOLOv5 requirements (pass *.txt file or list of packages) +def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), install=True, cmds=''): + # Check installed dependencies meet YOLOv5 requirements (pass *.txt file or list of packages or single package str) prefix = colorstr('red', 'bold', 'requirements:') check_python() # check python version - if isinstance(requirements, (str, Path)): # requirements.txt file - file = Path(requirements) - assert file.exists(), f"{prefix} {file.resolve()} not found, check failed." + if isinstance(requirements, Path): # requirements.txt file + file = requirements.resolve() + assert file.exists(), f"{prefix} {file} not found, check failed." with file.open() as f: requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(f) if x.name not in exclude] - else: # list or tuple of packages - requirements = [x for x in requirements if x not in exclude] + elif isinstance(requirements, str): + requirements = [requirements] - n = 0 # number of packages updates - for i, r in enumerate(requirements): + s = '' + n = 0 + for r in requirements: try: pkg.require(r) - except Exception: # DistributionNotFound or VersionConflict if requirements not met - s = f"{prefix} {r} not found and is required by YOLOv5" - if install and AUTOINSTALL: # check environment variable - LOGGER.info(f"{s}, attempting auto-update...") - try: - assert check_online(), f"'pip install {r}' skipped (offline)" - LOGGER.info(check_output(f'pip install "{r}" {cmds[i] if cmds else ""}', shell=True).decode()) - n += 1 - except Exception as e: - LOGGER.warning(f'{prefix} {e}') - else: - LOGGER.info(f'{s}. Please install and rerun your command.') + except (pkg.VersionConflict, pkg.DistributionNotFound): # exception if requirements not met + s += f'"{r}" ' + n += 1 - if n: # if packages updated - source = file.resolve() if 'file' in locals() else requirements - s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \ - f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n" - LOGGER.info(s) + if s and install and AUTOINSTALL: # check environment variable + LOGGER.info(f"{prefix} YOLOv5 requirement{'s' * (n > 1)} {s}not found, attempting AutoUpdate...") + try: + assert check_online(), "AutoUpdate skipped (offline)" + LOGGER.info(check_output(f'pip install {s} {cmds}', shell=True).decode()) + source = file if 'file' in locals() else requirements + s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \ + f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n" + LOGGER.info(s) + except Exception as e: + LOGGER.warning(f'{prefix} ❌ {e}') def check_img_size(imgsz, s=32, floor=0): @@ -385,7 +379,7 @@ def check_img_size(imgsz, s=32, floor=0): imgsz = list(imgsz) # convert to list if tuple new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz] if new_size != imgsz: - LOGGER.warning(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}') + LOGGER.warning(f'WARNING ⚠️ --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}') return new_size @@ -400,7 +394,7 @@ def check_imshow(): cv2.waitKey(1) return True except Exception as e: - LOGGER.warning(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}') + LOGGER.warning(f'WARNING ⚠️ Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}') return False @@ -470,8 +464,7 @@ def check_dataset(data, autodownload=True): # Read yaml (optional) if isinstance(data, (str, Path)): - with open(data, errors='ignore') as f: - data = yaml.safe_load(f) # dictionary + data = yaml_load(data) # dictionary # Checks for k in 'train', 'val', 'names': @@ -486,7 +479,13 @@ def check_dataset(data, autodownload=True): path = (ROOT / path).resolve() for k in 'train', 'val', 'test': if data.get(k): # prepend path - data[k] = str(path / data[k]) if isinstance(data[k], str) else [str(path / x) for x in data[k]] + if isinstance(data[k], str): + x = (path / data[k]).resolve() + if not x.exists() and data[k].startswith('../'): + x = (path / data[k][3:]).resolve() + data[k] = str(x) + else: + data[k] = [str((path / x).resolve()) for x in data[k]] # Parse yaml train, val, test, s = (data.get(x) for x in ('train', 'val', 'test', 'download')) @@ -497,13 +496,12 @@ def check_dataset(data, autodownload=True): if not s or not autodownload: raise Exception('Dataset not found ❌') t = time.time() - root = path.parent if 'path' in data else '..' # unzip directory i.e. '../' if s.startswith('http') and s.endswith('.zip'): # URL f = Path(s).name # filename LOGGER.info(f'Downloading {s} to {f}...') torch.hub.download_url_to_file(s, f) - Path(root).mkdir(parents=True, exist_ok=True) # create root - ZipFile(f).extractall(path=root) # unzip + Path(DATASETS_DIR).mkdir(parents=True, exist_ok=True) # create root + ZipFile(f).extractall(path=DATASETS_DIR) # unzip Path(f).unlink() # remove zip r = None # success elif s.startswith('bash '): # bash script @@ -512,7 +510,7 @@ def check_dataset(data, autodownload=True): else: # python script r = exec(s, {'yaml': data}) # return None dt = f'({round(time.time() - t, 1)}s)' - s = f"success ✅ {dt}, saved to {colorstr('bold', root)}" if r in (0, None) else f"failure {dt} ❌" + s = f"success ✅ {dt}, saved to {colorstr('bold', DATASETS_DIR)}" if r in (0, None) else f"failure {dt} ❌" LOGGER.info(f"Dataset download {s}") check_font('Arial.ttf' if is_ascii(data['names']) else 'Arial.Unicode.ttf', progress=True) # download fonts return data # dictionary @@ -537,7 +535,7 @@ def check_amp(model): f = ROOT / 'data' / 'images' / 'bus.jpg' # image to check im = f if f.exists() else 'https://ultralytics.com/images/bus.jpg' if check_online() else np.ones((640, 640, 3)) try: - assert amp_allclose(model, im) or amp_allclose(DetectMultiBackend('yolov5n.pt', device), im) + assert amp_allclose(deepcopy(model), im) or amp_allclose(DetectMultiBackend('yolov5n.pt', device), im) LOGGER.info(f'{prefix}checks passed ✅') return True except Exception: @@ -569,10 +567,10 @@ def download(url, dir='.', unzip=True, delete=True, curl=False, threads=1, retry def download_one(url, dir): # Download 1 file success = True - f = dir / Path(url).name # filename - if Path(url).is_file(): # exists in current path - Path(url).rename(f) # move to dir - elif not f.exists(): + if Path(url).is_file(): + f = Path(url) # filename + else: # does not exist + f = dir / Path(url).name LOGGER.info(f'Downloading {url} to {f}...') for i in range(retry + 1): if curl: @@ -586,9 +584,9 @@ def download(url, dir='.', unzip=True, delete=True, curl=False, threads=1, retry if success: break elif i < retry: - LOGGER.warning(f'Download failure, retrying {i + 1}/{retry} {url}...') + LOGGER.warning(f'⚠️ Download failure, retrying {i + 1}/{retry} {url}...') else: - LOGGER.warning(f'Failed to download {url}...') + LOGGER.warning(f'❌ Failed to download {url}...') if unzip and success and f.suffix in ('.zip', '.tar', '.gz'): LOGGER.info(f'Unzipping {f}...') @@ -727,7 +725,7 @@ def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0): def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0): # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] normalized where xy1=top-left, xy2=bottom-right if clip: - clip_coords(x, (h - eps, w - eps)) # warning: inplace clip + clip_boxes(x, (h - eps, w - eps)) # warning: inplace clip y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) y[:, 0] = ((x[:, 0] + x[:, 2]) / 2) / w # x center y[:, 1] = ((x[:, 1] + x[:, 3]) / 2) / h # y center @@ -771,7 +769,23 @@ def resample_segments(segments, n=1000): return segments -def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): +def scale_boxes(img1_shape, boxes, img0_shape, ratio_pad=None): + # Rescale boxes (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + pad = ratio_pad[1] + + boxes[:, [0, 2]] -= pad[0] # x padding + boxes[:, [1, 3]] -= pad[1] # y padding + boxes[:, :4] /= gain + clip_boxes(boxes, img0_shape) + return boxes + + +def scale_segments(img1_shape, segments, img0_shape, ratio_pad=None): # Rescale coords (xyxy) from img1_shape to img0_shape if ratio_pad is None: # calculate from img0_shape gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new @@ -780,15 +794,15 @@ def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): gain = ratio_pad[0][0] pad = ratio_pad[1] - coords[:, [0, 2]] -= pad[0] # x padding - coords[:, [1, 3]] -= pad[1] # y padding - coords[:, :4] /= gain - clip_coords(coords, img0_shape) - return coords + segments[:, 0] -= pad[0] # x padding + segments[:, 1] -= pad[1] # y padding + segments /= gain + clip_segments(segments, img0_shape) + return segments -def clip_coords(boxes, shape): - # Clip bounding xyxy bounding boxes to image shape (height, width) +def clip_boxes(boxes, shape): + # Clip boxes (xyxy) to image shape (height, width) if isinstance(boxes, torch.Tensor): # faster individually boxes[:, 0].clamp_(0, shape[1]) # x1 boxes[:, 1].clamp_(0, shape[0]) # y1 @@ -799,15 +813,28 @@ def clip_coords(boxes, shape): boxes[:, [1, 3]] = boxes[:, [1, 3]].clip(0, shape[0]) # y1, y2 -def non_max_suppression(prediction, - conf_thres=0.25, - iou_thres=0.45, - classes=None, - agnostic=False, - multi_label=False, - labels=(), - max_det=300): - """Non-Maximum Suppression (NMS) on inference results to reject overlapping bounding boxes +def clip_segments(boxes, shape): + # Clip segments (xy1,xy2,...) to image shape (height, width) + if isinstance(boxes, torch.Tensor): # faster individually + boxes[:, 0].clamp_(0, shape[1]) # x + boxes[:, 1].clamp_(0, shape[0]) # y + else: # np.array (faster grouped) + boxes[:, 0] = boxes[:, 0].clip(0, shape[1]) # x + boxes[:, 1] = boxes[:, 1].clip(0, shape[0]) # y + + +def non_max_suppression( + prediction, + conf_thres=0.25, + iou_thres=0.45, + classes=None, + agnostic=False, + multi_label=False, + labels=(), + max_det=300, + nm=0, # number of masks +): + """Non-Maximum Suppression (NMS) on inference results to reject overlapping detections Returns: list of detections, on (n,6) tensor per image [xyxy, conf, cls] @@ -817,7 +844,7 @@ def non_max_suppression(prediction, prediction = prediction[0] # select only inference output bs = prediction.shape[0] # batch size - nc = prediction.shape[2] - 5 # number of classes + nc = prediction.shape[2] - nm - 5 # number of classes xc = prediction[..., 4] > conf_thres # candidates # Checks @@ -828,13 +855,14 @@ def non_max_suppression(prediction, # min_wh = 2 # (pixels) minimum box width and height max_wh = 7680 # (pixels) maximum box width and height max_nms = 30000 # maximum number of boxes into torchvision.ops.nms() - time_limit = 0.3 + 0.03 * bs # seconds to quit after + time_limit = 0.5 + 0.05 * bs # seconds to quit after redundant = True # require redundant detections multi_label &= nc > 1 # multiple labels per box (adds 0.5ms/img) merge = False # use merge-NMS t = time.time() - output = [torch.zeros((0, 6), device=prediction.device)] * bs + mi = 5 + nc # mask start index + output = [torch.zeros((0, 6 + nm), device=prediction.device)] * bs for xi, x in enumerate(prediction): # image index, image inference # Apply constraints # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height @@ -843,7 +871,7 @@ def non_max_suppression(prediction, # Cat apriori labels if autolabelling if labels and len(labels[xi]): lb = labels[xi] - v = torch.zeros((len(lb), nc + 5), device=x.device) + v = torch.zeros((len(lb), nc + nm + 5), device=x.device) v[:, :4] = lb[:, 1:5] # box v[:, 4] = 1.0 # conf v[range(len(lb)), lb[:, 0].long() + 5] = 1.0 # cls @@ -856,16 +884,17 @@ def non_max_suppression(prediction, # Compute conf x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf - # Box (center x, center y, width, height) to (x1, y1, x2, y2) - box = xywh2xyxy(x[:, :4]) + # Box/Mask + box = xywh2xyxy(x[:, :4]) # center_x, center_y, width, height) to (x1, y1, x2, y2) + mask = x[:, mi:] # zero columns if no masks # Detections matrix nx6 (xyxy, conf, cls) if multi_label: - i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T - x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + i, j = (x[:, 5:mi] > conf_thres).nonzero(as_tuple=False).T + x = torch.cat((box[i], x[i, 5 + j, None], j[:, None].float(), mask[i]), 1) else: # best class only - conf, j = x[:, 5:].max(1, keepdim=True) - x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + conf, j = x[:, 5:mi].max(1, keepdim=True) + x = torch.cat((box, conf, j.float(), mask), 1)[conf.view(-1) > conf_thres] # Filter by class if classes is not None: @@ -881,6 +910,8 @@ def non_max_suppression(prediction, continue elif n > max_nms: # excess boxes x = x[x[:, 4].argsort(descending=True)[:max_nms]] # sort by confidence + else: + x = x[x[:, 4].argsort(descending=True)] # sort by confidence # Batched NMS c = x[:, 5:6] * (0 if agnostic else max_wh) # classes @@ -898,7 +929,7 @@ def non_max_suppression(prediction, output[xi] = x[i] if (time.time() - t) > time_limit: - LOGGER.warning(f'WARNING: NMS time limit {time_limit:.3f}s exceeded') + LOGGER.warning(f'WARNING ⚠️ NMS time limit {time_limit:.3f}s exceeded') break # time limit exceeded return output @@ -975,7 +1006,7 @@ def apply_classifier(x, model, img, im0): d[:, :4] = xywh2xyxy(b).long() # Rescale boxes from img_size to im0 size - scale_coords(img.shape[2:], d[:, :4], im0[i].shape) + scale_boxes(img.shape[2:], d[:, :4], im0[i].shape) # Classes pred_cls1 = d[:, 5].long() diff --git a/utils/loggers/__init__.py b/utils/loggers/__init__.py index 3aee358..941d09e 100644 --- a/utils/loggers/__init__.py +++ b/utils/loggers/__init__.py @@ -11,13 +11,13 @@ import pkg_resources as pkg import torch from torch.utils.tensorboard import SummaryWriter -from utils.general import colorstr, cv2 +from utils.general import LOGGER, colorstr, cv2 from utils.loggers.clearml.clearml_utils import ClearmlLogger from utils.loggers.wandb.wandb_utils import WandbLogger from utils.plots import plot_images, plot_labels, plot_results from utils.torch_utils import de_parallel -LOGGERS = ('csv', 'tb', 'wandb', 'clearml') # *.csv, TensorBoard, Weights & Biases, ClearML +LOGGERS = ('csv', 'tb', 'wandb', 'clearml', 'comet') # *.csv, TensorBoard, Weights & Biases, ClearML RANK = int(os.getenv('RANK', -1)) try: @@ -41,6 +41,18 @@ try: except (ImportError, AssertionError): clearml = None +try: + if RANK not in [0, -1]: + comet_ml = None + else: + import comet_ml + + assert hasattr(comet_ml, '__version__') # verify package import not local dir + from utils.loggers.comet import CometLogger + +except (ModuleNotFoundError, ImportError, AssertionError): + comet_ml = None + class Loggers(): # YOLOv5 Loggers class @@ -80,7 +92,10 @@ class Loggers(): prefix = colorstr('ClearML: ') s = f"{prefix}run 'pip install clearml' to automatically track, visualize and remotely train YOLOv5 🚀 in ClearML" self.logger.info(s) - + if not comet_ml: + prefix = colorstr('Comet: ') + s = f"{prefix}run 'pip install comet_ml' to automatically track and visualize YOLOv5 🚀 runs in Comet" + self.logger.info(s) # TensorBoard s = self.save_dir if 'tb' in self.include and not self.opt.evolve: @@ -107,6 +122,18 @@ class Loggers(): else: self.clearml = None + # Comet + if comet_ml and 'comet' in self.include: + if isinstance(self.opt.resume, str) and self.opt.resume.startswith("comet://"): + run_id = self.opt.resume.split("/")[-1] + self.comet_logger = CometLogger(self.opt, self.hyp, run_id=run_id) + + else: + self.comet_logger = CometLogger(self.opt, self.hyp) + + else: + self.comet_logger = None + @property def remote_dataset(self): # Get data_dict if custom dataset artifact link is provided @@ -115,12 +142,18 @@ class Loggers(): data_dict = self.clearml.data_dict if self.wandb: data_dict = self.wandb.data_dict + if self.comet_logger: + data_dict = self.comet_logger.data_dict return data_dict def on_train_start(self): - # Callback runs on train start - pass + if self.comet_logger: + self.comet_logger.on_train_start() + + def on_pretrain_routine_start(self): + if self.comet_logger: + self.comet_logger.on_pretrain_routine_start() def on_pretrain_routine_end(self, labels, names): # Callback runs on pre-train routine end @@ -131,8 +164,11 @@ class Loggers(): self.wandb.log({"Labels": [wandb.Image(str(x), caption=x.name) for x in paths]}) # if self.clearml: # pass # ClearML saves these images automatically using hooks + if self.comet_logger: + self.comet_logger.on_pretrain_routine_end(paths) - def on_train_batch_end(self, model, ni, imgs, targets, paths): + def on_train_batch_end(self, model, ni, imgs, targets, paths, vals): + log_dict = dict(zip(self.keys[0:3], vals)) # Callback runs on train batch end # ni: number integrated batches (since train start) if self.plots: @@ -148,11 +184,21 @@ class Loggers(): if self.clearml: self.clearml.log_debug_samples(files, title='Mosaics') + if self.comet_logger: + self.comet_logger.on_train_batch_end(log_dict, step=ni) + def on_train_epoch_end(self, epoch): # Callback runs on train epoch end if self.wandb: self.wandb.current_epoch = epoch + 1 + if self.comet_logger: + self.comet_logger.on_train_epoch_end(epoch) + + def on_val_start(self): + if self.comet_logger: + self.comet_logger.on_val_start() + def on_val_image_end(self, pred, predn, path, names, im): # Callback runs on val image end if self.wandb: @@ -160,7 +206,11 @@ class Loggers(): if self.clearml: self.clearml.log_image_with_boxes(path, pred, names, im) - def on_val_end(self): + def on_val_batch_end(self, batch_i, im, targets, paths, shapes, out): + if self.comet_logger: + self.comet_logger.on_val_batch_end(batch_i, im, targets, paths, shapes, out) + + def on_val_end(self, nt, tp, fp, p, r, f1, ap, ap50, ap_class, confusion_matrix): # Callback runs on val end if self.wandb or self.clearml: files = sorted(self.save_dir.glob('val*.jpg')) @@ -169,6 +219,9 @@ class Loggers(): if self.clearml: self.clearml.log_debug_samples(files, title='Validation') + if self.comet_logger: + self.comet_logger.on_val_end(nt, tp, fp, p, r, f1, ap, ap50, ap_class, confusion_matrix) + def on_fit_epoch_end(self, vals, epoch, best_fitness, fi): # Callback runs at the end of each fit (train+val) epoch x = dict(zip(self.keys, vals)) @@ -199,6 +252,9 @@ class Loggers(): self.clearml.current_epoch_logged_images = set() # reset epoch image limit self.clearml.current_epoch += 1 + if self.comet_logger: + self.comet_logger.on_fit_epoch_end(x, epoch=epoch) + def on_model_save(self, last, epoch, final_epoch, best_fitness, fi): # Callback runs on model save event if (epoch + 1) % self.opt.save_period == 0 and not final_epoch and self.opt.save_period != -1: @@ -209,6 +265,9 @@ class Loggers(): model_name='Latest Model', auto_delete_file=False) + if self.comet_logger: + self.comet_logger.on_model_save(last, epoch, final_epoch, best_fitness, fi) + def on_train_end(self, last, best, epoch, results): # Callback runs on training end, i.e. saving best model if self.plots: @@ -237,10 +296,16 @@ class Loggers(): name='Best Model', auto_delete_file=False) + if self.comet_logger: + final_results = dict(zip(self.keys[3:10], results)) + self.comet_logger.on_train_end(files, self.save_dir, last, best, epoch, final_results) + def on_params_update(self, params: dict): # Update hyperparams or configs of the experiment if self.wandb: self.wandb.wandb_run.config.update(params, allow_val_change=True) + if self.comet_logger: + self.comet_logger.on_params_update(params) class GenericLogger: @@ -328,7 +393,7 @@ def log_tensorboard_graph(tb, model, imgsz=(640, 640)): warnings.simplefilter('ignore') # suppress jit trace warning tb.add_graph(torch.jit.trace(de_parallel(model), im, strict=False), []) except Exception as e: - print(f'WARNING: TensorBoard graph visualization failure {e}') + LOGGER.warning(f'WARNING ⚠️ TensorBoard graph visualization failure {e}') def web_project_name(project): diff --git a/utils/loggers/clearml/clearml_utils.py b/utils/loggers/clearml/clearml_utils.py index 1e13690..eb1c12c 100644 --- a/utils/loggers/clearml/clearml_utils.py +++ b/utils/loggers/clearml/clearml_utils.py @@ -11,6 +11,7 @@ from utils.plots import Annotator, colors try: import clearml from clearml import Dataset, Task + assert hasattr(clearml, '__version__') # verify package import not local dir except (ImportError, AssertionError): clearml = None diff --git a/utils/loggers/comet/README.md b/utils/loggers/comet/README.md new file mode 100644 index 0000000..3a51cb9 --- /dev/null +++ b/utils/loggers/comet/README.md @@ -0,0 +1,256 @@ + + +# YOLOv5 with Comet + +This guide will cover how to use YOLOv5 with [Comet](https://bit.ly/yolov5-readme-comet) + +# About Comet + +Comet builds tools that help data scientists, engineers, and team leaders accelerate and optimize machine learning and deep learning models. + +Track and visualize model metrics in real time, save your hyperparameters, datasets, and model checkpoints, and visualize your model predictions with [Comet Custom Panels](https://bit.ly/yolov5-colab-comet-panels)! +Comet makes sure you never lose track of your work and makes it easy to share results and collaborate across teams of all sizes! + +# Getting Started + +## Install Comet + +```shell +pip install comet_ml +``` + +## Configure Comet Credentials + +There are two ways to configure Comet with YOLOv5. + +You can either set your credentials through enviroment variables + +**Environment Variables** + +```shell +export COMET_API_KEY= +export COMET_PROJECT_NAME= # This will default to 'yolov5' +``` + +Or create a `.comet.config` file in your working directory and set your credentials there. + +**Comet Configuration File** + +``` +[comet] +api_key= +project_name= # This will default to 'yolov5' +``` + +## Run the Training Script + +```shell +# Train YOLOv5s on COCO128 for 5 epochs +python train.py --img 640 --batch 16 --epochs 5 --data coco128.yaml --weights yolov5s.pt +``` + +That's it! Comet will automatically log your hyperparameters, command line arguments, training and valiation metrics. You can visualize and analyze your runs in the Comet UI + +yolo-ui + +# Try out an Example! +Check out an example of a [completed run here](https://www.comet.com/examples/comet-example-yolov5/a0e29e0e9b984e4a822db2a62d0cb357?experiment-tab=chart&showOutliers=true&smoothing=0&transformY=smoothing&xAxis=step&ref=yolov5&utm_source=yolov5&utm_medium=affilliate&utm_campaign=yolov5_comet_integration) + +Or better yet, try it out yourself in this Colab Notebook + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1RG0WOQyxlDlo5Km8GogJpIEJlg_5lyYO?usp=sharing) + +# Log automatically + +By default, Comet will log the following items + +## Metrics +- Box Loss, Object Loss, Classification Loss for the training and validation data +- mAP_0.5, mAP_0.5:0.95 metrics for the validation data. +- Precision and Recall for the validation data + +## Parameters + +- Model Hyperparameters +- All parameters passed through the command line options + +## Visualizations + +- Confusion Matrix of the model predictions on the validation data +- Plots for the PR and F1 curves across all classes +- Correlogram of the Class Labels + +# Configure Comet Logging + +Comet can be configured to log additional data either through command line flags passed to the training script +or through environment variables. + +```shell +export COMET_MODE=online # Set whether to run Comet in 'online' or 'offline' mode. Defaults to online +export COMET_MODEL_NAME= #Set the name for the saved model. Defaults to yolov5 +export COMET_LOG_CONFUSION_MATRIX=false # Set to disable logging a Comet Confusion Matrix. Defaults to true +export COMET_MAX_IMAGE_UPLOADS= # Controls how many total image predictions to log to Comet. Defaults to 100. +export COMET_LOG_PER_CLASS_METRICS=true # Set to log evaluation metrics for each detected class at the end of training. Defaults to false +export COMET_DEFAULT_CHECKPOINT_FILENAME= # Set this if you would like to resume training from a different checkpoint. Defaults to 'last.pt' +export COMET_LOG_BATCH_LEVEL_METRICS=true # Set this if you would like to log training metrics at the batch level. Defaults to false. +export COMET_LOG_PREDICTIONS=true # Set this to false to disable logging model predictions +``` + +## Logging Checkpoints with Comet + +Logging Models to Comet is disabled by default. To enable it, pass the `save-period` argument to the training script. This will save the +logged checkpoints to Comet based on the interval value provided by `save-period` + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--save-period 1 +``` + +## Logging Model Predictions + +By default, model predictions (images, ground truth labels and bounding boxes) will be logged to Comet. + +You can control the frequency of logged predictions and the associated images by passing the `bbox_interval` command line argument. Predictions can be visualized using Comet's Object Detection Custom Panel. This frequency corresponds to every Nth batch of data per epoch. In the example below, we are logging every 2nd batch of data for each epoch. + +**Note:** The YOLOv5 validation dataloader will default to a batch size of 32, so you will have to set the logging frequency accordingly. + +Here is an [example project using the Panel](https://www.comet.com/examples/comet-example-yolov5?shareable=YcwMiJaZSXfcEXpGOHDD12vA1&ref=yolov5&utm_source=yolov5&utm_medium=affilliate&utm_campaign=yolov5_comet_integration) + + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--bbox_interval 2 +``` + +### Controlling the number of Prediction Images logged to Comet + +When logging predictions from YOLOv5, Comet will log the images associated with each set of predictions. By default a maximum of 100 validation images are logged. You can increase or decrease this number using the `COMET_MAX_IMAGE_UPLOADS` environment variable. + +```shell +env COMET_MAX_IMAGE_UPLOADS=200 python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--bbox_interval 1 +``` + +### Logging Class Level Metrics + +Use the `COMET_LOG_PER_CLASS_METRICS` environment variable to log mAP, precision, recall, f1 for each class. + +```shell +env COMET_LOG_PER_CLASS_METRICS=true python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt +``` + +## Uploading a Dataset to Comet Artifacts + +If you would like to store your data using [Comet Artifacts](https://www.comet.com/docs/v2/guides/data-management/using-artifacts/#learn-more?ref=yolov5&utm_source=yolov5&utm_medium=affilliate&utm_campaign=yolov5_comet_integration), you can do so using the `upload_dataset` flag. + +The dataset be organized in the way described in the [YOLOv5 documentation](https://docs.ultralytics.com/tutorials/train-custom-datasets/#3-organize-directories). The dataset config `yaml` file must follow the same format as that of the `coco128.yaml` file. + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--upload_dataset +``` + +You can find the uploaded dataset in the Artifacts tab in your Comet Workspace +artifact-1 + +You can preview the data directly in the Comet UI. +artifact-2 + +Artifacts are versioned and also support adding metadata about the dataset. Comet will automatically log the metadata from your dataset `yaml` file +artifact-3 + +### Using a saved Artifact + +If you would like to use a dataset from Comet Artifacts, set the `path` variable in your dataset `yaml` file to point to the following Artifact resource URL. + +``` +# contents of artifact.yaml file +path: "comet:///:" +``` +Then pass this file to your training script in the following way + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data artifact.yaml \ +--weights yolov5s.pt +``` + +Artifacts also allow you to track the lineage of data as it flows through your Experimentation workflow. Here you can see a graph that shows you all the experiments that have used your uploaded dataset. +artifact-4 + +## Resuming a Training Run + +If your training run is interrupted for any reason, e.g. disrupted internet connection, you can resume the run using the `resume` flag and the Comet Run Path. + +The Run Path has the following format `comet:////`. + +This will restore the run to its state before the interruption, which includes restoring the model from a checkpoint, restoring all hyperparameters and training arguments and downloading Comet dataset Artifacts if they were used in the original run. The resumed run will continue logging to the existing Experiment in the Comet UI + +```shell +python train.py \ +--resume "comet://" +``` + +## Hyperparameter Search with the Comet Optimizer + +YOLOv5 is also integrated with Comet's Optimizer, making is simple to visualie hyperparameter sweeps in the Comet UI. + +### Configuring an Optimizer Sweep + +To configure the Comet Optimizer, you will have to create a JSON file with the information about the sweep. An example file has been provided in `utils/loggers/comet/optimizer_config.json` + +```shell +python utils/loggers/comet/hpo.py \ + --comet_optimizer_config "utils/loggers/comet/optimizer_config.json" +``` + +The `hpo.py` script accepts the same arguments as `train.py`. If you wish to pass additional arguments to your sweep simply add them after +the script. + +```shell +python utils/loggers/comet/hpo.py \ + --comet_optimizer_config "utils/loggers/comet/optimizer_config.json" \ + --save-period 1 \ + --bbox_interval 1 +``` + +### Running a Sweep in Parallel + +```shell +comet optimizer -j utils/loggers/comet/hpo.py \ + utils/loggers/comet/optimizer_config.json" +``` + +### Visualizing Results + +Comet provides a number of ways to visualize the results of your sweep. Take a look at a [project with a completed sweep here](https://www.comet.com/examples/comet-example-yolov5/view/PrlArHGuuhDTKC1UuBmTtOSXD/panels?ref=yolov5&utm_source=yolov5&utm_medium=affilliate&utm_campaign=yolov5_comet_integration) + +hyperparameter-yolo diff --git a/utils/loggers/comet/__init__.py b/utils/loggers/comet/__init__.py new file mode 100644 index 0000000..ba5cecc --- /dev/null +++ b/utils/loggers/comet/__init__.py @@ -0,0 +1,501 @@ +import glob +import json +import logging +import os +import sys +from pathlib import Path + +logger = logging.getLogger(__name__) + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +try: + import comet_ml + + # Project Configuration + config = comet_ml.config.get_config() + COMET_PROJECT_NAME = config.get_string(os.getenv("COMET_PROJECT_NAME"), "comet.project_name", default="yolov5") +except (ModuleNotFoundError, ImportError): + comet_ml = None + COMET_PROJECT_NAME = None + +import PIL +import torch +import torchvision.transforms as T +import yaml + +from utils.dataloaders import img2label_paths +from utils.general import check_dataset, scale_boxes, xywh2xyxy +from utils.metrics import box_iou + +COMET_PREFIX = "comet://" + +COMET_MODE = os.getenv("COMET_MODE", "online") + +# Model Saving Settings +COMET_MODEL_NAME = os.getenv("COMET_MODEL_NAME", "yolov5") + +# Dataset Artifact Settings +COMET_UPLOAD_DATASET = os.getenv("COMET_UPLOAD_DATASET", "false").lower() == "true" + +# Evaluation Settings +COMET_LOG_CONFUSION_MATRIX = os.getenv("COMET_LOG_CONFUSION_MATRIX", "true").lower() == "true" +COMET_LOG_PREDICTIONS = os.getenv("COMET_LOG_PREDICTIONS", "true").lower() == "true" +COMET_MAX_IMAGE_UPLOADS = int(os.getenv("COMET_MAX_IMAGE_UPLOADS", 100)) + +# Confusion Matrix Settings +CONF_THRES = float(os.getenv("CONF_THRES", 0.001)) +IOU_THRES = float(os.getenv("IOU_THRES", 0.6)) + +# Batch Logging Settings +COMET_LOG_BATCH_METRICS = os.getenv("COMET_LOG_BATCH_METRICS", "false").lower() == "true" +COMET_BATCH_LOGGING_INTERVAL = os.getenv("COMET_BATCH_LOGGING_INTERVAL", 1) +COMET_PREDICTION_LOGGING_INTERVAL = os.getenv("COMET_PREDICTION_LOGGING_INTERVAL", 1) +COMET_LOG_PER_CLASS_METRICS = os.getenv("COMET_LOG_PER_CLASS_METRICS", "false").lower() == "true" + +RANK = int(os.getenv("RANK", -1)) + +to_pil = T.ToPILImage() + + +class CometLogger: + """Log metrics, parameters, source code, models and much more + with Comet + """ + + def __init__(self, opt, hyp, run_id=None, job_type="Training", **experiment_kwargs) -> None: + self.job_type = job_type + self.opt = opt + self.hyp = hyp + + # Comet Flags + self.comet_mode = COMET_MODE + + self.save_model = opt.save_period > -1 + self.model_name = COMET_MODEL_NAME + + # Batch Logging Settings + self.log_batch_metrics = COMET_LOG_BATCH_METRICS + self.comet_log_batch_interval = COMET_BATCH_LOGGING_INTERVAL + + # Dataset Artifact Settings + self.upload_dataset = self.opt.upload_dataset if self.opt.upload_dataset else COMET_UPLOAD_DATASET + self.resume = self.opt.resume + + # Default parameters to pass to Experiment objects + self.default_experiment_kwargs = { + "log_code": False, + "log_env_gpu": True, + "log_env_cpu": True, + "project_name": COMET_PROJECT_NAME,} + self.default_experiment_kwargs.update(experiment_kwargs) + self.experiment = self._get_experiment(self.comet_mode, run_id) + + self.data_dict = self.check_dataset(self.opt.data) + self.class_names = self.data_dict["names"] + self.num_classes = self.data_dict["nc"] + + self.logged_images_count = 0 + self.max_images = COMET_MAX_IMAGE_UPLOADS + + if run_id is None: + self.experiment.log_other("Created from", "YOLOv5") + if not isinstance(self.experiment, comet_ml.OfflineExperiment): + workspace, project_name, experiment_id = self.experiment.url.split("/")[-3:] + self.experiment.log_other( + "Run Path", + f"{workspace}/{project_name}/{experiment_id}", + ) + self.log_parameters(vars(opt)) + self.log_parameters(self.opt.hyp) + self.log_asset_data( + self.opt.hyp, + name="hyperparameters.json", + metadata={"type": "hyp-config-file"}, + ) + self.log_asset( + f"{self.opt.save_dir}/opt.yaml", + metadata={"type": "opt-config-file"}, + ) + + self.comet_log_confusion_matrix = COMET_LOG_CONFUSION_MATRIX + + if hasattr(self.opt, "conf_thres"): + self.conf_thres = self.opt.conf_thres + else: + self.conf_thres = CONF_THRES + if hasattr(self.opt, "iou_thres"): + self.iou_thres = self.opt.iou_thres + else: + self.iou_thres = IOU_THRES + + self.log_parameters({"val_iou_threshold": self.iou_thres, "val_conf_threshold": self.conf_thres}) + + self.comet_log_predictions = COMET_LOG_PREDICTIONS + if self.opt.bbox_interval == -1: + self.comet_log_prediction_interval = 1 if self.opt.epochs < 10 else self.opt.epochs // 10 + else: + self.comet_log_prediction_interval = self.opt.bbox_interval + + if self.comet_log_predictions: + self.metadata_dict = {} + self.logged_image_names = [] + + self.comet_log_per_class_metrics = COMET_LOG_PER_CLASS_METRICS + + self.experiment.log_others({ + "comet_mode": COMET_MODE, + "comet_max_image_uploads": COMET_MAX_IMAGE_UPLOADS, + "comet_log_per_class_metrics": COMET_LOG_PER_CLASS_METRICS, + "comet_log_batch_metrics": COMET_LOG_BATCH_METRICS, + "comet_log_confusion_matrix": COMET_LOG_CONFUSION_MATRIX, + "comet_model_name": COMET_MODEL_NAME,}) + + # Check if running the Experiment with the Comet Optimizer + if hasattr(self.opt, "comet_optimizer_id"): + self.experiment.log_other("optimizer_id", self.opt.comet_optimizer_id) + self.experiment.log_other("optimizer_objective", self.opt.comet_optimizer_objective) + self.experiment.log_other("optimizer_metric", self.opt.comet_optimizer_metric) + self.experiment.log_other("optimizer_parameters", json.dumps(self.hyp)) + + def _get_experiment(self, mode, experiment_id=None): + if mode == "offline": + if experiment_id is not None: + return comet_ml.ExistingOfflineExperiment( + previous_experiment=experiment_id, + **self.default_experiment_kwargs, + ) + + return comet_ml.OfflineExperiment(**self.default_experiment_kwargs,) + + else: + try: + if experiment_id is not None: + return comet_ml.ExistingExperiment( + previous_experiment=experiment_id, + **self.default_experiment_kwargs, + ) + + return comet_ml.Experiment(**self.default_experiment_kwargs) + + except ValueError: + logger.warning("COMET WARNING: " + "Comet credentials have not been set. " + "Comet will default to offline logging. " + "Please set your credentials to enable online logging.") + return self._get_experiment("offline", experiment_id) + + return + + def log_metrics(self, log_dict, **kwargs): + self.experiment.log_metrics(log_dict, **kwargs) + + def log_parameters(self, log_dict, **kwargs): + self.experiment.log_parameters(log_dict, **kwargs) + + def log_asset(self, asset_path, **kwargs): + self.experiment.log_asset(asset_path, **kwargs) + + def log_asset_data(self, asset, **kwargs): + self.experiment.log_asset_data(asset, **kwargs) + + def log_image(self, img, **kwargs): + self.experiment.log_image(img, **kwargs) + + def log_model(self, path, opt, epoch, fitness_score, best_model=False): + if not self.save_model: + return + + model_metadata = { + "fitness_score": fitness_score[-1], + "epochs_trained": epoch + 1, + "save_period": opt.save_period, + "total_epochs": opt.epochs,} + + model_files = glob.glob(f"{path}/*.pt") + for model_path in model_files: + name = Path(model_path).name + + self.experiment.log_model( + self.model_name, + file_or_folder=model_path, + file_name=name, + metadata=model_metadata, + overwrite=True, + ) + + def check_dataset(self, data_file): + with open(data_file) as f: + data_config = yaml.safe_load(f) + + if data_config['path'].startswith(COMET_PREFIX): + path = data_config['path'].replace(COMET_PREFIX, "") + data_dict = self.download_dataset_artifact(path) + + return data_dict + + self.log_asset(self.opt.data, metadata={"type": "data-config-file"}) + + return check_dataset(data_file) + + def log_predictions(self, image, labelsn, path, shape, predn): + if self.logged_images_count >= self.max_images: + return + detections = predn[predn[:, 4] > self.conf_thres] + iou = box_iou(labelsn[:, 1:], detections[:, :4]) + mask, _ = torch.where(iou > self.iou_thres) + if len(mask) == 0: + return + + filtered_detections = detections[mask] + filtered_labels = labelsn[mask] + + image_id = path.split("/")[-1].split(".")[0] + image_name = f"{image_id}_curr_epoch_{self.experiment.curr_epoch}" + if image_name not in self.logged_image_names: + native_scale_image = PIL.Image.open(path) + self.log_image(native_scale_image, name=image_name) + self.logged_image_names.append(image_name) + + metadata = [] + for cls, *xyxy in filtered_labels.tolist(): + metadata.append({ + "label": f"{self.class_names[int(cls)]}-gt", + "score": 100, + "box": { + "x": xyxy[0], + "y": xyxy[1], + "x2": xyxy[2], + "y2": xyxy[3]},}) + for *xyxy, conf, cls in filtered_detections.tolist(): + metadata.append({ + "label": f"{self.class_names[int(cls)]}", + "score": conf * 100, + "box": { + "x": xyxy[0], + "y": xyxy[1], + "x2": xyxy[2], + "y2": xyxy[3]},}) + + self.metadata_dict[image_name] = metadata + self.logged_images_count += 1 + + return + + def preprocess_prediction(self, image, labels, shape, pred): + nl, _ = labels.shape[0], pred.shape[0] + + # Predictions + if self.opt.single_cls: + pred[:, 5] = 0 + + predn = pred.clone() + scale_boxes(image.shape[1:], predn[:, :4], shape[0], shape[1]) + + labelsn = None + if nl: + tbox = xywh2xyxy(labels[:, 1:5]) # target boxes + scale_boxes(image.shape[1:], tbox, shape[0], shape[1]) # native-space labels + labelsn = torch.cat((labels[:, 0:1], tbox), 1) # native-space labels + scale_boxes(image.shape[1:], predn[:, :4], shape[0], shape[1]) # native-space pred + + return predn, labelsn + + def add_assets_to_artifact(self, artifact, path, asset_path, split): + img_paths = sorted(glob.glob(f"{asset_path}/*")) + label_paths = img2label_paths(img_paths) + + for image_file, label_file in zip(img_paths, label_paths): + image_logical_path, label_logical_path = map(lambda x: os.path.relpath(x, path), [image_file, label_file]) + + try: + artifact.add(image_file, logical_path=image_logical_path, metadata={"split": split}) + artifact.add(label_file, logical_path=label_logical_path, metadata={"split": split}) + except ValueError as e: + logger.error('COMET ERROR: Error adding file to Artifact. Skipping file.') + logger.error(f"COMET ERROR: {e}") + continue + + return artifact + + def upload_dataset_artifact(self): + dataset_name = self.data_dict.get("dataset_name", "yolov5-dataset") + path = str((ROOT / Path(self.data_dict["path"])).resolve()) + + metadata = self.data_dict.copy() + for key in ["train", "val", "test"]: + split_path = metadata.get(key) + if split_path is not None: + metadata[key] = split_path.replace(path, "") + + artifact = comet_ml.Artifact(name=dataset_name, artifact_type="dataset", metadata=metadata) + for key in metadata.keys(): + if key in ["train", "val", "test"]: + if isinstance(self.upload_dataset, str) and (key != self.upload_dataset): + continue + + asset_path = self.data_dict.get(key) + if asset_path is not None: + artifact = self.add_assets_to_artifact(artifact, path, asset_path, key) + + self.experiment.log_artifact(artifact) + + return + + def download_dataset_artifact(self, artifact_path): + logged_artifact = self.experiment.get_artifact(artifact_path) + artifact_save_dir = str(Path(self.opt.save_dir) / logged_artifact.name) + logged_artifact.download(artifact_save_dir) + + metadata = logged_artifact.metadata + data_dict = metadata.copy() + data_dict["path"] = artifact_save_dir + data_dict["names"] = {int(k): v for k, v in metadata.get("names").items()} + + data_dict = self.update_data_paths(data_dict) + return data_dict + + def update_data_paths(self, data_dict): + path = data_dict.get("path", "") + + for split in ["train", "val", "test"]: + if data_dict.get(split): + split_path = data_dict.get(split) + data_dict[split] = (f"{path}/{split_path}" if isinstance(split, str) else [ + f"{path}/{x}" for x in split_path]) + + return data_dict + + def on_pretrain_routine_end(self, paths): + if self.opt.resume: + return + + for path in paths: + self.log_asset(str(path)) + + if self.upload_dataset: + if not self.resume: + self.upload_dataset_artifact() + + return + + def on_train_start(self): + self.log_parameters(self.hyp) + + def on_train_epoch_start(self): + return + + def on_train_epoch_end(self, epoch): + self.experiment.curr_epoch = epoch + + return + + def on_train_batch_start(self): + return + + def on_train_batch_end(self, log_dict, step): + self.experiment.curr_step = step + if self.log_batch_metrics and (step % self.comet_log_batch_interval == 0): + self.log_metrics(log_dict, step=step) + + return + + def on_train_end(self, files, save_dir, last, best, epoch, results): + if self.comet_log_predictions: + curr_epoch = self.experiment.curr_epoch + self.experiment.log_asset_data(self.metadata_dict, "image-metadata.json", epoch=curr_epoch) + + for f in files: + self.log_asset(f, metadata={"epoch": epoch}) + self.log_asset(f"{save_dir}/results.csv", metadata={"epoch": epoch}) + + if not self.opt.evolve: + model_path = str(best if best.exists() else last) + name = Path(model_path).name + if self.save_model: + self.experiment.log_model( + self.model_name, + file_or_folder=model_path, + file_name=name, + overwrite=True, + ) + + # Check if running Experiment with Comet Optimizer + if hasattr(self.opt, 'comet_optimizer_id'): + metric = results.get(self.opt.comet_optimizer_metric) + self.experiment.log_other('optimizer_metric_value', metric) + + self.finish_run() + + def on_val_start(self): + return + + def on_val_batch_start(self): + return + + def on_val_batch_end(self, batch_i, images, targets, paths, shapes, outputs): + if not (self.comet_log_predictions and ((batch_i + 1) % self.comet_log_prediction_interval == 0)): + return + + for si, pred in enumerate(outputs): + if len(pred) == 0: + continue + + image = images[si] + labels = targets[targets[:, 0] == si, 1:] + shape = shapes[si] + path = paths[si] + predn, labelsn = self.preprocess_prediction(image, labels, shape, pred) + if labelsn is not None: + self.log_predictions(image, labelsn, path, shape, predn) + + return + + def on_val_end(self, nt, tp, fp, p, r, f1, ap, ap50, ap_class, confusion_matrix): + if self.comet_log_per_class_metrics: + if self.num_classes > 1: + for i, c in enumerate(ap_class): + class_name = self.class_names[c] + self.experiment.log_metrics( + { + 'mAP@.5': ap50[i], + 'mAP@.5:.95': ap[i], + 'precision': p[i], + 'recall': r[i], + 'f1': f1[i], + 'true_positives': tp[i], + 'false_positives': fp[i], + 'support': nt[c]}, + prefix=class_name) + + if self.comet_log_confusion_matrix: + epoch = self.experiment.curr_epoch + class_names = list(self.class_names.values()) + class_names.append("background") + num_classes = len(class_names) + + self.experiment.log_confusion_matrix( + matrix=confusion_matrix.matrix, + max_categories=num_classes, + labels=class_names, + epoch=epoch, + column_label='Actual Category', + row_label='Predicted Category', + file_name=f"confusion-matrix-epoch-{epoch}.json", + ) + + def on_fit_epoch_end(self, result, epoch): + self.log_metrics(result, epoch=epoch) + + def on_model_save(self, last, epoch, final_epoch, best_fitness, fi): + if ((epoch + 1) % self.opt.save_period == 0 and not final_epoch) and self.opt.save_period != -1: + self.log_model(last.parent, self.opt, epoch, fi, best_model=best_fitness == fi) + + def on_params_update(self, params): + self.log_parameters(params) + + def finish_run(self): + self.experiment.end() diff --git a/utils/loggers/comet/comet_utils.py b/utils/loggers/comet/comet_utils.py new file mode 100644 index 0000000..3cbd451 --- /dev/null +++ b/utils/loggers/comet/comet_utils.py @@ -0,0 +1,150 @@ +import logging +import os +from urllib.parse import urlparse + +try: + import comet_ml +except (ModuleNotFoundError, ImportError): + comet_ml = None + +import yaml + +logger = logging.getLogger(__name__) + +COMET_PREFIX = "comet://" +COMET_MODEL_NAME = os.getenv("COMET_MODEL_NAME", "yolov5") +COMET_DEFAULT_CHECKPOINT_FILENAME = os.getenv("COMET_DEFAULT_CHECKPOINT_FILENAME", "last.pt") + + +def download_model_checkpoint(opt, experiment): + model_dir = f"{opt.project}/{experiment.name}" + os.makedirs(model_dir, exist_ok=True) + + model_name = COMET_MODEL_NAME + model_asset_list = experiment.get_model_asset_list(model_name) + + if len(model_asset_list) == 0: + logger.error(f"COMET ERROR: No checkpoints found for model name : {model_name}") + return + + model_asset_list = sorted( + model_asset_list, + key=lambda x: x["step"], + reverse=True, + ) + logged_checkpoint_map = {asset["fileName"]: asset["assetId"] for asset in model_asset_list} + + resource_url = urlparse(opt.weights) + checkpoint_filename = resource_url.query + + if checkpoint_filename: + asset_id = logged_checkpoint_map.get(checkpoint_filename) + else: + asset_id = logged_checkpoint_map.get(COMET_DEFAULT_CHECKPOINT_FILENAME) + checkpoint_filename = COMET_DEFAULT_CHECKPOINT_FILENAME + + if asset_id is None: + logger.error(f"COMET ERROR: Checkpoint {checkpoint_filename} not found in the given Experiment") + return + + try: + logger.info(f"COMET INFO: Downloading checkpoint {checkpoint_filename}") + asset_filename = checkpoint_filename + + model_binary = experiment.get_asset(asset_id, return_type="binary", stream=False) + model_download_path = f"{model_dir}/{asset_filename}" + with open(model_download_path, "wb") as f: + f.write(model_binary) + + opt.weights = model_download_path + + except Exception as e: + logger.warning("COMET WARNING: Unable to download checkpoint from Comet") + logger.exception(e) + + +def set_opt_parameters(opt, experiment): + """Update the opts Namespace with parameters + from Comet's ExistingExperiment when resuming a run + + Args: + opt (argparse.Namespace): Namespace of command line options + experiment (comet_ml.APIExperiment): Comet API Experiment object + """ + asset_list = experiment.get_asset_list() + resume_string = opt.resume + + for asset in asset_list: + if asset["fileName"] == "opt.yaml": + asset_id = asset["assetId"] + asset_binary = experiment.get_asset(asset_id, return_type="binary", stream=False) + opt_dict = yaml.safe_load(asset_binary) + for key, value in opt_dict.items(): + setattr(opt, key, value) + opt.resume = resume_string + + # Save hyperparameters to YAML file + # Necessary to pass checks in training script + save_dir = f"{opt.project}/{experiment.name}" + os.makedirs(save_dir, exist_ok=True) + + hyp_yaml_path = f"{save_dir}/hyp.yaml" + with open(hyp_yaml_path, "w") as f: + yaml.dump(opt.hyp, f) + opt.hyp = hyp_yaml_path + + +def check_comet_weights(opt): + """Downloads model weights from Comet and updates the + weights path to point to saved weights location + + Args: + opt (argparse.Namespace): Command Line arguments passed + to YOLOv5 training script + + Returns: + None/bool: Return True if weights are successfully downloaded + else return None + """ + if comet_ml is None: + return + + if isinstance(opt.weights, str): + if opt.weights.startswith(COMET_PREFIX): + api = comet_ml.API() + resource = urlparse(opt.weights) + experiment_path = f"{resource.netloc}{resource.path}" + experiment = api.get(experiment_path) + download_model_checkpoint(opt, experiment) + return True + + return None + + +def check_comet_resume(opt): + """Restores run parameters to its original state based on the model checkpoint + and logged Experiment parameters. + + Args: + opt (argparse.Namespace): Command Line arguments passed + to YOLOv5 training script + + Returns: + None/bool: Return True if the run is restored successfully + else return None + """ + if comet_ml is None: + return + + if isinstance(opt.resume, str): + if opt.resume.startswith(COMET_PREFIX): + api = comet_ml.API() + resource = urlparse(opt.resume) + experiment_path = f"{resource.netloc}{resource.path}" + experiment = api.get(experiment_path) + set_opt_parameters(opt, experiment) + download_model_checkpoint(opt, experiment) + + return True + + return None diff --git a/utils/loggers/comet/hpo.py b/utils/loggers/comet/hpo.py new file mode 100644 index 0000000..7dd5c92 --- /dev/null +++ b/utils/loggers/comet/hpo.py @@ -0,0 +1,118 @@ +import argparse +import json +import logging +import os +import sys +from pathlib import Path + +import comet_ml + +logger = logging.getLogger(__name__) + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from train import train +from utils.callbacks import Callbacks +from utils.general import increment_path +from utils.torch_utils import select_device + +# Project Configuration +config = comet_ml.config.get_config() +COMET_PROJECT_NAME = config.get_string(os.getenv("COMET_PROJECT_NAME"), "comet.project_name", default="yolov5") + + +def get_args(known=False): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='initial weights path') + parser.add_argument('--cfg', type=str, default='', help='model.yaml path') + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--hyp', type=str, default=ROOT / 'data/hyps/hyp.scratch-low.yaml', help='hyperparameters path') + parser.add_argument('--epochs', type=int, default=300, help='total training epochs') + parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs, -1 for autobatch') + parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='train, val image size (pixels)') + parser.add_argument('--rect', action='store_true', help='rectangular training') + parser.add_argument('--resume', nargs='?', const=True, default=False, help='resume most recent training') + parser.add_argument('--nosave', action='store_true', help='only save final checkpoint') + parser.add_argument('--noval', action='store_true', help='only validate final epoch') + parser.add_argument('--noautoanchor', action='store_true', help='disable AutoAnchor') + parser.add_argument('--noplots', action='store_true', help='save no plot files') + parser.add_argument('--evolve', type=int, nargs='?', const=300, help='evolve hyperparameters for x generations') + parser.add_argument('--bucket', type=str, default='', help='gsutil bucket') + parser.add_argument('--cache', type=str, nargs='?', const='ram', help='--cache images in "ram" (default) or "disk"') + parser.add_argument('--image-weights', action='store_true', help='use weighted image selection for training') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--multi-scale', action='store_true', help='vary img-size +/- 50%%') + parser.add_argument('--single-cls', action='store_true', help='train multi-class data as single-class') + parser.add_argument('--optimizer', type=str, choices=['SGD', 'Adam', 'AdamW'], default='SGD', help='optimizer') + parser.add_argument('--sync-bn', action='store_true', help='use SyncBatchNorm, only available in DDP mode') + parser.add_argument('--workers', type=int, default=8, help='max dataloader workers (per RANK in DDP mode)') + parser.add_argument('--project', default=ROOT / 'runs/train', help='save to project/name') + parser.add_argument('--name', default='exp', help='save to project/name') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + parser.add_argument('--quad', action='store_true', help='quad dataloader') + parser.add_argument('--cos-lr', action='store_true', help='cosine LR scheduler') + parser.add_argument('--label-smoothing', type=float, default=0.0, help='Label smoothing epsilon') + parser.add_argument('--patience', type=int, default=100, help='EarlyStopping patience (epochs without improvement)') + parser.add_argument('--freeze', nargs='+', type=int, default=[0], help='Freeze layers: backbone=10, first3=0 1 2') + parser.add_argument('--save-period', type=int, default=-1, help='Save checkpoint every x epochs (disabled if < 1)') + parser.add_argument('--seed', type=int, default=0, help='Global training seed') + parser.add_argument('--local_rank', type=int, default=-1, help='Automatic DDP Multi-GPU argument, do not modify') + + # Weights & Biases arguments + parser.add_argument('--entity', default=None, help='W&B: Entity') + parser.add_argument('--upload_dataset', nargs='?', const=True, default=False, help='W&B: Upload data, "val" option') + parser.add_argument('--bbox_interval', type=int, default=-1, help='W&B: Set bounding-box image logging interval') + parser.add_argument('--artifact_alias', type=str, default='latest', help='W&B: Version of dataset artifact to use') + + # Comet Arguments + parser.add_argument("--comet_optimizer_config", type=str, help="Comet: Path to a Comet Optimizer Config File.") + parser.add_argument("--comet_optimizer_id", type=str, help="Comet: ID of the Comet Optimizer sweep.") + parser.add_argument("--comet_optimizer_objective", type=str, help="Comet: Set to 'minimize' or 'maximize'.") + parser.add_argument("--comet_optimizer_metric", type=str, help="Comet: Metric to Optimize.") + parser.add_argument("--comet_optimizer_workers", + type=int, + default=1, + help="Comet: Number of Parallel Workers to use with the Comet Optimizer.") + + return parser.parse_known_args()[0] if known else parser.parse_args() + + +def run(parameters, opt): + hyp_dict = {k: v for k, v in parameters.items() if k not in ["epochs", "batch_size"]} + + opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok or opt.evolve)) + opt.batch_size = parameters.get("batch_size") + opt.epochs = parameters.get("epochs") + + device = select_device(opt.device, batch_size=opt.batch_size) + train(hyp_dict, opt, device, callbacks=Callbacks()) + + +if __name__ == "__main__": + opt = get_args(known=True) + + opt.weights = str(opt.weights) + opt.cfg = str(opt.cfg) + opt.data = str(opt.data) + opt.project = str(opt.project) + + optimizer_id = os.getenv("COMET_OPTIMIZER_ID") + if optimizer_id is None: + with open(opt.comet_optimizer_config) as f: + optimizer_config = json.load(f) + optimizer = comet_ml.Optimizer(optimizer_config) + else: + optimizer = comet_ml.Optimizer(optimizer_id) + + opt.comet_optimizer_id = optimizer.id + status = optimizer.status() + + opt.comet_optimizer_objective = status["spec"]["objective"] + opt.comet_optimizer_metric = status["spec"]["metric"] + + logger.info("COMET INFO: Starting Hyperparameter Sweep") + for parameter in optimizer.get_parameters(): + run(parameter["parameters"], opt) diff --git a/utils/loggers/comet/optimizer_config.json b/utils/loggers/comet/optimizer_config.json new file mode 100644 index 0000000..83dddda --- /dev/null +++ b/utils/loggers/comet/optimizer_config.json @@ -0,0 +1,209 @@ +{ + "algorithm": "random", + "parameters": { + "anchor_t": { + "type": "discrete", + "values": [ + 2, + 8 + ] + }, + "batch_size": { + "type": "discrete", + "values": [ + 16, + 32, + 64 + ] + }, + "box": { + "type": "discrete", + "values": [ + 0.02, + 0.2 + ] + }, + "cls": { + "type": "discrete", + "values": [ + 0.2 + ] + }, + "cls_pw": { + "type": "discrete", + "values": [ + 0.5 + ] + }, + "copy_paste": { + "type": "discrete", + "values": [ + 1 + ] + }, + "degrees": { + "type": "discrete", + "values": [ + 0, + 45 + ] + }, + "epochs": { + "type": "discrete", + "values": [ + 5 + ] + }, + "fl_gamma": { + "type": "discrete", + "values": [ + 0 + ] + }, + "fliplr": { + "type": "discrete", + "values": [ + 0 + ] + }, + "flipud": { + "type": "discrete", + "values": [ + 0 + ] + }, + "hsv_h": { + "type": "discrete", + "values": [ + 0 + ] + }, + "hsv_s": { + "type": "discrete", + "values": [ + 0 + ] + }, + "hsv_v": { + "type": "discrete", + "values": [ + 0 + ] + }, + "iou_t": { + "type": "discrete", + "values": [ + 0.7 + ] + }, + "lr0": { + "type": "discrete", + "values": [ + 1e-05, + 0.1 + ] + }, + "lrf": { + "type": "discrete", + "values": [ + 0.01, + 1 + ] + }, + "mixup": { + "type": "discrete", + "values": [ + 1 + ] + }, + "momentum": { + "type": "discrete", + "values": [ + 0.6 + ] + }, + "mosaic": { + "type": "discrete", + "values": [ + 0 + ] + }, + "obj": { + "type": "discrete", + "values": [ + 0.2 + ] + }, + "obj_pw": { + "type": "discrete", + "values": [ + 0.5 + ] + }, + "optimizer": { + "type": "categorical", + "values": [ + "SGD", + "Adam", + "AdamW" + ] + }, + "perspective": { + "type": "discrete", + "values": [ + 0 + ] + }, + "scale": { + "type": "discrete", + "values": [ + 0 + ] + }, + "shear": { + "type": "discrete", + "values": [ + 0 + ] + }, + "translate": { + "type": "discrete", + "values": [ + 0 + ] + }, + "warmup_bias_lr": { + "type": "discrete", + "values": [ + 0, + 0.2 + ] + }, + "warmup_epochs": { + "type": "discrete", + "values": [ + 5 + ] + }, + "warmup_momentum": { + "type": "discrete", + "values": [ + 0, + 0.95 + ] + }, + "weight_decay": { + "type": "discrete", + "values": [ + 0, + 0.001 + ] + } + }, + "spec": { + "maxCombo": 0, + "metric": "metrics/mAP_0.5", + "objective": "maximize" + }, + "trials": 1 +} diff --git a/utils/metrics.py b/utils/metrics.py index ee7d339..ed611d7 100644 --- a/utils/metrics.py +++ b/utils/metrics.py @@ -28,7 +28,7 @@ def smooth(y, f=0.05): return np.convolve(yp, np.ones(nf) / nf, mode='valid') # y-smoothed -def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16): +def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16, prefix=""): """ Compute the average precision, given the recall and precision curves. Source: https://github.com/rafaelpadilla/Object-Detection-Metrics. # Arguments @@ -83,10 +83,10 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names names = [v for k, v in names.items() if k in unique_classes] # list: only classes that have data names = dict(enumerate(names)) # to dict if plot: - plot_pr_curve(px, py, ap, Path(save_dir) / 'PR_curve.png', names) - plot_mc_curve(px, f1, Path(save_dir) / 'F1_curve.png', names, ylabel='F1') - plot_mc_curve(px, p, Path(save_dir) / 'P_curve.png', names, ylabel='Precision') - plot_mc_curve(px, r, Path(save_dir) / 'R_curve.png', names, ylabel='Recall') + plot_pr_curve(px, py, ap, Path(save_dir) / f'{prefix}PR_curve.png', names) + plot_mc_curve(px, f1, Path(save_dir) / f'{prefix}F1_curve.png', names, ylabel='F1') + plot_mc_curve(px, p, Path(save_dir) / f'{prefix}P_curve.png', names, ylabel='Precision') + plot_mc_curve(px, r, Path(save_dir) / f'{prefix}R_curve.png', names, ylabel='Recall') i = smooth(f1.mean(0), 0.1).argmax() # max F1 index p, r, f1 = p[:, i], r[:, i], f1[:, i] @@ -170,12 +170,12 @@ class ConfusionMatrix: if n and sum(j) == 1: self.matrix[detection_classes[m1[j]], gc] += 1 # correct else: - self.matrix[self.nc, gc] += 1 # background FP + self.matrix[self.nc, gc] += 1 # true background if n: for i, dc in enumerate(detection_classes): if not any(m1 == i): - self.matrix[dc, self.nc] += 1 # background FN + self.matrix[dc, self.nc] += 1 # predicted background def matrix(self): return self.matrix @@ -186,7 +186,7 @@ class ConfusionMatrix: # fn = self.matrix.sum(0) - tp # false negatives (missed detections) return tp[:-1], fp[:-1] # remove background class - @TryExcept('WARNING: ConfusionMatrix plot failure: ') + @TryExcept('WARNING ⚠️ ConfusionMatrix plot failure: ') def plot(self, normalize=True, save_dir='', names=()): import seaborn as sn @@ -197,6 +197,7 @@ class ConfusionMatrix: nc, nn = self.nc, len(names) # number of classes, names sn.set(font_scale=1.0 if nc < 50 else 0.8) # for label size labels = (0 < nn < 99) and (nn == nc) # apply names to ticklabels + ticklabels = (names + ['background']) if labels else "auto" with warnings.catch_warnings(): warnings.simplefilter('ignore') # suppress empty matrix RuntimeWarning: All-NaN slice encountered sn.heatmap(array, @@ -208,8 +209,8 @@ class ConfusionMatrix: fmt='.2f', square=True, vmin=0.0, - xticklabels=names + ['background FP'] if labels else "auto", - yticklabels=names + ['background FN'] if labels else "auto").set_facecolor((1, 1, 1)) + xticklabels=ticklabels, + yticklabels=ticklabels).set_facecolor((1, 1, 1)) ax.set_ylabel('True') ax.set_ylabel('Predicted') ax.set_title('Confusion Matrix') diff --git a/utils/plots.py b/utils/plots.py index 0f322b6..36df271 100644 --- a/utils/plots.py +++ b/utils/plots.py @@ -20,9 +20,10 @@ import torch from PIL import Image, ImageDraw, ImageFont from utils import TryExcept, threaded -from utils.general import (CONFIG_DIR, FONT, LOGGER, check_font, check_requirements, clip_coords, increment_path, +from utils.general import (CONFIG_DIR, FONT, LOGGER, check_font, check_requirements, clip_boxes, increment_path, is_ascii, xywh2xyxy, xyxy2xywh) from utils.metrics import fitness +from utils.segment.general import scale_image # Settings RANK = int(os.getenv('RANK', -1)) @@ -113,6 +114,52 @@ class Annotator: thickness=tf, lineType=cv2.LINE_AA) + def masks(self, masks, colors, im_gpu=None, alpha=0.5): + """Plot masks at once. + Args: + masks (tensor): predicted masks on cuda, shape: [n, h, w] + colors (List[List[Int]]): colors for predicted masks, [[r, g, b] * n] + im_gpu (tensor): img is in cuda, shape: [3, h, w], range: [0, 1] + alpha (float): mask transparency: 0.0 fully transparent, 1.0 opaque + """ + if self.pil: + # convert to numpy first + self.im = np.asarray(self.im).copy() + if im_gpu is None: + # Add multiple masks of shape(h,w,n) with colors list([r,g,b], [r,g,b], ...) + if len(masks) == 0: + return + if isinstance(masks, torch.Tensor): + masks = torch.as_tensor(masks, dtype=torch.uint8) + masks = masks.permute(1, 2, 0).contiguous() + masks = masks.cpu().numpy() + # masks = np.ascontiguousarray(masks.transpose(1, 2, 0)) + masks = scale_image(masks.shape[:2], masks, self.im.shape) + masks = np.asarray(masks, dtype=np.float32) + colors = np.asarray(colors, dtype=np.float32) # shape(n,3) + s = masks.sum(2, keepdims=True).clip(0, 1) # add all masks together + masks = (masks @ colors).clip(0, 255) # (h,w,n) @ (n,3) = (h,w,3) + self.im[:] = masks * alpha + self.im * (1 - s * alpha) + else: + if len(masks) == 0: + self.im[:] = im_gpu.permute(1, 2, 0).contiguous().cpu().numpy() * 255 + colors = torch.tensor(colors, device=im_gpu.device, dtype=torch.float32) / 255.0 + colors = colors[:, None, None] # shape(n,1,1,3) + masks = masks.unsqueeze(3) # shape(n,h,w,1) + masks_color = masks * (colors * alpha) # shape(n,h,w,3) + + inv_alph_masks = (1 - masks * alpha).cumprod(0) # shape(n,h,w,1) + mcs = (masks_color * inv_alph_masks).sum(0) * 2 # mask color summand shape(n,h,w,3) + + im_gpu = im_gpu.flip(dims=[0]) # flip channel + im_gpu = im_gpu.permute(1, 2, 0).contiguous() # shape(h,w,3) + im_gpu = im_gpu * inv_alph_masks[-1] + mcs + im_mask = (im_gpu * 255).byte().cpu().numpy() + self.im[:] = scale_image(im_gpu.shape, im_mask, self.im.shape) + if self.pil: + # convert im back to PIL and update draw + self.fromarray(self.im) + def rectangle(self, xy, fill=None, outline=None, width=1): # Add rectangle to image (PIL-only) self.draw.rectangle(xy, fill, outline, width) @@ -124,6 +171,11 @@ class Annotator: xy[1] += 1 - h self.draw.text(xy, text, fill=txt_color, font=self.font) + def fromarray(self, im): + # Update self.im from a numpy array + self.im = im if isinstance(im, Image.Image) else Image.fromarray(im) + self.draw = ImageDraw.Draw(self.im) + def result(self): # Return annotated image as array return np.asarray(self.im) @@ -152,7 +204,6 @@ def feature_visualization(x, module_type, stage, n=32, save_dir=Path('runs/detec ax[i].axis('off') LOGGER.info(f'Saving {f}... ({n}/{channels})') - plt.title('Features') plt.savefig(f, dpi=300, bbox_inches='tight') plt.close() np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save @@ -180,26 +231,31 @@ def butter_lowpass_filtfilt(data, cutoff=1500, fs=50000, order=5): return filtfilt(b, a, data) # forward-backward filter -def output_to_target(output): - # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] +def output_to_target(output, max_det=300): + # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] for plotting targets = [] for i, o in enumerate(output): - targets.extend([i, cls, *list(*xyxy2xywh(np.array(box)[None])), conf] for *box, conf, cls in o.cpu().numpy()) - return np.array(targets) + box, conf, cls = o[:max_det, :6].cpu().split((4, 1, 1), 1) + j = torch.full((conf.shape[0], 1), i) + targets.append(torch.cat((j, cls, xyxy2xywh(box), conf), 1)) + return torch.cat(targets, 0).numpy() @threaded -def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=1920, max_subplots=16): +def plot_images(images, targets, paths=None, fname='images.jpg', names=None): # Plot image grid with labels if isinstance(images, torch.Tensor): images = images.cpu().float().numpy() if isinstance(targets, torch.Tensor): targets = targets.cpu().numpy() - if np.max(images[0]) <= 1: - images *= 255 # de-normalise (optional) + + max_size = 1920 # max image size + max_subplots = 16 # max image subplots, i.e. 4x4 bs, _, h, w = images.shape # batch size, _, height, width bs = min(bs, max_subplots) # limit plot images ns = np.ceil(bs ** 0.5) # number of subplots (square) + if np.max(images[0]) <= 1: + images *= 255 # de-normalise (optional) # Build Image mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init @@ -364,7 +420,7 @@ def plot_labels(labels, names=(), save_dir=Path('')): ax[0].set_ylabel('instances') if 0 < len(names) < 30: ax[0].set_xticks(range(len(names))) - ax[0].set_xticklabels(names, rotation=90, fontsize=10) + ax[0].set_xticklabels(list(names.values()), rotation=90, fontsize=10) else: ax[0].set_xlabel('classes') sn.histplot(x, x='x', y='y', ax=ax[2], bins=50, pmax=0.9) @@ -509,7 +565,7 @@ def save_one_box(xyxy, im, file=Path('im.jpg'), gain=1.02, pad=10, square=False, b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad xyxy = xywh2xyxy(b).long() - clip_coords(xyxy, im.shape) + clip_boxes(xyxy, im.shape) crop = im[int(xyxy[0, 1]):int(xyxy[0, 3]), int(xyxy[0, 0]):int(xyxy[0, 2]), ::(1 if BGR else -1)] if save: file.parent.mkdir(parents=True, exist_ok=True) # make directory diff --git a/utils/segment/__init__.py b/utils/segment/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/segment/augmentations.py b/utils/segment/augmentations.py new file mode 100644 index 0000000..169adde --- /dev/null +++ b/utils/segment/augmentations.py @@ -0,0 +1,104 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Image augmentation functions +""" + +import math +import random + +import cv2 +import numpy as np + +from ..augmentations import box_candidates +from ..general import resample_segments, segment2box + + +def mixup(im, labels, segments, im2, labels2, segments2): + # Applies MixUp augmentation https://arxiv.org/pdf/1710.09412.pdf + r = np.random.beta(32.0, 32.0) # mixup ratio, alpha=beta=32.0 + im = (im * r + im2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + segments = np.concatenate((segments, segments2), 0) + return im, labels, segments + + +def random_perspective(im, + targets=(), + segments=(), + degrees=10, + translate=.1, + scale=.1, + shear=10, + perspective=0.0, + border=(0, 0)): + # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10)) + # targets = [cls, xyxy] + + height = im.shape[0] + border[0] * 2 # shape(h,w,c) + width = im.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -im.shape[1] / 2 # x translation (pixels) + C[1, 2] = -im.shape[0] / 2 # y translation (pixels) + + # Perspective + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(1 - scale, 1 + scale) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = (random.uniform(0.5 - translate, 0.5 + translate) * width) # x translation (pixels) + T[1, 2] = (random.uniform(0.5 - translate, 0.5 + translate) * height) # y translation (pixels) + + # Combined rotation matrix + M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + im = cv2.warpPerspective(im, M, dsize=(width, height), borderValue=(114, 114, 114)) + else: # affine + im = cv2.warpAffine(im, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) + + # Visualize + # import matplotlib.pyplot as plt + # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() + # ax[0].imshow(im[:, :, ::-1]) # base + # ax[1].imshow(im2[:, :, ::-1]) # warped + + # Transform label coordinates + n = len(targets) + new_segments = [] + if n: + new = np.zeros((n, 4)) + segments = resample_segments(segments) # upsample + for i, segment in enumerate(segments): + xy = np.ones((len(segment), 3)) + xy[:, :2] = segment + xy = xy @ M.T # transform + xy = (xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2]) # perspective rescale or affine + + # clip + new[i] = segment2box(xy, width, height) + new_segments.append(xy) + + # filter candidates + i = box_candidates(box1=targets[:, 1:5].T * s, box2=new.T, area_thr=0.01) + targets = targets[i] + targets[:, 1:5] = new[i] + new_segments = np.array(new_segments)[i] + + return im, targets, new_segments diff --git a/utils/segment/dataloaders.py b/utils/segment/dataloaders.py new file mode 100644 index 0000000..a63d6ec --- /dev/null +++ b/utils/segment/dataloaders.py @@ -0,0 +1,330 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Dataloaders +""" + +import os +import random + +import cv2 +import numpy as np +import torch +from torch.utils.data import DataLoader, distributed + +from ..augmentations import augment_hsv, copy_paste, letterbox +from ..dataloaders import InfiniteDataLoader, LoadImagesAndLabels, seed_worker +from ..general import LOGGER, xyn2xy, xywhn2xyxy, xyxy2xywhn +from ..torch_utils import torch_distributed_zero_first +from .augmentations import mixup, random_perspective + +RANK = int(os.getenv('RANK', -1)) + + +def create_dataloader(path, + imgsz, + batch_size, + stride, + single_cls=False, + hyp=None, + augment=False, + cache=False, + pad=0.0, + rect=False, + rank=-1, + workers=8, + image_weights=False, + quad=False, + prefix='', + shuffle=False, + mask_downsample_ratio=1, + overlap_mask=False): + if rect and shuffle: + LOGGER.warning('WARNING ⚠️ --rect is incompatible with DataLoader shuffle, setting shuffle=False') + shuffle = False + with torch_distributed_zero_first(rank): # init dataset *.cache only once if DDP + dataset = LoadImagesAndLabelsAndMasks( + path, + imgsz, + batch_size, + augment=augment, # augmentation + hyp=hyp, # hyperparameters + rect=rect, # rectangular batches + cache_images=cache, + single_cls=single_cls, + stride=int(stride), + pad=pad, + image_weights=image_weights, + prefix=prefix, + downsample_ratio=mask_downsample_ratio, + overlap=overlap_mask) + + batch_size = min(batch_size, len(dataset)) + nd = torch.cuda.device_count() # number of CUDA devices + nw = min([os.cpu_count() // max(nd, 1), batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle) + loader = DataLoader if image_weights else InfiniteDataLoader # only DataLoader allows for attribute updates + generator = torch.Generator() + generator.manual_seed(6148914691236517205 + RANK) + return loader( + dataset, + batch_size=batch_size, + shuffle=shuffle and sampler is None, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabelsAndMasks.collate_fn4 if quad else LoadImagesAndLabelsAndMasks.collate_fn, + worker_init_fn=seed_worker, + generator=generator, + ), dataset + + +class LoadImagesAndLabelsAndMasks(LoadImagesAndLabels): # for training/testing + + def __init__( + self, + path, + img_size=640, + batch_size=16, + augment=False, + hyp=None, + rect=False, + image_weights=False, + cache_images=False, + single_cls=False, + stride=32, + pad=0, + prefix="", + downsample_ratio=1, + overlap=False, + ): + super().__init__(path, img_size, batch_size, augment, hyp, rect, image_weights, cache_images, single_cls, + stride, pad, prefix) + self.downsample_ratio = downsample_ratio + self.overlap = overlap + + def __getitem__(self, index): + index = self.indices[index] # linear, shuffled, or image_weights + + hyp = self.hyp + mosaic = self.mosaic and random.random() < hyp['mosaic'] + masks = [] + if mosaic: + # Load mosaic + img, labels, segments = self.load_mosaic(index) + shapes = None + + # MixUp augmentation + if random.random() < hyp["mixup"]: + img, labels, segments = mixup(img, labels, segments, *self.load_mosaic(random.randint(0, self.n - 1))) + + else: + # Load image + img, (h0, w0), (h, w) = self.load_image(index) + + # Letterbox + shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape + img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment) + shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling + + labels = self.labels[index].copy() + # [array, array, ....], array.shape=(num_points, 2), xyxyxyxy + segments = self.segments[index].copy() + if len(segments): + for i_s in range(len(segments)): + segments[i_s] = xyn2xy( + segments[i_s], + ratio[0] * w, + ratio[1] * h, + padw=pad[0], + padh=pad[1], + ) + if labels.size: # normalized xywh to pixel xyxy format + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], ratio[0] * w, ratio[1] * h, padw=pad[0], padh=pad[1]) + + if self.augment: + img, labels, segments = random_perspective(img, + labels, + segments=segments, + degrees=hyp["degrees"], + translate=hyp["translate"], + scale=hyp["scale"], + shear=hyp["shear"], + perspective=hyp["perspective"]) + + nl = len(labels) # number of labels + if nl: + labels[:, 1:5] = xyxy2xywhn(labels[:, 1:5], w=img.shape[1], h=img.shape[0], clip=True, eps=1e-3) + if self.overlap: + masks, sorted_idx = polygons2masks_overlap(img.shape[:2], + segments, + downsample_ratio=self.downsample_ratio) + masks = masks[None] # (640, 640) -> (1, 640, 640) + labels = labels[sorted_idx] + else: + masks = polygons2masks(img.shape[:2], segments, color=1, downsample_ratio=self.downsample_ratio) + + masks = (torch.from_numpy(masks) if len(masks) else torch.zeros(1 if self.overlap else nl, img.shape[0] // + self.downsample_ratio, img.shape[1] // + self.downsample_ratio)) + # TODO: albumentations support + if self.augment: + # Albumentations + # there are some augmentation that won't change boxes and masks, + # so just be it for now. + img, labels = self.albumentations(img, labels) + nl = len(labels) # update after albumentations + + # HSV color-space + augment_hsv(img, hgain=hyp["hsv_h"], sgain=hyp["hsv_s"], vgain=hyp["hsv_v"]) + + # Flip up-down + if random.random() < hyp["flipud"]: + img = np.flipud(img) + if nl: + labels[:, 2] = 1 - labels[:, 2] + masks = torch.flip(masks, dims=[1]) + + # Flip left-right + if random.random() < hyp["fliplr"]: + img = np.fliplr(img) + if nl: + labels[:, 1] = 1 - labels[:, 1] + masks = torch.flip(masks, dims=[2]) + + # Cutouts # labels = cutout(img, labels, p=0.5) + + labels_out = torch.zeros((nl, 6)) + if nl: + labels_out[:, 1:] = torch.from_numpy(labels) + + # Convert + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB + img = np.ascontiguousarray(img) + + return (torch.from_numpy(img), labels_out, self.im_files[index], shapes, masks) + + def load_mosaic(self, index): + # YOLOv5 4-mosaic loader. Loads 1 image + 3 random images into a 4-image mosaic + labels4, segments4 = [], [] + s = self.img_size + yc, xc = (int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border) # mosaic center x, y + + # 3 additional image indices + indices = [index] + random.choices(self.indices, k=3) # 3 additional image indices + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = self.load_image(index) + + # place img in img4 + if i == 0: # top left + img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) + x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) + elif i == 1: # top right + x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc + x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h + elif i == 2: # bottom left + x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) + x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) + elif i == 3: # bottom right + x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) + x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) + + img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + padw = x1a - x1b + padh = y1a - y1b + + labels, segments = self.labels[index].copy(), self.segments[index].copy() + + if labels.size: + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padw, padh) # normalized xywh to pixel xyxy format + segments = [xyn2xy(x, w, h, padw, padh) for x in segments] + labels4.append(labels) + segments4.extend(segments) + + # Concat/clip labels + labels4 = np.concatenate(labels4, 0) + for x in (labels4[:, 1:], *segments4): + np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() + # img4, labels4 = replicate(img4, labels4) # replicate + + # Augment + img4, labels4, segments4 = copy_paste(img4, labels4, segments4, p=self.hyp["copy_paste"]) + img4, labels4, segments4 = random_perspective(img4, + labels4, + segments4, + degrees=self.hyp["degrees"], + translate=self.hyp["translate"], + scale=self.hyp["scale"], + shear=self.hyp["shear"], + perspective=self.hyp["perspective"], + border=self.mosaic_border) # border to remove + return img4, labels4, segments4 + + @staticmethod + def collate_fn(batch): + img, label, path, shapes, masks = zip(*batch) # transposed + batched_masks = torch.cat(masks, 0) + for i, l in enumerate(label): + l[:, 0] = i # add target image index for build_targets() + return torch.stack(img, 0), torch.cat(label, 0), path, shapes, batched_masks + + +def polygon2mask(img_size, polygons, color=1, downsample_ratio=1): + """ + Args: + img_size (tuple): The image size. + polygons (np.ndarray): [N, M], N is the number of polygons, + M is the number of points(Be divided by 2). + """ + mask = np.zeros(img_size, dtype=np.uint8) + polygons = np.asarray(polygons) + polygons = polygons.astype(np.int32) + shape = polygons.shape + polygons = polygons.reshape(shape[0], -1, 2) + cv2.fillPoly(mask, polygons, color=color) + nh, nw = (img_size[0] // downsample_ratio, img_size[1] // downsample_ratio) + # NOTE: fillPoly firstly then resize is trying the keep the same way + # of loss calculation when mask-ratio=1. + mask = cv2.resize(mask, (nw, nh)) + return mask + + +def polygons2masks(img_size, polygons, color, downsample_ratio=1): + """ + Args: + img_size (tuple): The image size. + polygons (list[np.ndarray]): each polygon is [N, M], + N is the number of polygons, + M is the number of points(Be divided by 2). + """ + masks = [] + for si in range(len(polygons)): + mask = polygon2mask(img_size, [polygons[si].reshape(-1)], color, downsample_ratio) + masks.append(mask) + return np.array(masks) + + +def polygons2masks_overlap(img_size, segments, downsample_ratio=1): + """Return a (640, 640) overlap mask.""" + masks = np.zeros((img_size[0] // downsample_ratio, img_size[1] // downsample_ratio), + dtype=np.int32 if len(segments) > 255 else np.uint8) + areas = [] + ms = [] + for si in range(len(segments)): + mask = polygon2mask( + img_size, + [segments[si].reshape(-1)], + downsample_ratio=downsample_ratio, + color=1, + ) + ms.append(mask) + areas.append(mask.sum()) + areas = np.asarray(areas) + index = np.argsort(-areas) + ms = np.array(ms)[index] + for i in range(len(segments)): + mask = ms[i] * (i + 1) + masks = masks + mask + masks = np.clip(masks, a_min=0, a_max=i + 1) + return masks, index diff --git a/utils/segment/general.py b/utils/segment/general.py new file mode 100644 index 0000000..655123b --- /dev/null +++ b/utils/segment/general.py @@ -0,0 +1,134 @@ +import cv2 +import numpy as np +import torch +import torch.nn.functional as F + + +def crop_mask(masks, boxes): + """ + "Crop" predicted masks by zeroing out everything not in the predicted bbox. + Vectorized by Chong (thanks Chong). + + Args: + - masks should be a size [h, w, n] tensor of masks + - boxes should be a size [n, 4] tensor of bbox coords in relative point form + """ + + n, h, w = masks.shape + x1, y1, x2, y2 = torch.chunk(boxes[:, :, None], 4, 1) # x1 shape(1,1,n) + r = torch.arange(w, device=masks.device, dtype=x1.dtype)[None, None, :] # rows shape(1,w,1) + c = torch.arange(h, device=masks.device, dtype=x1.dtype)[None, :, None] # cols shape(h,1,1) + + return masks * ((r >= x1) * (r < x2) * (c >= y1) * (c < y2)) + + +def process_mask_upsample(protos, masks_in, bboxes, shape): + """ + Crop after upsample. + proto_out: [mask_dim, mask_h, mask_w] + out_masks: [n, mask_dim], n is number of masks after nms + bboxes: [n, 4], n is number of masks after nms + shape:input_image_size, (h, w) + + return: h, w, n + """ + + c, mh, mw = protos.shape # CHW + masks = (masks_in @ protos.float().view(c, -1)).sigmoid().view(-1, mh, mw) + masks = F.interpolate(masks[None], shape, mode='bilinear', align_corners=False)[0] # CHW + masks = crop_mask(masks, bboxes) # CHW + return masks.gt_(0.5) + + +def process_mask(protos, masks_in, bboxes, shape, upsample=False): + """ + Crop before upsample. + proto_out: [mask_dim, mask_h, mask_w] + out_masks: [n, mask_dim], n is number of masks after nms + bboxes: [n, 4], n is number of masks after nms + shape:input_image_size, (h, w) + + return: h, w, n + """ + + c, mh, mw = protos.shape # CHW + ih, iw = shape + masks = (masks_in @ protos.float().view(c, -1)).sigmoid().view(-1, mh, mw) # CHW + + downsampled_bboxes = bboxes.clone() + downsampled_bboxes[:, 0] *= mw / iw + downsampled_bboxes[:, 2] *= mw / iw + downsampled_bboxes[:, 3] *= mh / ih + downsampled_bboxes[:, 1] *= mh / ih + + masks = crop_mask(masks, downsampled_bboxes) # CHW + if upsample: + masks = F.interpolate(masks[None], shape, mode='bilinear', align_corners=False)[0] # CHW + return masks.gt_(0.5) + + +def scale_image(im1_shape, masks, im0_shape, ratio_pad=None): + """ + img1_shape: model input shape, [h, w] + img0_shape: origin pic shape, [h, w, 3] + masks: [h, w, num] + """ + # Rescale coordinates (xyxy) from im1_shape to im0_shape + if ratio_pad is None: # calculate from im0_shape + gain = min(im1_shape[0] / im0_shape[0], im1_shape[1] / im0_shape[1]) # gain = old / new + pad = (im1_shape[1] - im0_shape[1] * gain) / 2, (im1_shape[0] - im0_shape[0] * gain) / 2 # wh padding + else: + pad = ratio_pad[1] + top, left = int(pad[1]), int(pad[0]) # y, x + bottom, right = int(im1_shape[0] - pad[1]), int(im1_shape[1] - pad[0]) + + if len(masks.shape) < 2: + raise ValueError(f'"len of masks shape" should be 2 or 3, but got {len(masks.shape)}') + masks = masks[top:bottom, left:right] + # masks = masks.permute(2, 0, 1).contiguous() + # masks = F.interpolate(masks[None], im0_shape[:2], mode='bilinear', align_corners=False)[0] + # masks = masks.permute(1, 2, 0).contiguous() + masks = cv2.resize(masks, (im0_shape[1], im0_shape[0])) + + if len(masks.shape) == 2: + masks = masks[:, :, None] + return masks + + +def mask_iou(mask1, mask2, eps=1e-7): + """ + mask1: [N, n] m1 means number of predicted objects + mask2: [M, n] m2 means number of gt objects + Note: n means image_w x image_h + + return: masks iou, [N, M] + """ + intersection = torch.matmul(mask1, mask2.t()).clamp(0) + union = (mask1.sum(1)[:, None] + mask2.sum(1)[None]) - intersection # (area1 + area2) - intersection + return intersection / (union + eps) + + +def masks_iou(mask1, mask2, eps=1e-7): + """ + mask1: [N, n] m1 means number of predicted objects + mask2: [N, n] m2 means number of gt objects + Note: n means image_w x image_h + + return: masks iou, (N, ) + """ + intersection = (mask1 * mask2).sum(1).clamp(0) # (N, ) + union = (mask1.sum(1) + mask2.sum(1))[None] - intersection # (area1 + area2) - intersection + return intersection / (union + eps) + + +def masks2segments(masks, strategy='largest'): + # Convert masks(n,160,160) into segments(n,xy) + segments = [] + for x in masks.int().numpy().astype('uint8'): + c = cv2.findContours(x, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0] + if strategy == 'concat': # concatenate all segments + c = np.concatenate([x.reshape(-1, 2) for x in c]) + elif strategy == 'largest': # select largest segment + c = np.array(c[np.array([len(x) for x in c]).argmax()]).reshape(-1, 2) + segments.append(c.astype('float32')) + return segments diff --git a/utils/segment/loss.py b/utils/segment/loss.py new file mode 100644 index 0000000..b45b2c2 --- /dev/null +++ b/utils/segment/loss.py @@ -0,0 +1,186 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..general import xywh2xyxy +from ..loss import FocalLoss, smooth_BCE +from ..metrics import bbox_iou +from ..torch_utils import de_parallel +from .general import crop_mask + + +class ComputeLoss: + # Compute losses + def __init__(self, model, autobalance=False, overlap=False): + self.sort_obj_iou = False + self.overlap = overlap + device = next(model.parameters()).device # get model device + h = model.hyp # hyperparameters + self.device = device + + # Define criteria + BCEcls = nn.BCEWithLogitsLoss(pos_weight=torch.tensor([h['cls_pw']], device=device)) + BCEobj = nn.BCEWithLogitsLoss(pos_weight=torch.tensor([h['obj_pw']], device=device)) + + # Class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3 + self.cp, self.cn = smooth_BCE(eps=h.get('label_smoothing', 0.0)) # positive, negative BCE targets + + # Focal loss + g = h['fl_gamma'] # focal loss gamma + if g > 0: + BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g) + + m = de_parallel(model).model[-1] # Detect() module + self.balance = {3: [4.0, 1.0, 0.4]}.get(m.nl, [4.0, 1.0, 0.25, 0.06, 0.02]) # P3-P7 + self.ssi = list(m.stride).index(16) if autobalance else 0 # stride 16 index + self.BCEcls, self.BCEobj, self.gr, self.hyp, self.autobalance = BCEcls, BCEobj, 1.0, h, autobalance + self.na = m.na # number of anchors + self.nc = m.nc # number of classes + self.nl = m.nl # number of layers + self.nm = m.nm # number of masks + self.anchors = m.anchors + self.device = device + + def __call__(self, preds, targets, masks): # predictions, targets, model + p, proto = preds + bs, nm, mask_h, mask_w = proto.shape # batch size, number of masks, mask height, mask width + lcls = torch.zeros(1, device=self.device) + lbox = torch.zeros(1, device=self.device) + lobj = torch.zeros(1, device=self.device) + lseg = torch.zeros(1, device=self.device) + tcls, tbox, indices, anchors, tidxs, xywhn = self.build_targets(p, targets) # targets + + # Losses + for i, pi in enumerate(p): # layer index, layer predictions + b, a, gj, gi = indices[i] # image, anchor, gridy, gridx + tobj = torch.zeros(pi.shape[:4], dtype=pi.dtype, device=self.device) # target obj + + n = b.shape[0] # number of targets + if n: + pxy, pwh, _, pcls, pmask = pi[b, a, gj, gi].split((2, 2, 1, self.nc, nm), 1) # subset of predictions + + # Box regression + pxy = pxy.sigmoid() * 2 - 0.5 + pwh = (pwh.sigmoid() * 2) ** 2 * anchors[i] + pbox = torch.cat((pxy, pwh), 1) # predicted box + iou = bbox_iou(pbox, tbox[i], CIoU=True).squeeze() # iou(prediction, target) + lbox += (1.0 - iou).mean() # iou loss + + # Objectness + iou = iou.detach().clamp(0).type(tobj.dtype) + if self.sort_obj_iou: + j = iou.argsort() + b, a, gj, gi, iou = b[j], a[j], gj[j], gi[j], iou[j] + if self.gr < 1: + iou = (1.0 - self.gr) + self.gr * iou + tobj[b, a, gj, gi] = iou # iou ratio + + # Classification + if self.nc > 1: # cls loss (only if multiple classes) + t = torch.full_like(pcls, self.cn, device=self.device) # targets + t[range(n), tcls[i]] = self.cp + lcls += self.BCEcls(pcls, t) # BCE + + # Mask regression + if tuple(masks.shape[-2:]) != (mask_h, mask_w): # downsample + masks = F.interpolate(masks[None], (mask_h, mask_w), mode="nearest")[0] + marea = xywhn[i][:, 2:].prod(1) # mask width, height normalized + mxyxy = xywh2xyxy(xywhn[i] * torch.tensor([mask_w, mask_h, mask_w, mask_h], device=self.device)) + for bi in b.unique(): + j = b == bi # matching index + if self.overlap: + mask_gti = torch.where(masks[bi][None] == tidxs[i][j].view(-1, 1, 1), 1.0, 0.0) + else: + mask_gti = masks[tidxs[i]][j] + lseg += self.single_mask_loss(mask_gti, pmask[j], proto[bi], mxyxy[j], marea[j]) + + obji = self.BCEobj(pi[..., 4], tobj) + lobj += obji * self.balance[i] # obj loss + if self.autobalance: + self.balance[i] = self.balance[i] * 0.9999 + 0.0001 / obji.detach().item() + + if self.autobalance: + self.balance = [x / self.balance[self.ssi] for x in self.balance] + lbox *= self.hyp["box"] + lobj *= self.hyp["obj"] + lcls *= self.hyp["cls"] + lseg *= self.hyp["box"] / bs + + loss = lbox + lobj + lcls + lseg + return loss * bs, torch.cat((lbox, lseg, lobj, lcls)).detach() + + def single_mask_loss(self, gt_mask, pred, proto, xyxy, area): + # Mask loss for one image + pred_mask = (pred @ proto.view(self.nm, -1)).view(-1, *proto.shape[1:]) # (n,32) @ (32,80,80) -> (n,80,80) + loss = F.binary_cross_entropy_with_logits(pred_mask, gt_mask, reduction="none") + return (crop_mask(loss, xyxy).mean(dim=(1, 2)) / area).mean() + + def build_targets(self, p, targets): + # Build targets for compute_loss(), input targets(image,class,x,y,w,h) + na, nt = self.na, targets.shape[0] # number of anchors, targets + tcls, tbox, indices, anch, tidxs, xywhn = [], [], [], [], [], [] + gain = torch.ones(8, device=self.device) # normalized to gridspace gain + ai = torch.arange(na, device=self.device).float().view(na, 1).repeat(1, nt) # same as .repeat_interleave(nt) + if self.overlap: + batch = p[0].shape[0] + ti = [] + for i in range(batch): + num = (targets[:, 0] == i).sum() # find number of targets of each image + ti.append(torch.arange(num, device=self.device).float().view(1, num).repeat(na, 1) + 1) # (na, num) + ti = torch.cat(ti, 1) # (na, nt) + else: + ti = torch.arange(nt, device=self.device).float().view(1, nt).repeat(na, 1) + targets = torch.cat((targets.repeat(na, 1, 1), ai[..., None], ti[..., None]), 2) # append anchor indices + + g = 0.5 # bias + off = torch.tensor( + [ + [0, 0], + [1, 0], + [0, 1], + [-1, 0], + [0, -1], # j,k,l,m + # [1, 1], [1, -1], [-1, 1], [-1, -1], # jk,jm,lk,lm + ], + device=self.device).float() * g # offsets + + for i in range(self.nl): + anchors, shape = self.anchors[i], p[i].shape + gain[2:6] = torch.tensor(shape)[[3, 2, 3, 2]] # xyxy gain + + # Match targets to anchors + t = targets * gain # shape(3,n,7) + if nt: + # Matches + r = t[..., 4:6] / anchors[:, None] # wh ratio + j = torch.max(r, 1 / r).max(2)[0] < self.hyp['anchor_t'] # compare + # j = wh_iou(anchors, t[:, 4:6]) > model.hyp['iou_t'] # iou(3,n)=wh_iou(anchors(3,2), gwh(n,2)) + t = t[j] # filter + + # Offsets + gxy = t[:, 2:4] # grid xy + gxi = gain[[2, 3]] - gxy # inverse + j, k = ((gxy % 1 < g) & (gxy > 1)).T + l, m = ((gxi % 1 < g) & (gxi > 1)).T + j = torch.stack((torch.ones_like(j), j, k, l, m)) + t = t.repeat((5, 1, 1))[j] + offsets = (torch.zeros_like(gxy)[None] + off[:, None])[j] + else: + t = targets[0] + offsets = 0 + + # Define + bc, gxy, gwh, at = t.chunk(4, 1) # (image, class), grid xy, grid wh, anchors + (a, tidx), (b, c) = at.long().T, bc.long().T # anchors, image, class + gij = (gxy - offsets).long() + gi, gj = gij.T # grid indices + + # Append + indices.append((b, a, gj.clamp_(0, shape[2] - 1), gi.clamp_(0, shape[3] - 1))) # image, anchor, grid + tbox.append(torch.cat((gxy - gij, gwh), 1)) # box + anch.append(anchors[a]) # anchors + tcls.append(c) # class + tidxs.append(tidx) + xywhn.append(torch.cat((gxy, gwh), 1) / gain[2:6]) # xywh normalized + + return tcls, tbox, indices, anch, tidxs, xywhn diff --git a/utils/segment/metrics.py b/utils/segment/metrics.py new file mode 100644 index 0000000..b09ce23 --- /dev/null +++ b/utils/segment/metrics.py @@ -0,0 +1,210 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" +Model validation metrics +""" + +import numpy as np + +from ..metrics import ap_per_class + + +def fitness(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.1, 0.9, 0.0, 0.0, 0.1, 0.9] + return (x[:, :8] * w).sum(1) + + +def ap_per_class_box_and_mask( + tp_m, + tp_b, + conf, + pred_cls, + target_cls, + plot=False, + save_dir=".", + names=(), +): + """ + Args: + tp_b: tp of boxes. + tp_m: tp of masks. + other arguments see `func: ap_per_class`. + """ + results_boxes = ap_per_class(tp_b, + conf, + pred_cls, + target_cls, + plot=plot, + save_dir=save_dir, + names=names, + prefix="Box")[2:] + results_masks = ap_per_class(tp_m, + conf, + pred_cls, + target_cls, + plot=plot, + save_dir=save_dir, + names=names, + prefix="Mask")[2:] + + results = { + "boxes": { + "p": results_boxes[0], + "r": results_boxes[1], + "ap": results_boxes[3], + "f1": results_boxes[2], + "ap_class": results_boxes[4]}, + "masks": { + "p": results_masks[0], + "r": results_masks[1], + "ap": results_masks[3], + "f1": results_masks[2], + "ap_class": results_masks[4]}} + return results + + +class Metric: + + def __init__(self) -> None: + self.p = [] # (nc, ) + self.r = [] # (nc, ) + self.f1 = [] # (nc, ) + self.all_ap = [] # (nc, 10) + self.ap_class_index = [] # (nc, ) + + @property + def ap50(self): + """AP@0.5 of all classes. + Return: + (nc, ) or []. + """ + return self.all_ap[:, 0] if len(self.all_ap) else [] + + @property + def ap(self): + """AP@0.5:0.95 + Return: + (nc, ) or []. + """ + return self.all_ap.mean(1) if len(self.all_ap) else [] + + @property + def mp(self): + """mean precision of all classes. + Return: + float. + """ + return self.p.mean() if len(self.p) else 0.0 + + @property + def mr(self): + """mean recall of all classes. + Return: + float. + """ + return self.r.mean() if len(self.r) else 0.0 + + @property + def map50(self): + """Mean AP@0.5 of all classes. + Return: + float. + """ + return self.all_ap[:, 0].mean() if len(self.all_ap) else 0.0 + + @property + def map(self): + """Mean AP@0.5:0.95 of all classes. + Return: + float. + """ + return self.all_ap.mean() if len(self.all_ap) else 0.0 + + def mean_results(self): + """Mean of results, return mp, mr, map50, map""" + return (self.mp, self.mr, self.map50, self.map) + + def class_result(self, i): + """class-aware result, return p[i], r[i], ap50[i], ap[i]""" + return (self.p[i], self.r[i], self.ap50[i], self.ap[i]) + + def get_maps(self, nc): + maps = np.zeros(nc) + self.map + for i, c in enumerate(self.ap_class_index): + maps[c] = self.ap[i] + return maps + + def update(self, results): + """ + Args: + results: tuple(p, r, ap, f1, ap_class) + """ + p, r, all_ap, f1, ap_class_index = results + self.p = p + self.r = r + self.all_ap = all_ap + self.f1 = f1 + self.ap_class_index = ap_class_index + + +class Metrics: + """Metric for boxes and masks.""" + + def __init__(self) -> None: + self.metric_box = Metric() + self.metric_mask = Metric() + + def update(self, results): + """ + Args: + results: Dict{'boxes': Dict{}, 'masks': Dict{}} + """ + self.metric_box.update(list(results["boxes"].values())) + self.metric_mask.update(list(results["masks"].values())) + + def mean_results(self): + return self.metric_box.mean_results() + self.metric_mask.mean_results() + + def class_result(self, i): + return self.metric_box.class_result(i) + self.metric_mask.class_result(i) + + def get_maps(self, nc): + return self.metric_box.get_maps(nc) + self.metric_mask.get_maps(nc) + + @property + def ap_class_index(self): + # boxes and masks have the same ap_class_index + return self.metric_box.ap_class_index + + +KEYS = [ + "train/box_loss", + "train/seg_loss", # train loss + "train/obj_loss", + "train/cls_loss", + "metrics/precision(B)", + "metrics/recall(B)", + "metrics/mAP_0.5(B)", + "metrics/mAP_0.5:0.95(B)", # metrics + "metrics/precision(M)", + "metrics/recall(M)", + "metrics/mAP_0.5(M)", + "metrics/mAP_0.5:0.95(M)", # metrics + "val/box_loss", + "val/seg_loss", # val loss + "val/obj_loss", + "val/cls_loss", + "x/lr0", + "x/lr1", + "x/lr2",] + +BEST_KEYS = [ + "best/epoch", + "best/precision(B)", + "best/recall(B)", + "best/mAP_0.5(B)", + "best/mAP_0.5:0.95(B)", + "best/precision(M)", + "best/recall(M)", + "best/mAP_0.5(M)", + "best/mAP_0.5:0.95(M)",] diff --git a/utils/segment/plots.py b/utils/segment/plots.py new file mode 100644 index 0000000..9b90900 --- /dev/null +++ b/utils/segment/plots.py @@ -0,0 +1,143 @@ +import contextlib +import math +from pathlib import Path + +import cv2 +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import torch + +from .. import threaded +from ..general import xywh2xyxy +from ..plots import Annotator, colors + + +@threaded +def plot_images_and_masks(images, targets, masks, paths=None, fname='images.jpg', names=None): + # Plot image grid with labels + if isinstance(images, torch.Tensor): + images = images.cpu().float().numpy() + if isinstance(targets, torch.Tensor): + targets = targets.cpu().numpy() + if isinstance(masks, torch.Tensor): + masks = masks.cpu().numpy().astype(int) + + max_size = 1920 # max image size + max_subplots = 16 # max image subplots, i.e. 4x4 + bs, _, h, w = images.shape # batch size, _, height, width + bs = min(bs, max_subplots) # limit plot images + ns = np.ceil(bs ** 0.5) # number of subplots (square) + if np.max(images[0]) <= 1: + images *= 255 # de-normalise (optional) + + # Build Image + mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init + for i, im in enumerate(images): + if i == max_subplots: # if last batch has fewer images than we expect + break + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + im = im.transpose(1, 2, 0) + mosaic[y:y + h, x:x + w, :] = im + + # Resize (optional) + scale = max_size / ns / max(h, w) + if scale < 1: + h = math.ceil(scale * h) + w = math.ceil(scale * w) + mosaic = cv2.resize(mosaic, tuple(int(x * ns) for x in (w, h))) + + # Annotate + fs = int((h + w) * ns * 0.01) # font size + annotator = Annotator(mosaic, line_width=round(fs / 10), font_size=fs, pil=True, example=names) + for i in range(i + 1): + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + annotator.rectangle([x, y, x + w, y + h], None, (255, 255, 255), width=2) # borders + if paths: + annotator.text((x + 5, y + 5 + h), text=Path(paths[i]).name[:40], txt_color=(220, 220, 220)) # filenames + if len(targets) > 0: + idx = targets[:, 0] == i + ti = targets[idx] # image targets + + boxes = xywh2xyxy(ti[:, 2:6]).T + classes = ti[:, 1].astype('int') + labels = ti.shape[1] == 6 # labels if no conf column + conf = None if labels else ti[:, 6] # check for confidence presence (label vs pred) + + if boxes.shape[1]: + if boxes.max() <= 1.01: # if normalized with tolerance 0.01 + boxes[[0, 2]] *= w # scale to pixels + boxes[[1, 3]] *= h + elif scale < 1: # absolute coords need scale if image scales + boxes *= scale + boxes[[0, 2]] += x + boxes[[1, 3]] += y + for j, box in enumerate(boxes.T.tolist()): + cls = classes[j] + color = colors(cls) + cls = names[cls] if names else cls + if labels or conf[j] > 0.25: # 0.25 conf thresh + label = f'{cls}' if labels else f'{cls} {conf[j]:.1f}' + annotator.box_label(box, label, color=color) + + # Plot masks + if len(masks): + if masks.max() > 1.0: # mean that masks are overlap + image_masks = masks[[i]] # (1, 640, 640) + nl = len(ti) + index = np.arange(nl).reshape(nl, 1, 1) + 1 + image_masks = np.repeat(image_masks, nl, axis=0) + image_masks = np.where(image_masks == index, 1.0, 0.0) + else: + image_masks = masks[idx] + + im = np.asarray(annotator.im).copy() + for j, box in enumerate(boxes.T.tolist()): + if labels or conf[j] > 0.25: # 0.25 conf thresh + color = colors(classes[j]) + mh, mw = image_masks[j].shape + if mh != h or mw != w: + mask = image_masks[j].astype(np.uint8) + mask = cv2.resize(mask, (w, h)) + mask = mask.astype(bool) + else: + mask = image_masks[j].astype(bool) + with contextlib.suppress(Exception): + im[y:y + h, x:x + w, :][mask] = im[y:y + h, x:x + w, :][mask] * 0.4 + np.array(color) * 0.6 + annotator.fromarray(im) + annotator.im.save(fname) # save + + +def plot_results_with_masks(file="path/to/results.csv", dir="", best=True): + # Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv') + save_dir = Path(file).parent if file else Path(dir) + fig, ax = plt.subplots(2, 8, figsize=(18, 6), tight_layout=True) + ax = ax.ravel() + files = list(save_dir.glob("results*.csv")) + assert len(files), f"No results.csv files found in {save_dir.resolve()}, nothing to plot." + for f in files: + try: + data = pd.read_csv(f) + index = np.argmax(0.9 * data.values[:, 8] + 0.1 * data.values[:, 7] + 0.9 * data.values[:, 12] + + 0.1 * data.values[:, 11]) + s = [x.strip() for x in data.columns] + x = data.values[:, 0] + for i, j in enumerate([1, 2, 3, 4, 5, 6, 9, 10, 13, 14, 15, 16, 7, 8, 11, 12]): + y = data.values[:, j] + # y[y == 0] = np.nan # don't show zero values + ax[i].plot(x, y, marker=".", label=f.stem, linewidth=2, markersize=2) + if best: + # best + ax[i].scatter(index, y[index], color="r", label=f"best:{index}", marker="*", linewidth=3) + ax[i].set_title(s[j] + f"\n{round(y[index], 5)}") + else: + # last + ax[i].scatter(x[-1], y[-1], color="r", label="last", marker="*", linewidth=3) + ax[i].set_title(s[j] + f"\n{round(y[-1], 5)}") + # if j in [8, 9, 10]: # share train and val loss y axes + # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) + except Exception as e: + print(f"Warning: Plotting error for {f}: {e}") + ax[1].legend() + fig.savefig(save_dir / "results.png", dpi=200) + plt.close() diff --git a/utils/torch_utils.py b/utils/torch_utils.py index abf0bbc..9f257d0 100644 --- a/utils/torch_utils.py +++ b/utils/torch_utils.py @@ -47,7 +47,7 @@ def smartCrossEntropyLoss(label_smoothing=0.0): if check_version(torch.__version__, '1.10.0'): return nn.CrossEntropyLoss(label_smoothing=label_smoothing) if label_smoothing > 0: - LOGGER.warning(f'WARNING: label smoothing {label_smoothing} requires torch>=1.10.0') + LOGGER.warning(f'WARNING ⚠️ label smoothing {label_smoothing} requires torch>=1.10.0') return nn.CrossEntropyLoss() @@ -251,6 +251,7 @@ def fuse_conv_and_bn(conv, bn): kernel_size=conv.kernel_size, stride=conv.stride, padding=conv.padding, + dilation=conv.dilation, groups=conv.groups, bias=True).requires_grad_(False).to(conv.weight.device) diff --git a/utils/triton.py b/utils/triton.py new file mode 100644 index 0000000..a94ef0a --- /dev/null +++ b/utils/triton.py @@ -0,0 +1,85 @@ +# YOLOv5 🚀 by Ultralytics, GPL-3.0 license +""" Utils to interact with the Triton Inference Server +""" + +import typing +from urllib.parse import urlparse + +import torch + + +class TritonRemoteModel: + """ A wrapper over a model served by the Triton Inference Server. It can + be configured to communicate over GRPC or HTTP. It accepts Torch Tensors + as input and returns them as outputs. + """ + + def __init__(self, url: str): + """ + Keyword arguments: + url: Fully qualified address of the Triton server - for e.g. grpc://localhost:8000 + """ + + parsed_url = urlparse(url) + if parsed_url.scheme == "grpc": + from tritonclient.grpc import InferenceServerClient, InferInput + + self.client = InferenceServerClient(parsed_url.netloc) # Triton GRPC client + model_repository = self.client.get_model_repository_index() + self.model_name = model_repository.models[0].name + self.metadata = self.client.get_model_metadata(self.model_name, as_json=True) + + def create_input_placeholders() -> typing.List[InferInput]: + return [ + InferInput(i['name'], [int(s) for s in i["shape"]], i['datatype']) for i in self.metadata['inputs']] + + else: + from tritonclient.http import InferenceServerClient, InferInput + + self.client = InferenceServerClient(parsed_url.netloc) # Triton HTTP client + model_repository = self.client.get_model_repository_index() + self.model_name = model_repository[0]['name'] + self.metadata = self.client.get_model_metadata(self.model_name) + + def create_input_placeholders() -> typing.List[InferInput]: + return [ + InferInput(i['name'], [int(s) for s in i["shape"]], i['datatype']) for i in self.metadata['inputs']] + + self._create_input_placeholders_fn = create_input_placeholders + + @property + def runtime(self): + """Returns the model runtime""" + return self.metadata.get("backend", self.metadata.get("platform")) + + def __call__(self, *args, **kwargs) -> typing.Union[torch.Tensor, typing.Tuple[torch.Tensor, ...]]: + """ Invokes the model. Parameters can be provided via args or kwargs. + args, if provided, are assumed to match the order of inputs of the model. + kwargs are matched with the model input names. + """ + inputs = self._create_inputs(*args, **kwargs) + response = self.client.infer(model_name=self.model_name, inputs=inputs) + result = [] + for output in self.metadata['outputs']: + tensor = torch.as_tensor(response.as_numpy(output['name'])) + result.append(tensor) + return result[0] if len(result) == 1 else result + + def _create_inputs(self, *args, **kwargs): + args_len, kwargs_len = len(args), len(kwargs) + if not args_len and not kwargs_len: + raise RuntimeError("No inputs provided.") + if args_len and kwargs_len: + raise RuntimeError("Cannot specify args and kwargs at the same time") + + placeholders = self._create_input_placeholders_fn() + if args_len: + if args_len != len(placeholders): + raise RuntimeError(f"Expected {len(placeholders)} inputs, got {args_len}.") + for input, value in zip(placeholders, args): + input.set_data_from_numpy(value.cpu().numpy()) + else: + for input in placeholders: + value = kwargs[input.name] + input.set_data_from_numpy(value.cpu().numpy()) + return placeholders