From 12d9a6d4d27e8e67a5be5390a6b455a24d4b7d23 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 20 Apr 2021 17:12:02 -0700 Subject: [PATCH 01/61] First timm.bits commit, add initial abstractions, WIP updates to train, val... some of it working --- timm/bits/README.md | 8 + timm/bits/__init__.py | 10 + timm/bits/device_env.py | 58 ++++ timm/bits/device_env_cuda.py | 90 ++++++ timm/bits/device_env_factory.py | 34 +++ timm/bits/device_env_xla.py | 85 ++++++ timm/bits/grad_clipper.py | 36 +++ timm/bits/logger.py | 223 +++++++++++++++ timm/bits/tracker.py | 50 ++++ timm/bits/updater.py | 54 ++++ timm/bits/updater_cuda.py | 36 +++ timm/bits/updater_factory.py | 30 ++ timm/bits/updater_xla.py | 52 ++++ timm/data/collate.py | 38 +++ timm/data/config.py | 8 + timm/data/fetcher.py | 69 +++++ timm/data/loader.py | 152 ++-------- timm/data/prefetcher_cuda.py | 79 ++++++ timm/data/tf_preprocessing.py | 39 ++- timm/data/transforms_factory.py | 10 +- timm/metrics/__init__.py | 4 + timm/metrics/accuracy.py | 112 ++++++++ timm/metrics/precision_recall.py | 117 ++++++++ timm/metrics/scalar_avg.py | 30 ++ timm/metrics/tensor_avg.py | 42 +++ timm/scheduler/scheduler.py | 6 +- train.py | 462 +++++++++++-------------------- validate.py | 161 ++++------- 28 files changed, 1553 insertions(+), 542 deletions(-) create mode 100644 timm/bits/README.md create mode 100644 timm/bits/__init__.py create mode 100644 timm/bits/device_env.py create mode 100644 timm/bits/device_env_cuda.py create mode 100644 timm/bits/device_env_factory.py create mode 100644 timm/bits/device_env_xla.py create mode 100644 timm/bits/grad_clipper.py create mode 100644 timm/bits/logger.py create mode 100644 timm/bits/tracker.py create mode 100644 timm/bits/updater.py create mode 100644 timm/bits/updater_cuda.py create mode 100644 timm/bits/updater_factory.py create mode 100644 timm/bits/updater_xla.py create mode 100644 timm/data/collate.py create mode 100644 timm/data/fetcher.py create mode 100644 timm/data/prefetcher_cuda.py create mode 100644 timm/metrics/__init__.py create mode 100644 timm/metrics/accuracy.py create mode 100644 timm/metrics/precision_recall.py create mode 100644 timm/metrics/scalar_avg.py create mode 100644 timm/metrics/tensor_avg.py diff --git a/timm/bits/README.md b/timm/bits/README.md new file mode 100644 index 0000000000..02ba6dc614 --- /dev/null +++ b/timm/bits/README.md @@ -0,0 +1,8 @@ +# Timm Bits + +A collection of reusable components and lightweight abstractions for training and evaluating NN. + +This is an early WIP with the primary goal to get up and running on TPUs first. Expect significant changes, rewrites, additions... + +The current train.py and validate.py scipts are evolving to use the timm.bits components, they will also change significantly. + diff --git a/timm/bits/__init__.py b/timm/bits/__init__.py new file mode 100644 index 0000000000..33080c7324 --- /dev/null +++ b/timm/bits/__init__.py @@ -0,0 +1,10 @@ +from .device_env_factory import initialize_device, get_device +from .device_env import DeviceEnv +#from .evaluate import evaluate, eval_step +from .logger import Logger +#from .task import TaskClassify +from .updater import Updater +from .updater_factory import create_updater +from .tracker import Tracker +#from .task_metrics import TaskMetrics, TaskMetricsClassify +#from .train import train_one_epoch, TrainServices, TrainState, TrainCfg, Experiment \ No newline at end of file diff --git a/timm/bits/device_env.py b/timm/bits/device_env.py new file mode 100644 index 0000000000..646d64f478 --- /dev/null +++ b/timm/bits/device_env.py @@ -0,0 +1,58 @@ +import torch +import abc + + +class DeviceEnv(abc.ABC): + + @property + @abc.abstractmethod + def device(self) -> torch.device: + pass + + @property + @abc.abstractmethod + def local_rank(self) -> int: + pass + + @property + @abc.abstractmethod + def global_rank(self) -> int: + pass + + @property + @abc.abstractmethod + def is_distributed(self) -> bool: + pass + + @property + @abc.abstractmethod + def world_size(self) -> int: + pass + + @property + @abc.abstractmethod + def is_master(self) -> bool: + pass + + @property + @abc.abstractmethod + def type(self) -> str: + pass + + @property + @abc.abstractmethod + def autocast(self): + pass + + @abc.abstractmethod + def wrap_distributed(self, *modules): + pass + + @abc.abstractmethod + def to_device(self, *modules: torch.nn.Module): + pass + + #@abc.abstractmethod + def mark_step(self): + # FIXME this is for XLA only, make it common to all devices w/ appropriate no-ops? + pass \ No newline at end of file diff --git a/timm/bits/device_env_cuda.py b/timm/bits/device_env_cuda.py new file mode 100644 index 0000000000..29c4d8f6d7 --- /dev/null +++ b/timm/bits/device_env_cuda.py @@ -0,0 +1,90 @@ +import os +from contextlib import suppress + +import torch +from torch.nn.parallel import DistributedDataParallel + +from .device_env import DeviceEnv + + +def is_cuda_available(): + return torch.cuda.is_available() + + +class DeviceEnvCuda(DeviceEnv): + + def __init__(self, device_idx=None, local_rank=None, amp=False, memory_format=None): + assert torch.cuda.device_count() + torch.backends.cudnn.benchmark = True + self._local_rank = 0 + self._distributed = False + self._world_size = 1 + self._global_rank = 0 + if 'WORLD_SIZE' in os.environ: + self._distributed = int(os.environ['WORLD_SIZE']) > 1 + if self._distributed: + if local_rank is None: + lr = os.environ.get('LOCAL_RANK', None) + if lr is None: + raise RuntimeError( + 'At least one of LOCAL_RANK env variable or local_rank arg must be set to valid integer.') + self._local_rank = lr + else: + self._local_rank = int(local_rank) + self._device = torch.device('cuda:%d' % self._local_rank) + torch.cuda.set_device(self._local_rank) + torch.distributed.init_process_group(backend='nccl', init_method='env://') + self._world_size = torch.distributed.get_world_size() + self._global_rank = torch.distributed.get_rank() + else: + self._device = torch.device('cuda' if device_idx is None else f'cuda:{device_idx}') + self._memory_format = memory_format + if amp: + self._amp = amp + self._autocast = torch.cuda.amp.autocast + else: + self._amp = amp + self._autocast = suppress + + @property + def device(self): + return self._device + + @property + def local_rank(self): + return self._local_rank + + @property + def global_rank(self): + return self._global_rank + + @property + def is_distributed(self): + return self._distributed + + @property + def world_size(self): + return self._world_size + + @property + def is_master(self): + return self._local_rank == 0 + + @property + def type(self) -> str: + return 'cuda' + + @property + def amp(self) -> bool: + return self._amp + + @property + def autocast(self): + return self._autocast + + def wrap_distributed(self, *modules, **kwargs): + return [DistributedDataParallel(m, device_ids=[self._local_rank], **kwargs) for m in modules] + + def to_device(self, *modules: torch.nn.Module): + # FIXME handling dtype / memformat... disable flags, enable flags, diff fn? + return [m.to(device=self._device, memory_format=self._memory_format) for m in modules] diff --git a/timm/bits/device_env_factory.py b/timm/bits/device_env_factory.py new file mode 100644 index 0000000000..f6dc14f3c9 --- /dev/null +++ b/timm/bits/device_env_factory.py @@ -0,0 +1,34 @@ +from .device_env_cuda import DeviceEnvCuda, is_cuda_available +from .device_env_xla import DeviceEnvXla, is_xla_available + +_device_env = None + + +def initialize_device(force_cpu: bool = False, xla_device_type=None, **kwargs): + global _device_env + if _device_env is not None: + # warning + return _device_env + + denv = None + if not force_cpu: + if is_xla_available(xla_device_type): + # XLA supports more than just TPU, but by default will only look at TPU + denv = DeviceEnvXla(**kwargs, xla_device_type=xla_device_type) + elif is_cuda_available(): + denv = DeviceEnvCuda(**kwargs) + + if denv is None: + # FIXME implement CPU support + raise NotImplementedError() + + _device_env = denv + return denv + + +def get_device(): + if _device_env is None: + raise RuntimeError('Please initialize device environment by calling initialize_device first.') + return _device_env + + diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py new file mode 100644 index 0000000000..385b862602 --- /dev/null +++ b/timm/bits/device_env_xla.py @@ -0,0 +1,85 @@ +import os +from contextlib import suppress +import torch + +try: + import torch_xla.core.xla_model as xm + import torch_xla.amp as xa + _HAS_XLA = True +except ImportError as e: + xm = None + _HAS_XLA = False + +from .device_env import DeviceEnv + + +def is_xla_available(xla_device_type=None): + if not _HAS_XLA: + return False + supported_devs = xm.get_xla_supported_devices(devkind=xla_device_type) + print(supported_devs) + return len(supported_devs) >= 1 + + +class DeviceEnvXla(DeviceEnv): + + def __init__(self, xla_device_type=None, device_idx=None, local_rank=0, amp=False): + self._device = xm.xla_device(n=device_idx, devkind=xla_device_type) + print(self._device) + self._local_rank = xm.get_local_ordinal(local_rank) + self._world_size = xm.xrt_world_size() + self._distributed = self._world_size > 1 + self._global_rank = 0 + if self._distributed: + self._global_rank = xm.get_ordinal() + if amp: + self._autocast = xa.autocast + else: + self._autocast = suppress + self._memory_format = None + + @property + def device(self): + return self._device + + @property + def local_rank(self): + return self._local_rank + + @property + def global_rank(self): + return self._global_rank + + @property + def is_distributed(self): + return self._distributed + + @property + def world_size(self): + return self._world_size + + @property + def is_master(self): + return self._global_rank == 0 + + @property + def type(self) -> str: + return 'xla' + + @property + def amp(self) -> bool: + return False + + @property + def autocast(self): + return self._autocast + + def wrap_distributed(self, *modules): + # NO-OP + return tuple([m for m in modules]) + + def to_device(self, *modules: torch.nn.Module): + return [m.to(device=self._device, memory_format=self._memory_format) for m in modules] + + def mark_step(self): + xm.mark_step() diff --git a/timm/bits/grad_clipper.py b/timm/bits/grad_clipper.py new file mode 100644 index 0000000000..232f5fc0bb --- /dev/null +++ b/timm/bits/grad_clipper.py @@ -0,0 +1,36 @@ +from functools import partial + +import torch + +from timm.utils.agc import adaptive_clip_grad + + +def get_clip_grad_fn(mode: str = 'norm', norm_type: float = 2.0): + if mode == 'norm': + return partial(torch.nn.utils.clip_grad_norm_, norm_type=norm_type) + elif mode == 'value': + return torch.nn.utils.clip_grad_value_ + elif mode == 'agc': + return partial(adaptive_clip_grad, norm_type=norm_type) + else: + assert False, f"Unknown clip mode ({mode})." + + +def get_clip_parameters(model): + if hasattr(model, 'get_clip_parameters'): + return model.get_clip_parameters() + else: + return model.parameters() + + +class GradClipper: + + def __init__(self, model, clip_value, clip_mode='norm'): + self.model = model + self.clip_fn = get_clip_grad_fn(clip_mode) + self.clip_value = clip_value + self.enabled = True + + def __call__(self): + if self.enabled: + self.clip_fn(get_clip_parameters(self.model), self.clip_value) \ No newline at end of file diff --git a/timm/bits/logger.py b/timm/bits/logger.py new file mode 100644 index 0000000000..2e2cd9da7c --- /dev/null +++ b/timm/bits/logger.py @@ -0,0 +1,223 @@ +import csv +import logging +import os +from collections import OrderedDict +from typing import Optional, Tuple, Dict, Union + +import torch + +_logger = logging.getLogger(__name__) + +try: + from torch.utils.tensorboard import SummaryWriter + HAS_TB = True +except ImportError as e: + HAS_TB = False + +try: + import wandb + HAS_WANDB = True +except ImportError: + HAS_WANDB = False + + +# FIXME old formatting for reference, to remove +# +# def log_eval(batch_idx, last_idx, batch_time, loss, top1, top5, log_suffix=''): +# log_name = 'Test' + log_suffix +# logging.info( +# f'{log_name}: [{batch_idx:>4d}/{last_idx}] ' +# f'Time: {batch_time.smooth_val:.3f} ({batch_time.avg:.3f}) ' +# f'Loss: {loss.smooth_val:>7.4f} ({loss.avg:>6.4f}) ' +# f'Acc@1: {top1.smooth_val:>7.4f} ({top1.avg:>7.4f}) ' +# f'Acc@5: {top5.smooth_val:>7.4f} ({top5.avg:>7.4f})' +# ) +# +# +# def log_train(epoch, step, num_steps, loss, batch_size, batch_time, data_time, lr, world_size=1): +# last_step = max(0, num_steps - 1) +# progress = 100. * step / last_step if last_step else 0. +# log_str = f'Train: {epoch} [{step:>4d}/{num_steps} ({progress:>3.0f}%)]' \ +# f' Time: {batch_time.smooth_val:.3f}s, {batch_size * world_size / batch_time.smooth_val:>7.2f}/s' \ +# f' ({batch_time.avg:.3f}s, {batch_size * world_size / batch_time.avg:>7.2f}/s)' \ +# f' Data: {data_time.smooth_val:.3f} ({data_time.avg:.3f})' +# log_str += f' Loss: {loss.smooth_val:>9.6f} ({loss.avg:>6.4f}) ' +# log_str += f' LR: {lr:.3e} ' +# +# if args.save_images and output_dir: +# torchvision.utils.save_image( +# input, +# os.path.join(output_dir, 'train-batch-%d.jpg' % batch_idx), +# padding=0, +# normalize=True) + + +def summary_row_dict(results, index=None, index_name='epoch'): + assert isinstance(results, dict) + row_dict = OrderedDict() + if index is not None: + row_dict[index_name] = index + if not results: + return row_dict + if isinstance(next(iter(results.values())), dict): + # each key in results is a per-phase results dict, flatten by prefixing with phase name + for p, pr in results.keys(): + assert isinstance(dict, pr) + row_dict.update([('_'.join([p, k]), v) for k, v in pr.items()]) + else: + row_dict.update(results) + return row_dict + + +class SummaryCsv: + def __init__(self, output_dir, filename='summary.csv'): + self.output_dir = output_dir + self.filename = os.path.join(output_dir, filename) + self.needs_header = not os.path.exists(self.filename) + + def update(self, row_dict): + with open(self.filename, mode='a') as cf: + dw = csv.DictWriter(cf, fieldnames=row_dict.keys()) + if self.needs_header: # first iteration (epoch == 1 can't be used) + dw.writeheader() + self.needs_header = False + dw.writerow(row_dict) + + +def _add_kwargs(text_update, name_map=None, **kwargs): + def _to_str(key, val): + if isinstance(val, float): + return f'{key}: {val:.4f}' + else: + return f'{key}: {val}' + + def _map_name(key, name_map, capitalize=True): + if name_map is None: + if capitalize: + return key.capitalize() if not key.isupper() else key + else: + return key + return name_map.get(key, None) + + for k, v in kwargs.items(): + if isinstance(v, dict): + # log each k, v of a dict kwarg as separate items + for kk, vv in v.items(): + name = _map_name(kk, name_map) + if not name: + continue + text_update += [_to_str(kk, vv)] + else: + name = _map_name(k, name_map, capitalize=True) + if not name: + continue + text_update += [_to_str(name, v)] + + +class Logger: + + def __init__( + self, + experiment_name=None, + output_dir=None, + logger=None, + log_wandb=False, + hparams=None): + + self.output_dir = output_dir # for tensorboard, csv, console logging to file? + self.logger = logger or logging.getLogger('log') + hparams = hparams or {} + + # Setup CSV writer(s) + if output_dir is not None: + self.csv_writer = SummaryCsv(output_dir=output_dir) + else: + self.csv_writer = None + + # Setup Tensorboard + self.summary_writer = None # FIXME tensorboard + + # Setup W&B + self.wandb_run = None + if log_wandb: + if HAS_WANDB: + self.wandb_run = wandb.init(project=experiment_name, config=hparams) + else: + _logger.warning("You've requested to log metrics to wandb but package not found. " + "Metrics not being logged to wandb, try `pip install wandb`") + + # FIXME image save + + def log_step( + self, + phase: str, + step: int, + end_step: Optional[int] = None, + loss: Optional[float] = None, + rate: Optional[float] = None, + epoch: Optional[int] = None, + phase_suffix: str = '', + **kwargs, + ): + """ log train/eval step + """ + phase_title = f'{phase.capitalize()} ({phase_suffix})' if phase_suffix else f'{phase.capitalize()}' + progress = 100. * step / end_step if end_step else 0. + text_update = [ + phase_title, + f'Epoch: {epoch}' if epoch is not None else None, + f'Step: {step}' if end_step is None else None, + f'Step: [{step}/{end_step} ({progress:>3.0f}%)]' if end_step is not None else None, + f'Rate: {rate:.2f}/s' if rate is not None else None, + f'Loss: {loss:.5f}' if loss is not None else None, + ] + _add_kwargs(text_update, **kwargs) + log_str = ' '.join(item for item in text_update if item) + self.logger.info(log_str) + if self.summary_writer is not None: + # FIXME log step values to tensorboard + pass + + def log_phase( + self, + phase: str = 'eval', + epoch: Optional[int] = None, + name_map: Optional[dict] = None, + **kwargs + ): + """log completion of evaluation or training phase + """ + title = [ + f'{phase.capitalize()}', + f'epoch: {epoch}' if epoch is not None else None, + 'completed. ', + ] + title_str = ' '.join(i for i in title if i) + results = [] + _add_kwargs(results, name_map=name_map, **kwargs) + log_str = title_str + ', '.join(item for item in results if item) + self.logger.info(log_str) + + def write_summary( + self, + results: Dict, # Dict or Dict of Dict where first level keys are treated as per-phase results + index: Optional[Union[int, str]] = None, + index_name: str = 'epoch', + ): + """ Log complete results for all phases (typically called at end of epoch) + + Args: + results (dict or dict[dict]): dict of results to write, or multiple dicts where first level + key is the name of results dict for each phase + index: value for row index (typically epoch #) + index_name: name for row index header (typically 'epoch') + """ + + row_dict = summary_row_dict(index=index, index_name=index_name, results=results) + if self.csv_writer: + self.csv_writer.update(row_dict) + if self.wandb_run is not None: + wandb.log(row_dict) + if self.summary_writer: + # FIXME log epoch summaries to tensorboard + pass diff --git a/timm/bits/tracker.py b/timm/bits/tracker.py new file mode 100644 index 0000000000..12e0106b22 --- /dev/null +++ b/timm/bits/tracker.py @@ -0,0 +1,50 @@ +import time +from typing import Optional + +from timm.metrics import ScalarAvgMinMax + + +class Tracker: + + def __init__(self): + self.data_time = ScalarAvgMinMax() # time for data loader to produce batch of samples + self.step_time = ScalarAvgMinMax() # time for model step + self.iter_time = ScalarAvgMinMax() # full iteration time incl. data, step, and book-keeping + self.epoch_time = ScalarAvgMinMax() + + self.iter_timestamp: Optional[float] = None + self.prev_timestamp: Optional[float] = None + self.epoch_timestamp: Optional[float] = None + + def _measure_iter(self, ref_timestamp=None): + timestamp = time.perf_counter() + self.prev_timestamp = timestamp + + def mark_iter(self): + timestamp = time.perf_counter() + if self.iter_timestamp is not None: + iter_time = timestamp - self.iter_timestamp + self.iter_time.update(iter_time) + self.iter_timestamp = self.prev_timestamp = timestamp + + def mark_iter_data_end(self): + assert self.prev_timestamp is not None + timestamp = time.perf_counter() + data_time = timestamp - self.prev_timestamp + self.data_time.update(data_time) + self.prev_timestamp = timestamp + + def mark_iter_step_end(self): + assert self.prev_timestamp is not None + timestamp = time.perf_counter() + step_time = timestamp - self.prev_timestamp + self.step_time.update(step_time) + self.prev_timestamp = timestamp + + def mark_epoch(self): + timestamp = time.perf_counter() + if self.epoch_timestamp is not None: + epoch_time = timestamp - self.epoch_timestamp + self.epoch_time.update(epoch_time) + self.epoch_timestamp = timestamp + diff --git a/timm/bits/updater.py b/timm/bits/updater.py new file mode 100644 index 0000000000..6612c8eafd --- /dev/null +++ b/timm/bits/updater.py @@ -0,0 +1,54 @@ +from typing import Callable, Optional, Union + +import torch + +from .grad_clipper import GradClipper + + +class Updater: + + def __init__( + self, + optimizer: torch.optim.Optimizer, + clip_value: Optional[Union[Callable, float]] = None, + clip_mode: str = 'norm'): + + self.optimizer = optimizer + self.clipper: Optional[GradClipper] = None + if clip_value is not None: + if isinstance(clip_value, Callable): + self.clipper = clip_value + else: + GradClipper(clip_value, clip_mode) + self.scaler = None + self.create_graph = getattr(self.optimizer, 'second_order', False) + self.num_accumulated = 0 + self.after_step_closure = False + + def apply(self, loss: torch.Tensor, accumulate=False): + loss.backward(create_graph=self.create_graph) + if self.clipper is not None: + self.clipper() + if not accumulate: + self.optimizer.step() + self.reset() + else: + self.num_accumulated += 1 + + def reset(self): + self.optimizer.zero_grad() + self.num_accumulated = 0 + + def state_dict(self): + state_dict = dict(optimizer=self.optimizer.state_dict()) + if self.scaler is not None: + state_dict['scaler'] = self.scaler.state_dict() + + def load_state_dict(self, state_dict): + if 'optimizer' in state_dict: + self.optimizer.load_state_dict(state_dict['optimizer']) + if 'scaler' in state_dict and self.scaler is not None: + self.scaler.load_state_dict(state_dict['scaler']) + + + diff --git a/timm/bits/updater_cuda.py b/timm/bits/updater_cuda.py new file mode 100644 index 0000000000..799aef00c3 --- /dev/null +++ b/timm/bits/updater_cuda.py @@ -0,0 +1,36 @@ +from typing import Callable, Optional, Union, Any + +import torch + +from .updater import Updater + + +class UpdaterCuda(Updater): + def __init__( + self, + optimizer: torch.optim.Optimizer, + clip_value: Optional[Union[Callable, float]] = None, + clip_mode: str = 'norm', + use_scaler: bool = False, + scaler_kwargs: Any = None, + ): + super().__init__(optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode) + scaler_kwargs = scaler_kwargs or {} + if use_scaler: + self.scaler = torch.cuda.amp.GradScaler(**scaler_kwargs) + + def apply(self, loss: torch.Tensor, accumulate=False): + if self.scaler is not None: + self.scaler.scale(loss).backward(create_graph=self.create_graph) + if self.clipper is not None: + self.scaler.unscale_(self.optimizer) # unscale the gradients of optimizer's assigned params in-place + self.clipper() + if not accumulate: + self.scaler.step(self.optimizer) + self.reset() + else: + self.num_accumulated += 1 + self.scaler.update() + else: + Updater.apply(self, loss, accumulate) + diff --git a/timm/bits/updater_factory.py b/timm/bits/updater_factory.py new file mode 100644 index 0000000000..aba008d2bb --- /dev/null +++ b/timm/bits/updater_factory.py @@ -0,0 +1,30 @@ +from typing import Callable, Optional, Union, Any + +import torch + +from .device_env import DeviceEnv +from .device_env_factory import get_device +from .updater import Updater +from .updater_cuda import UpdaterCuda +from .updater_xla import UpdaterXla + + +def create_updater( + optimizer: torch.optim.Optimizer, + dev_env: Optional[DeviceEnv] = None, + clip_value: Optional[Union[Callable, float]] = None, + clip_mode: str = 'norm', + scaler_kwargs: Any = None) -> Updater: + + if not dev_env: + dev_env = get_device() + + updater_kwargs = dict( + optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode, scaler_kwargs=scaler_kwargs) + if dev_env.type == 'xla': + return UpdaterXla(**updater_kwargs, use_scaler=dev_env.amp) + elif dev_env.type == 'cuda': + return UpdaterCuda(**updater_kwargs, use_scaler=dev_env.amp) + else: + updater_kwargs.pop('scaler_kwargs', None) + return Updater(**updater_kwargs) diff --git a/timm/bits/updater_xla.py b/timm/bits/updater_xla.py new file mode 100644 index 0000000000..0789f06fc0 --- /dev/null +++ b/timm/bits/updater_xla.py @@ -0,0 +1,52 @@ +from typing import Callable, Optional, Union, Any + +import torch + +try: + import torch_xla.core.xla_model as xm + import torch_xla.amp as xa + _HAS_XLA = True +except ImportError as e: + xm = None + _HAS_XLA = False + +from .updater import Updater + + +class UpdaterXla(Updater): + + def __init__( + self, + optimizer: torch.optim.Optimizer, + clip_value: Optional[Union[Callable, float]] = None, + clip_mode: str = 'norm', + use_scaler: bool = False, + scaler_kwargs: Any = None, + ): + super().__init__(optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode) + self.after_step_closure = True + if use_scaler: + self.scaler = xa.GradScaler(**scaler_kwargs) + + def apply(self, loss: torch.Tensor, accumulate: bool = False): + if self.scaler is None: + loss.backward(create_graph=self.create_graph) + gradients = xm._fetch_gradients(self.optimizer) + xm.all_reduce('sum', gradients, scale=1.0 / xm.xrt_world_size()) + if self.clipper is not None: + self.clipper() + if not accumulate: + xm.optimizer_step(self.optimizer) + else: + self.scaler.scale(loss).backward(create_graph=self.create_graph) + if self.clipper is not None: + self.scaler.unscale_(self.optimizer) # unscale the gradients of optimizer's assigned params in-place + self.clipper() + if not accumulate: + self.scaler.step(self.optimizer) + self.reset() + self.scaler.update() + + def after_step(self, after_step_fn, *args): + xm.add_step_closure(after_step_fn, *args) + diff --git a/timm/data/collate.py b/timm/data/collate.py new file mode 100644 index 0000000000..a1e37e1f80 --- /dev/null +++ b/timm/data/collate.py @@ -0,0 +1,38 @@ +import numpy as np + +import torch + + +def fast_collate(batch): + """ A fast collation function optimized for uint8 images (np array or torch) and int64 targets (labels)""" + assert isinstance(batch[0], tuple) + batch_size = len(batch) + if isinstance(batch[0][0], tuple): + # This branch 'deinterleaves' and flattens tuples of input tensors into one tensor ordered by position + # such that all tuple of position n will end up in a torch.split(tensor, batch_size) in nth position + inner_tuple_size = len(batch[0][0]) + flattened_batch_size = batch_size * inner_tuple_size + targets = torch.zeros(flattened_batch_size, dtype=torch.int64) + tensor = torch.zeros((flattened_batch_size, *batch[0][0][0].shape), dtype=torch.uint8) + for i in range(batch_size): + assert len(batch[i][0]) == inner_tuple_size # all input tensor tuples must be same length + for j in range(inner_tuple_size): + targets[i + j * batch_size] = batch[i][1] + tensor[i + j * batch_size] += torch.from_numpy(batch[i][0][j]) + return tensor, targets + elif isinstance(batch[0][0], np.ndarray): + targets = torch.tensor([b[1] for b in batch], dtype=torch.int64) + assert len(targets) == batch_size + tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) + for i in range(batch_size): + tensor[i] += torch.from_numpy(batch[i][0]) + return tensor, targets + elif isinstance(batch[0][0], torch.Tensor): + targets = torch.tensor([b[1] for b in batch], dtype=torch.int64) + assert len(targets) == batch_size + tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) + for i in range(batch_size): + tensor[i].copy_(batch[i][0]) + return tensor, targets + else: + assert False \ No newline at end of file diff --git a/timm/data/config.py b/timm/data/config.py index 38f5689a70..06920d7df8 100644 --- a/timm/data/config.py +++ b/timm/data/config.py @@ -70,6 +70,14 @@ def resolve_data_config(args, default_cfg={}, model=None, use_test_size=False, v elif 'crop_pct' in default_cfg: new_config['crop_pct'] = default_cfg['crop_pct'] + if getattr(args, 'mixup', 0) > 0 \ + or getattr(args, 'cutmix', 0) > 0. \ + or getattr(args, 'cutmix_minmax', None) is not None: + new_config['mixup'] = dict( + mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, + prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode, + label_smoothing=args.smoothing, num_classes=args.num_classes) + if verbose: _logger.info('Data processing configuration for current model + dataset:') for n, v in new_config.items(): diff --git a/timm/data/fetcher.py b/timm/data/fetcher.py new file mode 100644 index 0000000000..1cbc3fe5be --- /dev/null +++ b/timm/data/fetcher.py @@ -0,0 +1,69 @@ +import torch + +from .constants import * +from .random_erasing import RandomErasing +from. mixup import FastCollateMixup + + +class FetcherXla: + def __init__(self): + pass + + +class Fetcher: + + def __init__(self, + loader, + mean=IMAGENET_DEFAULT_MEAN, + std=IMAGENET_DEFAULT_STD, + device=None, + dtype=None, + re_prob=0., + re_mode='const', + re_count=1, + re_num_splits=0): + self.loader = loader + self.mean = torch.tensor([x * 255 for x in mean]).view(1, 3, 1, 1) + self.std = torch.tensor([x * 255 for x in std]).view(1, 3, 1, 1) + self.device = torch.device(device) + self.dtype = dtype or torch.float32 + if device: + self.mean.to(device=device, dtype=self.dtype) + self.std.to(device=device, dtype=self.dtype) + if re_prob > 0.: + self.random_erasing = RandomErasing( + probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits) + else: + self.random_erasing = None + + def __iter__(self): + for sample, target in self.loader: + sample = sample.to(device=self.device) + target = target.to(device=self.device) + sample = sample.to(dtype=self.dtype).sub_(self.mean).div_(self.std) + if self.random_erasing is not None: + sample = self.random_erasing(sample) + yield sample, target + + def __len__(self): + return len(self.loader) + + @property + def sampler(self): + return self.loader.sampler + + @property + def dataset(self): + return self.loader.dataset + + @property + def mixup_enabled(self): + if isinstance(self.loader.collate_fn, FastCollateMixup): + return self.loader.collate_fn.mixup_enabled + else: + return False + + @mixup_enabled.setter + def mixup_enabled(self, x): + if isinstance(self.loader.collate_fn, FastCollateMixup): + self.loader.collate_fn.mixup_enabled = x \ No newline at end of file diff --git a/timm/data/loader.py b/timm/data/loader.py index 7614466909..9b15eb02d6 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -7,122 +7,15 @@ """ import torch.utils.data -import numpy as np +from timm.bits import get_device + +from .fetcher import Fetcher +from .prefetcher_cuda import PrefetcherCuda +from .collate import fast_collate from .transforms_factory import create_transform from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .distributed_sampler import OrderedDistributedSampler -from .random_erasing import RandomErasing -from .mixup import FastCollateMixup - - -def fast_collate(batch): - """ A fast collation function optimized for uint8 images (np array or torch) and int64 targets (labels)""" - assert isinstance(batch[0], tuple) - batch_size = len(batch) - if isinstance(batch[0][0], tuple): - # This branch 'deinterleaves' and flattens tuples of input tensors into one tensor ordered by position - # such that all tuple of position n will end up in a torch.split(tensor, batch_size) in nth position - inner_tuple_size = len(batch[0][0]) - flattened_batch_size = batch_size * inner_tuple_size - targets = torch.zeros(flattened_batch_size, dtype=torch.int64) - tensor = torch.zeros((flattened_batch_size, *batch[0][0][0].shape), dtype=torch.uint8) - for i in range(batch_size): - assert len(batch[i][0]) == inner_tuple_size # all input tensor tuples must be same length - for j in range(inner_tuple_size): - targets[i + j * batch_size] = batch[i][1] - tensor[i + j * batch_size] += torch.from_numpy(batch[i][0][j]) - return tensor, targets - elif isinstance(batch[0][0], np.ndarray): - targets = torch.tensor([b[1] for b in batch], dtype=torch.int64) - assert len(targets) == batch_size - tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) - for i in range(batch_size): - tensor[i] += torch.from_numpy(batch[i][0]) - return tensor, targets - elif isinstance(batch[0][0], torch.Tensor): - targets = torch.tensor([b[1] for b in batch], dtype=torch.int64) - assert len(targets) == batch_size - tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) - for i in range(batch_size): - tensor[i].copy_(batch[i][0]) - return tensor, targets - else: - assert False - - -class PrefetchLoader: - - def __init__(self, - loader, - mean=IMAGENET_DEFAULT_MEAN, - std=IMAGENET_DEFAULT_STD, - fp16=False, - re_prob=0., - re_mode='const', - re_count=1, - re_num_splits=0): - self.loader = loader - self.mean = torch.tensor([x * 255 for x in mean]).cuda().view(1, 3, 1, 1) - self.std = torch.tensor([x * 255 for x in std]).cuda().view(1, 3, 1, 1) - self.fp16 = fp16 - if fp16: - self.mean = self.mean.half() - self.std = self.std.half() - if re_prob > 0.: - self.random_erasing = RandomErasing( - probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits) - else: - self.random_erasing = None - - def __iter__(self): - stream = torch.cuda.Stream() - first = True - - for next_input, next_target in self.loader: - with torch.cuda.stream(stream): - next_input = next_input.cuda(non_blocking=True) - next_target = next_target.cuda(non_blocking=True) - if self.fp16: - next_input = next_input.half().sub_(self.mean).div_(self.std) - else: - next_input = next_input.float().sub_(self.mean).div_(self.std) - if self.random_erasing is not None: - next_input = self.random_erasing(next_input) - - if not first: - yield input, target - else: - first = False - - torch.cuda.current_stream().wait_stream(stream) - input = next_input - target = next_target - - yield input, target - - def __len__(self): - return len(self.loader) - - @property - def sampler(self): - return self.loader.sampler - - @property - def dataset(self): - return self.loader.dataset - - @property - def mixup_enabled(self): - if isinstance(self.loader.collate_fn, FastCollateMixup): - return self.loader.collate_fn.mixup_enabled - else: - return False - - @mixup_enabled.setter - def mixup_enabled(self, x): - if isinstance(self.loader.collate_fn, FastCollateMixup): - self.loader.collate_fn.mixup_enabled = x def create_loader( @@ -130,7 +23,7 @@ def create_loader( input_size, batch_size, is_training=False, - use_prefetcher=True, + dev_env=None, no_aug=False, re_prob=0., re_mode='const', @@ -163,7 +56,7 @@ def create_loader( dataset.transform = create_transform( input_size, is_training=is_training, - use_prefetcher=use_prefetcher, + use_fetcher=True, no_aug=no_aug, scale=scale, ratio=ratio, @@ -183,6 +76,9 @@ def create_loader( separate=num_aug_splits > 0, ) + if dev_env is None: + dev_env = get_device() + sampler = None if distributed and not isinstance(dataset, torch.utils.data.IterableDataset): if is_training: @@ -193,10 +89,9 @@ def create_loader( sampler = OrderedDistributedSampler(dataset) if collate_fn is None: - collate_fn = fast_collate if use_prefetcher else torch.utils.data.dataloader.default_collate + collate_fn = fast_collate loader_class = torch.utils.data.DataLoader - if use_multi_epochs_loader: loader_class = MultiEpochsDataLoader @@ -214,18 +109,19 @@ def create_loader( except TypeError as e: loader_args.pop('persistent_workers') # only in Pytorch 1.7+ loader = loader_class(dataset, **loader_args) - if use_prefetcher: - prefetch_re_prob = re_prob if is_training and not no_aug else 0. - loader = PrefetchLoader( - loader, - mean=mean, - std=std, - fp16=fp16, - re_prob=prefetch_re_prob, - re_mode=re_mode, - re_count=re_count, - re_num_splits=re_num_splits - ) + + fetcher_kwargs = dict( + mean=mean, + std=std, + re_prob=re_prob if is_training and not no_aug else 0., + re_mode=re_mode, + re_count=re_count, + re_num_splits=re_num_splits + ) + if dev_env.type == 'cuda': + loader = PrefetcherCuda(loader, **fetcher_kwargs) + else: + loader = Fetcher(loader, device=dev_env.device, **fetcher_kwargs) return loader diff --git a/timm/data/prefetcher_cuda.py b/timm/data/prefetcher_cuda.py new file mode 100644 index 0000000000..4f1c4e10af --- /dev/null +++ b/timm/data/prefetcher_cuda.py @@ -0,0 +1,79 @@ +import torch.cuda + +from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from .mixup import FastCollateMixup +from .random_erasing import RandomErasing + + +class PrefetcherCuda: + + def __init__(self, + loader, + mean=IMAGENET_DEFAULT_MEAN, + std=IMAGENET_DEFAULT_STD, + fp16=False, + re_prob=0., + re_mode='const', + re_count=1, + re_num_splits=0): + self.loader = loader + self.mean = torch.tensor([x * 255 for x in mean]).cuda().view(1, 3, 1, 1) + self.std = torch.tensor([x * 255 for x in std]).cuda().view(1, 3, 1, 1) + self.fp16 = fp16 + if fp16: + self.mean = self.mean.half() + self.std = self.std.half() + if re_prob > 0.: + self.random_erasing = RandomErasing( + probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits) + else: + self.random_erasing = None + + def __iter__(self): + stream = torch.cuda.Stream() + first = True + + for next_input, next_target in self.loader: + with torch.cuda.stream(stream): + next_input = next_input.cuda(non_blocking=True) + next_target = next_target.cuda(non_blocking=True) + if self.fp16: + next_input = next_input.half().sub_(self.mean).div_(self.std) + else: + next_input = next_input.float().sub_(self.mean).div_(self.std) + if self.random_erasing is not None: + next_input = self.random_erasing(next_input) + + if not first: + yield input, target + else: + first = False + + torch.cuda.current_stream().wait_stream(stream) + input = next_input + target = next_target + + yield input, target + + def __len__(self): + return len(self.loader) + + @property + def sampler(self): + return self.loader.sampler + + @property + def dataset(self): + return self.loader.dataset + + @property + def mixup_enabled(self): + if isinstance(self.loader.collate_fn, FastCollateMixup): + return self.loader.collate_fn.mixup_enabled + else: + return False + + @mixup_enabled.setter + def mixup_enabled(self, x): + if isinstance(self.loader.collate_fn, FastCollateMixup): + self.loader.collate_fn.mixup_enabled = x \ No newline at end of file diff --git a/timm/data/tf_preprocessing.py b/timm/data/tf_preprocessing.py index 44b4a3af73..0e657a9a4d 100644 --- a/timm/data/tf_preprocessing.py +++ b/timm/data/tf_preprocessing.py @@ -22,7 +22,10 @@ # limitations under the License. # ============================================================================== """ImageNet preprocessing for MnasNet.""" -import tensorflow as tf +import tensorflow.compat.v1 as tf +tf.disable_v2_behavior() +tf.compat.v1.disable_eager_execution() + import numpy as np IMAGE_SIZE = 224 @@ -131,6 +134,39 @@ def _decode_and_center_crop(image_bytes, image_size, resize_method): return image +def crop(image_bytes, crop_window): + """Helper function to crop a jpeg or a decoded image.""" + if image_bytes.dtype == tf.dtypes.string: + image = tf.image.decode_and_crop_jpeg(image_bytes, + tf.stack(crop_window), + channels=3) + else: + image = tf.image.crop_to_bounding_box(image_bytes, *crop_window) + return image + + +def _decode_and_resize_then_crop( + image_bytes: tf.Tensor, + image_size, + crop_pct: float = 32, +) -> tf.Tensor: + """Rescales an image to image_size / crop_pct, then center crops.""" + image = tf.image.decode_jpeg(image_bytes, channels=3) + # Scale image to "scaled size" before taking a center crop + if crop_pct > 1.0: # If crop_pct is >1, treat it as num pad pixels (like VGG) + scale_size = tuple([int(x + crop_pct) for x in image_size]) + else: + scale_size = tuple([int(float(x) / crop_pct) for x in image_size]) + image = tf.image.resize(image, scale_size, tf.image.ResizeMethod.BICUBIC) + crop_height = tf.cast(image_size[0], tf.int32) + crop_width = tf.cast(image_size[1], tf.int32) + offset_height = ((scale_size[0] - crop_height) + 1) // 2 + offset_width = ((scale_size[1] - crop_width) + 1) // 2 + crop_window = [offset_height, offset_width, crop_height, crop_width] + image = crop(image, crop_window) + return image + + def _flip(image): """Random horizontal image flip.""" image = tf.image.random_flip_left_right(image) @@ -172,6 +208,7 @@ def preprocess_for_eval(image_bytes, use_bfloat16, image_size=IMAGE_SIZE, interp """ resize_method = tf.image.ResizeMethod.BICUBIC if interpolation == 'bicubic' else tf.image.ResizeMethod.BILINEAR image = _decode_and_center_crop(image_bytes, image_size, resize_method) + #image = _decode_and_resize_then_crop(image_bytes, (image_size, image_size), resize_method) image = tf.reshape(image, [image_size, image_size, 3]) image = tf.image.convert_image_dtype( image, dtype=tf.bfloat16 if use_bfloat16 else tf.float32) diff --git a/timm/data/transforms_factory.py b/timm/data/transforms_factory.py index df6e0de033..16e08a39f4 100644 --- a/timm/data/transforms_factory.py +++ b/timm/data/transforms_factory.py @@ -167,7 +167,7 @@ def transforms_imagenet_eval( def create_transform( input_size, is_training=False, - use_prefetcher=False, + use_fetcher=False, no_aug=False, scale=None, ratio=None, @@ -191,7 +191,7 @@ def create_transform( else: img_size = input_size - if tf_preprocessing and use_prefetcher: + if tf_preprocessing and use_fetcher: assert not separate, "Separate transforms not supported for TF preprocessing" from timm.data.tf_preprocessing import TfPreprocessTransform transform = TfPreprocessTransform( @@ -202,7 +202,7 @@ def create_transform( transform = transforms_noaug_train( img_size, interpolation=interpolation, - use_prefetcher=use_prefetcher, + use_prefetcher=use_fetcher, mean=mean, std=std) elif is_training: @@ -215,7 +215,7 @@ def create_transform( color_jitter=color_jitter, auto_augment=auto_augment, interpolation=interpolation, - use_prefetcher=use_prefetcher, + use_prefetcher=use_fetcher, mean=mean, std=std, re_prob=re_prob, @@ -228,7 +228,7 @@ def create_transform( transform = transforms_imagenet_eval( img_size, interpolation=interpolation, - use_prefetcher=use_prefetcher, + use_prefetcher=use_fetcher, mean=mean, std=std, crop_pct=crop_pct) diff --git a/timm/metrics/__init__.py b/timm/metrics/__init__.py new file mode 100644 index 0000000000..93a2773eb3 --- /dev/null +++ b/timm/metrics/__init__.py @@ -0,0 +1,4 @@ +from .accuracy import Accuracy, AccuracyTopK +from .precision_recall import PrecisionRecall +from .scalar_avg import ScalarAvgMinMax +from .tensor_avg import TensorAvg, TensorEma diff --git a/timm/metrics/accuracy.py b/timm/metrics/accuracy.py new file mode 100644 index 0000000000..98aa59eb22 --- /dev/null +++ b/timm/metrics/accuracy.py @@ -0,0 +1,112 @@ +import torch +from typing import Optional, Tuple, Dict + + +class Accuracy(torch.nn.Module): + + def __init__(self, threshold=0.5, multi_label=False): + self.threshold = threshold + self.eps = 1e-8 + self.multi_label = multi_label + + # statistics / counts + self._correct_sum = torch.tensor(0, dtype=torch.long) + self._total_sum = torch.tensor(0, dtype=torch.long) + + def update(self, predictions, target): + raise NotImplemented() + + def reset(self): + self._correct_sum = 0 + self._total_sum = 0 + + @property + def counts(self): + pass + + def compute(self): + raise NotImplemented() + + +class AccuracyTopK(torch.nn.Module): + + def __init__(self, topk=(1, 5), device=None): + super().__init__() + self.eps = 1e-8 + self.device = device + self.topk = topk + self.maxk = max(topk) + + # statistics / counts + self.reset() + + def update(self, predictions: torch.Tensor, target: torch.Tensor): + sorted_indices = predictions.topk(self.maxk, dim=1)[1] + sorted_indices.t_() + correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) + + batch_size = target.shape[0] + correct_k = {k: correct[:k].reshape(-1).float().sum(0) for k in self.topk} + for k, v in correct_k.items(): + attr = f'_correct_top{k}' + old_v = getattr(self, attr) + setattr(self, attr, old_v + v) + self._total_sum += batch_size + + def reset(self): + for k in self.topk: + setattr(self, f'_correct_top{k}', torch.tensor(0, dtype=torch.float32)) + self._total_sum = torch.tensor(0, dtype=torch.float32) + + @property + def counts(self): + pass + + def compute(self) -> Dict[str, torch.Tensor]: + return {f'top{k}': 100 * getattr(self, f'_correct_top{k}') / self._total_sum for k in self.topk} + + +# +# class AccuracyTopK: +# +# def __init__(self, topk=(1, 5), device=None): +# self.eps = 1e-8 +# self.device = device +# self.topk = topk +# self.maxk = max(topk) +# +# # statistics / counts +# self._correct_sum = None +# self._total_sum = None +# +# def _check_init(self, device): +# to_device = self.device if self.device else device +# if self._correct_sum is None: +# self._correct_sum = {f'top{k}': torch.tensor(0., device=to_device) for k in self.topk} +# if self._total_sum is None: +# self._total_sum = torch.tensor(0, dtype=torch.long, device=to_device) +# +# def update(self, predictions: torch.Tensor, target: torch.Tensor): +# sorted_indices = predictions.topk(self.maxk, dim=1)[1] +# sorted_indices.t_() +# correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) +# +# batch_size = target.shape[0] +# correct_k = {f'top{k}': correct[:k].reshape(-1).float().sum(0) for k in self.topk} +# self._check_init(device=predictions.device) +# for k, v in correct_k.items(): +# old_v = self._correct_sum[k] +# self._correct_sum[k] = old_v + v +# self._total_sum += batch_size +# +# def reset(self): +# self._correct_sum = None +# self._total_sum = None +# +# @property +# def counts(self): +# pass +# +# def compute(self) -> Dict[str, torch.Tensor]: +# assert self._correct_sum is not None and self._total_sum is not None +# return {k: 100 * v / self._total_sum for k, v in self._correct_sum.items()} diff --git a/timm/metrics/precision_recall.py b/timm/metrics/precision_recall.py new file mode 100644 index 0000000000..a5a38f91e0 --- /dev/null +++ b/timm/metrics/precision_recall.py @@ -0,0 +1,117 @@ +import torch +import torch.nn.functional as F + + +class PrecisionRecall: + + def __init__(self, threshold=0.5, multi_label=False, device=None): + self.threshold = threshold + self.device = device + self.multi_label = multi_label + + # statistics + + # the total number of true positive instances under each class + # Shape: (num_classes, ) + self._tp_sum = None + + # the total number of instances + # Shape: (num_classes, ) + self._total_sum = None + + # the total number of instances under each _predicted_ class, + # including true positives and false positives + # Shape: (num_classes, ) + self._pred_sum = None + + # the total number of instances under each _true_ class, + # including true positives and false negatives + # Shape: (num_classes, ) + self._true_sum = None + + self.reset() + + def reset(self): + self._tp_sum = None + self._total_sum = None + self._pred_sum = None + self._true_sum = None + + def update(self, predictions, target): + output_type = predictions.type() + num_classes = predictions.size(-1) + if self.multi_label: + if self.threshold is not None: + predictions = (predictions > self.threshold).type(output_type) + predictions = predictions.t().reshape(num_classes, -1) + target = target.t().reshape(num_classes, -1) + else: + target = F.one_hot(target.view(-1), num_classes=num_classes) + indices = torch.argmax(predictions, dim=1).view(-1) + predictions = F.one_hot(indices, num_classes=num_classes) + # FIXME make sure binary case works + + target = target.type(output_type) + correct = (target * predictions > 0).type(output_type) + pred_positives = predictions.sum(dim=0) + target_positives = target.sum(dim=0) + if correct.sum() == 0: + true_positives = torch.zeros_like(pred_positives) + else: + true_positives = correct.sum(dim=0) + + if self._tp_sum is None: + self._tp_sum = torch.zeros(num_classes, device=self.device) + self._true_sum = torch.zeros(num_classes, device=self.device) + self._pred_sum = torch.zeros(num_classes, device=self.device) + self._total_sum = torch.tensor(0, device=self.device) + + self._tp_sum += true_positives + self._pred_sum += pred_positives + self._true_sum += target_positives + self._total_sum += target.shape[0] + + def counts_as_tuple(self, reduce=False): + tp_sum = self._tp_sum + pred_sum = self._pred_sum + true_sum = self._true_sum + total_sum = self._total_sum + if reduce: + tp_sum = reduce_tensor_sum(tp_sum) + pred_sum = reduce_tensor_sum(pred_sum) + true_sum = reduce_tensor_sum(true_sum) + total_sum = reduce_tensor_sum(total_sum) + return tp_sum, pred_sum, true_sum, total_sum + + def counts(self, reduce=False): + tp_sum, pred_sum, true_sum, total_sum = self.counts_as_tuple(reduce=reduce) + return dict(tp_sum=tp_sum, pred_sum=pred_sum, true_sum=true_sum, total_sum=total_sum) + + def confusion(self, reduce=False): + tp_sum, pred_sum, true_sum, total_sum = self.counts_as_tuple(reduce=reduce) + fp = pred_sum - tp_sum + fn = true_sum - tp_sum + tp = tp_sum + tn = total_sum - tp - fp - fn + return dict(tp=tp, fp=fp, fn=fn, tn=tn) + + def compute(self, fscore_beta=1, average='micro', no_reduce=False, distributed=False): + tp_sum, pred_sum, true_sum, total_sum = self.counts_as_tuple(reduce=distributed) + if average == 'micro': + tp_sum = tp_sum.sum() + pred_sum = pred_sum.sum() + true_sum = true_sum.sum() + + precision = tp_sum / pred_sum + recall = tp_sum / true_sum + beta_sq = fscore_beta ** 2 + f1_denom = beta_sq * precision + recall + fscore = (1 + beta_sq) * precision * recall / f1_denom + + if average == 'macro' and not no_reduce: + precision = precision.mean() + recall = recall.mean() + fscore = fscore.mean() + return dict(fscore=fscore, precision=precision, recall=recall) + + return dict(fscore=fscore, precision=precision, recall=recall) diff --git a/timm/metrics/scalar_avg.py b/timm/metrics/scalar_avg.py new file mode 100644 index 0000000000..f5d958077b --- /dev/null +++ b/timm/metrics/scalar_avg.py @@ -0,0 +1,30 @@ +class ScalarAvgMinMax: + + """Computes and stores the average and current value""" + def __init__(self): + self.val = 0 + self.avg = 0 + self.min = None + self.max = None + self.sum = 0 + self.count = 0 + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.min = None + self.max = None + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.min = val if self.min is None else min(self.min, val) + self.max = val if self.max is None else max(self.max, val) + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + + diff --git a/timm/metrics/tensor_avg.py b/timm/metrics/tensor_avg.py new file mode 100644 index 0000000000..ac2fb6ed0e --- /dev/null +++ b/timm/metrics/tensor_avg.py @@ -0,0 +1,42 @@ +import torch + + +class TensorAvg: + + """Computes and stores the average and current value""" + def __init__(self): + self.sum = None + self.count = None + self.reset() + + def reset(self): + self.sum = None + self.count = None + + def update(self, val: torch.Tensor, n=1): + if self.sum is None: + self.sum = torch.zeros_like(val) + self.count = torch.tensor(0, dtype=torch.long, device=val.device) + self.sum += (val * n) + self.count += n + + def compute(self): + return self.sum / self.count + + +class TensorEma: + + """Computes and stores the average and current value""" + def __init__(self, smoothing_factor=0.9, init_zero=False): + self.smoothing_factor = smoothing_factor + self.init_zero = init_zero + self.val = None + self.reset() + + def reset(self): + self.val = None + + def update(self, val): + if self.val is None: + self.val = torch.zeros_like(val) if self.init_zero else val.clone() + self.val = (1. - self.smoothing_factor) * val + self.smoothing_factor * self.val diff --git a/timm/scheduler/scheduler.py b/timm/scheduler/scheduler.py index 21d51509c8..79e9a5e9e6 100644 --- a/timm/scheduler/scheduler.py +++ b/timm/scheduler/scheduler.py @@ -65,14 +65,16 @@ def get_update_values(self, num_updates: int): return None def step(self, epoch: int, metric: float = None) -> None: - self.metric = metric + if metric is not None: + self.metric = metric values = self.get_epoch_values(epoch) if values is not None: values = self._add_noise(values, epoch) self.update_groups(values) def step_update(self, num_updates: int, metric: float = None): - self.metric = metric + if metric is not None: + self.metric = metric values = self.get_update_values(num_updates) if values is not None: values = self._add_noise(values, num_updates) diff --git a/train.py b/train.py index 85829fc151..f105e525b4 100755 --- a/train.py +++ b/train.py @@ -20,14 +20,14 @@ import os import logging from collections import OrderedDict -from contextlib import suppress from datetime import datetime import torch import torch.nn as nn import torchvision.utils -from torch.nn.parallel import DistributedDataParallel as NativeDDP +from timm.bits import initialize_device, DeviceEnv, create_updater, Updater, Logger, Tracker +from timm.metrics import TensorAvg, AccuracyTopK from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset from timm.models import create_model, safe_model_name, resume_checkpoint, load_checkpoint,\ convert_splitbn_model, model_parameters @@ -35,32 +35,11 @@ from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy from timm.optim import create_optimizer_v2, optimizer_kwargs from timm.scheduler import create_scheduler -from timm.utils import ApexScaler, NativeScaler - -try: - from apex import amp - from apex.parallel import DistributedDataParallel as ApexDDP - from apex.parallel import convert_syncbn_model - has_apex = True -except ImportError: - has_apex = False - -has_native_amp = False -try: - if getattr(torch.cuda.amp, 'autocast') is not None: - has_native_amp = True -except AttributeError: - pass - -try: - import wandb - has_wandb = True -except ImportError: - has_wandb = False - -torch.backends.cudnn.benchmark = True + + _logger = logging.getLogger('train') + # The first arg parser parses out only the --config argument, this argument is used to # load a yaml file containing key-values that override the defaults for the main parser below config_parser = parser = argparse.ArgumentParser(description='Training Config', add_help=False) @@ -254,16 +233,10 @@ help='save images of input bathes every log interval for debugging') parser.add_argument('--amp', action='store_true', default=False, help='use NVIDIA Apex AMP or Native AMP for mixed precision training') -parser.add_argument('--apex-amp', action='store_true', default=False, - help='Use NVIDIA Apex AMP mixed precision') -parser.add_argument('--native-amp', action='store_true', default=False, - help='Use Native Torch AMP mixed precision') parser.add_argument('--channels-last', action='store_true', default=False, help='Use channels_last memory layout') parser.add_argument('--pin-mem', action='store_true', default=False, help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') -parser.add_argument('--no-prefetcher', action='store_true', default=False, - help='disable fast prefetcher') parser.add_argument('--output', default='', type=str, metavar='PATH', help='path to output folder (default: none, current dir)') parser.add_argument('--experiment', default='', type=str, metavar='NAME', @@ -301,50 +274,15 @@ def _parse_args(): def main(): setup_default_logging() args, args_text = _parse_args() - - if args.log_wandb: - if has_wandb: - wandb.init(project=args.experiment, config=args) - else: - _logger.warning("You've requested to log metrics to wandb but package not found. " - "Metrics not being logged to wandb, try `pip install wandb`") - - args.prefetcher = not args.no_prefetcher - args.distributed = False - if 'WORLD_SIZE' in os.environ: - args.distributed = int(os.environ['WORLD_SIZE']) > 1 - args.device = 'cuda:0' - args.world_size = 1 - args.rank = 0 # global rank - if args.distributed: - args.device = 'cuda:%d' % args.local_rank - torch.cuda.set_device(args.local_rank) - torch.distributed.init_process_group(backend='nccl', init_method='env://') - args.world_size = torch.distributed.get_world_size() - args.rank = torch.distributed.get_rank() - _logger.info('Training in distributed mode with multiple processes, 1 GPU per process. Process %d, total %d.' - % (args.rank, args.world_size)) + + dev_env = initialize_device(amp=args.amp) + if dev_env.is_distributed: + _logger.info('Training in distributed mode with multiple processes, 1 device per process. Process %d, total %d.' + % (dev_env.global_rank, dev_env.world_size)) else: - _logger.info('Training with a single process on 1 GPUs.') - assert args.rank >= 0 - - # resolve AMP arguments based on PyTorch / Apex availability - use_amp = None - if args.amp: - # `--amp` chooses native amp before apex (APEX ver not actively maintained) - if has_native_amp: - args.native_amp = True - elif has_apex: - args.apex_amp = True - if args.apex_amp and has_apex: - use_amp = 'apex' - elif args.native_amp and has_native_amp: - use_amp = 'native' - elif args.apex_amp or args.native_amp: - _logger.warning("Neither APEX or native Torch AMP is available, using float32. " - "Install NVIDA apex or upgrade to PyTorch 1.6") - - random_seed(args.seed, args.rank) + _logger.info('Training with a single process on 1 device.') + + random_seed(args.seed, dev_env.global_rank) model = create_model( args.model, @@ -364,11 +302,11 @@ def main(): assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' args.num_classes = model.num_classes # FIXME handle model default vs config num_classes more elegantly - if args.local_rank == 0: + if dev_env.is_master: _logger.info( f'Model {safe_model_name(args.model)} created, param count:{sum([m.numel() for m in model.parameters()])}') - data_config = resolve_data_config(vars(args), model=model, verbose=args.local_rank == 0) + data_config = resolve_data_config(vars(args), model=model, verbose=dev_env.is_master) # setup augmentation batch splits for contrastive loss or split bn num_aug_splits = 0 @@ -382,55 +320,33 @@ def main(): model = convert_splitbn_model(model, max(num_aug_splits, 2)) # move model to GPU, enable channels last layout if set - model.cuda() - if args.channels_last: - model = model.to(memory_format=torch.channels_last) + dev_env.to_device(model) # setup synchronized BatchNorm for distributed training - if args.distributed and args.sync_bn: + if dev_env.is_distributed and args.sync_bn: assert not args.split_bn - if has_apex and use_amp != 'native': - # Apex SyncBN preferred unless native amp is activated - model = convert_syncbn_model(model) - else: - model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) - if args.local_rank == 0: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + if dev_env.is_master: _logger.info( 'Converted model to use Synchronized BatchNorm. WARNING: You may have issues if using ' 'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.') if args.torchscript: - assert not use_amp == 'apex', 'Cannot use APEX AMP with torchscripted model' assert not args.sync_bn, 'Cannot use SyncBatchNorm with torchscripted model' model = torch.jit.script(model) - optimizer = create_optimizer_v2(model, **optimizer_kwargs(cfg=args)) - - # setup automatic mixed-precision (AMP) loss scaling and op casting - amp_autocast = suppress # do nothing - loss_scaler = None - if use_amp == 'apex': - model, optimizer = amp.initialize(model, optimizer, opt_level='O1') - loss_scaler = ApexScaler() - if args.local_rank == 0: - _logger.info('Using NVIDIA APEX AMP. Training in mixed precision.') - elif use_amp == 'native': - amp_autocast = torch.cuda.amp.autocast - loss_scaler = NativeScaler() - if args.local_rank == 0: - _logger.info('Using native Torch AMP. Training in mixed precision.') - else: - if args.local_rank == 0: - _logger.info('AMP not enabled. Training in float32.') + updater = create_updater( + create_optimizer_v2(model, **optimizer_kwargs(cfg=args)), + clip_value=args.clip_grad, clip_mode=args.clip_mode) # optionally resume from a checkpoint resume_epoch = None if args.resume: resume_epoch = resume_checkpoint( model, args.resume, - optimizer=None if args.no_resume_opt else optimizer, - loss_scaler=None if args.no_resume_opt else loss_scaler, - log_info=args.local_rank == 0) + optimizer=None if args.no_resume_opt else updater.optimizer, + loss_scaler=None if args.no_resume_opt else updater.scaler, + log_info=dev_env.is_master) # setup exponential moving average of model weights, SWA could be used here too model_ema = None @@ -442,20 +358,14 @@ def main(): load_checkpoint(model_ema.module, args.resume, use_ema=True) # setup distributed training - if args.distributed: - if has_apex and use_amp != 'native': - # Apex DDP preferred unless native amp is activated - if args.local_rank == 0: - _logger.info("Using NVIDIA APEX DistributedDataParallel.") - model = ApexDDP(model, delay_allreduce=True) - else: - if args.local_rank == 0: - _logger.info("Using native Torch DistributedDataParallel.") - model = NativeDDP(model, device_ids=[args.local_rank]) # can use device str in Torch >= 1.1 - # NOTE: EMA model does not need to be wrapped by DDP + if dev_env.is_distributed: + if dev_env.is_master: + _logger.info("Distributing model.") + model = dev_env.wrap_distributed(model) + # NOTE: EMA model does not need to be wrapped by DDP # setup learning rate schedule and starting epoch - lr_scheduler, num_epochs = create_scheduler(args, optimizer) + lr_scheduler, num_epochs = create_scheduler(args, updater.optimizer) start_epoch = 0 if args.start_epoch is not None: # a specified start_epoch will always override the resume epoch @@ -465,7 +375,7 @@ def main(): if lr_scheduler is not None and start_epoch > 0: lr_scheduler.step(start_epoch) - if args.local_rank == 0: + if dev_env.is_master: _logger.info('Scheduled epochs: {}'.format(num_epochs)) # create the train and eval datasets @@ -478,18 +388,14 @@ def main(): # setup mixup / cutmix collate_fn = None - mixup_fn = None mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None if mixup_active: mixup_args = dict( mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode, label_smoothing=args.smoothing, num_classes=args.num_classes) - if args.prefetcher: - assert not num_aug_splits # collate conflict (need to support deinterleaving in collate mixup) - collate_fn = FastCollateMixup(**mixup_args) - else: - mixup_fn = Mixup(**mixup_args) + assert not num_aug_splits # collate conflict (need to support deinterleaving in collate mixup) + collate_fn = FastCollateMixup(**mixup_args) # wrap dataset in AugMix helper if num_aug_splits > 1: @@ -504,7 +410,6 @@ def main(): input_size=data_config['input_size'], batch_size=args.batch_size, is_training=True, - use_prefetcher=args.prefetcher, no_aug=args.no_aug, re_prob=args.reprob, re_mode=args.remode, @@ -521,7 +426,7 @@ def main(): mean=data_config['mean'], std=data_config['std'], num_workers=args.workers, - distributed=args.distributed, + distributed=dev_env.is_distributed, collate_fn=collate_fn, pin_memory=args.pin_mem, use_multi_epochs_loader=args.use_multi_epochs_loader @@ -532,12 +437,11 @@ def main(): input_size=data_config['input_size'], batch_size=args.validation_batch_size_multiplier * args.batch_size, is_training=False, - use_prefetcher=args.prefetcher, interpolation=data_config['interpolation'], mean=data_config['mean'], std=data_config['std'], num_workers=args.workers, - distributed=args.distributed, + distributed=dev_env.is_distributed, crop_pct=data_config['crop_pct'], pin_memory=args.pin_mem, ) @@ -545,23 +449,24 @@ def main(): # setup loss function if args.jsd: assert num_aug_splits > 1 # JSD only valid with aug splits set - train_loss_fn = JsdCrossEntropy(num_splits=num_aug_splits, smoothing=args.smoothing).cuda() + train_loss_fn = JsdCrossEntropy(num_splits=num_aug_splits, smoothing=args.smoothing) elif mixup_active: # smoothing is handled with mixup target transform - train_loss_fn = SoftTargetCrossEntropy().cuda() + train_loss_fn = SoftTargetCrossEntropy() elif args.smoothing: - train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing).cuda() + train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing) else: - train_loss_fn = nn.CrossEntropyLoss().cuda() - validate_loss_fn = nn.CrossEntropyLoss().cuda() + train_loss_fn = nn.CrossEntropyLoss() + validate_loss_fn = nn.CrossEntropyLoss() + dev_env.to_device(train_loss_fn, validate_loss_fn) # setup checkpoint saver and eval metric tracking eval_metric = args.eval_metric best_metric = None best_epoch = None saver = None - output_dir = '' - if args.local_rank == 0: + output_dir = None + if dev_env.is_master: if args.experiment: exp_name = args.experiment else: @@ -573,42 +478,48 @@ def main(): output_dir = get_outdir(args.output if args.output else './output/train', exp_name) decreasing = True if eval_metric == 'loss' else False saver = CheckpointSaver( - model=model, optimizer=optimizer, args=args, model_ema=model_ema, amp_scaler=loss_scaler, + model=model, optimizer=updater.optimizer, args=args, model_ema=model_ema, amp_scaler=updater.scaler, checkpoint_dir=output_dir, recovery_dir=output_dir, decreasing=decreasing, max_history=args.checkpoint_hist) with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: f.write(args_text) + logger = Logger(output_dir=output_dir, logger=_logger, hparams=vars(args)) + try: for epoch in range(start_epoch, num_epochs): - if args.distributed and hasattr(loader_train.sampler, 'set_epoch'): + if dev_env.is_distributed and hasattr(loader_train.sampler, 'set_epoch'): loader_train.sampler.set_epoch(epoch) + if args.mixup_off_epoch and epoch >= args.mixup_off_epoch: + if loader_train.mixup_enabled: + loader_train.mixup_enabled = False train_metrics = train_one_epoch( - epoch, model, loader_train, optimizer, train_loss_fn, args, - lr_scheduler=lr_scheduler, saver=saver, output_dir=output_dir, - amp_autocast=amp_autocast, loss_scaler=loss_scaler, model_ema=model_ema, mixup_fn=mixup_fn) + epoch, model, loader_train, updater, train_loss_fn, dev_env, + lr_scheduler=lr_scheduler, saver=saver, logger=logger, model_ema=model_ema, + log_interval=args.log_interval, recovery_interval=args.recovery_interval) - if args.distributed and args.dist_bn in ('broadcast', 'reduce'): - if args.local_rank == 0: + if dev_env.is_distributed and args.dist_bn in ('broadcast', 'reduce'): + if dev_env.is_master: _logger.info("Distributing BatchNorm running means and vars") - distribute_bn(model, args.world_size, args.dist_bn == 'reduce') + distribute_bn(model, dev_env.world_size, args.dist_bn == 'reduce') - eval_metrics = validate(model, loader_eval, validate_loss_fn, args, amp_autocast=amp_autocast) + eval_metrics = evaluate(model, loader_eval, validate_loss_fn, dev_env, logger=logger) if model_ema is not None and not args.model_ema_force_cpu: - if args.distributed and args.dist_bn in ('broadcast', 'reduce'): - distribute_bn(model_ema, args.world_size, args.dist_bn == 'reduce') - ema_eval_metrics = validate( - model_ema.module, loader_eval, validate_loss_fn, args, amp_autocast=amp_autocast, log_suffix=' (EMA)') + if dev_env.is_distributed and args.dist_bn in ('broadcast', 'reduce'): + distribute_bn(model_ema, dev_env.world_size, args.dist_bn == 'reduce') + + ema_eval_metrics = evaluate( + model_ema.module, loader_eval, validate_loss_fn, dev_env, + logger=logger, phase_suffix='EMA') eval_metrics = ema_eval_metrics if lr_scheduler is not None: # step LR for next epoch lr_scheduler.step(epoch + 1, eval_metrics[eval_metric]) - update_summary( - epoch, train_metrics, eval_metrics, os.path.join(output_dir, 'summary.csv'), - write_header=best_metric is None, log_wandb=args.log_wandb and has_wandb) + if logger is not None: + logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metric)) if saver is not None: # save proper checkpoint with eval metric @@ -622,175 +533,128 @@ def main(): def train_one_epoch( - epoch, model, loader, optimizer, loss_fn, args, - lr_scheduler=None, saver=None, output_dir='', amp_autocast=suppress, - loss_scaler=None, model_ema=None, mixup_fn=None): - - if args.mixup_off_epoch and epoch >= args.mixup_off_epoch: - if args.prefetcher and loader.mixup_enabled: - loader.mixup_enabled = False - elif mixup_fn is not None: - mixup_fn.mixup_enabled = False - - second_order = hasattr(optimizer, 'is_second_order') and optimizer.is_second_order - batch_time_m = AverageMeter() - data_time_m = AverageMeter() - losses_m = AverageMeter() + epoch: int, + model: nn.Module, + loader, + updater: Updater, + loss_fn: nn.Module, + dev_env: DeviceEnv, + lr_scheduler=None, + saver: CheckpointSaver = None, + logger: Logger = None, + model_ema: nn.Module = None, + log_interval: int = 50, + recovery_interval: int = 0, +): + tracker = Tracker() + losses_m = TensorAvg() model.train() - end = time.time() - last_idx = len(loader) - 1 + end_idx = len(loader) - 1 num_updates = epoch * len(loader) - for batch_idx, (input, target) in enumerate(loader): - last_batch = batch_idx == last_idx - data_time_m.update(time.time() - end) - if not args.prefetcher: - input, target = input.cuda(), target.cuda() - if mixup_fn is not None: - input, target = mixup_fn(input, target) - if args.channels_last: - input = input.contiguous(memory_format=torch.channels_last) - - with amp_autocast(): - output = model(input) + batch_size = 0 + tracker.mark_iter() + for step_idx, (sample, target) in enumerate(loader): + tracker.mark_iter_data_end() + last_step = step_idx == end_idx + batch_size = max(batch_size, sample.shape[0]) + + with dev_env.autocast(): + output = model(sample) loss = loss_fn(output, target) - if not args.distributed: - losses_m.update(loss.item(), input.size(0)) - - optimizer.zero_grad() - if loss_scaler is not None: - loss_scaler( - loss, optimizer, - clip_grad=args.clip_grad, clip_mode=args.clip_mode, - parameters=model_parameters(model, exclude_head='agc' in args.clip_mode), - create_graph=second_order) - else: - loss.backward(create_graph=second_order) - if args.clip_grad is not None: - dispatch_clip_grad( - model_parameters(model, exclude_head='agc' in args.clip_mode), - value=args.clip_grad, mode=args.clip_mode) - optimizer.step() + updater.reset() + updater.apply(loss) + dev_env.mark_step() # FIXME + tracker.mark_iter_step_end() + losses_m.update(loss, sample.size(0)) if model_ema is not None: model_ema.update(model) - torch.cuda.synchronize() num_updates += 1 - batch_time_m.update(time.time() - end) - if last_batch or batch_idx % args.log_interval == 0: - lrl = [param_group['lr'] for param_group in optimizer.param_groups] + if last_step or (step_idx + 1) % log_interval == 0: + lrl = [param_group['lr'] for param_group in updater.optimizer.param_groups] lr = sum(lrl) / len(lrl) - if args.distributed: - reduced_loss = reduce_tensor(loss.data, args.world_size) - losses_m.update(reduced_loss.item(), input.size(0)) - - if args.local_rank == 0: - _logger.info( - 'Train: {} [{:>4d}/{} ({:>3.0f}%)] ' - 'Loss: {loss.val:>9.6f} ({loss.avg:>6.4f}) ' - 'Time: {batch_time.val:.3f}s, {rate:>7.2f}/s ' - '({batch_time.avg:.3f}s, {rate_avg:>7.2f}/s) ' - 'LR: {lr:.3e} ' - 'Data: {data_time.val:.3f} ({data_time.avg:.3f})'.format( - epoch, - batch_idx, len(loader), - 100. * batch_idx / last_idx, - loss=losses_m, - batch_time=batch_time_m, - rate=input.size(0) * args.world_size / batch_time_m.val, - rate_avg=input.size(0) * args.world_size / batch_time_m.avg, - lr=lr, - data_time=data_time_m)) - - if args.save_images and output_dir: - torchvision.utils.save_image( - input, - os.path.join(output_dir, 'train-batch-%d.jpg' % batch_idx), - padding=0, - normalize=True) - - if saver is not None and args.recovery_interval and ( - last_batch or (batch_idx + 1) % args.recovery_interval == 0): - saver.save_recovery(epoch, batch_idx=batch_idx) + if dev_env.is_master and logger is not None: + loss_avg = losses_m.compute() + logger.log_step( + 'Train', + step=step_idx, + end_step=end_idx, + loss=loss_avg.item(), + rate=(dev_env.world_size * batch_size) / tracker.iter_time.avg, + lr=lr, + ) + + if saver is not None and recovery_interval and (last_step or (step_idx + 1) % recovery_interval == 0): + saver.save_recovery(epoch, batch_idx=step_idx) if lr_scheduler is not None: - lr_scheduler.step_update(num_updates=num_updates, metric=losses_m.avg) + lr_scheduler.step_update(num_updates=num_updates) - end = time.time() + tracker.mark_iter() # end for - if hasattr(optimizer, 'sync_lookahead'): - optimizer.sync_lookahead() + if hasattr(updater.optimizer, 'sync_lookahead'): + updater.optimizer.sync_lookahead() + + return OrderedDict([('loss', losses_m.compute().item())]) - return OrderedDict([('loss', losses_m.avg)]) +def evaluate( + model: nn.Module, + loader, + loss_fn: nn.Module, + dev_env: DeviceEnv, + logger: Logger, + phase_suffix: str = '', + log_interval: int = 10, +): -def validate(model, loader, loss_fn, args, amp_autocast=suppress, log_suffix=''): - batch_time_m = AverageMeter() - losses_m = AverageMeter() - top1_m = AverageMeter() - top5_m = AverageMeter() + tracker = Tracker() + losses_m = TensorAvg() + accuracy_m = AccuracyTopK() model.eval() - end = time.time() - last_idx = len(loader) - 1 + end_idx = len(loader) - 1 + tracker.mark_iter() with torch.no_grad(): - for batch_idx, (input, target) in enumerate(loader): - last_batch = batch_idx == last_idx - if not args.prefetcher: - input = input.cuda() - target = target.cuda() - if args.channels_last: - input = input.contiguous(memory_format=torch.channels_last) - - with amp_autocast(): - output = model(input) - if isinstance(output, (tuple, list)): - output = output[0] - - # augmentation reduction - reduce_factor = args.tta - if reduce_factor > 1: - output = output.unfold(0, reduce_factor, reduce_factor).mean(dim=2) - target = target[0:target.size(0):reduce_factor] - - loss = loss_fn(output, target) - acc1, acc5 = accuracy(output, target, topk=(1, 5)) - - if args.distributed: - reduced_loss = reduce_tensor(loss.data, args.world_size) - acc1 = reduce_tensor(acc1, args.world_size) - acc5 = reduce_tensor(acc5, args.world_size) - else: - reduced_loss = loss.data - - torch.cuda.synchronize() - - losses_m.update(reduced_loss.item(), input.size(0)) - top1_m.update(acc1.item(), output.size(0)) - top5_m.update(acc5.item(), output.size(0)) - - batch_time_m.update(time.time() - end) - end = time.time() - if args.local_rank == 0 and (last_batch or batch_idx % args.log_interval == 0): - log_name = 'Test' + log_suffix - _logger.info( - '{0}: [{1:>4d}/{2}] ' - 'Time: {batch_time.val:.3f} ({batch_time.avg:.3f}) ' - 'Loss: {loss.val:>7.4f} ({loss.avg:>6.4f}) ' - 'Acc@1: {top1.val:>7.4f} ({top1.avg:>7.4f}) ' - 'Acc@5: {top5.val:>7.4f} ({top5.avg:>7.4f})'.format( - log_name, batch_idx, last_idx, batch_time=batch_time_m, - loss=losses_m, top1=top1_m, top5=top5_m)) - - metrics = OrderedDict([('loss', losses_m.avg), ('top1', top1_m.avg), ('top5', top5_m.avg)]) - - return metrics + for step_idx, (sample, target) in enumerate(loader): + tracker.mark_iter_data_end() + last_step = step_idx == end_idx + + with dev_env.autocast(): + output = model(sample) + if isinstance(output, (tuple, list)): + output = output[0] + loss = loss_fn(output, target) + + dev_env.mark_step() # FIXME + tracker.mark_iter_step_end() + losses_m.update(loss, output.size(0)) + accuracy_m.update(output, target) + + if dev_env.is_master and (last_step or step_idx % log_interval == 0): + top1, top5 = accuracy_m.compute().values() + loss_avg = losses_m.compute() + logger.log_step( + 'Eval', + step=step_idx, + num_steps=end_idx, + loss=loss_avg.item(), + top1=top1.item(), + top5=top5.item(), + phase_suffix=phase_suffix, + ) + tracker.mark_iter() + + top1, top5 = accuracy_m.compute().values() + results = OrderedDict([('loss', losses_m.compute().item()), ('top1', top1.item()), ('top5', top5.item())]) + return results if __name__ == '__main__': diff --git a/validate.py b/validate.py index 74f8f435c0..add2346968 100755 --- a/validate.py +++ b/validate.py @@ -17,27 +17,14 @@ import torch.nn as nn import torch.nn.parallel from collections import OrderedDict -from contextlib import suppress +from timm.bits import initialize_device, Tracker, Logger +from timm.metrics import AccuracyTopK, TensorAvg from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models from timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet -from timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_legacy - -has_apex = False -try: - from apex import amp - has_apex = True -except ImportError: - pass - -has_native_amp = False -try: - if getattr(torch.cuda.amp, 'autocast') is not None: - has_native_amp = True -except AttributeError: - pass - -torch.backends.cudnn.benchmark = True +from timm.utils import natural_key, setup_default_logging + + _logger = logging.getLogger('validate') @@ -72,36 +59,28 @@ help='path to class to idx mapping file (default: "")') parser.add_argument('--gp', default=None, type=str, metavar='POOL', help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') -parser.add_argument('--log-freq', default=10, type=int, +parser.add_argument('--log-freq', default=20, type=int, metavar='N', help='batch logging frequency (default: 10)') parser.add_argument('--checkpoint', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') parser.add_argument('--pretrained', dest='pretrained', action='store_true', help='use pre-trained model') -parser.add_argument('--num-gpu', type=int, default=1, - help='Number of GPUS to use') -parser.add_argument('--no-test-pool', dest='no_test_pool', action='store_true', - help='disable test time pool') -parser.add_argument('--no-prefetcher', action='store_true', default=False, - help='disable fast prefetcher') +# parser.add_argument('--num-gpu', type=int, default=1, +# help='Number of GPUS to use') +parser.add_argument('--test-pool', dest='test_pool', action='store_true', + help='enable test time pool') parser.add_argument('--pin-mem', action='store_true', default=False, help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') parser.add_argument('--channels-last', action='store_true', default=False, help='Use channels_last memory layout') parser.add_argument('--amp', action='store_true', default=False, help='Use AMP mixed precision. Defaults to Apex, fallback to native Torch AMP.') -parser.add_argument('--apex-amp', action='store_true', default=False, - help='Use NVIDIA Apex AMP mixed precision') -parser.add_argument('--native-amp', action='store_true', default=False, - help='Use Native Torch AMP mixed precision') parser.add_argument('--tf-preprocessing', action='store_true', default=False, help='Use Tensorflow preprocessing pipeline (require CPU TF installed') parser.add_argument('--use-ema', dest='use_ema', action='store_true', help='use ema version of weights if present') parser.add_argument('--torchscript', dest='torchscript', action='store_true', help='convert model torchscript for inference') -parser.add_argument('--legacy-jit', dest='legacy_jit', action='store_true', - help='use legacy jit mode for pytorch 1.5/1.5.1/1.6 to get back fusion performance') parser.add_argument('--results-file', default='', type=str, metavar='FILENAME', help='Output csv file for validation results (summary)') parser.add_argument('--real-labels', default='', type=str, metavar='FILENAME', @@ -113,26 +92,8 @@ def validate(args): # might as well try to validate something args.pretrained = args.pretrained or not args.checkpoint - args.prefetcher = not args.no_prefetcher - amp_autocast = suppress # do nothing - if args.amp: - if has_native_amp: - args.native_amp = True - elif has_apex: - args.apex_amp = True - else: - _logger.warning("Neither APEX or Native Torch AMP is available.") - assert not args.apex_amp or not args.native_amp, "Only one AMP mode should be set." - if args.native_amp: - amp_autocast = torch.cuda.amp.autocast - _logger.info('Validating in mixed precision with native PyTorch AMP.') - elif args.apex_amp: - _logger.info('Validating in mixed precision with NVIDIA APEX AMP.') - else: - _logger.info('Validating in float32. AMP not enabled.') - if args.legacy_jit: - set_jit_legacy() + dev_env = initialize_device(amp=args.amp) # create model model = create_model( @@ -154,24 +115,16 @@ def validate(args): data_config = resolve_data_config(vars(args), model=model, use_test_size=True, verbose=True) test_time_pool = False - if not args.no_test_pool: + if args.test_pool: model, test_time_pool = apply_test_time_pool(model, data_config, use_test_size=True) if args.torchscript: torch.jit.optimized_execution(True) model = torch.jit.script(model) - model = model.cuda() - if args.apex_amp: - model = amp.initialize(model, opt_level='O1') - - if args.channels_last: - model = model.to(memory_format=torch.channels_last) - - if args.num_gpu > 1: - model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpu))) - - criterion = nn.CrossEntropyLoss().cuda() + # FIXME device + model, criterion = dev_env.to_device(model, nn.CrossEntropyLoss()) + model.to(dev_env.device) dataset = create_dataset( root=args.data, name=args.dataset, split=args.split, @@ -194,7 +147,6 @@ def validate(args): dataset, input_size=data_config['input_size'], batch_size=args.batch_size, - use_prefetcher=args.prefetcher, interpolation=data_config['interpolation'], mean=data_config['mean'], std=data_config['std'], @@ -203,63 +155,61 @@ def validate(args): pin_memory=args.pin_mem, tf_preprocessing=args.tf_preprocessing) - batch_time = AverageMeter() - losses = AverageMeter() - top1 = AverageMeter() - top5 = AverageMeter() + logger = Logger(logger=_logger) + tracker = Tracker() + losses = TensorAvg() + accuracy = AccuracyTopK().to(dev_env.device) model.eval() + num_steps = len(loader) with torch.no_grad(): - # warmup, reduce variability of first batch time, especially for comparing torchscript vs non - input = torch.randn((args.batch_size,) + tuple(data_config['input_size'])).cuda() - if args.channels_last: - input = input.contiguous(memory_format=torch.channels_last) - model(input) - end = time.time() - for batch_idx, (input, target) in enumerate(loader): - if args.no_prefetcher: - target = target.cuda() - input = input.cuda() - if args.channels_last: - input = input.contiguous(memory_format=torch.channels_last) + tracker.mark_iter() + for step_idx, (sample, target) in enumerate(loader): + tracker.mark_iter_data_end() # compute output - with amp_autocast(): - output = model(input) + with dev_env.autocast(): + output = model(sample) if valid_labels is not None: output = output[:, valid_labels] loss = criterion(output, target) + if dev_env.type == 'cuda': + torch.cuda.synchronize() + #elif dev_env.type == 'xla': + # dev_env.mark_step() + tracker.mark_iter_step_end() + + losses.update(loss.detach(), sample.size(0)) if real_labels is not None: real_labels.add_result(output) - - # measure accuracy and record loss - acc1, acc5 = accuracy(output.detach(), target, topk=(1, 5)) - losses.update(loss.item(), input.size(0)) - top1.update(acc1.item(), input.size(0)) - top5.update(acc5.item(), input.size(0)) - - # measure elapsed time - batch_time.update(time.time() - end) - end = time.time() - - if batch_idx % args.log_freq == 0: - _logger.info( - 'Test: [{0:>4d}/{1}] ' - 'Time: {batch_time.val:.3f}s ({batch_time.avg:.3f}s, {rate_avg:>7.2f}/s) ' - 'Loss: {loss.val:>7.4f} ({loss.avg:>6.4f}) ' - 'Acc@1: {top1.val:>7.3f} ({top1.avg:>7.3f}) ' - 'Acc@5: {top5.val:>7.3f} ({top5.avg:>7.3f})'.format( - batch_idx, len(loader), batch_time=batch_time, - rate_avg=input.size(0) / batch_time.avg, - loss=losses, top1=top1, top5=top5)) + accuracy.update(output.detach(), target) + + if dev_env.type == 'xla': + dev_env.mark_step() + + tracker.mark_iter() + if step_idx % args.log_freq == 0: + top1, top5 = accuracy.compute().values() + loss_avg = losses.compute() + logger.log_step( + phase='eval', + step=step_idx, + num_steps=num_steps, + rate=args.batch_size / tracker.iter_time.avg, + loss=loss_avg.item(), + top1=top1.item(), + top5=top5.item(), + ) if real_labels is not None: # real labels mode replaces topk values at the end top1a, top5a = real_labels.get_accuracy(k=1), real_labels.get_accuracy(k=5) else: - top1a, top5a = top1.avg, top5.avg + top1a, top5a = accuracy.compute().values() + top1a, top5a = top1a.item(), top5a.item() + results = OrderedDict( top1=round(top1a, 4), top1_err=round(100 - top1a, 4), top5=round(top5a, 4), top5_err=round(100 - top5a, 4), @@ -267,9 +217,7 @@ def validate(args): img_size=data_config['input_size'][-1], cropt_pct=crop_pct, interpolation=data_config['interpolation']) - - _logger.info(' * Acc@1 {:.3f} ({:.3f}) Acc@5 {:.3f} ({:.3f})'.format( - results['top1'], results['top1_err'], results['top5'], results['top5_err'])) + logger.log_phase(phase='eval', name_map={'top1': 'Acc@1', 'top5': 'Acc@5'}, **results) return results @@ -309,7 +257,6 @@ def main(): result = OrderedDict(model=args.model) r = {} while not r and batch_size >= args.num_gpu: - torch.cuda.empty_cache() try: args.batch_size = batch_size print('Validating with batch size: %d' % args.batch_size) From 76de984a5fd2894a21b6dae270548b09f2e3f602 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 21 Apr 2021 13:02:53 -0700 Subject: [PATCH 02/61] Fix some bugs with XLA support, logger, add hacky xla dist launch script since torch.dist.launch doesn't work --- launch_xla.py | 66 ++++++++++++++++++++++++++++++++++++ timm/bits/device_env_cuda.py | 6 ++-- timm/bits/device_env_xla.py | 14 ++++++-- timm/bits/logger.py | 7 ++-- timm/bits/updater_xla.py | 8 ++++- timm/data/loader.py | 9 +++-- timm/metrics/accuracy.py | 2 ++ timm/metrics/tensor_avg.py | 2 ++ train.py | 8 +++-- 9 files changed, 104 insertions(+), 18 deletions(-) create mode 100644 launch_xla.py diff --git a/launch_xla.py b/launch_xla.py new file mode 100644 index 0000000000..9e60556cc5 --- /dev/null +++ b/launch_xla.py @@ -0,0 +1,66 @@ +""" +Adapatation of (pre-elastic) torch.distributed.launch for pytorch xla. + +`torch.distributed.launch` is a module that spawns up multiple distributed +training processes on each of the training nodes. + +""" + + +import sys +import subprocess +import importlib +import os +from argparse import ArgumentParser, REMAINDER +from typing import Optional, IO + +import torch_xla.distributed.xla_multiprocessing as xmp + + +def parse_args(): + """ + Helper function parsing the command line options + @retval ArgumentParser + """ + parser = ArgumentParser( + description="PyTorch distributed training launch helper utility" + "that will spawn up multiple distributed processes") + + # Optional arguments for the launch helper + parser.add_argument("--num-devices", type=int, default=1, + help="The number of XLA devices to use for distributed training") + + # positional + parser.add_argument( + "script", type=str, + help="The full path to the single device training script to be launched" + "in parallel, followed by all the arguments for the training script") + + # rest from the training program + parser.add_argument('script_args', nargs=REMAINDER) + return parser.parse_args() + + +def main(): + args = parse_args() + + # set PyTorch distributed related environmental variables + # current_env = os.environ.copy() + # current_env["MASTER_ADDR"] = args.master_addr + # current_env["MASTER_PORT"] = str(args.master_port) + # current_env["WORLD_SIZE"] = str(dist_world_size) + # if 'OMP_NUM_THREADS' not in os.environ and args.nproc_per_node > 1: + # current_env["OMP_NUM_THREADS"] = str(1) + + script_abs = os.path.abspath(args.script) + script_base, script_rel = os.path.split(script_abs) + sys.path.append(script_base) + mod = importlib.import_module(os.path.splitext(script_rel)[0]) + + sys.argv = [args.script] + args.script_args + + xmp.spawn(mod._mp_entry, args=(), nprocs=args.num_devices) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/timm/bits/device_env_cuda.py b/timm/bits/device_env_cuda.py index 29c4d8f6d7..d609bd2a51 100644 --- a/timm/bits/device_env_cuda.py +++ b/timm/bits/device_env_cuda.py @@ -83,8 +83,10 @@ def autocast(self): return self._autocast def wrap_distributed(self, *modules, **kwargs): - return [DistributedDataParallel(m, device_ids=[self._local_rank], **kwargs) for m in modules] + wrapped = [DistributedDataParallel(m, device_ids=[self._local_rank], **kwargs) for m in modules] + return wrapped[0] if len(wrapped) == 1 else wrapped def to_device(self, *modules: torch.nn.Module): # FIXME handling dtype / memformat... disable flags, enable flags, diff fn? - return [m.to(device=self._device, memory_format=self._memory_format) for m in modules] + moved = [m.to(device=self._device, memory_format=self._memory_format) for m in modules] + return moved[0] if len(moved) == 1 else moved diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index 385b862602..18e0fd3b4e 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -10,6 +10,12 @@ xm = None _HAS_XLA = False +try: + # only the very latest XLA builds have AMP + import torch_xla.amp as xa +except ImportError as e: + xa = None + from .device_env import DeviceEnv @@ -25,7 +31,6 @@ class DeviceEnvXla(DeviceEnv): def __init__(self, xla_device_type=None, device_idx=None, local_rank=0, amp=False): self._device = xm.xla_device(n=device_idx, devkind=xla_device_type) - print(self._device) self._local_rank = xm.get_local_ordinal(local_rank) self._world_size = xm.xrt_world_size() self._distributed = self._world_size > 1 @@ -33,6 +38,7 @@ def __init__(self, xla_device_type=None, device_idx=None, local_rank=0, amp=Fals if self._distributed: self._global_rank = xm.get_ordinal() if amp: + assert xa is not None, 'XLA AMP is not present on this build' self._autocast = xa.autocast else: self._autocast = suppress @@ -76,10 +82,12 @@ def autocast(self): def wrap_distributed(self, *modules): # NO-OP - return tuple([m for m in modules]) + wrapped = [m for m in modules] + return wrapped[0] if len(wrapped) == 1 else wrapped def to_device(self, *modules: torch.nn.Module): - return [m.to(device=self._device, memory_format=self._memory_format) for m in modules] + moved = [m.to(device=self._device, memory_format=self._memory_format) for m in modules] + return moved[0] if len(moved) == 1 else moved def mark_step(self): xm.mark_step() diff --git a/timm/bits/logger.py b/timm/bits/logger.py index 2e2cd9da7c..d9ad41afda 100644 --- a/timm/bits/logger.py +++ b/timm/bits/logger.py @@ -61,8 +61,8 @@ def summary_row_dict(results, index=None, index_name='epoch'): return row_dict if isinstance(next(iter(results.values())), dict): # each key in results is a per-phase results dict, flatten by prefixing with phase name - for p, pr in results.keys(): - assert isinstance(dict, pr) + for p, pr in results.items(): + assert isinstance(pr, dict) row_dict.update([('_'.join([p, k]), v) for k, v in pr.items()]) else: row_dict.update(results) @@ -81,7 +81,7 @@ def update(self, row_dict): if self.needs_header: # first iteration (epoch == 1 can't be used) dw.writeheader() self.needs_header = False - dw.writerow(row_dict) + dw.writerow(row_dict) def _add_kwargs(text_update, name_map=None, **kwargs): @@ -212,7 +212,6 @@ def write_summary( index: value for row index (typically epoch #) index_name: name for row index header (typically 'epoch') """ - row_dict = summary_row_dict(index=index, index_name=index_name, results=results) if self.csv_writer: self.csv_writer.update(row_dict) diff --git a/timm/bits/updater_xla.py b/timm/bits/updater_xla.py index 0789f06fc0..25287ad9fb 100644 --- a/timm/bits/updater_xla.py +++ b/timm/bits/updater_xla.py @@ -4,12 +4,17 @@ try: import torch_xla.core.xla_model as xm - import torch_xla.amp as xa _HAS_XLA = True except ImportError as e: xm = None _HAS_XLA = False +try: + # only the very latest XLA builds have AMP + import torch_xla.amp as xa +except ImportError as e: + xa = None + from .updater import Updater @@ -26,6 +31,7 @@ def __init__( super().__init__(optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode) self.after_step_closure = True if use_scaler: + assert xa is not None, 'XLA AMP not present in this build' self.scaler = xa.GradScaler(**scaler_kwargs) def apply(self, loss: torch.Tensor, accumulate: bool = False): diff --git a/timm/data/loader.py b/timm/data/loader.py index 9b15eb02d6..45d40908c2 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -40,11 +40,9 @@ def create_loader( mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_workers=1, - distributed=False, crop_pct=None, collate_fn=None, pin_memory=False, - fp16=False, tf_preprocessing=False, use_multi_epochs_loader=False, persistent_workers=True, @@ -80,13 +78,14 @@ def create_loader( dev_env = get_device() sampler = None - if distributed and not isinstance(dataset, torch.utils.data.IterableDataset): + if dev_env.is_distributed and not isinstance(dataset, torch.utils.data.IterableDataset): if is_training: - sampler = torch.utils.data.distributed.DistributedSampler(dataset) + sampler = torch.utils.data.distributed.DistributedSampler( + dataset, num_replicas=dev_env.world_size, rank=dev_env.global_rank) else: # This will add extra duplicate entries to result in equal num # of samples per-process, will slightly alter validation results - sampler = OrderedDistributedSampler(dataset) + sampler = OrderedDistributedSampler(dataset, num_replicas=dev_env.world_size, rank=dev_env.global_rank) if collate_fn is None: collate_fn = fast_collate diff --git a/timm/metrics/accuracy.py b/timm/metrics/accuracy.py index 98aa59eb22..b58a3781ae 100644 --- a/timm/metrics/accuracy.py +++ b/timm/metrics/accuracy.py @@ -36,6 +36,7 @@ def __init__(self, topk=(1, 5), device=None): self.device = device self.topk = topk self.maxk = max(topk) + # FIXME handle distributed operation # statistics / counts self.reset() @@ -63,6 +64,7 @@ def counts(self): pass def compute(self) -> Dict[str, torch.Tensor]: + # FIXME handle distributed reduction return {f'top{k}': 100 * getattr(self, f'_correct_top{k}') / self._total_sum for k in self.topk} diff --git a/timm/metrics/tensor_avg.py b/timm/metrics/tensor_avg.py index ac2fb6ed0e..c9a3489b00 100644 --- a/timm/metrics/tensor_avg.py +++ b/timm/metrics/tensor_avg.py @@ -8,6 +8,7 @@ def __init__(self): self.sum = None self.count = None self.reset() + # FIXME handle distributed operation def reset(self): self.sum = None @@ -32,6 +33,7 @@ def __init__(self, smoothing_factor=0.9, init_zero=False): self.init_zero = init_zero self.val = None self.reset() + # FIXME handle distributed operation def reset(self): self.val = None diff --git a/train.py b/train.py index f105e525b4..de62792923 100755 --- a/train.py +++ b/train.py @@ -426,7 +426,6 @@ def main(): mean=data_config['mean'], std=data_config['std'], num_workers=args.workers, - distributed=dev_env.is_distributed, collate_fn=collate_fn, pin_memory=args.pin_mem, use_multi_epochs_loader=args.use_multi_epochs_loader @@ -441,7 +440,6 @@ def main(): mean=data_config['mean'], std=data_config['std'], num_workers=args.workers, - distributed=dev_env.is_distributed, crop_pct=data_config['crop_pct'], pin_memory=args.pin_mem, ) @@ -519,7 +517,7 @@ def main(): lr_scheduler.step(epoch + 1, eval_metrics[eval_metric]) if logger is not None: - logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metric)) + logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metrics)) if saver is not None: # save proper checkpoint with eval metric @@ -657,5 +655,9 @@ def evaluate( return results +def _mp_entry(*args): + main() + + if __name__ == '__main__': main() From 938716c753cbdb4501d1b406cd212983999bc0c3 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 21 Apr 2021 13:16:11 -0700 Subject: [PATCH 03/61] Fix import issue, use devenv for dist info in parser_tfds --- timm/bits/device_env_xla.py | 1 - timm/data/parsers/parser_tfds.py | 12 +++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index 18e0fd3b4e..518cd99311 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -4,7 +4,6 @@ try: import torch_xla.core.xla_model as xm - import torch_xla.amp as xa _HAS_XLA = True except ImportError as e: xm = None diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 0b12a4db1f..92495d12e4 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -23,6 +23,7 @@ exit(1) from .parser import Parser +from timm.bits import get_device MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities SHUFFLE_SIZE = 16834 # samples to shuffle in DS queue @@ -79,9 +80,14 @@ def __init__(self, root, name, split='train', shuffle=False, is_training=False, self.worker_info = None self.dist_rank = 0 self.dist_num_replicas = 1 - if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: - self.dist_rank = dist.get_rank() - self.dist_num_replicas = dist.get_world_size() + dev_env = get_device() + # FIXME allow to work without devenv usage? + if dev_env.is_distributed and dev_env.world_size > 1: + self.dist_rank = dev_env.global_rank + self.dist_num_replicas = dev_env.world_size + # if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: + # self.dist_rank = dist.get_rank() + # self.dist_num_replicas = dist.get_world_size() def _lazy_init(self): """ Lazily initialize the dataset. From aa92d7b1c50d4c5b99a6f15765ce666ec45aeec5 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 17 May 2021 15:56:24 -0700 Subject: [PATCH 04/61] Major timm.bits update. Updater and DeviceEnv now dataclasses, after_step closure used, metrics base impl w/ distributed reduce, many tweaks/fixes. --- timm/bits/__init__.py | 27 +- .../scalar_avg.py => bits/avg_scalar.py} | 2 +- .../tensor_avg.py => bits/avg_tensor.py} | 2 +- timm/bits/checkpoint.py | 58 +++ timm/bits/device_env.py | 140 ++++-- timm/bits/device_env_cuda.py | 96 ++-- timm/bits/device_env_factory.py | 14 +- timm/bits/device_env_xla.py | 136 +++--- timm/bits/distributed.py | 151 ++++++ timm/bits/distributed_torch.py | 190 ++++++++ timm/bits/{grad_clipper.py => grad_clip.py} | 20 +- timm/bits/logger.py | 45 +- timm/bits/metric.py | 142 ++++++ timm/bits/metric_accuracy.py | 98 ++++ .../metric_precision_recall.py} | 0 timm/bits/tracker.py | 19 +- timm/bits/train_cfg.py | 12 + timm/bits/train_services.py | 13 + timm/bits/train_setup.py | 153 ++++++ timm/bits/train_state.py | 33 ++ timm/bits/updater.py | 76 +-- timm/bits/updater_cuda.py | 48 +- timm/bits/updater_deepspeed.py | 26 + timm/bits/updater_factory.py | 39 +- timm/bits/updater_xla.py | 72 +-- timm/data/fetcher.py | 12 +- timm/data/loader.py | 6 +- timm/data/parsers/parser_tfds.py | 8 +- timm/metrics/__init__.py | 4 - timm/metrics/accuracy.py | 114 ----- timm/scheduler/__init__.py | 1 + timm/utils/checkpoint_saver.py | 3 +- timm/utils/clip_grad.py | 6 +- timm/utils/distributed.py | 4 +- train.py | 451 ++++++++++-------- validate.py | 15 +- 36 files changed, 1577 insertions(+), 659 deletions(-) rename timm/{metrics/scalar_avg.py => bits/avg_scalar.py} (96%) rename timm/{metrics/tensor_avg.py => bits/avg_tensor.py} (98%) create mode 100644 timm/bits/checkpoint.py create mode 100644 timm/bits/distributed.py create mode 100644 timm/bits/distributed_torch.py rename timm/bits/{grad_clipper.py => grad_clip.py} (58%) create mode 100644 timm/bits/metric.py create mode 100644 timm/bits/metric_accuracy.py rename timm/{metrics/precision_recall.py => bits/metric_precision_recall.py} (100%) create mode 100644 timm/bits/train_cfg.py create mode 100644 timm/bits/train_services.py create mode 100644 timm/bits/train_setup.py create mode 100644 timm/bits/train_state.py create mode 100644 timm/bits/updater_deepspeed.py delete mode 100644 timm/metrics/__init__.py delete mode 100644 timm/metrics/accuracy.py diff --git a/timm/bits/__init__.py b/timm/bits/__init__.py index 33080c7324..c99603414c 100644 --- a/timm/bits/__init__.py +++ b/timm/bits/__init__.py @@ -1,10 +1,25 @@ +from .avg_scalar import AvgMinMaxScalar +from .avg_tensor import AvgTensor +from .device_env import DeviceEnv, DeviceEnvType +from .device_env_cuda import DeviceEnvCuda from .device_env_factory import initialize_device, get_device -from .device_env import DeviceEnv -#from .evaluate import evaluate, eval_step +from .device_env_xla import DeviceEnvXla +from .distributed import distribute_bn, all_gather_recursive, all_reduce_recursive, broadcast_recursive,\ + all_reduce_sequence, all_gather_sequence +# from .evaluate import evaluate, eval_step from .logger import Logger -#from .task import TaskClassify +from .metric import Metric, MetricValue +from .metric_accuracy import AccuracyTopK +from .tracker import Tracker +# from .task_metrics import TaskMetrics, TaskMetricsClassify +from .train_cfg import TrainCfg +from .train_services import TrainServices +from .train_setup import setup_model_and_optimizer +from .train_state import TrainState +# from .task import TaskClassify from .updater import Updater +from .updater_cuda import UpdaterCudaWithScaler +from .updater_deepspeed import UpdaterDeepSpeed from .updater_factory import create_updater -from .tracker import Tracker -#from .task_metrics import TaskMetrics, TaskMetricsClassify -#from .train import train_one_epoch, TrainServices, TrainState, TrainCfg, Experiment \ No newline at end of file +from .updater_xla import UpdaterXla, UpdaterXlaWithScaler +# from .train import train_one_epoch, Experiment diff --git a/timm/metrics/scalar_avg.py b/timm/bits/avg_scalar.py similarity index 96% rename from timm/metrics/scalar_avg.py rename to timm/bits/avg_scalar.py index f5d958077b..04d41c8e18 100644 --- a/timm/metrics/scalar_avg.py +++ b/timm/bits/avg_scalar.py @@ -1,4 +1,4 @@ -class ScalarAvgMinMax: +class AvgMinMaxScalar: """Computes and stores the average and current value""" def __init__(self): diff --git a/timm/metrics/tensor_avg.py b/timm/bits/avg_tensor.py similarity index 98% rename from timm/metrics/tensor_avg.py rename to timm/bits/avg_tensor.py index c9a3489b00..0aaf92e305 100644 --- a/timm/metrics/tensor_avg.py +++ b/timm/bits/avg_tensor.py @@ -1,7 +1,7 @@ import torch -class TensorAvg: +class AvgTensor: """Computes and stores the average and current value""" def __init__(self): diff --git a/timm/bits/checkpoint.py b/timm/bits/checkpoint.py new file mode 100644 index 0000000000..3c191b0a94 --- /dev/null +++ b/timm/bits/checkpoint.py @@ -0,0 +1,58 @@ +import logging +import os +from collections import OrderedDict + +import torch + +from .train_state import TrainState, serialize_train_state, deserialize_train_state + +_logger = logging.getLogger(__name__) + + +def resume_train_checkpoint( + train_state, + checkpoint_path, + resume_opt=True, + deserialize_fn=deserialize_train_state, + log_info=True): + + raise NotImplementedError + + # resume_epoch = None + # if os.path.isfile(checkpoint_path): + # checkpoint = torch.load(checkpoint_path, map_location='cpu') + # + # if isinstance(checkpoint, dict) and 'state_dict' in checkpoint: + # if log_info: + # _logger.info('Restoring model state from checkpoint...') + # new_state_dict = OrderedDict() + # for k, v in checkpoint['state_dict'].items(): + # name = k[7:] if k.startswith('module') else k + # new_state_dict[name] = v + # model.load_state_dict(new_state_dict) + # + # if optimizer is not None and 'optimizer' in checkpoint: + # if log_info: + # _logger.info('Restoring optimizer state from checkpoint...') + # optimizer.load_state_dict(checkpoint['optimizer']) + # + # if loss_scaler is not None and loss_scaler.state_dict_key in checkpoint: + # if log_info: + # _logger.info('Restoring AMP loss scaler state from checkpoint...') + # loss_scaler.load_state_dict(checkpoint[loss_scaler.state_dict_key]) + # + # if 'epoch' in checkpoint: + # resume_epoch = checkpoint['epoch'] + # if 'version' in checkpoint and checkpoint['version'] > 1: + # resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save + # + # if log_info: + # _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) + # else: + # model.load_state_dict(checkpoint) + # if log_info: + # _logger.info("Loaded checkpoint '{}'".format(checkpoint_path)) + # return resume_epoch + # else: + # _logger.error("No checkpoint found at '{}'".format(checkpoint_path)) + # raise FileNotFoundError() diff --git a/timm/bits/device_env.py b/timm/bits/device_env.py index 646d64f478..7307823e50 100644 --- a/timm/bits/device_env.py +++ b/timm/bits/device_env.py @@ -1,58 +1,130 @@ -import torch import abc +from contextlib import suppress +from enum import Enum +from typing import Callable, Union, Optional, List, Tuple +from dataclasses import dataclass, field, InitVar +import torch +import torch.distributed as dist -class DeviceEnv(abc.ABC): +TensorList = Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]] - @property - @abc.abstractmethod - def device(self) -> torch.device: - pass - @property - @abc.abstractmethod - def local_rank(self) -> int: - pass +class DeviceEnvType(Enum): + """ Device Environment Types + """ + CPU = "cpu" + CUDA = "cuda" + XLA = "xla" + + +@dataclass +class DeviceEnv: + device_type: InitVar[Optional[str]] = None + device_index: InitVar[Optional[int]] = None + + device: torch.device = field(init=False) # set from device_type + device_index or post_init logic + world_size: Optional[int] = None # set by post_init from env when None + local_rank: Optional[int] = None # set by post_init from env when None + global_rank: Optional[int] = None # set by post_init from env when None + amp: bool = False + autocast: Optional[Callable] = None # set by post_init from env when None + memory_format: Optional[torch.memory_format] = None + dtype: Optional[torch.dtype] = None + + def __post_init__(self, device_type: Optional[str], device_index: Optional[int]): + device_type = device_type or 'cpu' + self.device = torch.device(device_type) if device_index is None \ + else torch.device(device_type, device_index) + self.world_size = 1 if self.world_size is None else self.world_size + self.local_rank = 0 if self.local_rank is None else self.local_rank + self.global_rank = 0 if self.global_rank is None else self.global_rank + if self.autocast is None: + self.autocast = suppress @property - @abc.abstractmethod - def global_rank(self) -> int: - pass + def type(self) -> DeviceEnvType: + if self.device.type == 'cpu': + return DeviceEnvType.CPU + elif self.device.type == 'cuda': + return DeviceEnvType.CUDA + elif self.device.type == 'xla': + return DeviceEnvType.XLA + else: + assert False, "Unexpected device type for base DevEnv impl." @property - @abc.abstractmethod - def is_distributed(self) -> bool: - pass + def type_cuda(self): + # shortcut for common cuda device type + return self.type == DeviceEnvType.CUDA @property - @abc.abstractmethod - def world_size(self) -> int: - pass + def type_xla(self): + # shortcut for common xla device type + return self.type == DeviceEnvType.XLA @property - @abc.abstractmethod - def is_master(self) -> bool: - pass + def distributed(self): + return self.world_size > 1 @property - @abc.abstractmethod - def type(self) -> str: - pass + def primary(self): + return self.local_rank == 0 @property - @abc.abstractmethod - def autocast(self): - pass + def global_primary(self): + return self.global_rank == 0 - @abc.abstractmethod def wrap_distributed(self, *modules): pass - @abc.abstractmethod - def to_device(self, *modules: torch.nn.Module): + def wrap_parallel(self, *modules): pass - #@abc.abstractmethod + def to_device(self, *modules: torch.nn.Module): + # FIXME handling dtype / memformat... disable flags, enable flags, diff fn? + moved = [m.to(device=self.device, memory_format=self.memory_format) for m in modules] + return moved[0] if len(moved) == 1 else moved + def mark_step(self): - # FIXME this is for XLA only, make it common to all devices w/ appropriate no-ops? - pass \ No newline at end of file + pass # NO-OP for non-XLA devices + + def all_reduce_(self, tensor: TensorList, op=dist.ReduceOp.SUM, average=False): + print(len(tensor), type(tensor)) + print(tensor.shape) + dist.all_reduce(tensor, op=op) + if average: + tensor.div_(self.world_size) + return tensor + + def all_reduce(self, tensor: torch.Tensor, op=dist.ReduceOp.SUM, average=False): + reduce_tensor = tensor.clone() + dist.all_reduce(reduce_tensor, op=op) + if average: + reduce_tensor = reduce_tensor / self.world_size + return reduce_tensor + + def all_gather(self, tensor: torch.Tensor, cat_dim=0): + output_tensors = [torch.empty_like(tensor) for _ in range(self.world_size)] + dist.all_gather(output_tensors, tensor) + return torch.cat(output_tensors, cat_dim) + + def all_to_all(self, tensor: torch.Tensor, num_splits, split_dim, cat_dim=0): + input_tensors = torch.chunk(tensor, num_splits, split_dim) + output_tensors = [torch.empty_like(input_tensors[0]) for _ in range(self.world_size)] + dist.all_to_all(output_tensors, input_tensors) + return torch.cat(output_tensors, cat_dim) + + def broadcast_(self, tensor: torch.Tensor, src_rank=0): + dist.broadcast(tensor, src=src_rank) + return tensor + + def broadcast(self, tensor: Optional[torch.Tensor] = None, src_rank=0): + if self.global_rank != src_rank: + tensor = torch.empty_like(tensor) + assert tensor is not None + dist.broadcast(tensor, src=src_rank) + return tensor + + def barrier(self): + dist.barrier() diff --git a/timm/bits/device_env_cuda.py b/timm/bits/device_env_cuda.py index d609bd2a51..7358e405af 100644 --- a/timm/bits/device_env_cuda.py +++ b/timm/bits/device_env_cuda.py @@ -1,92 +1,58 @@ import os from contextlib import suppress +from dataclasses import dataclass, field, InitVar +from typing import Optional import torch -from torch.nn.parallel import DistributedDataParallel +from torch.nn.parallel import DistributedDataParallel, DataParallel -from .device_env import DeviceEnv +from .device_env import DeviceEnv, DeviceEnvType def is_cuda_available(): return torch.cuda.is_available() +@dataclass class DeviceEnvCuda(DeviceEnv): - def __init__(self, device_idx=None, local_rank=None, amp=False, memory_format=None): + def __post_init__(self, device_type: str, device_index: Optional[int]): assert torch.cuda.device_count() torch.backends.cudnn.benchmark = True - self._local_rank = 0 - self._distributed = False - self._world_size = 1 - self._global_rank = 0 - if 'WORLD_SIZE' in os.environ: - self._distributed = int(os.environ['WORLD_SIZE']) > 1 - if self._distributed: - if local_rank is None: + setup_world_size = self.world_size or int(os.environ.get('WORLD_SIZE', 1)) + assert setup_world_size + if setup_world_size > 1: + # setup distributed + assert device_index is None + if self.local_rank is None: lr = os.environ.get('LOCAL_RANK', None) if lr is None: raise RuntimeError( 'At least one of LOCAL_RANK env variable or local_rank arg must be set to valid integer.') - self._local_rank = lr - else: - self._local_rank = int(local_rank) - self._device = torch.device('cuda:%d' % self._local_rank) - torch.cuda.set_device(self._local_rank) + self.local_rank = int(lr) + self.device = torch.device('cuda:%d' % self.local_rank) + torch.cuda.set_device(self.local_rank) torch.distributed.init_process_group(backend='nccl', init_method='env://') - self._world_size = torch.distributed.get_world_size() - self._global_rank = torch.distributed.get_rank() + self.world_size = torch.distributed.get_world_size() + assert self.world_size == setup_world_size + self.global_rank = torch.distributed.get_rank() else: - self._device = torch.device('cuda' if device_idx is None else f'cuda:{device_idx}') - self._memory_format = memory_format - if amp: - self._amp = amp - self._autocast = torch.cuda.amp.autocast - else: - self._amp = amp - self._autocast = suppress - - @property - def device(self): - return self._device - - @property - def local_rank(self): - return self._local_rank - - @property - def global_rank(self): - return self._global_rank - - @property - def is_distributed(self): - return self._distributed + self.device = torch.device('cuda' if device_index is None else f'cuda:{device_index}') + self.local_rank = 0 + self.world_size = 1 + self.global_rank = 0 + if self.autocast is None: + self.autocast = torch.cuda.amp.autocast if self.amp else suppress @property - def world_size(self): - return self._world_size - - @property - def is_master(self): - return self._local_rank == 0 - - @property - def type(self) -> str: - return 'cuda' - - @property - def amp(self) -> bool: - return self._amp - - @property - def autocast(self): - return self._autocast + def type(self) -> DeviceEnvType: + return DeviceEnvType.CUDA def wrap_distributed(self, *modules, **kwargs): - wrapped = [DistributedDataParallel(m, device_ids=[self._local_rank], **kwargs) for m in modules] + wrapped = [DistributedDataParallel(m, device_ids=[self.local_rank], **kwargs) for m in modules] return wrapped[0] if len(wrapped) == 1 else wrapped - def to_device(self, *modules: torch.nn.Module): - # FIXME handling dtype / memformat... disable flags, enable flags, diff fn? - moved = [m.to(device=self._device, memory_format=self._memory_format) for m in modules] - return moved[0] if len(moved) == 1 else moved + def wrap_parallel(self, *modules, **kwargs): + assert not self.distributed + wrapped = [DataParallel(m, **kwargs) for m in modules] + return wrapped[0] if len(wrapped) == 1 else wrapped diff --git a/timm/bits/device_env_factory.py b/timm/bits/device_env_factory.py index f6dc14f3c9..2037a39e31 100644 --- a/timm/bits/device_env_factory.py +++ b/timm/bits/device_env_factory.py @@ -1,10 +1,11 @@ +from .device_env import DeviceEnv from .device_env_cuda import DeviceEnvCuda, is_cuda_available from .device_env_xla import DeviceEnvXla, is_xla_available _device_env = None -def initialize_device(force_cpu: bool = False, xla_device_type=None, **kwargs): +def initialize_device(force_cpu: bool = False, **kwargs) -> DeviceEnv: global _device_env if _device_env is not None: # warning @@ -12,21 +13,22 @@ def initialize_device(force_cpu: bool = False, xla_device_type=None, **kwargs): denv = None if not force_cpu: + xla_device_type = kwargs.get('xla_device_type', None) if is_xla_available(xla_device_type): - # XLA supports more than just TPU, but by default will only look at TPU - denv = DeviceEnvXla(**kwargs, xla_device_type=xla_device_type) + # XLA supports more than just TPU, will search in order TPU, GPU, CPU + denv = DeviceEnvXla(**kwargs) elif is_cuda_available(): denv = DeviceEnvCuda(**kwargs) if denv is None: - # FIXME implement CPU support - raise NotImplementedError() + denv = DeviceEnv() + print(denv) # FIXME DEBUG _device_env = denv return denv -def get_device(): +def get_device() -> DeviceEnv: if _device_env is None: raise RuntimeError('Please initialize device environment by calling initialize_device first.') return _device_env diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index 518cd99311..cc9ea3ddfe 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -1,6 +1,10 @@ import os from contextlib import suppress +from dataclasses import dataclass, field, InitVar +from typing import Optional + import torch +from torch.distributed import ReduceOp try: import torch_xla.core.xla_model as xm @@ -15,78 +19,102 @@ except ImportError as e: xa = None -from .device_env import DeviceEnv +from .device_env import DeviceEnv, DeviceEnvType, TensorList + + +_PT_TO_XM_OP = { + ReduceOp.SUM: 'sum', + ReduceOp.PRODUCT: 'prod', + ReduceOp.MIN: 'min', + ReduceOp.MAX: 'max', + ReduceOp.BAND: 'and', + ReduceOp.BOR: 'or', +} def is_xla_available(xla_device_type=None): if not _HAS_XLA: return False supported_devs = xm.get_xla_supported_devices(devkind=xla_device_type) - print(supported_devs) return len(supported_devs) >= 1 +@dataclass class DeviceEnvXla(DeviceEnv): - def __init__(self, xla_device_type=None, device_idx=None, local_rank=0, amp=False): - self._device = xm.xla_device(n=device_idx, devkind=xla_device_type) - self._local_rank = xm.get_local_ordinal(local_rank) - self._world_size = xm.xrt_world_size() - self._distributed = self._world_size > 1 - self._global_rank = 0 - if self._distributed: - self._global_rank = xm.get_ordinal() - if amp: - assert xa is not None, 'XLA AMP is not present on this build' - self._autocast = xa.autocast + def __post_init__(self, device_type: Optional[str], device_idx: Optional[int]): + if device_type is not None: + device_type = device_type.upper() + assert device_type in ('TPU', 'GPU', 'CPU'), "XLA device type must be one of ('TPU', 'GPU', 'CPU')" + self.device = xm.xla_device(n=device_idx, devkind=device_type) + self.world_size = xm.xrt_world_size() + if self.distributed: + assert device_idx is None, "device_index is based on local rank for distributed XLA mode" + self.local_rank = xm.get_local_ordinal() + self.global_rank = xm.get_ordinal() else: - self._autocast = suppress - self._memory_format = None - - @property - def device(self): - return self._device - - @property - def local_rank(self): - return self._local_rank - - @property - def global_rank(self): - return self._global_rank - - @property - def is_distributed(self): - return self._distributed - - @property - def world_size(self): - return self._world_size - - @property - def is_master(self): - return self._global_rank == 0 - - @property - def type(self) -> str: - return 'xla' - - @property - def amp(self) -> bool: - return False + self.local_rank = 0 + self.global_rank = 0 + if self.amp: + assert xa is not None, 'XLA AMP is not present on this build' + if self.autocast is None: + self.autocast = xa.autocast if self.amp else suppress @property - def autocast(self): - return self._autocast + def type(self) -> DeviceEnvType: + return DeviceEnvType.XLA def wrap_distributed(self, *modules): - # NO-OP - wrapped = [m for m in modules] + wrapped = [m for m in modules] # NO-OP return wrapped[0] if len(wrapped) == 1 else wrapped - def to_device(self, *modules: torch.nn.Module): - moved = [m.to(device=self._device, memory_format=self._memory_format) for m in modules] - return moved[0] if len(moved) == 1 else moved + def wrap_parallel(self, *modules): + assert False, "Not implemented" def mark_step(self): xm.mark_step() + + def all_reduce(self, tensor: torch.Tensor, op=ReduceOp.SUM, average=False): + assert isinstance(tensor, torch.Tensor) # unlike in-place variant, lists/tuples not allowed + op = _PT_TO_XM_OP[op] + scale = 1.0 + if average: + scale /= self.world_size + return xm.all_reduce(op, tensor, scale=scale) + + def all_reduce_(self, tensor: TensorList, op=ReduceOp.SUM, average=False): + op = _PT_TO_XM_OP[op] + scale = 1.0 + wrapped = False + if isinstance(tensor, torch.Tensor): + tensor = [tensor] # bare tensors are not operated on in-place + wrapped = True + if average: + scale /= self.world_size + xm.all_reduce(op, tensor, scale=scale) + if wrapped: + tensor = tensor[0] + return tensor + + def all_gather(self, tensor: torch.Tensor, cat_dim=0): + output = xm.all_gather(tensor, cat_dim) + return output + + def all_to_all(self, tensor, num_splits, split_dim, cat_dim=0): + output = xm.all_to_all(tensor, split_dim, cat_dim, num_splits) + return output + + def broadcast(self, tensor: torch.Tensor, src_rank=0): + if self.global_rank != src_rank: + reduce_tensor = torch.zeros_like(tensor) + xm.all_reduce('sum', reduce_tensor) + else: + xm.all_reduce('sum', tensor) + return tensor + + def broadcast_(self, tensor: torch.Tensor, src_rank=0): + out_tensor = self.broadcast(tensor, src_rank) + return tensor.copy_(out_tensor) + + def barrier(self): + xm.rendezvous('timm.bits.dist_barrier') diff --git a/timm/bits/distributed.py b/timm/bits/distributed.py new file mode 100644 index 0000000000..55f9adf502 --- /dev/null +++ b/timm/bits/distributed.py @@ -0,0 +1,151 @@ +from typing import Dict, Tuple, List, Union, Any, Callable + +import torch +from torch.distributed import ReduceOp + +from timm.utils import unwrap_model + +from .device_env import DeviceEnv, DeviceEnvType +from .device_env_factory import get_device + + +TensorSeq = Union[torch.Tensor, Tuple[torch.Tensor, ...], List[torch.Tensor], Dict[Any, torch.Tensor]] + + +def _validate_type(tensor: TensorSeq): + if isinstance(tensor, (dict, list, tuple)): + if not tensor: + return + else: + assert isinstance(tensor, torch.Tensor) + + +def distribute_bn(model: torch.nn.Module, reduce: bool = False, dev_env: DeviceEnv = None): + if dev_env is None: + dev_env = get_device() + # ensure every node has the same running bn stats + for bn_name, bn_buf in unwrap_model(model).named_buffers(recurse=True): + if ('running_mean' in bn_name) or ('running_var' in bn_name): + if reduce: + # average bn stats across whole group + dev_env.all_reduce_(bn_buf, average=True) + else: + # broadcast bn stats from rank 0 to whole group + dev_env.broadcast_(bn_buf, 0) + + +def all_gather_recursive(tensor: TensorSeq, cat_dim=0, dev_env: DeviceEnv = None): + """ Recursive all gather via DeviceEnv distributed primitives + FIXME add group support + """ + _validate_type(tensor) + if dev_env is None: + dev_env = get_device() + if isinstance(tensor, torch.Tensor): + return dev_env.all_gather(tensor, cat_dim=cat_dim) + elif isinstance(tensor, dict): + return {k: all_gather_recursive(v, dev_env=dev_env) for k, v in tensor.items()} + elif isinstance(tensor, (tuple, list)): + return type(tensor)(all_gather_recursive(v, dev_env=dev_env) for v in tensor) + + +def all_reduce_recursive(tensor: TensorSeq, op=ReduceOp.SUM, average=False, dev_env: DeviceEnv = None): + """ Recursive all reduce via DeviceEnv distributed primitives + FIXME add group support + """ + _validate_type(tensor) + if dev_env is None: + dev_env = get_device() + if isinstance(tensor, torch.Tensor): + return dev_env.all_reduce_(tensor, op=op, average=average) + elif isinstance(tensor, dict): + return {k: all_reduce_recursive(v, op=op, average=average, dev_env=dev_env) for k, v in tensor.items()} + elif isinstance(tensor, (tuple, list)): + return type(tensor)(all_reduce_recursive(v, op=op, average=average, dev_env=dev_env) for v in tensor) + + +def broadcast_recursive(tensor: TensorSeq, src_rank: int, dev_env: DeviceEnv = None): + """ Recursive broadcast via DeviceEnv distributed primitives + FIXME add group support + """ + _validate_type(tensor) + if dev_env is None: + dev_env = get_device() + if isinstance(tensor, torch.Tensor): + return dev_env.broadcast_(tensor, src_rank=src_rank) + elif isinstance(tensor, dict): + return {k: broadcast_recursive(v, src_rank=src_rank, dev_env=dev_env) for k, v in tensor.items()} + elif isinstance(tensor, (tuple, list)): + return type(tensor)(broadcast_recursive(v, src_rank=src_rank, dev_env=dev_env) for v in tensor) + + +def all_gather_sequence(tensor: TensorSeq, cat_dim: int = 0, dev_env: DeviceEnv = None): + """ All gather a flat Tensor sequence (dict, list, tuple) of same shape + + """ + _validate_type(tensor) + if dev_env is None: + dev_env = get_device() + + with torch.no_grad(): + names = None + # merge values into one tensor for reduction + if isinstance(tensor, dict): + names = tensor.keys() + gather_values = tuple(tensor.values()) + elif isinstance(tensor, (tuple, list)): + gather_values = tensor + else: + gather_values = (tensor,) + + gather_values = torch.stack(gather_values, dim=0) + gather_values = dev_env.all_gather(gather_values, cat_dim=cat_dim + 1).unbind(dim=0) + + # separate reduced values into original structure + if isinstance(tensor, dict): + gather_values = {k: v for k, v in zip(names, gather_values)} + elif isinstance(tensor, (tuple, list)): + gather_values = type(tensor)(v for v in gather_values) + else: + gather_values = gather_values[0] + + return gather_values + + +def all_reduce_sequence(tensor: TensorSeq, op=ReduceOp.SUM, average=False, dev_env: DeviceEnv = None): + """ + All reduce the tensors in a flat Tensor sequence (dict, list, tuple) of same tensor shape + + Args: + tensor (dict): inputs to be reduced. All the values must be scalar Tensor. + average (bool): whether to do average or sum + Returns: + a sequence with the same type as input (dict, list, tuple) + """ + _validate_type(tensor) + if dev_env is None: + dev_env = get_device() + + with torch.no_grad(): + names = None + # merge values into one tensor for reduction + if isinstance(tensor, dict): + names = tensor.keys() + reduce_values = tuple(tensor.values()) + elif isinstance(tensor, (tuple, list)): + reduce_values = tensor + else: + reduce_values = (tensor,) + + reduce_values = torch.stack(reduce_values, dim=0) + dev_env.all_reduce_(reduce_values, op=op, average=average) + reduce_values = reduce_values.unbind(dim=0) + # separate reduced values into original structure + if isinstance(tensor, dict): + reduce_values = {k: v for k, v in zip(names, reduce_values)} + elif isinstance(tensor, (tuple, list)): + reduce_values = type(tensor)(v for v in reduce_values) + else: + reduce_values = reduce_values[0] + + return reduce_values \ No newline at end of file diff --git a/timm/bits/distributed_torch.py b/timm/bits/distributed_torch.py new file mode 100644 index 0000000000..20f7036c17 --- /dev/null +++ b/timm/bits/distributed_torch.py @@ -0,0 +1,190 @@ +""" PyTorch distributed helpers + +Some of this lifted from Detectron2 with other fns added by myself. + +FIXME many functions remain unfinished/untested +""" +from typing import Dict, Tuple, List, Union, Any, Callable + +import torch +import torch.distributed as dist +from torch.distributed import ReduceOp + +TensorSeq = Union[torch.Tensor, Tuple[torch.Tensor, ...], List[torch.Tensor], Dict[Any, torch.Tensor]] + + +def synchronize_torch(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +def all_reduce_sequence_torch(values: TensorSeq, op=ReduceOp.SUM, average=False, group=None): + """ + All reduce the tensors in a sequence (dict, list, tuple) + + Args: + values (dict): inputs to be reduced. All the values must be scalar Tensor. + average (bool): whether to do average or sum + Returns: + a sequence with the same type as input (dict, list, tuple) + """ + world_size = dist.get_world_size(group) + if world_size <= 1: + return values + + with torch.no_grad(): + names = None + if isinstance(values, dict): + names = values.keys() + reduce_values = torch.stack(tuple(values.values()), dim=0) + elif isinstance(values, (tuple, list)): + reduce_values = torch.stack(values, dim=0) + else: + reduce_values = values + dist.all_reduce(reduce_values, op=op, group=group) + if average: + reduce_values /= world_size + if isinstance(values, dict): + reduce_values = {k: v for k, v in zip(names, reduce_values)} + elif isinstance(values, (tuple, list)): + reduce_values = type(values)(v for v in reduce_values) + return reduce_values + + +def reduce_sequence_torch(values: TensorSeq, dst_rank=0, op=ReduceOp.SUM, average=False, group=None): + """ + All reduce the tensors in a sequence (dict, list, tuple) + + Args: + values (dict): inputs to be reduced. All the values must be scalar Tensor. + average (bool): whether to do average or sum + Returns: + a sequence with the same type as input (dict, list, tuple) + """ + world_size = dist.get_world_size(group) + this_rank = dist.get_rank() + if world_size <= 1: + return values + + with torch.no_grad(): + names = None + if isinstance(values, dict): + names = values.keys() + reduce_values = torch.stack(tuple(values.values()), dim=0) + elif isinstance(values, (tuple, list)): + reduce_values = torch.stack(values, dim=0) + else: + reduce_values = values + reduce_values = torch.stack(reduce_values, dim=0) + dist.reduce(reduce_values, dst=dst_rank, op=op, group=group) + if average and this_rank == dst_rank: + reduce_values /= world_size + if isinstance(values, dict): + reduce_values = {k: v for k, v in zip(names, reduce_values)} + elif isinstance(values, (tuple, list)): + reduce_values = type(values)(v for v in reduce_values) + return reduce_values + + +def all_gather_sequence_torch(values: TensorSeq, group=None, join_fn=torch.cat, join_dim=0): + world_size = dist.get_world_size(group) + + def _do_gather(tensor): + tensor_list = [torch.empty_like(tensor) for _ in range(world_size)] + dist.all_gather(tensor_list, tensor, group=group) + return join_fn(tensor_list, dim=join_dim) + + if isinstance(values, dict): + gathered = {k: _do_gather(v) for k, v in values.items()} + return gathered + elif isinstance(values, (list, tuple)): + gathered = type(values)(_do_gather(v) for v in values) + return gathered + else: + # if not a dict, list, tuple, expect a singular tensor + assert isinstance(values, torch.Tensor) + return _do_gather(values) + + +def gather_sequence_torch(values: TensorSeq, dst_rank, group=None, join_fn=torch.cat, join_dim=0): + world_size = dist.get_world_size(group) + this_rank = dist.get_rank(group) + + def _do_gather(tensor): + tensor_list = None + if this_rank == dst_rank: + tensor_list = [torch.empty_like(tensor) for _ in range(world_size)] + dist.gather(tensor, tensor_list, dst=dst_rank, group=group) + return join_fn(tensor_list, dim=join_dim) + + if isinstance(values, dict): + gathered = {k: _do_gather(v) for k, v in values.items()} + return gathered + elif isinstance(values, (list, tuple)): + gathered = type(values)(_do_gather(v) for v in values) + return gathered + else: + # if not a dict, list, tuple, expect a singular tensor + assert isinstance(values, torch.Tensor) + return _do_gather(values) + + +def all_gather_torch(value: TensorSeq, group=None, join_fn: Callable = None, join_dim=0): + if isinstance(value, torch.Tensor): + world_size = dist.get_world_size(group) + out_tensors = [torch.empty_like(value) for _ in range(world_size)] + dist.all_gather(out_tensors, value, group=group) + if join_fn is not None: + out_tensors = join_fn(out_tensors, dim=join_dim) + return out_tensors + elif isinstance(value, dict): + return {k: all_gather_torch(v, group, join_fn, join_dim) for k, v in value.items()} + elif isinstance(value, (tuple, list)): + return type(value)(all_gather_torch(v, group, join_fn, join_dim) for v in value) + + +def gather_torch(value: TensorSeq, dst_rank=0, group=None, join_fn: Callable = None, join_dim=0): + if isinstance(value, torch.Tensor): + world_size = dist.get_world_size(group) + this_rank = dist.get_rank() + out_tensors = None + if this_rank == dst_rank: + out_tensors = [torch.empty_like(value) for _ in range(world_size)] + dist.gather(value, out_tensors, dst=dst_rank, group=group) + if join_fn is not None: + out_tensors = join_fn(out_tensors, dim=join_dim) + return out_tensors + elif isinstance(value, dict): + return {k: gather_torch(v, dst_rank, group, join_fn, join_dim) for k, v in value.items()} + elif isinstance(value, (tuple, list)): + return type(value)(gather_torch(v, dst_rank, group, join_fn, join_dim) for v in value) + + +def all_reduce_torch(value: TensorSeq, op=ReduceOp.SUM, average=False, group=None): + if isinstance(value, torch.Tensor): + dist.all_reduce(value, op=op, group=group) + if average: + value /= dist.get_world_size(group) + elif isinstance(value, dict): + return {k: all_reduce_torch(v, op=op, average=average, group=group) for k, v in value.items()} + elif isinstance(value, (tuple, list)): + return type(value)(all_reduce_torch(v, op=op, average=average, group=group) for v in value) + + +def broadcast_torch(value: TensorSeq, src_rank: int = 0, group=None): + if isinstance(value, torch.Tensor): + return dist.broadcast(value, src=src_rank, group=group) + elif isinstance(value, dict): + return {k: broadcast_torch(v, src_rank=src_rank, group=group) for k, v in value.items()} + elif isinstance(value, (tuple, list)): + return type(value)(broadcast_torch(v, src_rank=src_rank, group=group) for v in value) \ No newline at end of file diff --git a/timm/bits/grad_clipper.py b/timm/bits/grad_clip.py similarity index 58% rename from timm/bits/grad_clipper.py rename to timm/bits/grad_clip.py index 232f5fc0bb..ba1d846c92 100644 --- a/timm/bits/grad_clipper.py +++ b/timm/bits/grad_clip.py @@ -16,21 +16,11 @@ def get_clip_grad_fn(mode: str = 'norm', norm_type: float = 2.0): assert False, f"Unknown clip mode ({mode})." -def get_clip_parameters(model): +def get_clip_parameters(model, skip_last=0): if hasattr(model, 'get_clip_parameters'): return model.get_clip_parameters() else: - return model.parameters() - - -class GradClipper: - - def __init__(self, model, clip_value, clip_mode='norm'): - self.model = model - self.clip_fn = get_clip_grad_fn(clip_mode) - self.clip_value = clip_value - self.enabled = True - - def __call__(self): - if self.enabled: - self.clip_fn(get_clip_parameters(self.model), self.clip_value) \ No newline at end of file + if skip_last: + return list(model.parameters())[::-skip_last] + else: + return model.parameters() diff --git a/timm/bits/logger.py b/timm/bits/logger.py index d9ad41afda..a7948a8bb7 100644 --- a/timm/bits/logger.py +++ b/timm/bits/logger.py @@ -21,6 +21,8 @@ HAS_WANDB = False +from .device_env_factory import get_device + # FIXME old formatting for reference, to remove # # def log_eval(batch_idx, last_idx, batch_time, loss, top1, top5, log_suffix=''): @@ -84,10 +86,16 @@ def update(self, row_dict): dw.writerow(row_dict) +_sci_keys = {'lr'} + + def _add_kwargs(text_update, name_map=None, **kwargs): def _to_str(key, val): if isinstance(val, float): - return f'{key}: {val:.4f}' + if key.lower() in _sci_keys: + return f'{key}: {val:.3e} ' + else: + return f'{key}: {val:.4f}' else: return f'{key}: {val}' @@ -120,12 +128,13 @@ def __init__( self, experiment_name=None, output_dir=None, - logger=None, + python_logger=None, + hparams=None, log_wandb=False, - hparams=None): - - self.output_dir = output_dir # for tensorboard, csv, console logging to file? - self.logger = logger or logging.getLogger('log') + output_enabled=True, + ): + self.output_dir = output_dir # for tensorboard, csv, text file (TODO) logging + self.logger = python_logger or logging.getLogger('log') hparams = hparams or {} # Setup CSV writer(s) @@ -146,28 +155,32 @@ def __init__( _logger.warning("You've requested to log metrics to wandb but package not found. " "Metrics not being logged to wandb, try `pip install wandb`") + self.output_enabled = output_enabled # FIXME image save def log_step( self, phase: str, step: int, - end_step: Optional[int] = None, + step_end: Optional[int] = None, + epoch: Optional[int] = None, loss: Optional[float] = None, rate: Optional[float] = None, - epoch: Optional[int] = None, phase_suffix: str = '', **kwargs, ): """ log train/eval step """ - phase_title = f'{phase.capitalize()} ({phase_suffix})' if phase_suffix else f'{phase.capitalize()}' - progress = 100. * step / end_step if end_step else 0. + if not self.output_enabled: + return + + phase_title = f'{phase.capitalize()} ({phase_suffix})' if phase_suffix else f'{phase.capitalize()}:' + progress = 100. * step / step_end if step_end else 0. text_update = [ phase_title, - f'Epoch: {epoch}' if epoch is not None else None, - f'Step: {step}' if end_step is None else None, - f'Step: [{step}/{end_step} ({progress:>3.0f}%)]' if end_step is not None else None, + f'{epoch}' if epoch is not None else None, + f'[{step}]' if step_end is None else None, + f'[{step}/{step_end} ({progress:>3.0f}%)]' if step_end is not None else None, f'Rate: {rate:.2f}/s' if rate is not None else None, f'Loss: {loss:.5f}' if loss is not None else None, ] @@ -187,6 +200,9 @@ def log_phase( ): """log completion of evaluation or training phase """ + if not self.output_enabled: + return + title = [ f'{phase.capitalize()}', f'epoch: {epoch}' if epoch is not None else None, @@ -212,6 +228,9 @@ def write_summary( index: value for row index (typically epoch #) index_name: name for row index header (typically 'epoch') """ + if not self.output_enabled: + return + row_dict = summary_row_dict(index=index, index_name=index_name, results=results) if self.csv_writer: self.csv_writer.update(row_dict) diff --git a/timm/bits/metric.py b/timm/bits/metric.py new file mode 100644 index 0000000000..7a5cc997ad --- /dev/null +++ b/timm/bits/metric.py @@ -0,0 +1,142 @@ +import abc +from typing import Callable, Union, Optional, List, Tuple, Dict +from dataclasses import dataclass + +import torch +from torch.distributed import ReduceOp + +from .device_env import DeviceEnv +from .device_env_factory import get_device +from .distributed import all_gather_sequence, all_reduce_sequence + +MetricValue = Union[float, torch.Tensor, List[float], List[torch.Tensor]] + +@dataclass +class ValueInfo: + initial: Optional[MetricValue] = 0. + dtype: torch.dtype = torch.float32 + dist_reduce: str = 'sum' + dist_average: bool = False + + +class Metric(abc.ABC): + + def __init__(self, dev_env: DeviceEnv = None): + self._infos: Dict[str, ValueInfo] = {} + self._values: Dict[str, Optional[MetricValue]] = {} + self._values_dist: Dict[str, Optional[MetricValue]] = {} + if dev_env is None: + dev_env = get_device() + self._dev_env = dev_env + + def _register_value(self, name: str, info: Optional[ValueInfo] = None): + info = info or ValueInfo() + self._infos[name] = info + + # def get_value(self, name: str, use_dist=True): + # if use_dist: + # return self._values_dist.get(name, self._values.get(name)) + # else: + # return self._values.get(name) + + def __getattr__(self, item): + if item not in self._infos: + raise AttributeError + value = self._values_dist.get(item, self._values.get(item, None)) + return value + + def __setattr__(self, key, value): + if '_infos' in self.__dict__ and key in self._infos: + self._values[key] = value + else: + super().__setattr__(key, value) + + def update( + self, + predictions: Union[torch.Tensor, Dict[str, torch.Tensor]], + target: Union[torch.Tensor, Dict[str, torch.Tensor]]): + self._update(predictions, target) + + def _update( + self, + predictions: Union[torch.Tensor, Dict[str, torch.Tensor]], + target: Union[torch.Tensor, Dict[str, torch.Tensor]]): + pass + + def reset(self): + self._values = {} + self._values_dist = {} + for name, info in self._infos.items(): + # if info specifies an initial value, we reset here, otherwise set to None and leave it to child class + if info.initial is not None: + if isinstance(info.initial, torch.Tensor): + tensor = info.initial.detach().clone() + else: + tensor = torch.ones([], dtype=info.dtype) * info.initial # scalar + self._values[name] = tensor.to(device=self._dev_env.device, dtype=info.dtype) + else: + self._values[name] = None + self._reset() + + def _reset(self): + pass + + def compute(self) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[str, torch.Tensor]]: + if self._dev_env.distributed: + self._distribute_values() + results = self._compute() + self._values_dist = {} + return results + + @abc.abstractmethod + def _compute(self) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[str, torch.Tensor]]: + pass + + def _distribute_values(self): + if not self._infos or not self._values: + return + + def _args(op: str): + if op == 'cat': + return True, dict(cat_dim=0) + else: + return False, dict(op=ReduceOp.SUM) + + prev_dsr = None + same_dsr = True + names = [] + values = [] + reductions = [] + for name, value in self._values.items(): + if value is not None: + info = self._infos[name] + dsr = (value.dtype, value.shape, info.dist_reduce) + if prev_dsr is not None and prev_dsr != dsr: + same_dsr = False + prev_dsr = dsr + names.append(name) + values.append(value) + reductions.append(_args(info.dist_reduce)) + same_dsr = False + if same_dsr: + do_gather, reduce_kwargs = reductions[0] + if do_gather: + reduced_values = all_gather_sequence(values, dev_env=self._dev_env, **reduce_kwargs) + else: + reduced_values = all_reduce_sequence(values, dev_env=self._dev_env, **reduce_kwargs) + for name, reduced_value in zip(names, reduced_values): + info = self._infos[name] + if info.dist_average: + reduced_value /= self._dev_env.world_size + self._values_dist[name] = reduced_value + else: + for n, v, r in zip(names, values, reductions): + info = self._infos[n] + do_gather, reduce_kwargs = r + if do_gather: + reduced_value = self._dev_env.all_gather(v, **reduce_kwargs) + else: + reduced_value = self._dev_env.all_reduce(v, **reduce_kwargs) + if info.dist_average: + reduced_value /= self._dev_env.world_size + self._values_dist[n] = reduced_value diff --git a/timm/bits/metric_accuracy.py b/timm/bits/metric_accuracy.py new file mode 100644 index 0000000000..0db72c6dce --- /dev/null +++ b/timm/bits/metric_accuracy.py @@ -0,0 +1,98 @@ +import torch +from typing import Optional, Tuple, Dict + +from .device_env import DeviceEnv +from .metric import Metric, ValueInfo + + +class Accuracy(Metric): + + def __init__(self, threshold=0.5, multi_label=False, dev_env=None): + super().__init__(dev_env=dev_env) + self.threshold = threshold + self.eps = 1e-8 + self.multi_label = multi_label + + # statistics / counts + self._register_value('correct') + self._register_value('total') + + def _update(self, predictions, target): + raise NotImplemented() + + def _compute(self): + raise NotImplemented() + + +# class AccuracyTopK(torch.nn.Module): +# +# def __init__(self, topk=(1, 5), device=None): +# super().__init__() +# self.eps = 1e-8 +# self.device = device +# self.topk = topk +# self.maxk = max(topk) +# # FIXME handle distributed operation +# +# # statistics / counts +# self.reset() +# +# def update(self, predictions: torch.Tensor, target: torch.Tensor): +# sorted_indices = predictions.topk(self.maxk, dim=1)[1] +# sorted_indices.t_() +# correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) +# +# batch_size = target.shape[0] +# correct_k = {k: correct[:k].reshape(-1).float().sum(0) for k in self.topk} +# for k, v in correct_k.items(): +# attr = f'_correct_top{k}' +# old_v = getattr(self, attr) +# setattr(self, attr, old_v + v) +# self._total_sum += batch_size +# +# def reset(self): +# for k in self.topk: +# setattr(self, f'_correct_top{k}', torch.tensor(0, dtype=torch.float32)) +# self._total_sum = torch.tensor(0, dtype=torch.float32) +# +# @property +# def counts(self): +# pass +# +# def compute(self) -> Dict[str, torch.Tensor]: +# # FIXME handle distributed reduction +# return {f'top{k}': 100 * getattr(self, f'_correct_top{k}') / self._total_sum for k in self.topk} + + +class AccuracyTopK(Metric): + + def __init__(self, topk=(1, 5), dev_env: DeviceEnv = None): + super().__init__(dev_env=dev_env) + self.eps = 1e-8 + self.topk = topk + self.maxk = max(topk) + + # statistics / counts + for k in self.topk: + self._register_value(f'top{k}') + self._register_value('total') + self.reset() + + def _update(self, predictions: torch.Tensor, target: torch.Tensor): + batch_size = predictions.shape[0] + sorted_indices = predictions.topk(self.maxk, dim=1)[1] + target_reshape = target.reshape(-1, 1).expand_as(sorted_indices) + correct = sorted_indices.eq(target_reshape).float().sum(0) + for k in self.topk: + attr_name = f'top{k}' + correct_at_k = correct[:k].sum() + setattr(self, attr_name, getattr(self, attr_name) + correct_at_k) + self.total += batch_size + + def _compute(self) -> Dict[str, torch.Tensor]: + assert self.total is not None + output = {} + for k in self.topk: + attr_name = f'top{k}' + output[attr_name] = 100 * getattr(self, attr_name) / self.total + return output diff --git a/timm/metrics/precision_recall.py b/timm/bits/metric_precision_recall.py similarity index 100% rename from timm/metrics/precision_recall.py rename to timm/bits/metric_precision_recall.py diff --git a/timm/bits/tracker.py b/timm/bits/tracker.py index 12e0106b22..7abbf95eee 100644 --- a/timm/bits/tracker.py +++ b/timm/bits/tracker.py @@ -1,16 +1,16 @@ import time from typing import Optional -from timm.metrics import ScalarAvgMinMax +from .avg_scalar import AvgMinMaxScalar class Tracker: def __init__(self): - self.data_time = ScalarAvgMinMax() # time for data loader to produce batch of samples - self.step_time = ScalarAvgMinMax() # time for model step - self.iter_time = ScalarAvgMinMax() # full iteration time incl. data, step, and book-keeping - self.epoch_time = ScalarAvgMinMax() + self.data_time = AvgMinMaxScalar() # time for data loader to produce batch of samples + self.step_time = AvgMinMaxScalar() # time for model step + self.iter_time = AvgMinMaxScalar() # full iteration time incl. data, step, and book-keeping + self.epoch_time = AvgMinMaxScalar() self.iter_timestamp: Optional[float] = None self.prev_timestamp: Optional[float] = None @@ -48,3 +48,12 @@ def mark_epoch(self): self.epoch_time.update(epoch_time) self.epoch_timestamp = timestamp + def get_avg_iter_rate(self, num_per_iter: int): + if num_per_iter == 0 or self.iter_time.avg == 0: + return 0 + return num_per_iter / self.iter_time.avg + + def get_last_iter_rate(self, num_per_iter: int): + if num_per_iter == 0 or self.iter_time.val == 0: + return 0 + return num_per_iter / self.iter_time.val diff --git a/timm/bits/train_cfg.py b/timm/bits/train_cfg.py new file mode 100644 index 0000000000..d7b35faf5e --- /dev/null +++ b/timm/bits/train_cfg.py @@ -0,0 +1,12 @@ +from dataclasses import dataclass + + +@dataclass +class TrainCfg: + """ Train Loop Configuration + Dataclass to propagate training configuration values + """ + num_epochs: int = 0 + log_interval: int = 50 + recovery_interval: int = 0 + accumulate_steps: int = 0 diff --git a/timm/bits/train_services.py b/timm/bits/train_services.py new file mode 100644 index 0000000000..286a4afc61 --- /dev/null +++ b/timm/bits/train_services.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass + +from .logger import Logger +from timm.utils.checkpoint_saver import CheckpointSaver + + +@dataclass +class TrainServices: + """ Train Loop Services + """ + logger: Logger = None + saver: CheckpointSaver = None + diff --git a/timm/bits/train_setup.py b/timm/bits/train_setup.py new file mode 100644 index 0000000000..992546a789 --- /dev/null +++ b/timm/bits/train_setup.py @@ -0,0 +1,153 @@ +import dataclasses +from typing import Callable, Union, Optional +import logging + +import torch +import torch.nn as nn + +from timm.optim import create_optimizer_v2 +from timm.utils import ModelEmaV2 + +try: + import deepspeed as ds +except ImportError: + ds = None + +from .checkpoint import resume_train_checkpoint +from .device_env import DeviceEnv +from .train_cfg import TrainCfg +from .train_state import TrainState +from .updater_factory import create_updater + + +_logger = logging.getLogger(__name__) + + +def setup_model_and_optimizer( + dev_env: DeviceEnv, + model: nn.Module, + optimizer: Union[Callable, str], + optimizer_cfg, + clip_fn: Optional[Union[Callable, str]] = None, + clip_value: Optional[float] = None, + model_ema: bool = False, + model_ema_decay: float = 0.9999, + use_syncbn: bool = False, + resume_path: str = '', + resume_opt: bool = True, + deepspeed: bool = False, +): + """ + + Args: + dev_env: + model: + optimizer: + optimizer_cfg: + clip_value: + clip_fn: + model_ema: + model_ema_decay: + use_syncbn: + resume_path: + resume_opt: + + Returns: + + """ + if deepspeed: + return setup_model_and_optimizer_deepspeed( + dev_env=dev_env, model=model, optimizer=optimizer, optimizer_cfg=optimizer_cfg, + clip_fn=clip_fn, clip_value=clip_value, model_ema=model_ema, model_ema_decay=model_ema_decay, + resume_path=resume_path, resume_opt=resume_opt, + ) + + dev_env.to_device(model) + + if use_syncbn and dev_env.distributed: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + if dev_env.primary: + _logger.info( + 'Converted model to use Synchronized BatchNorm. WARNING: You may have issues if using ' + 'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.') + + if isinstance(optimizer, Callable): + optimizer = optimizer(model=model, **optimizer_cfg) + else: + optimizer = create_optimizer_v2(model=model, **optimizer_cfg) + + updater = create_updater( + model=model, + optimizer=optimizer, + clip_fn=clip_fn, + clip_value=clip_value, + ) + + # ema model + model_ema = ModelEmaV2(model, decay=model_ema_decay) if model_ema else None + + train_state = TrainState(model=model, updater=updater, model_ema=model_ema) + + if resume_path: + resume_train_checkpoint( + train_state, + resume_path, + resume_opt=resume_opt, + log_info=dev_env.primary) + + if dev_env.distributed: + train_state = dataclasses.replace( + train_state, model=dev_env.wrap_distributed(train_state.model)) + + return train_state + + +def setup_model_and_optimizer_deepspeed( + dev_env: DeviceEnv, + model: nn.Module, + optimizer: Union[Callable, str], + optimizer_cfg, + clip_fn: Optional[Union[Callable, str]] = None, + clip_value: Optional[float] = None, + model_ema: bool = False, + model_ema_decay: float = 0.9999, + use_syncbn: bool = False, + resume_path: str = '', + resume_opt: bool = True, +): + dev_env.to_device(model) + + if isinstance(optimizer, Callable): + optimizer = optimizer(model=model, **optimizer_cfg) + else: + optimizer = create_optimizer_v2(model=model, **optimizer_cfg) + + model = ds.initialize(model=model, optimizer=optimizer, dist_init_required=False) + + updater = create_updater( + model=model, + optimizer=optimizer, + clip_fn=clip_fn, + clip_value=clip_value, + deepspeed=True, + ) + + # ema model + # FIXME how to do EMA w/ deepspeed? + model_ema = ModelEmaV2(model, decay=model_ema_decay) if model_ema else None + + train_state = TrainState(model=model, updater=updater, model_ema=model_ema) + + if resume_path: + # FIXME deepspeed resumes differently + resume_train_checkpoint( + train_state, + resume_path, + resume_opt=resume_opt, + log_info=dev_env.primary) + + if dev_env.distributed: + train_state = dataclasses.replace( + train_state, model=dev_env.wrap_distributed(train_state.model)) + + return train_state diff --git a/timm/bits/train_state.py b/timm/bits/train_state.py new file mode 100644 index 0000000000..9a9a0d9221 --- /dev/null +++ b/timm/bits/train_state.py @@ -0,0 +1,33 @@ +from dataclasses import dataclass +from typing import Dict, Any + +from torch import nn as nn + +from timm.scheduler import Scheduler +from .updater import Updater + + +@dataclass +class TrainState: + model: nn.Module = None + train_loss: nn.Module = None + eval_loss: nn.Module = None + updater: Updater = None + lr_scheduler: Scheduler = None + model_ema: nn.Module = None + + step_count_epoch: int = 0 + step_count_global: int = 0 + epoch: int = 0 + + def __post_init__(self): + assert self.model is not None + assert self.updater is not None + + +def serialize_train_state(train_state: TrainState): + pass + + +def deserialize_train_state(checkpoint: Dict[str, Any]): + pass \ No newline at end of file diff --git a/timm/bits/updater.py b/timm/bits/updater.py index 6612c8eafd..422d12ec7f 100644 --- a/timm/bits/updater.py +++ b/timm/bits/updater.py @@ -1,54 +1,68 @@ +from dataclasses import dataclass, field, InitVar +from functools import partial from typing import Callable, Optional, Union import torch +import torch.nn as nn -from .grad_clipper import GradClipper +from .grad_clip import get_clip_grad_fn, get_clip_parameters +@dataclass class Updater: - def __init__( - self, - optimizer: torch.optim.Optimizer, - clip_value: Optional[Union[Callable, float]] = None, - clip_mode: str = 'norm'): - - self.optimizer = optimizer - self.clipper: Optional[GradClipper] = None - if clip_value is not None: - if isinstance(clip_value, Callable): - self.clipper = clip_value + model: nn.Module = None + optimizer: torch.optim.Optimizer = None # FIXME handle multiple optimizers per-model + clip_fn: Optional[Union[Callable, str]] = None + clip_value: Optional[float] = None + clip_params_fn: Optional[Callable] = None + grad_scaler: Optional[Callable] = None + create_graph: Optional[bool] = None + after_step_closure: bool = False + + def __post_init__(self): + assert self.model is not None + assert self.optimizer is not None + if self.clip_fn is not None: + if isinstance(self.clip_fn, Callable): + skip_last = 0 else: - GradClipper(clip_value, clip_mode) - self.scaler = None - self.create_graph = getattr(self.optimizer, 'second_order', False) - self.num_accumulated = 0 + assert isinstance(self.clip_fn, str) + skip_last = 2 if 'agc' in self.clip_fn else 0 + self.clip_fn = get_clip_grad_fn(self.clip_fn) + assert self.clip_value is not None + self.clip_params_fn = partial(get_clip_parameters, model=self.model, skip_last=skip_last) + if self.create_graph is None: + self.create_graph = getattr(self.optimizer, 'second_order', False) self.after_step_closure = False + def reset(self): + self.optimizer.zero_grad() + def apply(self, loss: torch.Tensor, accumulate=False): loss.backward(create_graph=self.create_graph) - if self.clipper is not None: - self.clipper() - if not accumulate: - self.optimizer.step() - self.reset() - else: - self.num_accumulated += 1 + if accumulate: + return + if self.clip_fn is not None: + self.clip_fn(self.clip_params_fn(), self.clip_value) + self.optimizer.step() + self.reset() - def reset(self): - self.optimizer.zero_grad() - self.num_accumulated = 0 + def get_average_lr(self): + lrl = [param_group['lr'] for param_group in self.optimizer.param_groups if param_group['lr'] > 0] + return sum(lrl) / len(lrl) def state_dict(self): state_dict = dict(optimizer=self.optimizer.state_dict()) - if self.scaler is not None: - state_dict['scaler'] = self.scaler.state_dict() + if self.grad_scaler is not None: + state_dict['grad_scaler'] = self.grad_scaler.state_dict() def load_state_dict(self, state_dict): if 'optimizer' in state_dict: self.optimizer.load_state_dict(state_dict['optimizer']) - if 'scaler' in state_dict and self.scaler is not None: - self.scaler.load_state_dict(state_dict['scaler']) - + if 'grad_scaler' in state_dict and self.grad_scaler is not None: + self.grad_scaler.load_state_dict(state_dict['grad_scaler']) + def after_step(self, after_step_fn, *args): + after_step_fn(*args) diff --git a/timm/bits/updater_cuda.py b/timm/bits/updater_cuda.py index 799aef00c3..33f984db0d 100644 --- a/timm/bits/updater_cuda.py +++ b/timm/bits/updater_cuda.py @@ -1,36 +1,30 @@ -from typing import Callable, Optional, Union, Any +from dataclasses import dataclass, field, InitVar +from typing import Dict, Any import torch from .updater import Updater -class UpdaterCuda(Updater): - def __init__( - self, - optimizer: torch.optim.Optimizer, - clip_value: Optional[Union[Callable, float]] = None, - clip_mode: str = 'norm', - use_scaler: bool = False, - scaler_kwargs: Any = None, - ): - super().__init__(optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode) +@dataclass +class UpdaterCudaWithScaler(Updater): + + scaler_kwargs: InitVar[Dict[str, Any]] = None + + def __post_init__(self, scaler_kwargs: Dict[str, Any]): + super().__post_init__() scaler_kwargs = scaler_kwargs or {} - if use_scaler: - self.scaler = torch.cuda.amp.GradScaler(**scaler_kwargs) + self.grad_scaler = torch.cuda.amp.GradScaler(**scaler_kwargs) def apply(self, loss: torch.Tensor, accumulate=False): - if self.scaler is not None: - self.scaler.scale(loss).backward(create_graph=self.create_graph) - if self.clipper is not None: - self.scaler.unscale_(self.optimizer) # unscale the gradients of optimizer's assigned params in-place - self.clipper() - if not accumulate: - self.scaler.step(self.optimizer) - self.reset() - else: - self.num_accumulated += 1 - self.scaler.update() - else: - Updater.apply(self, loss, accumulate) - + self.grad_scaler.scale(loss).backward(create_graph=self.create_graph) + if accumulate: + # unscale first? + return + if self.clip_fn is not None: + # unscale the gradients of optimizer's assigned params in-place + self.grad_scaler.unscale_(self.optimizer) + self.clip_fn(self.clip_params_fn(), self.clip_value) + self.grad_scaler.step(self.optimizer) + self.grad_scaler.update() + self.reset() diff --git a/timm/bits/updater_deepspeed.py b/timm/bits/updater_deepspeed.py new file mode 100644 index 0000000000..e080a7deb4 --- /dev/null +++ b/timm/bits/updater_deepspeed.py @@ -0,0 +1,26 @@ +from dataclasses import dataclass, field, InitVar + +import torch +try: + import deepspeed as ds +except ImportError as e: + ds = None + +from .updater import Updater + + +@dataclass +class UpdaterDeepSpeed(Updater): + + def __post_init__(self): + super().__post_init__() + # FIXME not sure how to deal with model.module / grad clipping w/ DS engine interface + assert isinstance(self.model, ds.DeepSpeedEngine) + + def reset(self): + self.model.zero_grad() + + def apply(self, loss: torch.Tensor, accumulate=False): + self.model.backward(loss) + self.model.step() + self.reset() diff --git a/timm/bits/updater_factory.py b/timm/bits/updater_factory.py index aba008d2bb..24ef76c0f8 100644 --- a/timm/bits/updater_factory.py +++ b/timm/bits/updater_factory.py @@ -2,29 +2,38 @@ import torch -from .device_env import DeviceEnv +from .device_env import DeviceEnv, DeviceEnvType from .device_env_factory import get_device from .updater import Updater -from .updater_cuda import UpdaterCuda -from .updater_xla import UpdaterXla +from .updater_cuda import UpdaterCudaWithScaler +from .updater_deepspeed import UpdaterDeepSpeed +from .updater_xla import UpdaterXla, UpdaterXlaWithScaler def create_updater( + model: torch.nn.Module, optimizer: torch.optim.Optimizer, + clip_fn: Optional[Union[Callable, str]] = None, + clip_value: Optional[float] = None, + scaler_kwargs: Any = None, dev_env: Optional[DeviceEnv] = None, - clip_value: Optional[Union[Callable, float]] = None, - clip_mode: str = 'norm', - scaler_kwargs: Any = None) -> Updater: + deepspeed: bool = False, +) -> Updater: if not dev_env: dev_env = get_device() - updater_kwargs = dict( - optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode, scaler_kwargs=scaler_kwargs) - if dev_env.type == 'xla': - return UpdaterXla(**updater_kwargs, use_scaler=dev_env.amp) - elif dev_env.type == 'cuda': - return UpdaterCuda(**updater_kwargs, use_scaler=dev_env.amp) - else: - updater_kwargs.pop('scaler_kwargs', None) - return Updater(**updater_kwargs) + updater_kwargs = dict(model=model, optimizer=optimizer, clip_fn=clip_fn, clip_value=clip_value) + use_scaler = dev_env.amp + if use_scaler: + updater_kwargs['scaler_kwargs'] = scaler_kwargs + updater_cls = Updater + if dev_env.type == DeviceEnvType.XLA: + updater_cls = UpdaterXlaWithScaler if use_scaler else UpdaterXla + elif dev_env.type == DeviceEnvType.CUDA and use_scaler: + updater_cls = UpdaterCudaWithScaler + elif deepspeed: + del updater_kwargs['scaler_kwargs'] + updater_cls = UpdaterDeepSpeed + + return updater_cls(**updater_kwargs) diff --git a/timm/bits/updater_xla.py b/timm/bits/updater_xla.py index 25287ad9fb..935e199438 100644 --- a/timm/bits/updater_xla.py +++ b/timm/bits/updater_xla.py @@ -1,6 +1,8 @@ -from typing import Callable, Optional, Union, Any +from dataclasses import dataclass, field, InitVar +from typing import Any, Dict import torch +import torch.nn as nn try: import torch_xla.core.xla_model as xm @@ -18,41 +20,49 @@ from .updater import Updater +@dataclass class UpdaterXla(Updater): - def __init__( - self, - optimizer: torch.optim.Optimizer, - clip_value: Optional[Union[Callable, float]] = None, - clip_mode: str = 'norm', - use_scaler: bool = False, - scaler_kwargs: Any = None, - ): - super().__init__(optimizer=optimizer, clip_value=clip_value, clip_mode=clip_mode) + def __post_init__(self): + super().__post_init__() self.after_step_closure = True - if use_scaler: - assert xa is not None, 'XLA AMP not present in this build' - self.scaler = xa.GradScaler(**scaler_kwargs) def apply(self, loss: torch.Tensor, accumulate: bool = False): - if self.scaler is None: - loss.backward(create_graph=self.create_graph) - gradients = xm._fetch_gradients(self.optimizer) - xm.all_reduce('sum', gradients, scale=1.0 / xm.xrt_world_size()) - if self.clipper is not None: - self.clipper() - if not accumulate: - xm.optimizer_step(self.optimizer) - else: - self.scaler.scale(loss).backward(create_graph=self.create_graph) - if self.clipper is not None: - self.scaler.unscale_(self.optimizer) # unscale the gradients of optimizer's assigned params in-place - self.clipper() - if not accumulate: - self.scaler.step(self.optimizer) - self.reset() - self.scaler.update() + loss.backward(create_graph=self.create_graph) + if accumulate: + return + xm.reduce_gradients(self.optimizer) + if self.clip_fn is not None: + self.clip_fn(self.clip_params_fn(), self.clip_value) + self.optimizer.step() + xm.mark_step() + self.reset() def after_step(self, after_step_fn, *args): - xm.add_step_closure(after_step_fn, *args) + xm.add_step_closure(after_step_fn, args) + +@dataclass +class UpdaterXlaWithScaler(UpdaterXla): + + scaler_kwargs: InitVar[Dict[str, Any]] = None + + def __post_init__(self, scaler_kwargs: Dict[str, Any]): + super().__post_init__() + scaler_kwargs = scaler_kwargs or {} + assert xa is not None, 'XLA AMP not present in this build' + self.scaler = xa.GradScaler(**scaler_kwargs) + + def apply(self, loss: torch.Tensor, accumulate: bool = False): + self.scaler.scale(loss).backward(create_graph=self.create_graph) + if accumulate: + # unscale first? + return + xm.reduce_gradients(self.optimizer) + if self.clip_fn is not None: + self.scaler.unscale_(self.optimizer) # unscale the gradients of optimizer's assigned params in-place + self.clip_fn(self.clip_params_fn(), self.clip_value) + self.scaler.step(self.optimizer) + self.scaler.update() + xm.mark_step() + self.reset() diff --git a/timm/data/fetcher.py b/timm/data/fetcher.py index 1cbc3fe5be..ec5afe8a65 100644 --- a/timm/data/fetcher.py +++ b/timm/data/fetcher.py @@ -23,24 +23,20 @@ def __init__(self, re_count=1, re_num_splits=0): self.loader = loader - self.mean = torch.tensor([x * 255 for x in mean]).view(1, 3, 1, 1) - self.std = torch.tensor([x * 255 for x in std]).view(1, 3, 1, 1) self.device = torch.device(device) self.dtype = dtype or torch.float32 - if device: - self.mean.to(device=device, dtype=self.dtype) - self.std.to(device=device, dtype=self.dtype) + self.mean = torch.tensor([x * 255 for x in mean], dtype=self.dtype, device=self.device).view(1, 3, 1, 1) + self.std = torch.tensor([x * 255 for x in std], dtype=self.dtype, device=self.device).view(1, 3, 1, 1) if re_prob > 0.: self.random_erasing = RandomErasing( - probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits) + probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits, device=device) else: self.random_erasing = None def __iter__(self): for sample, target in self.loader: - sample = sample.to(device=self.device) + sample = sample.to(device=self.device, dtype=self.dtype).sub_(self.mean).div_(self.std) target = target.to(device=self.device) - sample = sample.to(dtype=self.dtype).sub_(self.mean).div_(self.std) if self.random_erasing is not None: sample = self.random_erasing(sample) yield sample, target diff --git a/timm/data/loader.py b/timm/data/loader.py index 45d40908c2..5ddcc6d200 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -8,7 +8,7 @@ import torch.utils.data -from timm.bits import get_device +from timm.bits import get_device, DeviceEnvType from .fetcher import Fetcher from .prefetcher_cuda import PrefetcherCuda @@ -78,7 +78,7 @@ def create_loader( dev_env = get_device() sampler = None - if dev_env.is_distributed and not isinstance(dataset, torch.utils.data.IterableDataset): + if dev_env.distributed and not isinstance(dataset, torch.utils.data.IterableDataset): if is_training: sampler = torch.utils.data.distributed.DistributedSampler( dataset, num_replicas=dev_env.world_size, rank=dev_env.global_rank) @@ -117,7 +117,7 @@ def create_loader( re_count=re_count, re_num_splits=re_num_splits ) - if dev_env.type == 'cuda': + if dev_env.type_cuda: loader = PrefetcherCuda(loader, **fetcher_kwargs) else: loader = Fetcher(loader, device=dev_env.device, **fetcher_kwargs) diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 92495d12e4..519be03d43 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -82,7 +82,7 @@ def __init__(self, root, name, split='train', shuffle=False, is_training=False, self.dist_num_replicas = 1 dev_env = get_device() # FIXME allow to work without devenv usage? - if dev_env.is_distributed and dev_env.world_size > 1: + if dev_env.distributed and dev_env.world_size > 1: self.dist_rank = dev_env.global_rank self.dist_num_replicas = dev_env.world_size # if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: @@ -150,8 +150,10 @@ def _lazy_init(self): ds = self.builder.as_dataset( split=self.subsplit or self.split, shuffle_files=self.shuffle, read_config=read_config) # avoid overloading threading w/ combo fo TF ds threads + PyTorch workers - ds.options().experimental_threading.private_threadpool_size = max(1, MAX_TP_SIZE // num_workers) - ds.options().experimental_threading.max_intra_op_parallelism = 1 + options = tf.data.Options() + options.experimental_threading.private_threadpool_size = max(1, MAX_TP_SIZE // num_workers) + options.experimental_threading.max_intra_op_parallelism = 1 + ds = ds.with_options(options) if self.is_training or self.repeats > 1: # to prevent excessive drop_last batch behaviour w/ IterableDatasets # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading diff --git a/timm/metrics/__init__.py b/timm/metrics/__init__.py deleted file mode 100644 index 93a2773eb3..0000000000 --- a/timm/metrics/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .accuracy import Accuracy, AccuracyTopK -from .precision_recall import PrecisionRecall -from .scalar_avg import ScalarAvgMinMax -from .tensor_avg import TensorAvg, TensorEma diff --git a/timm/metrics/accuracy.py b/timm/metrics/accuracy.py deleted file mode 100644 index b58a3781ae..0000000000 --- a/timm/metrics/accuracy.py +++ /dev/null @@ -1,114 +0,0 @@ -import torch -from typing import Optional, Tuple, Dict - - -class Accuracy(torch.nn.Module): - - def __init__(self, threshold=0.5, multi_label=False): - self.threshold = threshold - self.eps = 1e-8 - self.multi_label = multi_label - - # statistics / counts - self._correct_sum = torch.tensor(0, dtype=torch.long) - self._total_sum = torch.tensor(0, dtype=torch.long) - - def update(self, predictions, target): - raise NotImplemented() - - def reset(self): - self._correct_sum = 0 - self._total_sum = 0 - - @property - def counts(self): - pass - - def compute(self): - raise NotImplemented() - - -class AccuracyTopK(torch.nn.Module): - - def __init__(self, topk=(1, 5), device=None): - super().__init__() - self.eps = 1e-8 - self.device = device - self.topk = topk - self.maxk = max(topk) - # FIXME handle distributed operation - - # statistics / counts - self.reset() - - def update(self, predictions: torch.Tensor, target: torch.Tensor): - sorted_indices = predictions.topk(self.maxk, dim=1)[1] - sorted_indices.t_() - correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) - - batch_size = target.shape[0] - correct_k = {k: correct[:k].reshape(-1).float().sum(0) for k in self.topk} - for k, v in correct_k.items(): - attr = f'_correct_top{k}' - old_v = getattr(self, attr) - setattr(self, attr, old_v + v) - self._total_sum += batch_size - - def reset(self): - for k in self.topk: - setattr(self, f'_correct_top{k}', torch.tensor(0, dtype=torch.float32)) - self._total_sum = torch.tensor(0, dtype=torch.float32) - - @property - def counts(self): - pass - - def compute(self) -> Dict[str, torch.Tensor]: - # FIXME handle distributed reduction - return {f'top{k}': 100 * getattr(self, f'_correct_top{k}') / self._total_sum for k in self.topk} - - -# -# class AccuracyTopK: -# -# def __init__(self, topk=(1, 5), device=None): -# self.eps = 1e-8 -# self.device = device -# self.topk = topk -# self.maxk = max(topk) -# -# # statistics / counts -# self._correct_sum = None -# self._total_sum = None -# -# def _check_init(self, device): -# to_device = self.device if self.device else device -# if self._correct_sum is None: -# self._correct_sum = {f'top{k}': torch.tensor(0., device=to_device) for k in self.topk} -# if self._total_sum is None: -# self._total_sum = torch.tensor(0, dtype=torch.long, device=to_device) -# -# def update(self, predictions: torch.Tensor, target: torch.Tensor): -# sorted_indices = predictions.topk(self.maxk, dim=1)[1] -# sorted_indices.t_() -# correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) -# -# batch_size = target.shape[0] -# correct_k = {f'top{k}': correct[:k].reshape(-1).float().sum(0) for k in self.topk} -# self._check_init(device=predictions.device) -# for k, v in correct_k.items(): -# old_v = self._correct_sum[k] -# self._correct_sum[k] = old_v + v -# self._total_sum += batch_size -# -# def reset(self): -# self._correct_sum = None -# self._total_sum = None -# -# @property -# def counts(self): -# pass -# -# def compute(self) -> Dict[str, torch.Tensor]: -# assert self._correct_sum is not None and self._total_sum is not None -# return {k: 100 * v / self._total_sum for k, v in self._correct_sum.items()} diff --git a/timm/scheduler/__init__.py b/timm/scheduler/__init__.py index 6a77898262..60f5e3dffd 100644 --- a/timm/scheduler/__init__.py +++ b/timm/scheduler/__init__.py @@ -3,3 +3,4 @@ from .step_lr import StepLRScheduler from .tanh_lr import TanhLRScheduler from .scheduler_factory import create_scheduler +from .scheduler import Scheduler \ No newline at end of file diff --git a/timm/utils/checkpoint_saver.py b/timm/utils/checkpoint_saver.py index 6aad74ee52..7a13306edc 100644 --- a/timm/utils/checkpoint_saver.py +++ b/timm/utils/checkpoint_saver.py @@ -108,7 +108,8 @@ def _save(self, save_path, epoch, metric=None): save_state['arch'] = self.args.model save_state['args'] = self.args if self.amp_scaler is not None: - save_state[self.amp_scaler.state_dict_key] = self.amp_scaler.state_dict() + amp_key = getattr(self.amp_scaler, 'state_dict_key', 'amp_scaler') + save_state[amp_key] = self.amp_scaler.state_dict() if self.model_ema is not None: save_state['state_dict_ema'] = get_state_dict(self.model_ema, self.unwrap_fn) if metric is not None: diff --git a/timm/utils/clip_grad.py b/timm/utils/clip_grad.py index 7eb40697a2..d1279ac903 100644 --- a/timm/utils/clip_grad.py +++ b/timm/utils/clip_grad.py @@ -3,7 +3,11 @@ from timm.utils.agc import adaptive_clip_grad -def dispatch_clip_grad(parameters, value: float, mode: str = 'norm', norm_type: float = 2.0): +def dispatch_clip_grad( + parameters, + value: float, + mode: str = 'norm', + norm_type: float = 2.0): """ Dispatch to gradient clipping method Args: diff --git a/timm/utils/distributed.py b/timm/utils/distributed.py index 3c5dba8c1d..528b7d42da 100644 --- a/timm/utils/distributed.py +++ b/timm/utils/distributed.py @@ -21,8 +21,8 @@ def distribute_bn(model, world_size, reduce=False): if ('running_mean' in bn_name) or ('running_var' in bn_name): if reduce: # average bn stats across whole group - torch.distributed.all_reduce(bn_buf, op=dist.ReduceOp.SUM) + torch.distributed.all_reduce_recursive(bn_buf, op=dist.ReduceOp.SUM) bn_buf /= float(world_size) else: # broadcast bn stats from rank 0 to whole group - torch.distributed.broadcast(bn_buf, 0) + torch.distributed.broadcast_recursive(bn_buf, 0) diff --git a/train.py b/train.py index de62792923..05da82e299 100755 --- a/train.py +++ b/train.py @@ -21,13 +21,15 @@ import logging from collections import OrderedDict from datetime import datetime +from dataclasses import replace +from typing import Tuple import torch import torch.nn as nn import torchvision.utils -from timm.bits import initialize_device, DeviceEnv, create_updater, Updater, Logger, Tracker -from timm.metrics import TensorAvg, AccuracyTopK +from timm.bits import initialize_device, setup_model_and_optimizer, DeviceEnv, Logger, Tracker,\ + TrainState, TrainServices, TrainCfg, AccuracyTopK, AvgTensor from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset from timm.models import create_model, safe_model_name, resume_checkpoint, load_checkpoint,\ convert_splitbn_model, model_parameters @@ -276,7 +278,7 @@ def main(): args, args_text = _parse_args() dev_env = initialize_device(amp=args.amp) - if dev_env.is_distributed: + if dev_env.distributed: _logger.info('Training in distributed mode with multiple processes, 1 device per process. Process %d, total %d.' % (dev_env.global_rank, dev_env.world_size)) else: @@ -284,6 +286,111 @@ def main(): random_seed(args.seed, dev_env.global_rank) + mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None + + train_state, train_cfg = setup_train_task(args, dev_env, mixup_active) + + data_config, loader_eval, loader_train = setup_data(args, dev_env, mixup_active) + + # setup checkpoint saver + eval_metric = args.eval_metric + best_metric = None + best_epoch = None + saver = None + output_dir = None + if dev_env.primary: + if args.experiment: + exp_name = args.experiment + else: + exp_name = '-'.join([ + datetime.now().strftime("%Y%m%d-%H%M%S"), + safe_model_name(args.model), + str(data_config['input_size'][-1]) + ]) + output_dir = get_outdir(args.output if args.output else './output/train', exp_name) + decreasing = True if eval_metric == 'loss' else False + saver = CheckpointSaver( # TODO CheckpointSaverV2 + model=train_state.model, + optimizer=train_state.updater.optimizer, + args=args, + model_ema=train_state.model_ema, + amp_scaler=train_state.updater.grad_scaler, + checkpoint_dir=output_dir, + recovery_dir=output_dir, + decreasing=decreasing, + max_history=args.checkpoint_hist) + with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: + f.write(args_text) + + services = TrainServices( + logger=Logger( + output_dir=output_dir, python_logger=_logger, hparams=vars(args), output_enabled=dev_env.primary), + saver=saver, + ) + + try: + for epoch in range(train_state.epoch, train_cfg.num_epochs): + if dev_env.distributed and hasattr(loader_train.sampler, 'set_epoch'): + loader_train.sampler.set_epoch(epoch) + if args.mixup_off_epoch and epoch >= args.mixup_off_epoch: + if loader_train.mixup_enabled: + loader_train.mixup_enabled = False + + train_metrics = train_one_epoch( + dev_env=dev_env, + state=train_state, + services=services, + cfg=train_cfg, + loader=loader_train + ) + + if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): + if dev_env.primary: + _logger.info("Distributing BatchNorm running means and vars") + distribute_bn(model, dev_env.world_size, args.dist_bn == 'reduce') + + eval_metrics = evaluate( + train_state.model, + train_state.eval_loss, + loader_eval, + dev_env, + logger=services.logger) + + if train_state.model_ema is not None and not args.model_ema_force_cpu: + if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): + distribute_bn(train_state.model_ema, dev_env.world_size, args.dist_bn == 'reduce') + + ema_eval_metrics = evaluate( + train_state.model_ema.module, + train_state.eval_loss, + loader_eval, + dev_env, + logger=services.logger, + phase_suffix='EMA') + eval_metrics = ema_eval_metrics + + if train_state.lr_scheduler is not None: + # step LR for next epoch + train_state.lr_scheduler.step(epoch + 1, eval_metrics[eval_metric]) + + if services.logger is not None: + services.logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metrics)) + + if saver is not None: + # save proper checkpoint with eval metric + save_metric = eval_metrics[eval_metric] + best_metric, best_epoch = saver.save_checkpoint(epoch, metric=save_metric) + + train_state = replace(train_state, epoch=epoch + 1) + + except KeyboardInterrupt: + pass + if best_metric is not None: + _logger.info('*** Best metric: {0} (epoch {1})'.format(best_metric, best_epoch)) + + +def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): + model = create_model( args.model, pretrained=args.pretrained, @@ -302,82 +409,69 @@ def main(): assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' args.num_classes = model.num_classes # FIXME handle model default vs config num_classes more elegantly - if dev_env.is_master: + if dev_env.primary: _logger.info( f'Model {safe_model_name(args.model)} created, param count:{sum([m.numel() for m in model.parameters()])}') - data_config = resolve_data_config(vars(args), model=model, verbose=dev_env.is_master) - # setup augmentation batch splits for contrastive loss or split bn - num_aug_splits = 0 - if args.aug_splits > 0: - assert args.aug_splits > 1, 'A split of 1 makes no sense' - num_aug_splits = args.aug_splits - + assert args.aug_splits == 0 or args.aug_splits > 1, 'A split of 1 makes no sense' # enable split bn (separate bn stats per batch-portion) if args.split_bn: - assert num_aug_splits > 1 or args.resplit - model = convert_splitbn_model(model, max(num_aug_splits, 2)) - - # move model to GPU, enable channels last layout if set - dev_env.to_device(model) - - # setup synchronized BatchNorm for distributed training - if dev_env.is_distributed and args.sync_bn: - assert not args.split_bn - model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) - if dev_env.is_master: - _logger.info( - 'Converted model to use Synchronized BatchNorm. WARNING: You may have issues if using ' - 'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.') - - if args.torchscript: - assert not args.sync_bn, 'Cannot use SyncBatchNorm with torchscripted model' - model = torch.jit.script(model) - - updater = create_updater( - create_optimizer_v2(model, **optimizer_kwargs(cfg=args)), - clip_value=args.clip_grad, clip_mode=args.clip_mode) - - # optionally resume from a checkpoint - resume_epoch = None - if args.resume: - resume_epoch = resume_checkpoint( - model, args.resume, - optimizer=None if args.no_resume_opt else updater.optimizer, - loss_scaler=None if args.no_resume_opt else updater.scaler, - log_info=dev_env.is_master) - - # setup exponential moving average of model weights, SWA could be used here too - model_ema = None - if args.model_ema: - # Important to create EMA model after cuda(), DP wrapper, and AMP but before SyncBN and DDP wrapper - model_ema = ModelEmaV2( - model, decay=args.model_ema_decay, device='cpu' if args.model_ema_force_cpu else None) - if args.resume: - load_checkpoint(model_ema.module, args.resume, use_ema=True) - - # setup distributed training - if dev_env.is_distributed: - if dev_env.is_master: - _logger.info("Distributing model.") - model = dev_env.wrap_distributed(model) - # NOTE: EMA model does not need to be wrapped by DDP + assert args.aug_splits > 1 or args.resplit + model = convert_splitbn_model(model, max(args.aug_splits, 2)) + + train_state = setup_model_and_optimizer( + dev_env=dev_env, + model=model, + optimizer=args.opt, + optimizer_cfg=optimizer_kwargs(cfg=args), + clip_fn=args.clip_mode if args.clip_grad is not None else None, + clip_value=args.clip_grad, + model_ema=args.model_ema, + model_ema_decay=args.model_ema_decay, + use_syncbn=args.sync_bn, + ) # setup learning rate schedule and starting epoch - lr_scheduler, num_epochs = create_scheduler(args, updater.optimizer) - start_epoch = 0 - if args.start_epoch is not None: - # a specified start_epoch will always override the resume epoch - start_epoch = args.start_epoch - elif resume_epoch is not None: - start_epoch = resume_epoch - if lr_scheduler is not None and start_epoch > 0: - lr_scheduler.step(start_epoch) - - if dev_env.is_master: + # FIXME move into updater? + lr_scheduler, num_epochs = create_scheduler(args, train_state.updater.optimizer) + if lr_scheduler is not None and train_state.epoch > 0: + lr_scheduler.step(train_state.epoch) + + # setup loss function + if args.jsd: + assert args.aug_splits > 1 # JSD only valid with aug splits set + train_loss_fn = JsdCrossEntropy(num_splits=args.aug_splits, smoothing=args.smoothing) + elif mixup_active: + # smoothing is handled with mixup target transform + train_loss_fn = SoftTargetCrossEntropy() + elif args.smoothing: + train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing) + else: + train_loss_fn = nn.CrossEntropyLoss() + eval_loss_fn = nn.CrossEntropyLoss() + dev_env.to_device(train_loss_fn, eval_loss_fn) + + if dev_env.primary: _logger.info('Scheduled epochs: {}'.format(num_epochs)) + train_state = replace( + train_state, + lr_scheduler=lr_scheduler, + train_loss=train_loss_fn, + eval_loss=eval_loss_fn) + + train_cfg = TrainCfg( + num_epochs=num_epochs, + log_interval=args.log_interval, + recovery_interval=args.recovery_interval) + + return train_state, train_cfg + + +def setup_data(args, dev_env, mixup_active): + data_config = resolve_data_config(vars(args), model=model, verbose=dev_env.primary) + # create the train and eval datasets dataset_train = create_dataset( args.dataset, @@ -388,18 +482,17 @@ def main(): # setup mixup / cutmix collate_fn = None - mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None if mixup_active: mixup_args = dict( mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode, label_smoothing=args.smoothing, num_classes=args.num_classes) - assert not num_aug_splits # collate conflict (need to support deinterleaving in collate mixup) + assert not args.aug_splits # collate conflict (need to support deinterleaving in collate mixup) collate_fn = FastCollateMixup(**mixup_args) # wrap dataset in AugMix helper - if num_aug_splits > 1: - dataset_train = AugMixDataset(dataset_train, num_splits=num_aug_splits) + if args.aug_splits > 1: + dataset_train = AugMixDataset(dataset_train, num_splits=args.aug_splits) # create data loaders w/ augmentation pipeiine train_interpolation = args.train_interpolation @@ -421,7 +514,7 @@ def main(): vflip=args.vflip, color_jitter=args.color_jitter, auto_augment=args.aa, - num_aug_splits=num_aug_splits, + num_aug_splits=args.aug_splits, interpolation=train_interpolation, mean=data_config['mean'], std=data_config['std'], @@ -443,169 +536,107 @@ def main(): crop_pct=data_config['crop_pct'], pin_memory=args.pin_mem, ) + return data_config, loader_eval, loader_train - # setup loss function - if args.jsd: - assert num_aug_splits > 1 # JSD only valid with aug splits set - train_loss_fn = JsdCrossEntropy(num_splits=num_aug_splits, smoothing=args.smoothing) - elif mixup_active: - # smoothing is handled with mixup target transform - train_loss_fn = SoftTargetCrossEntropy() - elif args.smoothing: - train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing) - else: - train_loss_fn = nn.CrossEntropyLoss() - validate_loss_fn = nn.CrossEntropyLoss() - dev_env.to_device(train_loss_fn, validate_loss_fn) - - # setup checkpoint saver and eval metric tracking - eval_metric = args.eval_metric - best_metric = None - best_epoch = None - saver = None - output_dir = None - if dev_env.is_master: - if args.experiment: - exp_name = args.experiment - else: - exp_name = '-'.join([ - datetime.now().strftime("%Y%m%d-%H%M%S"), - safe_model_name(args.model), - str(data_config['input_size'][-1]) - ]) - output_dir = get_outdir(args.output if args.output else './output/train', exp_name) - decreasing = True if eval_metric == 'loss' else False - saver = CheckpointSaver( - model=model, optimizer=updater.optimizer, args=args, model_ema=model_ema, amp_scaler=updater.scaler, - checkpoint_dir=output_dir, recovery_dir=output_dir, decreasing=decreasing, max_history=args.checkpoint_hist) - with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: - f.write(args_text) - - logger = Logger(output_dir=output_dir, logger=_logger, hparams=vars(args)) - try: - for epoch in range(start_epoch, num_epochs): - if dev_env.is_distributed and hasattr(loader_train.sampler, 'set_epoch'): - loader_train.sampler.set_epoch(epoch) - if args.mixup_off_epoch and epoch >= args.mixup_off_epoch: - if loader_train.mixup_enabled: - loader_train.mixup_enabled = False +def train_one_epoch( + dev_env: DeviceEnv, + state: TrainState, + cfg: TrainCfg, + services: TrainServices, + loader, +): + tracker = Tracker() + loss_meter = AvgTensor() - train_metrics = train_one_epoch( - epoch, model, loader_train, updater, train_loss_fn, dev_env, - lr_scheduler=lr_scheduler, saver=saver, logger=logger, model_ema=model_ema, - log_interval=args.log_interval, recovery_interval=args.recovery_interval) + state.model.train() + state.updater.reset() # zero-grad - if dev_env.is_distributed and args.dist_bn in ('broadcast', 'reduce'): - if dev_env.is_master: - _logger.info("Distributing BatchNorm running means and vars") - distribute_bn(model, dev_env.world_size, args.dist_bn == 'reduce') + step_end_idx = len(loader) - 1 + tracker.mark_iter() + for step_idx, (sample, target) in enumerate(loader): + tracker.mark_iter_data_end() - eval_metrics = evaluate(model, loader_eval, validate_loss_fn, dev_env, logger=logger) + # FIXME move forward + loss into model 'task' wrapper + with dev_env.autocast(): + output = state.model(sample) + loss = state.train_loss(output, target) - if model_ema is not None and not args.model_ema_force_cpu: - if dev_env.is_distributed and args.dist_bn in ('broadcast', 'reduce'): - distribute_bn(model_ema, dev_env.world_size, args.dist_bn == 'reduce') + state.updater.apply(loss) - ema_eval_metrics = evaluate( - model_ema.module, loader_eval, validate_loss_fn, dev_env, - logger=logger, phase_suffix='EMA') - eval_metrics = ema_eval_metrics + tracker.mark_iter_step_end() - if lr_scheduler is not None: - # step LR for next epoch - lr_scheduler.step(epoch + 1, eval_metrics[eval_metric]) + state.updater.after_step( + after_train_step, + dev_env, + state, + services, + cfg, + step_idx, + step_end_idx, + tracker, + loss_meter, + (output, target, loss), + ) - if logger is not None: - logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metrics)) + tracker.mark_iter() + # end for - if saver is not None: - # save proper checkpoint with eval metric - save_metric = eval_metrics[eval_metric] - best_metric, best_epoch = saver.save_checkpoint(epoch, metric=save_metric) + if hasattr(state.updater.optimizer, 'sync_lookahead'): + state.updater.optimizer.sync_lookahead() - except KeyboardInterrupt: - pass - if best_metric is not None: - _logger.info('*** Best metric: {0} (epoch {1})'.format(best_metric, best_epoch)) + return OrderedDict([('loss', loss_meter.compute().item())]) -def train_one_epoch( - epoch: int, - model: nn.Module, - loader, - updater: Updater, - loss_fn: nn.Module, +def after_train_step( dev_env: DeviceEnv, - lr_scheduler=None, - saver: CheckpointSaver = None, - logger: Logger = None, - model_ema: nn.Module = None, - log_interval: int = 50, - recovery_interval: int = 0, + state: TrainState, + services: TrainServices, + cfg: TrainCfg, + step_idx: int, + step_end_idx: int, + tracker: Tracker, + loss_meter: AvgTensor, + tensors: Tuple[torch.Tensor, ...], ): - tracker = Tracker() - losses_m = TensorAvg() + end_step = step_idx == step_end_idx - model.train() - - end_idx = len(loader) - 1 - num_updates = epoch * len(loader) - batch_size = 0 - tracker.mark_iter() - for step_idx, (sample, target) in enumerate(loader): - tracker.mark_iter_data_end() - last_step = step_idx == end_idx - batch_size = max(batch_size, sample.shape[0]) - - with dev_env.autocast(): - output = model(sample) - loss = loss_fn(output, target) - - updater.reset() - updater.apply(loss) + with torch.no_grad(): + output, target, loss = tensors + loss_meter.update(loss, output.shape[0]) - dev_env.mark_step() # FIXME - tracker.mark_iter_step_end() - losses_m.update(loss, sample.size(0)) - if model_ema is not None: - model_ema.update(model) + if state.model_ema is not None: + state.model_ema.update(model) - num_updates += 1 - if last_step or (step_idx + 1) % log_interval == 0: - lrl = [param_group['lr'] for param_group in updater.optimizer.param_groups] - lr = sum(lrl) / len(lrl) + state = replace(state, step_count_global=state.step_count_global + 1) - if dev_env.is_master and logger is not None: - loss_avg = losses_m.compute() - logger.log_step( + if services.logger is not None and end_step or (step_idx + 1) % cfg.log_interval == 0: + global_batch_size = dev_env.world_size * output.shape[0] + loss_avg = loss_meter.compute() + if services.logger is not None: + lr_avg = state.updater.get_average_lr() + services.logger.log_step( 'Train', step=step_idx, - end_step=end_idx, + step_end=step_end_idx, + epoch=state.epoch, loss=loss_avg.item(), - rate=(dev_env.world_size * batch_size) / tracker.iter_time.avg, - lr=lr, + rate=tracker.get_avg_iter_rate(global_batch_size), + lr=lr_avg, ) - if saver is not None and recovery_interval and (last_step or (step_idx + 1) % recovery_interval == 0): - saver.save_recovery(epoch, batch_idx=step_idx) - - if lr_scheduler is not None: - lr_scheduler.step_update(num_updates=num_updates) + if services.saver is not None and cfg.recovery_interval and ( + end_step or (step_idx + 1) % cfg.recovery_interval == 0): + services.saver.save_recovery(state.epoch, batch_idx=step_idx) - tracker.mark_iter() - # end for - - if hasattr(updater.optimizer, 'sync_lookahead'): - updater.optimizer.sync_lookahead() - - return OrderedDict([('loss', losses_m.compute().item())]) + if state.lr_scheduler is not None: + state.lr_scheduler.step_update(num_updates=state.step_count_global) def evaluate( model: nn.Module, - loader, loss_fn: nn.Module, + loader, dev_env: DeviceEnv, logger: Logger, phase_suffix: str = '', @@ -613,7 +644,7 @@ def evaluate( ): tracker = Tracker() - losses_m = TensorAvg() + losses_m = AvgTensor() accuracy_m = AccuracyTopK() model.eval() @@ -636,13 +667,13 @@ def evaluate( losses_m.update(loss, output.size(0)) accuracy_m.update(output, target) - if dev_env.is_master and (last_step or step_idx % log_interval == 0): + if last_step or step_idx % log_interval == 0: top1, top5 = accuracy_m.compute().values() loss_avg = losses_m.compute() logger.log_step( 'Eval', step=step_idx, - num_steps=end_idx, + step_end=end_idx, loss=loss_avg.item(), top1=top1.item(), top5=top5.item(), diff --git a/validate.py b/validate.py index add2346968..b7538d9f41 100755 --- a/validate.py +++ b/validate.py @@ -18,8 +18,7 @@ import torch.nn.parallel from collections import OrderedDict -from timm.bits import initialize_device, Tracker, Logger -from timm.metrics import AccuracyTopK, TensorAvg +from timm.bits import initialize_device, Tracker, Logger, AccuracyTopK, AvgTensor from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models from timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet from timm.utils import natural_key, setup_default_logging @@ -155,10 +154,10 @@ def validate(args): pin_memory=args.pin_mem, tf_preprocessing=args.tf_preprocessing) - logger = Logger(logger=_logger) + logger = Logger(python_logger=_logger) tracker = Tracker() - losses = TensorAvg() - accuracy = AccuracyTopK().to(dev_env.device) + losses = AvgTensor() + accuracy = AccuracyTopK(dev_env=dev_env) model.eval() num_steps = len(loader) @@ -175,10 +174,8 @@ def validate(args): output = output[:, valid_labels] loss = criterion(output, target) - if dev_env.type == 'cuda': + if dev_env.type_cuda: torch.cuda.synchronize() - #elif dev_env.type == 'xla': - # dev_env.mark_step() tracker.mark_iter_step_end() losses.update(loss.detach(), sample.size(0)) @@ -186,7 +183,7 @@ def validate(args): real_labels.add_result(output) accuracy.update(output.detach(), target) - if dev_env.type == 'xla': + if dev_env.type_xla: dev_env.mark_step() tracker.mark_iter() From 6d90fcf2821d3b948f82d6af22a7c351b8fd5787 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 18 May 2021 11:34:31 -0700 Subject: [PATCH 05/61] Fix distribute_bn and model_ema --- train.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/train.py b/train.py index 05da82e299..3f18c8e5c4 100755 --- a/train.py +++ b/train.py @@ -29,14 +29,13 @@ import torchvision.utils from timm.bits import initialize_device, setup_model_and_optimizer, DeviceEnv, Logger, Tracker,\ - TrainState, TrainServices, TrainCfg, AccuracyTopK, AvgTensor + TrainState, TrainServices, TrainCfg, AccuracyTopK, AvgTensor, distribute_bn from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset -from timm.models import create_model, safe_model_name, resume_checkpoint, load_checkpoint,\ - convert_splitbn_model, model_parameters -from timm.utils import * +from timm.models import create_model, safe_model_name, convert_splitbn_model from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy from timm.optim import create_optimizer_v2, optimizer_kwargs from timm.scheduler import create_scheduler +from timm.utils import setup_default_logging, random_seed, get_outdir, CheckpointSaver _logger = logging.getLogger('train') @@ -290,7 +289,7 @@ def main(): train_state, train_cfg = setup_train_task(args, dev_env, mixup_active) - data_config, loader_eval, loader_train = setup_data(args, dev_env, mixup_active) + data_config, loader_eval, loader_train = setup_data(args, train_state.model.default_cfg, dev_env, mixup_active) # setup checkpoint saver eval_metric = args.eval_metric @@ -347,7 +346,7 @@ def main(): if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): if dev_env.primary: _logger.info("Distributing BatchNorm running means and vars") - distribute_bn(model, dev_env.world_size, args.dist_bn == 'reduce') + distribute_bn(train_state.model, args.dist_bn == 'reduce', dev_env) eval_metrics = evaluate( train_state.model, @@ -358,7 +357,7 @@ def main(): if train_state.model_ema is not None and not args.model_ema_force_cpu: if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): - distribute_bn(train_state.model_ema, dev_env.world_size, args.dist_bn == 'reduce') + distribute_bn(train_state.model_ema, args.dist_bn == 'reduce', dev_env) ema_eval_metrics = evaluate( train_state.model_ema.module, @@ -469,8 +468,8 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): return train_state, train_cfg -def setup_data(args, dev_env, mixup_active): - data_config = resolve_data_config(vars(args), model=model, verbose=dev_env.primary) +def setup_data(args, default_cfg, dev_env, mixup_active): + data_config = resolve_data_config(vars(args), default_cfg=default_cfg, verbose=dev_env.primary) # create the train and eval datasets dataset_train = create_dataset( @@ -606,7 +605,7 @@ def after_train_step( loss_meter.update(loss, output.shape[0]) if state.model_ema is not None: - state.model_ema.update(model) + state.model_ema.update(state.model) state = replace(state, step_count_global=state.step_count_global + 1) From cbd4ee737fb162a324a6de6b199c8081773a5748 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 18 May 2021 16:54:36 -0700 Subject: [PATCH 06/61] Fix model init for XLA, remove some prints. --- timm/bits/device_env.py | 2 -- timm/bits/device_env_xla.py | 20 ++++++++------------ timm/bits/train_setup.py | 1 + train.py | 8 ++++++-- 4 files changed, 15 insertions(+), 16 deletions(-) diff --git a/timm/bits/device_env.py b/timm/bits/device_env.py index 7307823e50..bac9b0ab96 100644 --- a/timm/bits/device_env.py +++ b/timm/bits/device_env.py @@ -90,8 +90,6 @@ def mark_step(self): pass # NO-OP for non-XLA devices def all_reduce_(self, tensor: TensorList, op=dist.ReduceOp.SUM, average=False): - print(len(tensor), type(tensor)) - print(tensor.shape) dist.all_reduce(tensor, op=op) if average: tensor.div_(self.world_size) diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index cc9ea3ddfe..71d350fd75 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -23,12 +23,12 @@ _PT_TO_XM_OP = { - ReduceOp.SUM: 'sum', - ReduceOp.PRODUCT: 'prod', - ReduceOp.MIN: 'min', - ReduceOp.MAX: 'max', - ReduceOp.BAND: 'and', - ReduceOp.BOR: 'or', + ReduceOp.SUM: xm.REDUCE_SUM, + ReduceOp.PRODUCT: xm.REDUCE_MUL, + ReduceOp.MIN: xm.REDUCE_MIN, + ReduceOp.MAX: xm.REDUCE_MAX, + ReduceOp.BAND: xm.REDUCE_AND, + ReduceOp.BOR: xm.REDUCE_OR, } @@ -77,20 +77,16 @@ def mark_step(self): def all_reduce(self, tensor: torch.Tensor, op=ReduceOp.SUM, average=False): assert isinstance(tensor, torch.Tensor) # unlike in-place variant, lists/tuples not allowed op = _PT_TO_XM_OP[op] - scale = 1.0 - if average: - scale /= self.world_size + scale = 1.0 / self.world_size if average else 1.0 return xm.all_reduce(op, tensor, scale=scale) def all_reduce_(self, tensor: TensorList, op=ReduceOp.SUM, average=False): op = _PT_TO_XM_OP[op] - scale = 1.0 + scale = 1.0 / self.world_size if average else 1.0 wrapped = False if isinstance(tensor, torch.Tensor): tensor = [tensor] # bare tensors are not operated on in-place wrapped = True - if average: - scale /= self.world_size xm.all_reduce(op, tensor, scale=scale) if wrapped: tensor = tensor[0] diff --git a/timm/bits/train_setup.py b/timm/bits/train_setup.py index 992546a789..3884958b05 100644 --- a/timm/bits/train_setup.py +++ b/timm/bits/train_setup.py @@ -89,6 +89,7 @@ def setup_model_and_optimizer( train_state = TrainState(model=model, updater=updater, model_ema=model_ema) if resume_path: + # FIXME this is not implemented yet, do a hack job before proper TrainState serialization? resume_train_checkpoint( train_state, resume_path, diff --git a/train.py b/train.py index 3f18c8e5c4..95f5cb7ec8 100755 --- a/train.py +++ b/train.py @@ -283,12 +283,16 @@ def main(): else: _logger.info('Training with a single process on 1 device.') - random_seed(args.seed, dev_env.global_rank) - mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None + random_seed(args.seed, 0) # Set all random seeds the same for model/state init (mandatory for XLA) + train_state, train_cfg = setup_train_task(args, dev_env, mixup_active) + # Set random seeds across ranks differently for train + # FIXME perhaps keep the same and just set diff seeds for dataloader worker process? what about TFDS? + random_seed(args.seed, dev_env.global_rank) + data_config, loader_eval, loader_train = setup_data(args, train_state.model.default_cfg, dev_env, mixup_active) # setup checkpoint saver From 72ca831dd483a930841be7ab99b87f08ef83cde7 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 19 May 2021 12:01:53 -0700 Subject: [PATCH 07/61] Back to using strings for the enum translation, forgot about import dep --- timm/bits/device_env_xla.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index 71d350fd75..a565c1c87f 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -23,12 +23,12 @@ _PT_TO_XM_OP = { - ReduceOp.SUM: xm.REDUCE_SUM, - ReduceOp.PRODUCT: xm.REDUCE_MUL, - ReduceOp.MIN: xm.REDUCE_MIN, - ReduceOp.MAX: xm.REDUCE_MAX, - ReduceOp.BAND: xm.REDUCE_AND, - ReduceOp.BOR: xm.REDUCE_OR, + ReduceOp.SUM: 'sum', + ReduceOp.PRODUCT: 'mul', + ReduceOp.MIN: 'min', + ReduceOp.MAX: 'max', + ReduceOp.BAND: 'and', + ReduceOp.BOR: 'or', } From 5b9c69e80a5ff4ecfd62429246e51a1de0f834fe Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 21 May 2021 18:08:06 -0700 Subject: [PATCH 08/61] Add basic training resume based on legacy code --- timm/bits/checkpoint.py | 90 ++++++++++++++++++++++------------------- train.py | 25 ++++++------ 2 files changed, 62 insertions(+), 53 deletions(-) diff --git a/timm/bits/checkpoint.py b/timm/bits/checkpoint.py index 3c191b0a94..b7ff19096c 100644 --- a/timm/bits/checkpoint.py +++ b/timm/bits/checkpoint.py @@ -4,55 +4,63 @@ import torch +from timm.utils import unwrap_model + from .train_state import TrainState, serialize_train_state, deserialize_train_state _logger = logging.getLogger(__name__) +def _load_state_dict(checkpoint, state_dict_key='state_dict'): + new_state_dict = OrderedDict() + for k, v in checkpoint[state_dict_key].items(): + name = k[7:] if k.startswith('module') else k + new_state_dict[name] = v + return new_state_dict + + def resume_train_checkpoint( - train_state, + train_state: TrainState, checkpoint_path, resume_opt=True, deserialize_fn=deserialize_train_state, log_info=True): - raise NotImplementedError - - # resume_epoch = None - # if os.path.isfile(checkpoint_path): - # checkpoint = torch.load(checkpoint_path, map_location='cpu') - # - # if isinstance(checkpoint, dict) and 'state_dict' in checkpoint: - # if log_info: - # _logger.info('Restoring model state from checkpoint...') - # new_state_dict = OrderedDict() - # for k, v in checkpoint['state_dict'].items(): - # name = k[7:] if k.startswith('module') else k - # new_state_dict[name] = v - # model.load_state_dict(new_state_dict) - # - # if optimizer is not None and 'optimizer' in checkpoint: - # if log_info: - # _logger.info('Restoring optimizer state from checkpoint...') - # optimizer.load_state_dict(checkpoint['optimizer']) - # - # if loss_scaler is not None and loss_scaler.state_dict_key in checkpoint: - # if log_info: - # _logger.info('Restoring AMP loss scaler state from checkpoint...') - # loss_scaler.load_state_dict(checkpoint[loss_scaler.state_dict_key]) - # - # if 'epoch' in checkpoint: - # resume_epoch = checkpoint['epoch'] - # if 'version' in checkpoint and checkpoint['version'] > 1: - # resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save - # - # if log_info: - # _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) - # else: - # model.load_state_dict(checkpoint) - # if log_info: - # _logger.info("Loaded checkpoint '{}'".format(checkpoint_path)) - # return resume_epoch - # else: - # _logger.error("No checkpoint found at '{}'".format(checkpoint_path)) - # raise FileNotFoundError() + # FIXME this is a hacky adaptation of pre-bits resume to get up and running quickly + resume_epoch = None + if os.path.isfile(checkpoint_path): + checkpoint = torch.load(checkpoint_path, map_location='cpu') + assert isinstance(checkpoint, dict) and 'state_dict' in checkpoint + if log_info: + _logger.info('Restoring model state from checkpoint...') + + train_state.model.load_state_dict(_load_state_dict(checkpoint)) + + if train_state.model_ema is not None and 'state_dict_ema' in checkpoint: + if log_info: + _logger.info('Restoring model (EMA) state from checkpoint...') + unwrap_model(train_state.model_ema).load_state_dict(_load_state_dict(checkpoint, 'state_dict_ema')) + + if resume_opt: + if train_state.updater.optimizer is not None and 'optimizer' in checkpoint: + if log_info: + _logger.info('Restoring optimizer state from checkpoint...') + train_state.updater.optimizer.load_state_dict(checkpoint['optimizer']) + + scaler_state_dict_key = 'amp_scaler' + if train_state.updater.grad_scaler is not None and scaler_state_dict_key in checkpoint: + if log_info: + _logger.info('Restoring AMP loss scaler state from checkpoint...') + train_state.updater.grad_scaler.load_state_dict(checkpoint[scaler_state_dict_key]) + + if 'epoch' in checkpoint: + resume_epoch = checkpoint['epoch'] + if 'version' in checkpoint and checkpoint['version'] > 1: + resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save + train_state.epoch = resume_epoch # FIXME use replace if we make train_state read-only + + if log_info: + _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) + else: + _logger.error("No valid resume checkpoint found at '{}'".format(checkpoint_path)) + raise FileNotFoundError() diff --git a/train.py b/train.py index 95f5cb7ec8..51645e4db8 100755 --- a/train.py +++ b/train.py @@ -340,11 +340,11 @@ def main(): loader_train.mixup_enabled = False train_metrics = train_one_epoch( - dev_env=dev_env, state=train_state, - services=services, cfg=train_cfg, - loader=loader_train + services=services, + loader=loader_train, + dev_env=dev_env, ) if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): @@ -356,8 +356,8 @@ def main(): train_state.model, train_state.eval_loss, loader_eval, - dev_env, - logger=services.logger) + services.logger, + dev_env) if train_state.model_ema is not None and not args.model_ema_force_cpu: if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): @@ -367,8 +367,8 @@ def main(): train_state.model_ema.module, train_state.eval_loss, loader_eval, + services.logger, dev_env, - logger=services.logger, phase_suffix='EMA') eval_metrics = ema_eval_metrics @@ -432,6 +432,7 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): clip_value=args.clip_grad, model_ema=args.model_ema, model_ema_decay=args.model_ema_decay, + resume_path=args.resume, use_syncbn=args.sync_bn, ) @@ -543,11 +544,11 @@ def setup_data(args, default_cfg, dev_env, mixup_active): def train_one_epoch( - dev_env: DeviceEnv, state: TrainState, cfg: TrainCfg, services: TrainServices, loader, + dev_env: DeviceEnv, ): tracker = Tracker() loss_meter = AvgTensor() @@ -571,10 +572,10 @@ def train_one_epoch( state.updater.after_step( after_train_step, - dev_env, state, - services, cfg, + services, + dev_env, step_idx, step_end_idx, tracker, @@ -592,10 +593,10 @@ def train_one_epoch( def after_train_step( - dev_env: DeviceEnv, state: TrainState, - services: TrainServices, cfg: TrainCfg, + services: TrainServices, + dev_env: DeviceEnv, step_idx: int, step_end_idx: int, tracker: Tracker, @@ -640,8 +641,8 @@ def evaluate( model: nn.Module, loss_fn: nn.Module, loader, - dev_env: DeviceEnv, logger: Logger, + dev_env: DeviceEnv, phase_suffix: str = '', log_interval: int = 10, ): From 91ab0b6ce5a1dbe22600132bb4fa9ededc96b2ab Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 3 Jun 2021 17:49:40 -0700 Subject: [PATCH 09/61] Add proper TrainState checkpoint save/load. Some reorg/refactoring and other cleanup. More to go... --- timm/bits/__init__.py | 9 +- timm/bits/checkpoint.py | 121 ++++++++++----- timm/bits/checkpoint_manager.py | 219 ++++++++++++++++++++++++++++ timm/bits/device_env.py | 66 ++++++++- timm/bits/device_env_cuda.py | 9 +- timm/bits/device_env_factory.py | 26 ++-- timm/bits/device_env_xla.py | 19 ++- timm/bits/distributed.py | 15 +- timm/bits/metric.py | 13 +- timm/bits/{logger.py => monitor.py} | 8 +- timm/bits/train_services.py | 8 +- timm/bits/train_setup.py | 10 +- timm/bits/train_state.py | 35 +++-- timm/bits/updater.py | 4 + timm/bits/updater_deepspeed.py | 4 + timm/bits/updater_factory.py | 3 +- timm/data/loader.py | 4 +- timm/data/parsers/parser_tfds.py | 4 +- timm/utils/model.py | 47 +++--- train.py | 50 +++---- validate.py | 4 +- 21 files changed, 522 insertions(+), 156 deletions(-) create mode 100644 timm/bits/checkpoint_manager.py rename timm/bits/{logger.py => monitor.py} (98%) diff --git a/timm/bits/__init__.py b/timm/bits/__init__.py index c99603414c..940e9e3e17 100644 --- a/timm/bits/__init__.py +++ b/timm/bits/__init__.py @@ -1,14 +1,15 @@ from .avg_scalar import AvgMinMaxScalar from .avg_tensor import AvgTensor -from .device_env import DeviceEnv, DeviceEnvType +from .checkpoint_manager import CheckpointManager +from .device_env import DeviceEnv, DeviceEnvType, get_global_device, set_global_device, is_global_device from .device_env_cuda import DeviceEnvCuda -from .device_env_factory import initialize_device, get_device +from .device_env_factory import initialize_device from .device_env_xla import DeviceEnvXla from .distributed import distribute_bn, all_gather_recursive, all_reduce_recursive, broadcast_recursive,\ all_reduce_sequence, all_gather_sequence # from .evaluate import evaluate, eval_step -from .logger import Logger -from .metric import Metric, MetricValue +from .monitor import Monitor +from .metric import Metric, MetricValueT from .metric_accuracy import AccuracyTopK from .tracker import Tracker # from .task_metrics import TaskMetrics, TaskMetricsClassify diff --git a/timm/bits/checkpoint.py b/timm/bits/checkpoint.py index b7ff19096c..df21ab5eee 100644 --- a/timm/bits/checkpoint.py +++ b/timm/bits/checkpoint.py @@ -1,17 +1,73 @@ import logging import os from collections import OrderedDict +from typing import Dict, Any, Callable import torch from timm.utils import unwrap_model -from .train_state import TrainState, serialize_train_state, deserialize_train_state +from .device_env import DeviceEnv +from .train_state import TrainState _logger = logging.getLogger(__name__) -def _load_state_dict(checkpoint, state_dict_key='state_dict'): +def save_train_state( + checkpoint_path: str, # FIXME pass base path + file pattern + epoch / step separately for DS? + train_state: TrainState, + extra_state: Dict[str, Any] = None, + unwrap_fn: Callable = unwrap_model, + dev_env: DeviceEnv = None, + log_info: bool = True): + + assert not train_state.updater.deepspeed + # DeepSpeed has a fully custom checkpoint saving setup, it is not possible + # specify a filename, checkpoints needed to be saved from all ranks, etc + # if train_state.updater.deepspeed: + # save_train_state_deepspeed(train_state, checkpoint_path) + + dev_env = dev_env or DeviceEnv.instance() + state_dict = train_state.state_dict(unwrap_fn=unwrap_fn) + if extra_state: + state_dict.update(extra_state) + if dev_env.type_xla: + # XLA state dict needs to be moved to CPU before save, this is normally done by xm.save + state_dict = dev_env.state_dict_to_cpu(state_dict) + torch.save(state_dict, checkpoint_path) + + +def load_train_state( + train_state: TrainState, + checkpoint_path: str, # FIXME pass base path + file pattern + epoch / step separately for DS + unwrap_fn: Callable = None, + load_opt: bool = True, + dev_env: DeviceEnv = None, + log_info: bool = True +): + unwrap_fn = unwrap_fn or unwrap_model + if not os.path.isfile(checkpoint_path): + _logger.error("No valid resume checkpoint found at '{}'".format(checkpoint_path)) + raise FileNotFoundError() + + if log_info: + _logger.info('Restoring training state from checkpoint...') + + checkpoint = torch.load(checkpoint_path, map_location='cpu') + assert isinstance(checkpoint, dict) + + if not checkpoint.get('version', 0) > 2: + load_legacy_checkpoint(train_state, checkpoint=checkpoint, load_opt=load_opt, log_info=log_info) + if log_info: + _logger.info("Loaded legacy checkpoint '{}' (epoch {})".format(checkpoint_path, train_state.epoch)) + return + + train_state.load_state_dict(checkpoint, unwrap_fn=unwrap_fn) + if log_info: + _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, train_state.epoch)) + + +def _get_state_dict(checkpoint, state_dict_key='state_dict'): new_state_dict = OrderedDict() for k, v in checkpoint[state_dict_key].items(): name = k[7:] if k.startswith('module') else k @@ -19,48 +75,35 @@ def _load_state_dict(checkpoint, state_dict_key='state_dict'): return new_state_dict -def resume_train_checkpoint( +def load_legacy_checkpoint( train_state: TrainState, - checkpoint_path, - resume_opt=True, - deserialize_fn=deserialize_train_state, + checkpoint, + load_opt=True, log_info=True): - # FIXME this is a hacky adaptation of pre-bits resume to get up and running quickly - resume_epoch = None - if os.path.isfile(checkpoint_path): - checkpoint = torch.load(checkpoint_path, map_location='cpu') - assert isinstance(checkpoint, dict) and 'state_dict' in checkpoint + assert isinstance(checkpoint, dict) and 'state_dict' in checkpoint + train_state.model.load_state_dict(_get_state_dict(checkpoint)) + + if train_state.model_ema is not None and 'state_dict_ema' in checkpoint: if log_info: - _logger.info('Restoring model state from checkpoint...') + _logger.info('Restoring model (EMA) state from checkpoint...') + unwrap_model(train_state.model_ema).load_state_dict(_get_state_dict(checkpoint, 'state_dict_ema')) - train_state.model.load_state_dict(_load_state_dict(checkpoint)) + if load_opt: + if train_state.updater.optimizer is not None and 'optimizer' in checkpoint: + if log_info: + _logger.info('Restoring optimizer state from checkpoint...') + train_state.updater.optimizer.load_state_dict(checkpoint['optimizer']) - if train_state.model_ema is not None and 'state_dict_ema' in checkpoint: + scaler_state_dict_key = 'amp_scaler' + if train_state.updater.grad_scaler is not None and scaler_state_dict_key in checkpoint: if log_info: - _logger.info('Restoring model (EMA) state from checkpoint...') - unwrap_model(train_state.model_ema).load_state_dict(_load_state_dict(checkpoint, 'state_dict_ema')) - - if resume_opt: - if train_state.updater.optimizer is not None and 'optimizer' in checkpoint: - if log_info: - _logger.info('Restoring optimizer state from checkpoint...') - train_state.updater.optimizer.load_state_dict(checkpoint['optimizer']) - - scaler_state_dict_key = 'amp_scaler' - if train_state.updater.grad_scaler is not None and scaler_state_dict_key in checkpoint: - if log_info: - _logger.info('Restoring AMP loss scaler state from checkpoint...') - train_state.updater.grad_scaler.load_state_dict(checkpoint[scaler_state_dict_key]) - - if 'epoch' in checkpoint: - resume_epoch = checkpoint['epoch'] - if 'version' in checkpoint and checkpoint['version'] > 1: - resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save - train_state.epoch = resume_epoch # FIXME use replace if we make train_state read-only + _logger.info('Restoring AMP loss scaler state from checkpoint...') + train_state.updater.grad_scaler.load_state_dict(checkpoint[scaler_state_dict_key]) + + if 'epoch' in checkpoint: + resume_epoch = checkpoint['epoch'] + if 'version' in checkpoint and checkpoint['version'] > 1: + resume_epoch += 1 # start at the next epoch, old checkpoints incremented before save + train_state.epoch = resume_epoch # FIXME use replace if we make train_state read-only - if log_info: - _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) - else: - _logger.error("No valid resume checkpoint found at '{}'".format(checkpoint_path)) - raise FileNotFoundError() diff --git a/timm/bits/checkpoint_manager.py b/timm/bits/checkpoint_manager.py new file mode 100644 index 0000000000..b051e126ea --- /dev/null +++ b/timm/bits/checkpoint_manager.py @@ -0,0 +1,219 @@ +""" Checkpoint Manager + +Track top-n training checkpoints and maintain recovery checkpoints on specified intervals. + +Hacked together by / Copyright 2021 Ross Wightman +""" +import glob +import logging +import operator +import os +import shutil +from typing import Optional, Dict, Callable, List +from dataclasses import dataclass, replace + + +from .checkpoint import save_train_state +from .train_state import TrainState + +_logger = logging.getLogger(__name__) + + +@dataclass +class CheckpointInfo: + path: str = '' + metrics: Dict[str, float] = None # all metrics at time of checkpoint save + metric_name: str = 'loss' + metric_decreasing: bool = True + epoch: int = 0 + global_step: int = 0 + + @property + def valid_key(self): + return self.metric_name and self.metrics and self.metric_name in self.metrics + + @property + def sort_key(self): + return self.metrics[self.metric_name] if self.valid_key else self.epoch + + @property + def decreasing_key(self): + return self.metric_decreasing if self.valid_key else False + + +class CheckpointManager: + def __init__( + self, + hparams=None, + save_state_fn=None, + checkpoint_dir='', + recovery_dir='', + checkpoint_tmpl=None, + recovery_tmpl=None, + metric_name='loss', + metric_decreasing=True, + max_history=10): + + # extra items to include in checkpoint + self.hparams = hparams # train arguments (config / hparams) # FIXME this will change with new config system + + # state + self.checkpoint_files: List[CheckpointInfo] = [] # (filename, metric) tuples in order of decreasing betterness + self.best_checkpoint = None + self.curr_recovery_file = '' + self.prev_recovery_file = '' + self.can_hardlink = True + + # util / helper fn + self.save_state_fn = save_state_fn or save_train_state + + # file / folder config + self.extension = '.pth.tar' + self.checkpoint_dir = checkpoint_dir + self.recovery_dir = recovery_dir + self.checkpoint_tmpl = (checkpoint_tmpl or 'checkpoint-{index}') + self.extension + self.recovery_tmpl = (recovery_tmpl or 'recovery-{index}') + self.extension + + # ordering / history config + self.metric_name = metric_name + self.metric_decreasing = metric_decreasing + self.metric_cmp_fn = operator.lt if metric_decreasing else operator.gt + self.max_history = max_history + assert self.max_history >= 1 + + def _replace(self, src, dst): + if self.can_hardlink: + try: + if os.path.exists(dst): + os.unlink(dst) # required for Windows support. + except Exception as e: + self.can_hardlink = False + os.replace(src, dst) + + def _duplicate(self, src, dst): + if self.can_hardlink: + try: + if os.path.exists(dst): + # for Windows + os.unlink(dst) + os.link(src, dst) + return + except Exception as e: + self.can_hardlink = False + shutil.copy2(src, dst) + + def _save(self, save_path, train_state: TrainState, metrics: Optional[Dict[str, float]] = None): + extra_state = dict( + # version < 2 increments epoch before save + # version < 3, pre timm bits + # version 3, first timm bits checkpoitns + version=3, + ) + if self.hparams is not None: + extra_state.update(dict(arch=self.hparams['model'], hparams=self.hparams)) + else: + arch = getattr(train_state.model, 'default_cfg', dict()).get('architecture', None) + if arch is None: + arch = type(train_state.model).__name__.lower() + extra_state.update(dict(arch=arch)) + if metrics is not None: + # save the metrics and how we originally sorted them in the checkpoint for future comparisons + extra_state.update(dict( + metrics=metrics, + metric_name=self.metric_name, + metric_decreasing=self.metric_decreasing + )) + + self.save_state_fn(save_path, train_state, extra_state) + + checkpoint_info = CheckpointInfo( + path=save_path, + metrics=metrics, + metric_name=self.metric_name, + metric_decreasing=self.metric_decreasing, + epoch=train_state.epoch, + global_step=train_state.step_count_global, + ) + return checkpoint_info + + def _udpate_checkpoints(self, info: CheckpointInfo): + self.checkpoint_files.append(info) + self.checkpoint_files = sorted( + self.checkpoint_files, + key=lambda x: x.sort_key, + reverse=not info.decreasing_key, # sort in descending order if a lower metric is not better + ) + + def _cleanup_checkpoints(self, trim=0): + trim = min(len(self.checkpoint_files), trim) + delete_index = self.max_history - trim + if delete_index < 0 or len(self.checkpoint_files) <= delete_index: + return + to_delete = self.checkpoint_files[delete_index:] + for d in to_delete: + try: + _logger.debug("Cleaning checkpoint: {}".format(d)) + os.remove(d[0]) + except Exception as e: + _logger.error("Exception '{}' while deleting checkpoint".format(e)) + self.checkpoint_files = self.checkpoint_files[:delete_index] + + def _compare_metric(self, lhs: CheckpointInfo, rhs: CheckpointInfo): + # compare metrics against an existing checkpoint + if not lhs or not lhs.valid_key or not rhs or not rhs.valid_key: + # always assume lhs metrics are better if there are no usable metrics to compare + return True + return self.metric_cmp_fn(lhs.sort_key, rhs.sort_key) + + def save_checkpoint(self, train_state: TrainState, metrics: Optional[Dict[str, float]] = None): + assert train_state.epoch >= 0 + tmp_save_path = os.path.join(self.checkpoint_dir, 'tmp' + self.extension) + last_save_path = os.path.join(self.checkpoint_dir, 'last' + self.extension) + curr_checkpoint = self._save(tmp_save_path, train_state, metrics) + self._replace(tmp_save_path, last_save_path) + + worst_checkpoint = self.checkpoint_files[-1] if self.checkpoint_files else None + if len(self.checkpoint_files) < self.max_history or self._compare_metric(curr_checkpoint, worst_checkpoint): + if len(self.checkpoint_files) >= self.max_history: + self._cleanup_checkpoints(1) + + filename = self.checkpoint_tmpl.format(index=train_state.epoch) + save_path = os.path.join(self.checkpoint_dir, filename) + curr_checkpoint = replace(curr_checkpoint, path=save_path) + self._duplicate(last_save_path, save_path) + self._udpate_checkpoints(curr_checkpoint) + + checkpoints_str = "Current checkpoints:\n" + for c in self.checkpoint_files: + checkpoints_str += f' {c.path}, {c.sort_key}\n'.format(c) + _logger.info(checkpoints_str) + + if curr_checkpoint.valid_key and self._compare_metric(curr_checkpoint, self.best_checkpoint): + self.best_checkpoint = curr_checkpoint + best_save_path = os.path.join(self.checkpoint_dir, 'best' + self.extension) + self._duplicate(last_save_path, best_save_path) + + return None if self.best_checkpoint is None else curr_checkpoint + + def save_recovery(self, train_state: TrainState): + tmp_save_path = os.path.join(self.recovery_dir, 'recovery_tmp' + self.extension) + self._save(tmp_save_path, train_state) + + filename = self.recovery_tmpl.format(index=train_state.step_count_global) + save_path = os.path.join(self.recovery_dir, filename) + self._replace(tmp_save_path, save_path) + + if os.path.exists(self.prev_recovery_file): + try: + _logger.debug("Cleaning recovery: {}".format(self.prev_recovery_file)) + os.remove(self.prev_recovery_file) + except Exception as e: + _logger.error("Exception '{}' while removing {}".format(e, self.prev_recovery_file)) + self.prev_recovery_file = self.curr_recovery_file + self.curr_recovery_file = save_path + + def find_recovery(self): + recovery_path = os.path.join(self.recovery_dir, self.recovery_prefix) + files = glob.glob(recovery_path + '*' + self.extension) + files = sorted(files) + return files[0] if len(files) else '' diff --git a/timm/bits/device_env.py b/timm/bits/device_env.py index bac9b0ab96..0a926e6961 100644 --- a/timm/bits/device_env.py +++ b/timm/bits/device_env.py @@ -1,7 +1,7 @@ import abc from contextlib import suppress from enum import Enum -from typing import Callable, Union, Optional, List, Tuple +from typing import Callable, Union, Optional, List, Tuple, Dict, Any from dataclasses import dataclass, field, InitVar import torch @@ -18,10 +18,21 @@ class DeviceEnvType(Enum): XLA = "xla" +def state_dict_apply(state_dict: Dict[str, Any], apply_fn, select_fn=lambda x: x.isinstance(torch.Tensor)): + out_dict = {} + for k, v in state_dict.items(): + if isinstance(v, dict): + out_dict[k] = state_dict_apply(v, apply_fn, select_fn) + else: + out_dict[k] = apply_fn(v) if select_fn(v) else v + return out_dict + + @dataclass class DeviceEnv: device_type: InitVar[Optional[str]] = None device_index: InitVar[Optional[int]] = None + channels_last: InitVar[bool] = False device: torch.device = field(init=False) # set from device_type + device_index or post_init logic world_size: Optional[int] = None # set by post_init from env when None @@ -32,7 +43,12 @@ class DeviceEnv: memory_format: Optional[torch.memory_format] = None dtype: Optional[torch.dtype] = None - def __post_init__(self, device_type: Optional[str], device_index: Optional[int]): + def __post_init__( + self, + device_type: Optional[str], + device_index: Optional[int], + channels_last: bool, + ): device_type = device_type or 'cpu' self.device = torch.device(device_type) if device_index is None \ else torch.device(device_type, device_index) @@ -41,6 +57,17 @@ def __post_init__(self, device_type: Optional[str], device_index: Optional[int]) self.global_rank = 0 if self.global_rank is None else self.global_rank if self.autocast is None: self.autocast = suppress + if channels_last: + self.memory_format = torch.channels_last + + @staticmethod + def is_instance(): + return is_global_device() + + @staticmethod + def instance(): + # throws if called before global device is set / initialized + return get_global_device() @property def type(self) -> DeviceEnvType: @@ -81,11 +108,23 @@ def wrap_distributed(self, *modules): def wrap_parallel(self, *modules): pass + def to_cpu(self, *modules: torch.nn.Module): + moved = [m.cpu() for m in modules] + return moved[0] if len(moved) == 1 else moved + def to_device(self, *modules: torch.nn.Module): - # FIXME handling dtype / memformat... disable flags, enable flags, diff fn? + # FIXME handling dtype? Do we want separate dtype for data vs model? moved = [m.to(device=self.device, memory_format=self.memory_format) for m in modules] return moved[0] if len(moved) == 1 else moved + def state_dict_to_cpu(self, state: Dict[str, Any]): + cpu_state = state_dict_apply(state, apply_fn=lambda x: x.cpu()) + return cpu_state + + def state_dict_to_device(self, state: Dict[str, Any]): + cpu_state = state_dict_apply(state, apply_fn=lambda x: x.to(self.device)) + return cpu_state + def mark_step(self): pass # NO-OP for non-XLA devices @@ -126,3 +165,24 @@ def broadcast(self, tensor: Optional[torch.Tensor] = None, src_rank=0): def barrier(self): dist.barrier() + + +# Global device environment singleton instance +_global_device_env: Optional[DeviceEnv] = None + + +def is_global_device(): + return _global_device_env is not None + + +def get_global_device() -> DeviceEnv: + if not is_global_device(): + raise RuntimeError('Please initialize device environment by calling initialize_device / set_global_device.') + return _global_device_env + + +def set_global_device(device: DeviceEnv): + global _global_device_env + if _global_device_env is not None: + raise RuntimeError('Global device is already set, it should NOT be set again.') + _global_device_env = device diff --git a/timm/bits/device_env_cuda.py b/timm/bits/device_env_cuda.py index 7358e405af..c57dfda5c5 100644 --- a/timm/bits/device_env_cuda.py +++ b/timm/bits/device_env_cuda.py @@ -16,7 +16,12 @@ def is_cuda_available(): @dataclass class DeviceEnvCuda(DeviceEnv): - def __post_init__(self, device_type: str, device_index: Optional[int]): + def __post_init__( + self, + device_type: Optional[str], + device_index: Optional[int], + channels_last: bool, + ): assert torch.cuda.device_count() torch.backends.cudnn.benchmark = True setup_world_size = self.world_size or int(os.environ.get('WORLD_SIZE', 1)) @@ -43,6 +48,8 @@ def __post_init__(self, device_type: str, device_index: Optional[int]): self.global_rank = 0 if self.autocast is None: self.autocast = torch.cuda.amp.autocast if self.amp else suppress + if channels_last: + self.memory_format = torch.channels_last @property def type(self) -> DeviceEnvType: diff --git a/timm/bits/device_env_factory.py b/timm/bits/device_env_factory.py index 2037a39e31..bb92daab3a 100644 --- a/timm/bits/device_env_factory.py +++ b/timm/bits/device_env_factory.py @@ -1,15 +1,15 @@ -from .device_env import DeviceEnv +import logging + +from .device_env import DeviceEnv, is_global_device, get_global_device, set_global_device from .device_env_cuda import DeviceEnvCuda, is_cuda_available from .device_env_xla import DeviceEnvXla, is_xla_available -_device_env = None +_logger = logging.getLogger(__name__) def initialize_device(force_cpu: bool = False, **kwargs) -> DeviceEnv: - global _device_env - if _device_env is not None: - # warning - return _device_env + if is_global_device(): + return get_global_device() denv = None if not force_cpu: @@ -23,14 +23,10 @@ def initialize_device(force_cpu: bool = False, **kwargs) -> DeviceEnv: if denv is None: denv = DeviceEnv() - print(denv) # FIXME DEBUG - _device_env = denv - return denv - - -def get_device() -> DeviceEnv: - if _device_env is None: - raise RuntimeError('Please initialize device environment by calling initialize_device first.') - return _device_env + _logger.info(f'Initialized device {denv.device}. ' + f'Rank: {denv.global_rank} ({denv.local_rank}) of {denv.world_size}.') + print(denv) # FIXME temporary print for debugging + set_global_device(denv) + return denv diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index a565c1c87f..46517f7a91 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -1,7 +1,7 @@ import os from contextlib import suppress from dataclasses import dataclass, field, InitVar -from typing import Optional +from typing import Optional, Dict import torch from torch.distributed import ReduceOp @@ -42,7 +42,12 @@ def is_xla_available(xla_device_type=None): @dataclass class DeviceEnvXla(DeviceEnv): - def __post_init__(self, device_type: Optional[str], device_idx: Optional[int]): + def __post_init__( + self, + device_type: Optional[str], + device_idx: Optional[int], + channels_last: bool, + ): if device_type is not None: device_type = device_type.upper() assert device_type in ('TPU', 'GPU', 'CPU'), "XLA device type must be one of ('TPU', 'GPU', 'CPU')" @@ -59,6 +64,8 @@ def __post_init__(self, device_type: Optional[str], device_idx: Optional[int]): assert xa is not None, 'XLA AMP is not present on this build' if self.autocast is None: self.autocast = xa.autocast if self.amp else suppress + if channels_last: + self.memory_format = torch.channels_last @property def type(self) -> DeviceEnvType: @@ -114,3 +121,11 @@ def broadcast_(self, tensor: torch.Tensor, src_rank=0): def barrier(self): xm.rendezvous('timm.bits.dist_barrier') + + def state_dict_to_cpu(self, state: Dict[str, torch.Tensor]): + cpu_state = xm._maybe_convert_to_cpu(state, convert=True) + return cpu_state + + def state_dict_to_device(self, state: Dict[str, torch.Tensor]): + device_state = xm.send_cpu_data_to_device(state, device=self.device) + return device_state diff --git a/timm/bits/distributed.py b/timm/bits/distributed.py index 55f9adf502..0b5df830d4 100644 --- a/timm/bits/distributed.py +++ b/timm/bits/distributed.py @@ -5,8 +5,7 @@ from timm.utils import unwrap_model -from .device_env import DeviceEnv, DeviceEnvType -from .device_env_factory import get_device +from .device_env import DeviceEnv TensorSeq = Union[torch.Tensor, Tuple[torch.Tensor, ...], List[torch.Tensor], Dict[Any, torch.Tensor]] @@ -22,7 +21,7 @@ def _validate_type(tensor: TensorSeq): def distribute_bn(model: torch.nn.Module, reduce: bool = False, dev_env: DeviceEnv = None): if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() # ensure every node has the same running bn stats for bn_name, bn_buf in unwrap_model(model).named_buffers(recurse=True): if ('running_mean' in bn_name) or ('running_var' in bn_name): @@ -40,7 +39,7 @@ def all_gather_recursive(tensor: TensorSeq, cat_dim=0, dev_env: DeviceEnv = None """ _validate_type(tensor) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() if isinstance(tensor, torch.Tensor): return dev_env.all_gather(tensor, cat_dim=cat_dim) elif isinstance(tensor, dict): @@ -55,7 +54,7 @@ def all_reduce_recursive(tensor: TensorSeq, op=ReduceOp.SUM, average=False, dev_ """ _validate_type(tensor) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() if isinstance(tensor, torch.Tensor): return dev_env.all_reduce_(tensor, op=op, average=average) elif isinstance(tensor, dict): @@ -70,7 +69,7 @@ def broadcast_recursive(tensor: TensorSeq, src_rank: int, dev_env: DeviceEnv = N """ _validate_type(tensor) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() if isinstance(tensor, torch.Tensor): return dev_env.broadcast_(tensor, src_rank=src_rank) elif isinstance(tensor, dict): @@ -85,7 +84,7 @@ def all_gather_sequence(tensor: TensorSeq, cat_dim: int = 0, dev_env: DeviceEnv """ _validate_type(tensor) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() with torch.no_grad(): names = None @@ -124,7 +123,7 @@ def all_reduce_sequence(tensor: TensorSeq, op=ReduceOp.SUM, average=False, dev_e """ _validate_type(tensor) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() with torch.no_grad(): names = None diff --git a/timm/bits/metric.py b/timm/bits/metric.py index 7a5cc997ad..b18282b8be 100644 --- a/timm/bits/metric.py +++ b/timm/bits/metric.py @@ -6,14 +6,13 @@ from torch.distributed import ReduceOp from .device_env import DeviceEnv -from .device_env_factory import get_device from .distributed import all_gather_sequence, all_reduce_sequence -MetricValue = Union[float, torch.Tensor, List[float], List[torch.Tensor]] +MetricValueT = Union[float, torch.Tensor, List[float], List[torch.Tensor]] @dataclass class ValueInfo: - initial: Optional[MetricValue] = 0. + initial: Optional[MetricValueT] = 0. dtype: torch.dtype = torch.float32 dist_reduce: str = 'sum' dist_average: bool = False @@ -23,10 +22,10 @@ class Metric(abc.ABC): def __init__(self, dev_env: DeviceEnv = None): self._infos: Dict[str, ValueInfo] = {} - self._values: Dict[str, Optional[MetricValue]] = {} - self._values_dist: Dict[str, Optional[MetricValue]] = {} + self._values: Dict[str, Optional[MetricValueT]] = {} + self._values_dist: Dict[str, Optional[MetricValueT]] = {} if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() self._dev_env = dev_env def _register_value(self, name: str, info: Optional[ValueInfo] = None): @@ -117,7 +116,7 @@ def _args(op: str): names.append(name) values.append(value) reductions.append(_args(info.dist_reduce)) - same_dsr = False + if same_dsr: do_gather, reduce_kwargs = reductions[0] if do_gather: diff --git a/timm/bits/logger.py b/timm/bits/monitor.py similarity index 98% rename from timm/bits/logger.py rename to timm/bits/monitor.py index a7948a8bb7..af397e1aed 100644 --- a/timm/bits/logger.py +++ b/timm/bits/monitor.py @@ -21,8 +21,6 @@ HAS_WANDB = False -from .device_env_factory import get_device - # FIXME old formatting for reference, to remove # # def log_eval(batch_idx, last_idx, batch_time, loss, top1, top5, log_suffix=''): @@ -122,19 +120,19 @@ def _map_name(key, name_map, capitalize=True): text_update += [_to_str(name, v)] -class Logger: +class Monitor: def __init__( self, experiment_name=None, output_dir=None, - python_logger=None, + logger=None, hparams=None, log_wandb=False, output_enabled=True, ): self.output_dir = output_dir # for tensorboard, csv, text file (TODO) logging - self.logger = python_logger or logging.getLogger('log') + self.logger = logger or logging.getLogger('log') hparams = hparams or {} # Setup CSV writer(s) diff --git a/timm/bits/train_services.py b/timm/bits/train_services.py index 286a4afc61..5ead002db3 100644 --- a/timm/bits/train_services.py +++ b/timm/bits/train_services.py @@ -1,13 +1,13 @@ from dataclasses import dataclass -from .logger import Logger -from timm.utils.checkpoint_saver import CheckpointSaver +from .monitor import Monitor +from .checkpoint_manager import CheckpointManager @dataclass class TrainServices: """ Train Loop Services """ - logger: Logger = None - saver: CheckpointSaver = None + logger: Monitor = None + checkpoint_manager: CheckpointManager = None diff --git a/timm/bits/train_setup.py b/timm/bits/train_setup.py index 3884958b05..1480de6384 100644 --- a/timm/bits/train_setup.py +++ b/timm/bits/train_setup.py @@ -13,7 +13,7 @@ except ImportError: ds = None -from .checkpoint import resume_train_checkpoint +from .checkpoint import load_train_state from .device_env import DeviceEnv from .train_cfg import TrainCfg from .train_state import TrainState @@ -90,10 +90,10 @@ def setup_model_and_optimizer( if resume_path: # FIXME this is not implemented yet, do a hack job before proper TrainState serialization? - resume_train_checkpoint( + load_train_state( train_state, resume_path, - resume_opt=resume_opt, + load_opt=resume_opt, log_info=dev_env.primary) if dev_env.distributed: @@ -141,10 +141,10 @@ def setup_model_and_optimizer_deepspeed( if resume_path: # FIXME deepspeed resumes differently - resume_train_checkpoint( + load_legacy_checkpoint( train_state, resume_path, - resume_opt=resume_opt, + load_opt=resume_opt, log_info=dev_env.primary) if dev_env.distributed: diff --git a/timm/bits/train_state.py b/timm/bits/train_state.py index 9a9a0d9221..9c47b5fd4d 100644 --- a/timm/bits/train_state.py +++ b/timm/bits/train_state.py @@ -4,6 +4,8 @@ from torch import nn as nn from timm.scheduler import Scheduler +from timm.utils import get_state_dict, unwrap_model + from .updater import Updater @@ -16,18 +18,33 @@ class TrainState: lr_scheduler: Scheduler = None model_ema: nn.Module = None - step_count_epoch: int = 0 - step_count_global: int = 0 epoch: int = 0 + step_count: int = 0 + step_count_global: int = 0 def __post_init__(self): assert self.model is not None assert self.updater is not None - -def serialize_train_state(train_state: TrainState): - pass - - -def deserialize_train_state(checkpoint: Dict[str, Any]): - pass \ No newline at end of file + def state_dict(self, unwrap_fn=unwrap_model): + state = dict( + epoch=self.epoch, + step_count=self.step_count, + step_count_global=self.step_count_global, + model=get_state_dict(self.model, unwrap_fn), + model_ema=None if self.model_ema is None else get_state_dict(self.model_ema, unwrap_fn), + ) + # FIXME lr_scheduler state save? + state.update(self.updater.state_dict()) + return state + + def load_state_dict(self, state_dict, unwrap_fn=unwrap_model): + self.epoch = state_dict['epoch'] + self.step_count = state_dict['step_count'] + self.step_count_global = state_dict['step_count_global'] + + unwrap_fn(self.model).load_state_dict(state_dict.get('model')) + if 'model_ema' in state_dict and self.model_ema is not None: + unwrap_fn(self.model_ema).load_state_dict(state_dict.get('model_ema')) + + self.updater.load_state_dict(state_dict) diff --git a/timm/bits/updater.py b/timm/bits/updater.py index 422d12ec7f..0bf1c451d4 100644 --- a/timm/bits/updater.py +++ b/timm/bits/updater.py @@ -56,6 +56,7 @@ def state_dict(self): state_dict = dict(optimizer=self.optimizer.state_dict()) if self.grad_scaler is not None: state_dict['grad_scaler'] = self.grad_scaler.state_dict() + return state_dict def load_state_dict(self, state_dict): if 'optimizer' in state_dict: @@ -66,3 +67,6 @@ def load_state_dict(self, state_dict): def after_step(self, after_step_fn, *args): after_step_fn(*args) + @property + def deepspeed(self): + return False diff --git a/timm/bits/updater_deepspeed.py b/timm/bits/updater_deepspeed.py index e080a7deb4..f3c4b3b0fc 100644 --- a/timm/bits/updater_deepspeed.py +++ b/timm/bits/updater_deepspeed.py @@ -24,3 +24,7 @@ def apply(self, loss: torch.Tensor, accumulate=False): self.model.backward(loss) self.model.step() self.reset() + + @property + def deepspeed(self): + return True diff --git a/timm/bits/updater_factory.py b/timm/bits/updater_factory.py index 24ef76c0f8..c3fd9e451e 100644 --- a/timm/bits/updater_factory.py +++ b/timm/bits/updater_factory.py @@ -3,7 +3,6 @@ import torch from .device_env import DeviceEnv, DeviceEnvType -from .device_env_factory import get_device from .updater import Updater from .updater_cuda import UpdaterCudaWithScaler from .updater_deepspeed import UpdaterDeepSpeed @@ -21,7 +20,7 @@ def create_updater( ) -> Updater: if not dev_env: - dev_env = get_device() + dev_env = DeviceEnv.instance() updater_kwargs = dict(model=model, optimizer=optimizer, clip_fn=clip_fn, clip_value=clip_value) use_scaler = dev_env.amp diff --git a/timm/data/loader.py b/timm/data/loader.py index 5ddcc6d200..e8722b2948 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -8,7 +8,7 @@ import torch.utils.data -from timm.bits import get_device, DeviceEnvType +from timm.bits import DeviceEnv from .fetcher import Fetcher from .prefetcher_cuda import PrefetcherCuda @@ -75,7 +75,7 @@ def create_loader( ) if dev_env is None: - dev_env = get_device() + dev_env = DeviceEnv.instance() sampler = None if dev_env.distributed and not isinstance(dataset, torch.utils.data.IterableDataset): diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 519be03d43..32dac26dfc 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -23,7 +23,7 @@ exit(1) from .parser import Parser -from timm.bits import get_device +from timm.bits import get_global_device MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities SHUFFLE_SIZE = 16834 # samples to shuffle in DS queue @@ -80,7 +80,7 @@ def __init__(self, root, name, split='train', shuffle=False, is_training=False, self.worker_info = None self.dist_rank = 0 self.dist_num_replicas = 1 - dev_env = get_device() + dev_env = get_global_device() # FIXME allow to work without devenv usage? if dev_env.distributed and dev_env.world_size > 1: self.dist_rank = dev_env.global_rank diff --git a/timm/utils/model.py b/timm/utils/model.py index bd46e2f49c..66f7480e55 100644 --- a/timm/utils/model.py +++ b/timm/utils/model.py @@ -3,33 +3,38 @@ Hacked together by / Copyright 2020 Ross Wightman """ from .model_ema import ModelEma -import torch +import torch import fnmatch -def unwrap_model(model): - if isinstance(model, ModelEma): - return unwrap_model(model.ema) - else: - return model.module if hasattr(model, 'module') else model +_SUB_MODULE_ATTR = ('module', 'model') + + +def unwrap_model(model, recursive=True): + for attr in _SUB_MODULE_ATTR: + sub_module = getattr(model, attr, None) + if sub_module is not None: + return unwrap_model(sub_module) if recursive else sub_module + return model def get_state_dict(model, unwrap_fn=unwrap_model): return unwrap_fn(model).state_dict() -def avg_sq_ch_mean(model, input, output): - "calculate average channel square mean of output activations" - return torch.mean(output.mean(axis=[0,2,3])**2).item() +def avg_sq_ch_mean(model, input, output): + """calculate average channel square mean of output activations + """ + return torch.mean(output.mean(axis=[0, 2, 3]) ** 2).item() -def avg_ch_var(model, input, output): - "calculate average channel variance of output activations" - return torch.mean(output.var(axis=[0,2,3])).item()\ +def avg_ch_var(model, input, output): + """calculate average channel variance of output activations""" + return torch.mean(output.var(axis=[0, 2, 3])).item() -def avg_ch_var_residual(model, input, output): - "calculate average channel variance of output activations" - return torch.mean(output.var(axis=[0,2,3])).item() +def avg_ch_var_residual(model, input, output): + """calculate average channel variance of output activations""" + return torch.mean(output.var(axis=[0, 2, 3])).item() class ActivationStatsHook: @@ -58,15 +63,16 @@ def __init__(self, model, hook_fn_locs, hook_fns): raise ValueError("Please provide `hook_fns` for each `hook_fn_locs`, \ their lengths are different.") self.stats = dict((hook_fn.__name__, []) for hook_fn in hook_fns) - for hook_fn_loc, hook_fn in zip(hook_fn_locs, hook_fns): + for hook_fn_loc, hook_fn in zip(hook_fn_locs, hook_fns): self.register_hook(hook_fn_loc, hook_fn) def _create_hook(self, hook_fn): def append_activation_stats(module, input, output): out = hook_fn(module, input, output) self.stats[hook_fn.__name__].append(out) + return append_activation_stats - + def register_hook(self, hook_fn_loc, hook_fn): for name, module in self.model.named_modules(): if not fnmatch.fnmatch(name, hook_fn_loc): @@ -74,9 +80,9 @@ def register_hook(self, hook_fn_loc, hook_fn): module.register_forward_hook(self._create_hook(hook_fn)) -def extract_spp_stats(model, +def extract_spp_stats(model, hook_fn_locs, - hook_fns, + hook_fns, input_shape=[8, 3, 224, 224]): """Extract average square channel mean and variance of activations during forward pass to plot Signal Propogation Plots (SPP). @@ -84,9 +90,8 @@ def extract_spp_stats(model, Paper: https://arxiv.org/abs/2101.08692 Example Usage: https://gist.github.com/amaarora/6e56942fcb46e67ba203f3009b30d950 - """ + """ x = torch.normal(0., 1., input_shape) hook = ActivationStatsHook(model, hook_fn_locs=hook_fn_locs, hook_fns=hook_fns) _ = model(x) return hook.stats - \ No newline at end of file diff --git a/train.py b/train.py index 51645e4db8..c61425423c 100755 --- a/train.py +++ b/train.py @@ -28,14 +28,14 @@ import torch.nn as nn import torchvision.utils -from timm.bits import initialize_device, setup_model_and_optimizer, DeviceEnv, Logger, Tracker,\ - TrainState, TrainServices, TrainCfg, AccuracyTopK, AvgTensor, distribute_bn +from timm.bits import initialize_device, setup_model_and_optimizer, DeviceEnv, Monitor, Tracker,\ + TrainState, TrainServices, TrainCfg, CheckpointManager, AccuracyTopK, AvgTensor, distribute_bn from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset from timm.models import create_model, safe_model_name, convert_splitbn_model from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy -from timm.optim import create_optimizer_v2, optimizer_kwargs +from timm.optim import optimizer_kwargs from timm.scheduler import create_scheduler -from timm.utils import setup_default_logging, random_seed, get_outdir, CheckpointSaver +from timm.utils import setup_default_logging, random_seed, get_outdir, unwrap_model _logger = logging.getLogger('train') @@ -276,7 +276,7 @@ def main(): setup_default_logging() args, args_text = _parse_args() - dev_env = initialize_device(amp=args.amp) + dev_env = initialize_device(amp=args.amp, channels_last=args.channels_last) if dev_env.distributed: _logger.info('Training in distributed mode with multiple processes, 1 device per process. Process %d, total %d.' % (dev_env.global_rank, dev_env.world_size)) @@ -293,13 +293,17 @@ def main(): # FIXME perhaps keep the same and just set diff seeds for dataloader worker process? what about TFDS? random_seed(args.seed, dev_env.global_rank) - data_config, loader_eval, loader_train = setup_data(args, train_state.model.default_cfg, dev_env, mixup_active) + data_config, loader_eval, loader_train = setup_data( + args, + unwrap_model(train_state.model).default_cfg, + dev_env, + mixup_active) - # setup checkpoint saver + # setup checkpoint manager eval_metric = args.eval_metric best_metric = None best_epoch = None - saver = None + checkpoint_manager = None output_dir = None if dev_env.primary: if args.experiment: @@ -311,24 +315,20 @@ def main(): str(data_config['input_size'][-1]) ]) output_dir = get_outdir(args.output if args.output else './output/train', exp_name) - decreasing = True if eval_metric == 'loss' else False - saver = CheckpointSaver( # TODO CheckpointSaverV2 - model=train_state.model, - optimizer=train_state.updater.optimizer, - args=args, - model_ema=train_state.model_ema, - amp_scaler=train_state.updater.grad_scaler, + checkpoint_manager = CheckpointManager( + hparams=vars(args), checkpoint_dir=output_dir, recovery_dir=output_dir, - decreasing=decreasing, + metric_name=eval_metric, + metric_decreasing=True if eval_metric == 'loss' else False, max_history=args.checkpoint_hist) with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: f.write(args_text) services = TrainServices( - logger=Logger( - output_dir=output_dir, python_logger=_logger, hparams=vars(args), output_enabled=dev_env.primary), - saver=saver, + logger=Monitor( + output_dir=output_dir, logger=_logger, hparams=vars(args), output_enabled=dev_env.primary), + checkpoint_manager=checkpoint_manager, ) try: @@ -379,10 +379,10 @@ def main(): if services.logger is not None: services.logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metrics)) - if saver is not None: + if checkpoint_manager is not None: # save proper checkpoint with eval metric - save_metric = eval_metrics[eval_metric] - best_metric, best_epoch = saver.save_checkpoint(epoch, metric=save_metric) + best_checkpoint = checkpoint_manager.save_checkpoint(train_state, eval_metrics) + best_metric, best_epoch = best_checkpoint.sort_key, best_checkpoint.epoch train_state = replace(train_state, epoch=epoch + 1) @@ -629,9 +629,9 @@ def after_train_step( lr=lr_avg, ) - if services.saver is not None and cfg.recovery_interval and ( + if services.checkpoint_manager is not None and cfg.recovery_interval and ( end_step or (step_idx + 1) % cfg.recovery_interval == 0): - services.saver.save_recovery(state.epoch, batch_idx=step_idx) + services.checkpoint_manager.save_recovery(state.epoch, batch_idx=step_idx) if state.lr_scheduler is not None: state.lr_scheduler.step_update(num_updates=state.step_count_global) @@ -641,7 +641,7 @@ def evaluate( model: nn.Module, loss_fn: nn.Module, loader, - logger: Logger, + logger: Monitor, dev_env: DeviceEnv, phase_suffix: str = '', log_interval: int = 10, diff --git a/validate.py b/validate.py index 89d7098205..cee359c395 100755 --- a/validate.py +++ b/validate.py @@ -18,7 +18,7 @@ import torch.nn.parallel from collections import OrderedDict -from timm.bits import initialize_device, Tracker, Logger, AccuracyTopK, AvgTensor +from timm.bits import initialize_device, Tracker, Monitor, AccuracyTopK, AvgTensor from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models from timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet from timm.utils import natural_key, setup_default_logging @@ -154,7 +154,7 @@ def validate(args): pin_memory=args.pin_mem, tf_preprocessing=args.tf_preprocessing) - logger = Logger(python_logger=_logger) + logger = Monitor(logger=_logger) tracker = Tracker() losses = AvgTensor() accuracy = AccuracyTopK(dev_env=dev_env) From f411724de475989bec2873572fe9d2f0a7b7740e Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 12:49:53 -0700 Subject: [PATCH 10/61] Fix checkpoint delete issue. Add README about bits and initial Pytorch XLA usage on TPU-VM. Add some FIXMEs and fold train_cfg into train_state by default. --- timm/bits/README.md | 102 +++++++++++++++++++++++++++++++- timm/bits/checkpoint_manager.py | 8 +-- timm/bits/train_cfg.py | 4 +- timm/bits/train_services.py | 4 +- timm/bits/train_state.py | 22 +++++-- train.py | 85 +++++++++++++++++--------- 6 files changed, 183 insertions(+), 42 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index 02ba6dc614..76071164e4 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -1,8 +1,104 @@ # Timm Bits -A collection of reusable components and lightweight abstractions for training and evaluating NN. +## Intro +A collection of reusable components and lightweight abstractions for training and evaluating NN with PyTorch. -This is an early WIP with the primary goal to get up and running on TPUs first. Expect significant changes, rewrites, additions... +This is an early WIP (consider it pre-alpha) with the primary goal to get up and running on TPUs w/ PyTorch XLA as the first priority. Expect significant changes, rewrites, additions, and of course bugs. -The current train.py and validate.py scipts are evolving to use the timm.bits components, they will also change significantly. +The current train.py and validate.py scripts are evolving to use the timm.bits components, they will also change significantly. +## Bits Design Brief + +`bits` is designed to be a lightweight and modular set of training abstractions. It certainly shares concepts with other libraries (fastai, ignite, lightning, keras, etc, etc) but is not modeled after any specific one. It is supposed to be a 'bit different', hackable, and not everything to everyone. + +`timm` models will always be useable in pure PyTorch w/o `bits` or anything besides the utils / helpers for pretrained models, feature extraction, default data config. I may breakout bits into a diff project if there is any interest besides my own use for timm image and video model training. + +The layers: +* Device - DeviceEnv dataclass abstraction deals with PyTorch CPU, GPU and XLA device differences, incl distributed helpers, wrappers, etc. There is more than a passing similarity to HuggingFace Accelerate, but developed in parallel and with some difference in the detail. +* Updater - Dataclass that combines the backward pass, optimizer step, grad scaling, grad accumulation is a possibly device specific abstraction. + * Currently basic single optimizer, single forward/backward Updaters are included for GPU, XLA. + * Deepseed will need its own Updater(s) since its Engine is a monolith of epic proportions that breaks all separations of concern in PyTorch (UGH!). NOTE Deepspeed not working yet nor is it a priority. +* Monitor - pull together all console logging, csv summaries, tensorboard, and WandB summaries into one module for monitoring your training. +* Checkpoint Manager - keeps track of your checkpoints +* Metrics - yet another set of metrics, although this may be replaced w/ an external set of classes. Uses same update / reset / compute interface as Ignite and Lightning (in theory interchangeable w/ an adapter). Metrics keep state on GPU / TPU to avoid device -> cpu transfers (esp for XLA). +* Task (not implemented yet) - combine your model(s) w/ losses in a task specific module, will also allow task factory for easy build of related metrics +* Train State - dataclasses to hold your tasks (models), updater state, etc +* Train Loop Functions (still in train.py script, not refined) - set of functions for train step, 'after step', evaluate using all of the components mentioned + +How is this different than other options? +* I'm very much trying to avoid a monolithic trainer / learner / model wrapping type class with billions of hooks (avoiding granular inversion of control!). +* The goal is to provide reusable modules that can (hopefully) be mixed and matched w/ other code. +* Many of the components are based on Python dataclasses to reduce boilerplate. +* The train loop components are (will be) functional with easy to follow flow control, and are intended to be replaced when something different is needed, not augmented with extremely granular hooks. + + +## Quick Start + +Most initial users will likely be interested in training timm models w/ PyTorch XLA on TPU-VM instances, this quick start will get you moving. + +If you haven't noticed, this code is on a branch, make sure you checkout the `bits_and_tpu` branch on `timm` before doing this. You can test locally on your GPU too, in either XLA + GPU in a container or the usual PyTorch w/ GPU. + +## Setup Python environment + +This setup assumes you've SSH'd into your TPU-VM after setting it up (https://cloud.google.com/tpu/docs/users-guide-tpu-vm). Don't forget to do this in a TMUX session or you'll be sad if you lose your connection! + +The TPU-VM instances I've been using have a usable version of PyTorch XLA 1.8.1 installed in the python3 environment, we will be using that. + +I've found that leveraging TFDS w/ datasets in TFRecord format, streamed from Google Storage buckets is the most practical / cost-effective solution. I've written a PyTorch IterabeDataset wrapper around TFDS so we will install Tensorflow datasets and use that. + +One thing to watch, be very careful that you don't use a GS based dataset in a different continent from you TPU-VM instances. I burned through a few thousand USD leaving some wires crossed for 1 day. Otherwise the cost of training w/ buckets in same region are quite low. + +### Install TFDS (if using GS buckets) + +``` + pip3 install tensorflow-datasets +``` + +In some earlier tpu-vm instances the installed tensorflow version had issues with the GS bucket reading support and I often ended up installing a diff version. This could conflict with other use cases so only do it if needed. + +``` + pip3 install --upgrade tensorflow-cpu +``` + +You may run into some numpy / pytorch version dependency issues here, try capping the version of tensorflow at 2.4.1 in above command. + + +### Get your dataset into buckets + +You will need to host your dataset in buckets. I have tried creating custom datasets for this setup, but have used a number of TFDS datasets such as ImageNet, Flowers, caltech Birds, Oxford Pets that are available in TFDS. + +The TFDS dataset pages (https://www.tensorflow.org/datasets/catalog/imagenet2012) have directions for the various datasets, I recommend building them in a different VM or local machine and then uploading to your training bucket. Many of them will auto-download and build the tfrecord shards for you. ImageNet needs to be downloaded manually. + +### Use a custom allocator + +With PyTorch XLA on a TPU-VM and TFDS you'll end up with a lot of processes and buffering. The instance memory will be used up quickly. I highly recommend using a custom allocator via `LD_PRELOAD`. tcmalloc may now be a default in the tpu-vm instanecs (check first). jemalloc also worked well for me. If LD_PRELOAD is not set in your env, do the following + +``` + sudo apt update + sudo apt install google-perftools + export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4 +``` + +# Train, train, train + +With all the above done, you should be ready to train... below is one particular train command I've just recently been using for some trials on vision MLP models... + +``` + python3 launch_xla.py --num-devices 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --lr 8.8e-4 -b 256 +``` + +NOTE: build my TFDS dataset at ver 5.0.0 and it defaults to a newer version now. Change accordingly. + +# Gotchas and Known Issues +* When PyTorch XLA crashes, you hit a TPU OOM etc, lots of processes get orphaned. Get in the habit of killing all python processes before starting a new train run. + * `alias fml='pkill -f python3'` +* For TFDS use, due to the way PyTorch IterableDatasets work at the loader level, each worker process builds batches independently -- they are not dequeued and collated across workers. For validation especially, getting all the samples evenly divided across BOTH the distributed processes AND the dataset workers is a bit annoying. For now keeping the num_workers arg (j) low is advisable, especially for very small validation sets. This can limit your throughput though. +* Random erasing for on-device XLA tensors doesn't work. XLA isn't compatible with the array slicing approach to my RE impl, currently it's done by default after moving tensors to device. I need to fix. +* There are a number of models using ops that aren't lowered to XLA, this will REALLY slow things down to the point of being unusable. There are flags you can set to debug this, see PyTorch XLA troubleshooting page (https://github.com/pytorch/xla/blob/master/TROUBLESHOOTING.md) + * For NFNet models, force the ScaledStdConv `use_layernorm` arg to True, it is lowered, `std_mean` op is not +* This code doesn't currently work when float16 is forced via `XLA_USE_BF16=1` env arg, it will mess up metrics tensors that overflow in bfloat16. Better controlling model activation vs weight precision vs other tensors is a TODO. +* Your first batch, and generally first epoch will be slow with Pytorch XLA, after that things pick up and move along quickly. Be patient. + +# Bugs and Discussion + +If you find bugs, there are likely many. Feel free to file an issue with `[BITS]` as the prefix. Open a discussion if you have design ideas, again use `[BITS]` in the title. \ No newline at end of file diff --git a/timm/bits/checkpoint_manager.py b/timm/bits/checkpoint_manager.py index b051e126ea..b2c692cb44 100644 --- a/timm/bits/checkpoint_manager.py +++ b/timm/bits/checkpoint_manager.py @@ -86,7 +86,7 @@ def _replace(self, src, dst): try: if os.path.exists(dst): os.unlink(dst) # required for Windows support. - except Exception as e: + except (OSError, NotImplementedError) as e: self.can_hardlink = False os.replace(src, dst) @@ -98,7 +98,7 @@ def _duplicate(self, src, dst): os.unlink(dst) os.link(src, dst) return - except Exception as e: + except (OSError, NotImplementedError) as e: self.can_hardlink = False shutil.copy2(src, dst) @@ -153,8 +153,8 @@ def _cleanup_checkpoints(self, trim=0): for d in to_delete: try: _logger.debug("Cleaning checkpoint: {}".format(d)) - os.remove(d[0]) - except Exception as e: + os.remove(d.path) + except OSError as e: _logger.error("Exception '{}' while deleting checkpoint".format(e)) self.checkpoint_files = self.checkpoint_files[:delete_index] diff --git a/timm/bits/train_cfg.py b/timm/bits/train_cfg.py index d7b35faf5e..df627809cc 100644 --- a/timm/bits/train_cfg.py +++ b/timm/bits/train_cfg.py @@ -4,9 +4,9 @@ @dataclass class TrainCfg: """ Train Loop Configuration - Dataclass to propagate training configuration values + Dataclass to hold training configuration values """ - num_epochs: int = 0 + num_epochs: int = 100 log_interval: int = 50 recovery_interval: int = 0 accumulate_steps: int = 0 diff --git a/timm/bits/train_services.py b/timm/bits/train_services.py index 5ead002db3..d36d8c2266 100644 --- a/timm/bits/train_services.py +++ b/timm/bits/train_services.py @@ -8,6 +8,6 @@ class TrainServices: """ Train Loop Services """ - logger: Monitor = None - checkpoint_manager: CheckpointManager = None + monitor: Monitor = None + checkpoint: CheckpointManager = None diff --git a/timm/bits/train_state.py b/timm/bits/train_state.py index 9c47b5fd4d..91fcf76fb2 100644 --- a/timm/bits/train_state.py +++ b/timm/bits/train_state.py @@ -6,6 +6,7 @@ from timm.scheduler import Scheduler from timm.utils import get_state_dict, unwrap_model +from .train_cfg import TrainCfg from .updater import Updater @@ -18,6 +19,9 @@ class TrainState: lr_scheduler: Scheduler = None model_ema: nn.Module = None + train_cfg: TrainCfg = TrainCfg() + # FIXME collect & include other cfg like data & model so it's in one spot for checkpoints / logging / debugging? + epoch: int = 0 step_count: int = 0 step_count_global: int = 0 @@ -28,23 +32,33 @@ def __post_init__(self): def state_dict(self, unwrap_fn=unwrap_model): state = dict( + # train loop state (counters, etc), saved and restored epoch=self.epoch, step_count=self.step_count, step_count_global=self.step_count_global, + + # model params / state, saved and restored model=get_state_dict(self.model, unwrap_fn), model_ema=None if self.model_ema is None else get_state_dict(self.model_ema, unwrap_fn), + + # configuration, saved but currently not restored, determined by args / config file for each run + train_cfg=vars(self.train_cfg) ) - # FIXME lr_scheduler state save? - state.update(self.updater.state_dict()) + # FIXME include lr_scheduler state? + state.update(self.updater.state_dict()) # updater (optimizer, scaler,e tc) state added to state return state - def load_state_dict(self, state_dict, unwrap_fn=unwrap_model): + def load_state_dict(self, state_dict, unwrap_fn=unwrap_model, load_opt=True): + # restore train loop state self.epoch = state_dict['epoch'] self.step_count = state_dict['step_count'] self.step_count_global = state_dict['step_count_global'] + # restore model params / state unwrap_fn(self.model).load_state_dict(state_dict.get('model')) if 'model_ema' in state_dict and self.model_ema is not None: unwrap_fn(self.model_ema).load_state_dict(state_dict.get('model_ema')) - self.updater.load_state_dict(state_dict) + # restore optimizer state + if load_opt: + self.updater.load_state_dict(state_dict) diff --git a/train.py b/train.py index c61425423c..c484ad0dbd 100755 --- a/train.py +++ b/train.py @@ -287,7 +287,8 @@ def main(): random_seed(args.seed, 0) # Set all random seeds the same for model/state init (mandatory for XLA) - train_state, train_cfg = setup_train_task(args, dev_env, mixup_active) + train_state = setup_train_task(args, dev_env, mixup_active) + train_cfg = train_state.train_cfg # Set random seeds across ranks differently for train # FIXME perhaps keep the same and just set diff seeds for dataloader worker process? what about TFDS? @@ -326,9 +327,12 @@ def main(): f.write(args_text) services = TrainServices( - logger=Monitor( - output_dir=output_dir, logger=_logger, hparams=vars(args), output_enabled=dev_env.primary), - checkpoint_manager=checkpoint_manager, + monitor=Monitor( + output_dir=output_dir, + logger=_logger, + hparams=vars(args), + output_enabled=dev_env.primary), + checkpoint=checkpoint_manager, ) try: @@ -341,7 +345,6 @@ def main(): train_metrics = train_one_epoch( state=train_state, - cfg=train_cfg, services=services, loader=loader_train, dev_env=dev_env, @@ -356,7 +359,7 @@ def main(): train_state.model, train_state.eval_loss, loader_eval, - services.logger, + services.monitor, dev_env) if train_state.model_ema is not None and not args.model_ema_force_cpu: @@ -367,7 +370,7 @@ def main(): train_state.model_ema.module, train_state.eval_loss, loader_eval, - services.logger, + services.monitor, dev_env, phase_suffix='EMA') eval_metrics = ema_eval_metrics @@ -376,8 +379,10 @@ def main(): # step LR for next epoch train_state.lr_scheduler.step(epoch + 1, eval_metrics[eval_metric]) - if services.logger is not None: - services.logger.write_summary(index=epoch, results=dict(train=train_metrics, eval=eval_metrics)) + if services.monitor is not None: + services.monitor.write_summary( + index=epoch, + results=dict(train=train_metrics, eval=eval_metrics)) if checkpoint_manager is not None: # save proper checkpoint with eval metric @@ -459,18 +464,21 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): if dev_env.primary: _logger.info('Scheduled epochs: {}'.format(num_epochs)) + train_cfg = TrainCfg( + num_epochs=num_epochs, + log_interval=args.log_interval, + recovery_interval=args.recovery_interval, + ) + train_state = replace( train_state, lr_scheduler=lr_scheduler, train_loss=train_loss_fn, - eval_loss=eval_loss_fn) - - train_cfg = TrainCfg( - num_epochs=num_epochs, - log_interval=args.log_interval, - recovery_interval=args.recovery_interval) + eval_loss=eval_loss_fn, + train_cfg=train_cfg, + ) - return train_state, train_cfg + return train_state def setup_data(args, default_cfg, dev_env, mixup_active): @@ -545,13 +553,12 @@ def setup_data(args, default_cfg, dev_env, mixup_active): def train_one_epoch( state: TrainState, - cfg: TrainCfg, services: TrainServices, loader, dev_env: DeviceEnv, ): tracker = Tracker() - loss_meter = AvgTensor() + loss_meter = AvgTensor() # FIXME move loss meter into task specific TaskMetric state.model.train() state.updater.reset() # zero-grad @@ -573,7 +580,6 @@ def train_one_epoch( state.updater.after_step( after_train_step, state, - cfg, services, dev_env, step_idx, @@ -594,7 +600,6 @@ def train_one_epoch( def after_train_step( state: TrainState, - cfg: TrainCfg, services: TrainServices, dev_env: DeviceEnv, step_idx: int, @@ -603,6 +608,27 @@ def after_train_step( loss_meter: AvgTensor, tensors: Tuple[torch.Tensor, ...], ): + """ + After the core loss / backward / gradient apply step, we perform all non-gradient related + activities here including updating meters, metrics, performing logging, and writing checkpoints. + + Many / most of these operations require tensors to be moved to CPU, they shoud not be done + every step and for XLA use they should be done via the optimizer step_closure. This function includes + everything that should be executed within the step closure. + + Args: + state: + services: + dev_env: + step_idx: + step_end_idx: + tracker: + loss_meter: + tensors: + + Returns: + + """ end_step = step_idx == step_end_idx with torch.no_grad(): @@ -610,16 +636,18 @@ def after_train_step( loss_meter.update(loss, output.shape[0]) if state.model_ema is not None: + # FIXME should ema update be included here or in train / updater step? does it matter? state.model_ema.update(state.model) state = replace(state, step_count_global=state.step_count_global + 1) + cfg = state.train_cfg - if services.logger is not None and end_step or (step_idx + 1) % cfg.log_interval == 0: + if services.monitor is not None and end_step or (step_idx + 1) % cfg.log_interval == 0: global_batch_size = dev_env.world_size * output.shape[0] loss_avg = loss_meter.compute() - if services.logger is not None: + if services.monitor is not None: lr_avg = state.updater.get_average_lr() - services.logger.log_step( + services.monitor.log_step( 'Train', step=step_idx, step_end=step_end_idx, @@ -629,11 +657,12 @@ def after_train_step( lr=lr_avg, ) - if services.checkpoint_manager is not None and cfg.recovery_interval and ( + if services.checkpoint is not None and cfg.recovery_interval and ( end_step or (step_idx + 1) % cfg.recovery_interval == 0): - services.checkpoint_manager.save_recovery(state.epoch, batch_idx=step_idx) + services.checkpoint.save_recovery(state.epoch, batch_idx=step_idx) if state.lr_scheduler is not None: + # FIXME perform scheduler update here or via updater after_step call? state.lr_scheduler.step_update(num_updates=state.step_count_global) @@ -649,7 +678,7 @@ def evaluate( tracker = Tracker() losses_m = AvgTensor() - accuracy_m = AccuracyTopK() + accuracy_m = AccuracyTopK() # FIXME move loss and accuracy modules into task specific TaskMetric obj model.eval() @@ -666,7 +695,9 @@ def evaluate( output = output[0] loss = loss_fn(output, target) - dev_env.mark_step() # FIXME + # FIXME, explictly marking step for XLA use since I'm not using the parallel xm loader + # need to investigate whether parallel loader wrapper is helpful on tpu-vm or only usefor for 2-vm setup. + dev_env.mark_step() tracker.mark_iter_step_end() losses_m.update(loss, output.size(0)) accuracy_m.update(output, target) From c3db5f5801aca772d16364c4f32543530c9c8c98 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 13:00:52 -0700 Subject: [PATCH 11/61] Worker hack for TFDS eval, add TPU env var setting. --- timm/bits/README.md | 9 ++++++++- train.py | 6 +++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index 76071164e4..c51d4348bf 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -83,8 +83,15 @@ With PyTorch XLA on a TPU-VM and TFDS you'll end up with a lot of processes and With all the above done, you should be ready to train... below is one particular train command I've just recently been using for some trials on vision MLP models... +Make sure the TPU config for PyTorch XLA on TPU-VM is set: ``` - python3 launch_xla.py --num-devices 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --lr 8.8e-4 -b 256 + export XRT_TPU_CONFIG="localservice;0;localhost:51011" +``` + +Then, launch fighters! + +``` + python3 launch_xla.py --num-devices 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --epochs 500 --lr 8.8e-4 -b 256 ``` NOTE: build my TFDS dataset at ver 5.0.0 and it defaults to a newer version now. Change accordingly. diff --git a/train.py b/train.py index c484ad0dbd..cca814fd65 100755 --- a/train.py +++ b/train.py @@ -536,6 +536,10 @@ def setup_data(args, default_cfg, dev_env, mixup_active): use_multi_epochs_loader=args.use_multi_epochs_loader ) + eval_workers = args.workers + if 'tfds' in args.dataset: + # FIXME reduce validation issues when using TFDS w/ workers and distributed training + eval_workers = min(2, args.workers) loader_eval = create_loader( dataset_eval, input_size=data_config['input_size'], @@ -544,7 +548,7 @@ def setup_data(args, default_cfg, dev_env, mixup_active): interpolation=data_config['interpolation'], mean=data_config['mean'], std=data_config['std'], - num_workers=args.workers, + num_workers=eval_workers, crop_pct=data_config['crop_pct'], pin_memory=args.pin_mem, ) From 6b2d9c2660cde4fce1ef9b86ae1806c07b8368e7 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 13:08:29 -0700 Subject: [PATCH 12/61] Another bits/README.md update --- timm/bits/README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index c51d4348bf..eeb8297e3a 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -104,8 +104,13 @@ NOTE: build my TFDS dataset at ver 5.0.0 and it defaults to a newer version now. * There are a number of models using ops that aren't lowered to XLA, this will REALLY slow things down to the point of being unusable. There are flags you can set to debug this, see PyTorch XLA troubleshooting page (https://github.com/pytorch/xla/blob/master/TROUBLESHOOTING.md) * For NFNet models, force the ScaledStdConv `use_layernorm` arg to True, it is lowered, `std_mean` op is not * This code doesn't currently work when float16 is forced via `XLA_USE_BF16=1` env arg, it will mess up metrics tensors that overflow in bfloat16. Better controlling model activation vs weight precision vs other tensors is a TODO. +* I haven't tested this code with pre TPU-VM (2-VM) setups, but it should work w/ correct config. I intend to make it work with Colab and Kaggle TPU notebooks soon. * Your first batch, and generally first epoch will be slow with Pytorch XLA, after that things pick up and move along quickly. Be patient. # Bugs and Discussion -If you find bugs, there are likely many. Feel free to file an issue with `[BITS]` as the prefix. Open a discussion if you have design ideas, again use `[BITS]` in the title. \ No newline at end of file +If you find bugs (there are likely many), feel free to file an issue with `[BITS]` as the title prefix. Open a discussion if you have design ideas, again use `[BITS]` in the title. + +# Acknowledgements + +The TPU-VMs I've used for creating and testing this code, and that I hope to use for many future `timm` models were made available by the TPU Research Cloud (https://sites.research.google/trc/). \ No newline at end of file From cc870df7b8a4029ced7037500e9b253fec11cc10 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 14:23:34 -0700 Subject: [PATCH 13/61] Update README.md --- timm/bits/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index eeb8297e3a..f17418ac44 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -14,7 +14,7 @@ The current train.py and validate.py scripts are evolving to use the timm.bits c `timm` models will always be useable in pure PyTorch w/o `bits` or anything besides the utils / helpers for pretrained models, feature extraction, default data config. I may breakout bits into a diff project if there is any interest besides my own use for timm image and video model training. The layers: -* Device - DeviceEnv dataclass abstraction deals with PyTorch CPU, GPU and XLA device differences, incl distributed helpers, wrappers, etc. There is more than a passing similarity to HuggingFace Accelerate, but developed in parallel and with some difference in the detail. +* DeviceEnv - DeviceEnv dataclass abstraction deals with PyTorch CPU, GPU and XLA device differences, incl distributed helpers, wrappers, etc. There is more than a passing similarity to HuggingFace Accelerate, but developed in parallel and with some difference in the detail. * Updater - Dataclass that combines the backward pass, optimizer step, grad scaling, grad accumulation is a possibly device specific abstraction. * Currently basic single optimizer, single forward/backward Updaters are included for GPU, XLA. * Deepseed will need its own Updater(s) since its Engine is a monolith of epic proportions that breaks all separations of concern in PyTorch (UGH!). NOTE Deepspeed not working yet nor is it a priority. @@ -113,4 +113,4 @@ If you find bugs (there are likely many), feel free to file an issue with `[BITS # Acknowledgements -The TPU-VMs I've used for creating and testing this code, and that I hope to use for many future `timm` models were made available by the TPU Research Cloud (https://sites.research.google/trc/). \ No newline at end of file +The TPU-VMs I've used for creating and testing this code, and that I hope to use for many future `timm` models were made available by the TPU Research Cloud (https://sites.research.google/trc/). From ee2b8f49ee96871771a8800e0efba5306c4b9a7f Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 14:41:29 -0700 Subject: [PATCH 14/61] Update README.md --- timm/bits/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index f17418ac44..941e471a30 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -29,7 +29,7 @@ How is this different than other options? * I'm very much trying to avoid a monolithic trainer / learner / model wrapping type class with billions of hooks (avoiding granular inversion of control!). * The goal is to provide reusable modules that can (hopefully) be mixed and matched w/ other code. * Many of the components are based on Python dataclasses to reduce boilerplate. -* The train loop components are (will be) functional with easy to follow flow control, and are intended to be replaced when something different is needed, not augmented with extremely granular hooks. +* The train loop components are (will be) functional with easy to follow flow control, and are intended to be replaced when something different is needed, not augmented with hooks via callbacks or inheritence at every conceivable touch point. ## Quick Start @@ -44,7 +44,7 @@ This setup assumes you've SSH'd into your TPU-VM after setting it up (https://cl The TPU-VM instances I've been using have a usable version of PyTorch XLA 1.8.1 installed in the python3 environment, we will be using that. -I've found that leveraging TFDS w/ datasets in TFRecord format, streamed from Google Storage buckets is the most practical / cost-effective solution. I've written a PyTorch IterabeDataset wrapper around TFDS so we will install Tensorflow datasets and use that. +I've found that leveraging TFDS w/ datasets in TFRecord format, streamed from Google Storage buckets is the most practical / cost-effective solution. I've written a PyTorch IterabeDataset wrapper around TFDS so we will install Tensorflow datasets and use that. Note that traditionaly PyTorch datasets on local disks do work both on TPU-VM, GPU cloud instances, or you local machine. Setting up persistent disks wasn't the easiest thing to do on TPU-VM for awhile so TFDS was my default. One thing to watch, be very careful that you don't use a GS based dataset in a different continent from you TPU-VM instances. I burned through a few thousand USD leaving some wires crossed for 1 day. Otherwise the cost of training w/ buckets in same region are quite low. From 5c5cadfe4c2d14a5f35a71ec73082469fbc03729 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 4 Jun 2021 14:44:07 -0700 Subject: [PATCH 15/61] Update README.md --- timm/bits/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index 941e471a30..9ce2c63dd7 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -44,7 +44,7 @@ This setup assumes you've SSH'd into your TPU-VM after setting it up (https://cl The TPU-VM instances I've been using have a usable version of PyTorch XLA 1.8.1 installed in the python3 environment, we will be using that. -I've found that leveraging TFDS w/ datasets in TFRecord format, streamed from Google Storage buckets is the most practical / cost-effective solution. I've written a PyTorch IterabeDataset wrapper around TFDS so we will install Tensorflow datasets and use that. Note that traditionaly PyTorch datasets on local disks do work both on TPU-VM, GPU cloud instances, or you local machine. Setting up persistent disks wasn't the easiest thing to do on TPU-VM for awhile so TFDS was my default. +I've found that leveraging TFDS w/ datasets in TFRecord format, streamed from Google Storage buckets is the most practical / cost-effective solution. I've written a PyTorch IterabeDataset wrapper around TFDS so we will install Tensorflow datasets and use that. Traditional PyTorch datasets on local disks do work w/ bits for all of TPU-VM, GPU cloud instances, and your local machine. Setting up persistent disks wasn't the easiest thing to do on TPU-VMs so TFDS was my default in that context. One thing to watch, be very careful that you don't use a GS based dataset in a different continent from you TPU-VM instances. I burned through a few thousand USD leaving some wires crossed for 1 day. Otherwise the cost of training w/ buckets in same region are quite low. From 847b4af1442f9b18a3dbe50903778d0b410f52b0 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 6 Jun 2021 12:35:54 -0700 Subject: [PATCH 16/61] Update README.md --- timm/bits/README.md | 58 +++++++++++++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 20 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index 9ce2c63dd7..feacb52335 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -1,7 +1,7 @@ # Timm Bits ## Intro -A collection of reusable components and lightweight abstractions for training and evaluating NN with PyTorch. +A collection of reusable components and lightweight abstractions for training and evaluating NN with PyTorch and PyTorch XLA. This is an early WIP (consider it pre-alpha) with the primary goal to get up and running on TPUs w/ PyTorch XLA as the first priority. Expect significant changes, rewrites, additions, and of course bugs. @@ -9,30 +9,32 @@ The current train.py and validate.py scripts are evolving to use the timm.bits c ## Bits Design Brief -`bits` is designed to be a lightweight and modular set of training abstractions. It certainly shares concepts with other libraries (fastai, ignite, lightning, keras, etc, etc) but is not modeled after any specific one. It is supposed to be a 'bit different', hackable, and not everything to everyone. +`bits` is designed to be a lightweight and modular set of training abstractions. It shares concepts with other libraries (fastai, ignite, lightning, keras, etc, etc) but is not modeled after any specific one. It is supposed to be a 'bit different', hackable, and is purposely not trying to serve every use case or be everything to everyone. -`timm` models will always be useable in pure PyTorch w/o `bits` or anything besides the utils / helpers for pretrained models, feature extraction, default data config. I may breakout bits into a diff project if there is any interest besides my own use for timm image and video model training. +`timm` models will always be useable in pure PyTorch w/o `bits` or anything dependencies besides the model utils and helpers for pretrained models, feature extraction, default data config. + +I may breakout bits into a diff project if there is any interest besides my own use for timm image and video model training. The layers: -* DeviceEnv - DeviceEnv dataclass abstraction deals with PyTorch CPU, GPU and XLA device differences, incl distributed helpers, wrappers, etc. There is more than a passing similarity to HuggingFace Accelerate, but developed in parallel and with some difference in the detail. -* Updater - Dataclass that combines the backward pass, optimizer step, grad scaling, grad accumulation is a possibly device specific abstraction. - * Currently basic single optimizer, single forward/backward Updaters are included for GPU, XLA. +* DeviceEnv - DeviceEnv dataclass abstraction handles PyTorch CPU, GPU and XLA device differences, incl distributed functions, parallel wrappers, etc. There is more than a passing similarity to HuggingFace Accelerate, but developed in parallel and with some difference in the detail and separation of concerns. +* Updater - A dataclass that combines the backward pass, optimizer step, grad scaling, grad accumulation in device specific abstraction. + * Currently, basic single optimizer, single forward/backward Updaters are included for GPU, XLA. * Deepseed will need its own Updater(s) since its Engine is a monolith of epic proportions that breaks all separations of concern in PyTorch (UGH!). NOTE Deepspeed not working yet nor is it a priority. * Monitor - pull together all console logging, csv summaries, tensorboard, and WandB summaries into one module for monitoring your training. * Checkpoint Manager - keeps track of your checkpoints -* Metrics - yet another set of metrics, although this may be replaced w/ an external set of classes. Uses same update / reset / compute interface as Ignite and Lightning (in theory interchangeable w/ an adapter). Metrics keep state on GPU / TPU to avoid device -> cpu transfers (esp for XLA). -* Task (not implemented yet) - combine your model(s) w/ losses in a task specific module, will also allow task factory for easy build of related metrics -* Train State - dataclasses to hold your tasks (models), updater state, etc +* Metrics - yet another set of metrics, although this may be replaced w/ an external set of classes. Uses same update / reset / compute interface as Ignite and Lightning (in theory interchangeable w/ a thin adapter). Metrics keep state on GPU / TPU to avoid device -> cpu transfers (esp for XLA). +* Task (not implemented yet) - combine your model(s) w/ losses in a task specific module, will also allow task factory for easy build of appripriate metrics +* TrainState - dataclasses to hold your tasks (models), updater state, etc * Train Loop Functions (still in train.py script, not refined) - set of functions for train step, 'after step', evaluate using all of the components mentioned How is this different than other options? -* I'm very much trying to avoid a monolithic trainer / learner / model wrapping type class with billions of hooks (avoiding granular inversion of control!). +* I'm very much trying to avoid a monolithic trainer / learner / model wrapping type class with numerous hooks and overrides to keep track of (avoiding granular inversion of control!). * The goal is to provide reusable modules that can (hopefully) be mixed and matched w/ other code. * Many of the components are based on Python dataclasses to reduce boilerplate. * The train loop components are (will be) functional with easy to follow flow control, and are intended to be replaced when something different is needed, not augmented with hooks via callbacks or inheritence at every conceivable touch point. -## Quick Start +## Quick Start for PyTorch XLA on TPU-VM Most initial users will likely be interested in training timm models w/ PyTorch XLA on TPU-VM instances, this quick start will get you moving. @@ -51,16 +53,17 @@ One thing to watch, be very careful that you don't use a GS based dataset in a d ### Install TFDS (if using GS buckets) ``` - pip3 install tensorflow-datasets +pip3 install tensorflow-datasets ``` -In some earlier tpu-vm instances the installed tensorflow version had issues with the GS bucket reading support and I often ended up installing a diff version. This could conflict with other use cases so only do it if needed. +In some tpu-vm instances may have tensorflow version pre-installed that conflict with tensorflow-datasets, especially the bucket reading support. If training crashes with errors about inability to ready from buckets, tensorflow symbol errors, tensorflow datasets missing functions, etc, you should try removing the pre-installed tensorflow and installing one from pypi. ``` - pip3 install --upgrade tensorflow-cpu +sudo pip3 uninstall tf-nightly +pip3 install tensorflow-cpu ``` -You may run into some numpy / pytorch version dependency issues here, try capping the version of tensorflow at 2.4.1 in above command. +You may run into some numpy / pytorch version dependency issues here, try capping the version of tensorflow at `==2.4.1` in above command. ### Get your dataset into buckets @@ -74,9 +77,9 @@ The TFDS dataset pages (https://www.tensorflow.org/datasets/catalog/imagenet2012 With PyTorch XLA on a TPU-VM and TFDS you'll end up with a lot of processes and buffering. The instance memory will be used up quickly. I highly recommend using a custom allocator via `LD_PRELOAD`. tcmalloc may now be a default in the tpu-vm instanecs (check first). jemalloc also worked well for me. If LD_PRELOAD is not set in your env, do the following ``` - sudo apt update - sudo apt install google-perftools - export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4 +sudo apt update +sudo apt install google-perftools +export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4 ``` # Train, train, train @@ -85,17 +88,32 @@ With all the above done, you should be ready to train... below is one particular Make sure the TPU config for PyTorch XLA on TPU-VM is set: ``` - export XRT_TPU_CONFIG="localservice;0;localhost:51011" +export XRT_TPU_CONFIG="localservice;0;localhost:51011" ``` Then, launch fighters! ``` - python3 launch_xla.py --num-devices 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --epochs 500 --lr 8.8e-4 -b 256 +python3 launch_xla.py --num-devices 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --epochs 500 --lr 8.8e-4 -b 256 ``` NOTE: build my TFDS dataset at ver 5.0.0 and it defaults to a newer version now. Change accordingly. +# Quick Start w/ GPU + +`timm bits` should work great on your multi-GPU setups just like the old `timm` training script with either TFDS based datasets or a local folder. + +The equivalent training command of the XLA setup above if you were on an 8-GPU machine and using TFDS would be, + +``` +./distrbuted_train.sh 8 train.py gs://my-imagenet-bucket --dataset tfds/imagenet2012:5.0.0 --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --epochs 500 --lr 8.8e-4 -b 256 +``` + +Or this for imagenet in a local folder, +``` +./distrbuted_train.sh 8 train.py /path/to/imagenet --model resmlp_24_224 --opt adamw --opt-eps 1e-6 --clip-grad 1.0 --drop-path 0.1 --mixup 0.5 --cutmix 1.0 --aa rand-m6-n4-mstd1.0-inc1 --weight-decay .08 --model-ema --model-ema-decay 0.99993 --sched cosine -j 4 --warmup-lr 1e-6 --warmup-epochs 20 --epochs 500 --lr 8.8e-4 -b 256 +``` + # Gotchas and Known Issues * When PyTorch XLA crashes, you hit a TPU OOM etc, lots of processes get orphaned. Get in the habit of killing all python processes before starting a new train run. * `alias fml='pkill -f python3'` From 5e95ced5a7763541f7219f35fd155e3fbfe66e8b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 23 Jun 2021 11:10:05 -0700 Subject: [PATCH 17/61] timm bits checkpoint support for avg_checkpoints.py --- avg_checkpoints.py | 4 ++++ timm/models/helpers.py | 13 ++++++++++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/avg_checkpoints.py b/avg_checkpoints.py index 1f7604b05f..ea8bbe8476 100755 --- a/avg_checkpoints.py +++ b/avg_checkpoints.py @@ -41,6 +41,10 @@ def checkpoint_metric(checkpoint_path): metric = None if 'metric' in checkpoint: metric = checkpoint['metric'] + elif 'metrics' in checkpoint and 'metric_name' in checkpoint: + metrics = checkpoint['metrics'] + print(metrics) + metric = metrics[checkpoint['metric_name']] return metric diff --git a/timm/models/helpers.py b/timm/models/helpers.py index 662a7a483b..9dafeefa1e 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -24,13 +24,20 @@ def load_state_dict(checkpoint_path, use_ema=False): if checkpoint_path and os.path.isfile(checkpoint_path): checkpoint = torch.load(checkpoint_path, map_location='cpu') - state_dict_key = 'state_dict' + state_dict_key = '' if isinstance(checkpoint, dict): if use_ema and 'state_dict_ema' in checkpoint: state_dict_key = 'state_dict_ema' - if state_dict_key and state_dict_key in checkpoint: + elif use_ema and 'model_ema' in checkpoint: + state_dict_key = 'model_ema' + elif 'state_dict' in checkpoint: + state_dict_key = 'state_dict' + elif 'model' in checkpoint: + state_dict_key = 'model' + if state_dict_key: + state_dict = checkpoint[state_dict_key] new_state_dict = OrderedDict() - for k, v in checkpoint[state_dict_key].items(): + for k, v in state_dict.items(): # strip `module.` prefix name = k[7:] if k.startswith('module') else k new_state_dict[name] = v From 40457e569179642e21f8120c8854532b395ce1ca Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 13 Aug 2021 12:45:43 -0700 Subject: [PATCH 18/61] Transforms, augmentation work for bits, add RandomErasing support for XLA (pushing into transforms), revamp of transform/preproc config, etc ongoing... --- clean_checkpoint.py | 15 +- inference.py | 4 +- timm/bits/device_env.py | 3 + timm/bits/device_env_cuda.py | 5 +- timm/bits/device_env_xla.py | 5 + timm/bits/train_setup.py | 7 +- timm/data/__init__.py | 6 +- timm/data/auto_augment.py | 18 ++- timm/data/collate.py | 2 +- timm/data/config.py | 43 ++++++ timm/data/fetcher.py | 60 +++++--- timm/data/loader.py | 131 +++++++++-------- timm/data/mixup.py | 70 +++++++--- timm/data/prefetcher_cuda.py | 54 ++++--- timm/data/random_erasing.py | 87 ++++++++++-- timm/data/transforms.py | 57 +++++--- timm/data/transforms_factory.py | 241 ++++++++++++++++++++------------ timm/models/helpers.py | 13 +- train.py | 106 +++++++++----- validate.py | 34 +++-- 20 files changed, 628 insertions(+), 333 deletions(-) diff --git a/clean_checkpoint.py b/clean_checkpoint.py index a8edcc915a..1553fc4b1c 100755 --- a/clean_checkpoint.py +++ b/clean_checkpoint.py @@ -14,6 +14,9 @@ import shutil from collections import OrderedDict +from timm.models.helpers import load_state_dict +from timm.utils import setup_default_logging + parser = argparse.ArgumentParser(description='PyTorch Checkpoint Cleaner') parser.add_argument('--checkpoint', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') @@ -29,6 +32,7 @@ def main(): args = parser.parse_args() + setup_default_logging() if os.path.exists(args.output): print("Error: Output filename ({}) already exists.".format(args.output)) @@ -37,17 +41,8 @@ def main(): # Load an existing checkpoint to CPU, strip everything but the state_dict and re-save if args.checkpoint and os.path.isfile(args.checkpoint): print("=> Loading checkpoint '{}'".format(args.checkpoint)) - checkpoint = torch.load(args.checkpoint, map_location='cpu') - + state_dict = load_state_dict(args.checkpoint, use_ema=args.use_ema) new_state_dict = OrderedDict() - if isinstance(checkpoint, dict): - state_dict_key = 'state_dict_ema' if args.use_ema else 'state_dict' - if state_dict_key in checkpoint: - state_dict = checkpoint[state_dict_key] - else: - state_dict = checkpoint - else: - assert False for k, v in state_dict.items(): if args.clean_aux_bn and 'aux_bn' in k: # If all aux_bn keys are removed, the SplitBN layers will end up as normal and diff --git a/inference.py b/inference.py index 5fcf1e6024..1f248dc71b 100755 --- a/inference.py +++ b/inference.py @@ -13,7 +13,7 @@ import torch from timm.models import create_model, apply_test_time_pool -from timm.data import ImageDataset, create_loader, resolve_data_config +from timm.data import ImageDataset, create_loader_v2, resolve_data_config from timm.utils import AverageMeter, setup_default_logging torch.backends.cudnn.benchmark = True @@ -82,7 +82,7 @@ def main(): else: model = model.cuda() - loader = create_loader( + loader = create_loader_v2( ImageDataset(args.data), input_size=config['input_size'], batch_size=args.batch_size, diff --git a/timm/bits/device_env.py b/timm/bits/device_env.py index 0a926e6961..e992ee80a9 100644 --- a/timm/bits/device_env.py +++ b/timm/bits/device_env.py @@ -128,6 +128,9 @@ def state_dict_to_device(self, state: Dict[str, Any]): def mark_step(self): pass # NO-OP for non-XLA devices + def synchronize(self, tensors: Optional[TensorList] = None): + pass + def all_reduce_(self, tensor: TensorList, op=dist.ReduceOp.SUM, average=False): dist.all_reduce(tensor, op=op) if average: diff --git a/timm/bits/device_env_cuda.py b/timm/bits/device_env_cuda.py index c57dfda5c5..33760d97da 100644 --- a/timm/bits/device_env_cuda.py +++ b/timm/bits/device_env_cuda.py @@ -6,7 +6,7 @@ import torch from torch.nn.parallel import DistributedDataParallel, DataParallel -from .device_env import DeviceEnv, DeviceEnvType +from .device_env import DeviceEnv, DeviceEnvType, TensorList def is_cuda_available(): @@ -63,3 +63,6 @@ def wrap_parallel(self, *modules, **kwargs): assert not self.distributed wrapped = [DataParallel(m, **kwargs) for m in modules] return wrapped[0] if len(wrapped) == 1 else wrapped + + def synchronize(self, tensors: Optional[TensorList] = None): + torch.cuda.synchronize(self.device) diff --git a/timm/bits/device_env_xla.py b/timm/bits/device_env_xla.py index 46517f7a91..2dad927343 100644 --- a/timm/bits/device_env_xla.py +++ b/timm/bits/device_env_xla.py @@ -8,9 +8,11 @@ try: import torch_xla.core.xla_model as xm + import torch_xla _HAS_XLA = True except ImportError as e: xm = None + torch_xla = None _HAS_XLA = False try: @@ -81,6 +83,9 @@ def wrap_parallel(self, *modules): def mark_step(self): xm.mark_step() + def synchronize(self, tensors: Optional[TensorList] = None): + torch_xla._XLAC._xla_sync_multi(tensors, devices=[], wait=True, sync_xla_data=True) + def all_reduce(self, tensor: torch.Tensor, op=ReduceOp.SUM, average=False): assert isinstance(tensor, torch.Tensor) # unlike in-place variant, lists/tuples not allowed op = _PT_TO_XM_OP[op] diff --git a/timm/bits/train_setup.py b/timm/bits/train_setup.py index 1480de6384..5aca908f54 100644 --- a/timm/bits/train_setup.py +++ b/timm/bits/train_setup.py @@ -89,7 +89,6 @@ def setup_model_and_optimizer( train_state = TrainState(model=model, updater=updater, model_ema=model_ema) if resume_path: - # FIXME this is not implemented yet, do a hack job before proper TrainState serialization? load_train_state( train_state, resume_path, @@ -141,11 +140,7 @@ def setup_model_and_optimizer_deepspeed( if resume_path: # FIXME deepspeed resumes differently - load_legacy_checkpoint( - train_state, - resume_path, - load_opt=resume_opt, - log_info=dev_env.primary) + assert False if dev_env.distributed: train_state = dataclasses.replace( diff --git a/timm/data/__init__.py b/timm/data/__init__.py index 7d3cb2b4d7..163bcea737 100644 --- a/timm/data/__init__.py +++ b/timm/data/__init__.py @@ -4,9 +4,9 @@ from .constants import * from .dataset import ImageDataset, IterableImageDataset, AugMixDataset from .dataset_factory import create_dataset -from .loader import create_loader +from .loader import create_loader_v2, PreprocessCfg, AugCfg, MixupCfg from .mixup import Mixup, FastCollateMixup from .parsers import create_parser from .real_labels import RealLabelsImagenet -from .transforms import * -from .transforms_factory import create_transform \ No newline at end of file +from .transforms import RandomResizedCropAndInterpolation, ToTensor, ToNumpy +from .transforms_factory import create_transform_v2, create_transform diff --git a/timm/data/auto_augment.py b/timm/data/auto_augment.py index 7cbd2dee0a..46c36531bc 100644 --- a/timm/data/auto_augment.py +++ b/timm/data/auto_augment.py @@ -41,6 +41,22 @@ _RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC) +def _pil_interp(method): + def _convert(m): + if method == 'bicubic': + return Image.BICUBIC + elif method == 'lanczos': + return Image.LANCZOS + elif method == 'hamming': + return Image.HAMMING + else: + return Image.BILINEAR + if isinstance(method, (list, tuple)): + return [_convert(m) if isinstance(m, str) else m for m in method] + else: + return _convert(method) if isinstance(method, str) else method + + def _interpolation(kwargs): interpolation = kwargs.pop('resample', Image.BILINEAR) if isinstance(interpolation, (list, tuple)): @@ -325,7 +341,7 @@ def __init__(self, name, prob=0.5, magnitude=10, hparams=None): self.hparams = hparams.copy() self.kwargs = dict( fillcolor=hparams['img_mean'] if 'img_mean' in hparams else _FILL, - resample=hparams['interpolation'] if 'interpolation' in hparams else _RANDOM_INTERPOLATION, + resample=_pil_interp(hparams['interpolation']) if 'interpolation' in hparams else _RANDOM_INTERPOLATION, ) # If magnitude_std is > 0, we introduce some randomness diff --git a/timm/data/collate.py b/timm/data/collate.py index a1e37e1f80..28f2af2a77 100644 --- a/timm/data/collate.py +++ b/timm/data/collate.py @@ -30,7 +30,7 @@ def fast_collate(batch): elif isinstance(batch[0][0], torch.Tensor): targets = torch.tensor([b[1] for b in batch], dtype=torch.int64) assert len(targets) == batch_size - tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) + tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=batch[0][0].dtype) for i in range(batch_size): tensor[i].copy_(batch[i][0]) return tensor, targets diff --git a/timm/data/config.py b/timm/data/config.py index 06920d7df8..f9ed7b6c05 100644 --- a/timm/data/config.py +++ b/timm/data/config.py @@ -1,10 +1,53 @@ import logging +from dataclasses import dataclass +from typing import Tuple, Optional, Union + from .constants import * _logger = logging.getLogger(__name__) +@dataclass +class AugCfg: + scale_range: Tuple[float, float] = (0.08, 1.0) + ratio_range: Tuple[float, float] = (3 / 4, 4 / 3) + hflip_prob: float = 0.5 + vflip_prob: float = 0. + + color_jitter: float = 0.4 + auto_augment: Optional[str] = None + + re_prob: float = 0. + re_mode: str = 'const' + re_count: int = 1 + + num_aug_splits: int = 0 + + +@dataclass +class PreprocessCfg: + input_size: Tuple[int, int, int] = (3, 224, 224) + mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN + std: Tuple[float, ...] = IMAGENET_DEFAULT_STD + interpolation: str = 'bilinear' + crop_pct: float = 0.875 + aug: AugCfg = None + + +@dataclass +class MixupCfg: + prob: float = 1.0 + switch_prob: float = 0.5 + mixup_alpha: float = 1. + cutmix_alpha: float = 0. + cutmix_minmax: Optional[Tuple[float, float]] = None + mode: str = 'batch' + correct_lam: bool = True + label_smoothing: float = 0.1 + num_classes: int = 0 + + def resolve_data_config(args, default_cfg={}, model=None, use_test_size=False, verbose=False): new_config = {} default_cfg = default_cfg diff --git a/timm/data/fetcher.py b/timm/data/fetcher.py index ec5afe8a65..c833b5964c 100644 --- a/timm/data/fetcher.py +++ b/timm/data/fetcher.py @@ -2,7 +2,7 @@ from .constants import * from .random_erasing import RandomErasing -from. mixup import FastCollateMixup +from .mixup import FastCollateMixup class FetcherXla: @@ -12,31 +12,55 @@ def __init__(self): class Fetcher: - def __init__(self, - loader, - mean=IMAGENET_DEFAULT_MEAN, - std=IMAGENET_DEFAULT_STD, - device=None, - dtype=None, - re_prob=0., - re_mode='const', - re_count=1, - re_num_splits=0): + def __init__( + self, + loader, + device: torch.device, + dtype=torch.float32, + normalize=True, + normalize_shape=(1, 3, 1, 1), + mean=IMAGENET_DEFAULT_MEAN, + std=IMAGENET_DEFAULT_STD, + re_prob=0., + re_mode='const', + re_count=1, + num_aug_splits=0, + use_mp_loader=False, + ): self.loader = loader self.device = torch.device(device) - self.dtype = dtype or torch.float32 - self.mean = torch.tensor([x * 255 for x in mean], dtype=self.dtype, device=self.device).view(1, 3, 1, 1) - self.std = torch.tensor([x * 255 for x in std], dtype=self.dtype, device=self.device).view(1, 3, 1, 1) + self.dtype = dtype + if normalize: + self.mean = torch.tensor( + [x * 255 for x in mean], dtype=self.dtype, device=self.device).view(normalize_shape) + self.std = torch.tensor( + [x * 255 for x in std], dtype=self.dtype, device=self.device).view(normalize_shape) + else: + self.mean = None + self.std = None if re_prob > 0.: + # NOTE RandomErasing shouldn't be used here w/ XLA devices self.random_erasing = RandomErasing( - probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits, device=device) + probability=re_prob, mode=re_mode, count=re_count, num_splits=num_aug_splits) else: self.random_erasing = None + self.use_mp_loader = use_mp_loader + if use_mp_loader: + # FIXME testing for TPU use + import torch_xla.distributed.parallel_loader as pl + self._loader = pl.MpDeviceLoader(loader, device) + else: + self._loader = loader + print('re', self.random_erasing, self.mean, self.std) def __iter__(self): - for sample, target in self.loader: - sample = sample.to(device=self.device, dtype=self.dtype).sub_(self.mean).div_(self.std) - target = target.to(device=self.device) + for sample, target in self._loader: + if not self.use_mp_loader: + sample = sample.to(device=self.device) + target = target.to(device=self.device) + sample = sample.to(dtype=self.dtype) + if self.mean is not None: + sample.sub_(self.mean).div_(self.std) if self.random_erasing is not None: sample = self.random_erasing(sample) yield sample, target diff --git a/timm/data/loader.py b/timm/data/loader.py index e8722b2948..9d60cd5997 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -6,74 +6,52 @@ Hacked together by / Copyright 2020 Ross Wightman """ +from typing import Tuple, Optional, Union, Callable + import torch.utils.data from timm.bits import DeviceEnv - -from .fetcher import Fetcher -from .prefetcher_cuda import PrefetcherCuda from .collate import fast_collate -from .transforms_factory import create_transform -from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from .config import PreprocessCfg, AugCfg, MixupCfg from .distributed_sampler import OrderedDistributedSampler +from .fetcher import Fetcher +from .mixup import FastCollateMixup +from .prefetcher_cuda import PrefetcherCuda -def create_loader( - dataset, - input_size, - batch_size, - is_training=False, - dev_env=None, - no_aug=False, - re_prob=0., - re_mode='const', - re_count=1, - re_split=False, - scale=None, - ratio=None, - hflip=0.5, - vflip=0., - color_jitter=0.4, - auto_augment=None, - num_aug_splits=0, - interpolation='bilinear', - mean=IMAGENET_DEFAULT_MEAN, - std=IMAGENET_DEFAULT_STD, - num_workers=1, - crop_pct=None, - collate_fn=None, - pin_memory=False, - tf_preprocessing=False, - use_multi_epochs_loader=False, - persistent_workers=True, +def create_loader_v2( + dataset: torch.utils.data.Dataset, + batch_size: int, + is_training: bool = False, + dev_env: Optional[DeviceEnv] = None, + normalize=True, + pp_cfg: PreprocessCfg = PreprocessCfg(), + mix_cfg: MixupCfg = None, + num_workers: int = 1, + collate_fn: Optional[Callable] = None, + pin_memory: bool = False, + use_multi_epochs_loader: bool = False, + persistent_workers: bool = True, ): - re_num_splits = 0 - if re_split: - # apply RE to second half of batch if no aug split otherwise line up with aug split - re_num_splits = num_aug_splits or 2 - dataset.transform = create_transform( - input_size, - is_training=is_training, - use_fetcher=True, - no_aug=no_aug, - scale=scale, - ratio=ratio, - hflip=hflip, - vflip=vflip, - color_jitter=color_jitter, - auto_augment=auto_augment, - interpolation=interpolation, - mean=mean, - std=std, - crop_pct=crop_pct, - tf_preprocessing=tf_preprocessing, - re_prob=re_prob, - re_mode=re_mode, - re_count=re_count, - re_num_splits=re_num_splits, - separate=num_aug_splits > 0, - ) + """ + + Args: + dataset: + batch_size: + is_training: + dev_env: + normalize: + pp_cfg: + mix_cfg: + num_workers: + collate_fn: + pin_memory: + use_multi_epochs_loader: + persistent_workers: + + Returns: + """ if dev_env is None: dev_env = DeviceEnv.instance() @@ -85,10 +63,24 @@ def create_loader( else: # This will add extra duplicate entries to result in equal num # of samples per-process, will slightly alter validation results - sampler = OrderedDistributedSampler(dataset, num_replicas=dev_env.world_size, rank=dev_env.global_rank) + sampler = OrderedDistributedSampler( + dataset, num_replicas=dev_env.world_size, rank=dev_env.global_rank) if collate_fn is None: - collate_fn = fast_collate + if mix_cfg is not None and mix_cfg.prob > 0: + collate_fn = FastCollateMixup( + mixup_alpha=mix_cfg.mixup_alpha, + cutmix_alpha=mix_cfg.cutmix_alpha, + cutmix_minmax=mix_cfg.cutmix_minmax, + prob=mix_cfg.prob, + switch_prob=mix_cfg.switch_prob, + mode=mix_cfg.mode, + correct_lam=mix_cfg.correct_lam, + label_smoothing=mix_cfg.label_smoothing, + num_classes=mix_cfg.num_classes, + ) + else: + collate_fn = fast_collate loader_class = torch.utils.data.DataLoader if use_multi_epochs_loader: @@ -110,13 +102,18 @@ def create_loader( loader = loader_class(dataset, **loader_args) fetcher_kwargs = dict( - mean=mean, - std=std, - re_prob=re_prob if is_training and not no_aug else 0., - re_mode=re_mode, - re_count=re_count, - re_num_splits=re_num_splits + normalize=normalize, + mean=pp_cfg.mean, + std=pp_cfg.std, ) + if normalize and is_training and pp_cfg.aug is not None: + fetcher_kwargs.update(dict( + re_prob=pp_cfg.aug.re_prob, + re_mode=pp_cfg.aug.re_mode, + re_count=pp_cfg.aug.re_count, + num_aug_splits=pp_cfg.aug.num_aug_splits, + )) + if dev_env.type_cuda: loader = PrefetcherCuda(loader, **fetcher_kwargs) else: diff --git a/timm/data/mixup.py b/timm/data/mixup.py index 38477548a0..b618bb7c93 100644 --- a/timm/data/mixup.py +++ b/timm/data/mixup.py @@ -102,7 +102,7 @@ class Mixup: num_classes (int): number of classes for target """ def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0, switch_prob=0.5, - mode='batch', correct_lam=True, label_smoothing=0.1, num_classes=1000): + mode='batch', correct_lam=True, label_smoothing=0., num_classes=0): self.mixup_alpha = mixup_alpha self.cutmix_alpha = cutmix_alpha self.cutmix_minmax = cutmix_minmax @@ -113,6 +113,8 @@ def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0 self.mix_prob = prob self.switch_prob = switch_prob self.label_smoothing = label_smoothing + if label_smoothing > 0.: + assert num_classes > 0 self.num_classes = num_classes self.mode = mode self.correct_lam = correct_lam # correct lambda based on clipped area for cutmix @@ -218,17 +220,30 @@ def __call__(self, x, target): return x, target +def blend(a, b, lam, is_tensor=False, round_output=True): + if is_tensor: + blend = a.to(dtype=torch.float32) * lam + b.to(dtype=torch.float32) * (1 - lam) + if round_output: + torch.round(blend, out=blend) + else: + blend = a.astype(np.float32) * lam + b.astype(np.float32) * (1 - lam) + if round_output: + np.rint(blend, out=blend) + return blend + + class FastCollateMixup(Mixup): """ Fast Collate w/ Mixup/Cutmix that applies different params to each element or whole batch A Mixup impl that's performed while collating the batches. """ - def _mix_elem_collate(self, output, batch, half=False): + def _mix_elem_collate(self, output, batch, half=False, is_tensor=False): batch_size = len(batch) num_elem = batch_size // 2 if half else batch_size assert len(output) == num_elem lam_batch, use_cutmix = self._params_per_elem(num_elem) + round_output = output.dtype == torch.uint8 for i in range(num_elem): j = batch_size - i - 1 lam = lam_batch[i] @@ -236,22 +251,23 @@ def _mix_elem_collate(self, output, batch, half=False): if lam != 1.: if use_cutmix[i]: if not half: - mixed = mixed.copy() + mixed = mixed.clone() if is_tensor else mixed.copy() # don't want to modify while iterating (yl, yh, xl, xh), lam = cutmix_bbox_and_lam( output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam) mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh] lam_batch[i] = lam else: - mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam) - np.rint(mixed, out=mixed) - output[i] += torch.from_numpy(mixed.astype(np.uint8)) + mixed = blend(mixed, batch[j][0], lam, is_tensor, round_output) + mixed = mixed.to(dtype=output.dtype) if is_tensor else torch.from_numpy(mixed.astype(np.uint8)) + output[i].copy_(mixed) if half: lam_batch = np.concatenate((lam_batch, np.ones(num_elem))) return torch.tensor(lam_batch).unsqueeze(1) - def _mix_pair_collate(self, output, batch): + def _mix_pair_collate(self, output, batch, is_tensor=False): batch_size = len(batch) lam_batch, use_cutmix = self._params_per_elem(batch_size // 2) + round_output = output.dtype == torch.uint8 for i in range(batch_size // 2): j = batch_size - i - 1 lam = lam_batch[i] @@ -262,24 +278,30 @@ def _mix_pair_collate(self, output, batch): if use_cutmix[i]: (yl, yh, xl, xh), lam = cutmix_bbox_and_lam( output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam) - patch_i = mixed_i[:, yl:yh, xl:xh].copy() + patch_i = mixed_i[:, yl:yh, xl:xh] + patch_i = patch_i.clone() if is_tensor else patch_i.copy() # don't want to modify while iterating mixed_i[:, yl:yh, xl:xh] = mixed_j[:, yl:yh, xl:xh] mixed_j[:, yl:yh, xl:xh] = patch_i lam_batch[i] = lam else: - mixed_temp = mixed_i.astype(np.float32) * lam + mixed_j.astype(np.float32) * (1 - lam) - mixed_j = mixed_j.astype(np.float32) * lam + mixed_i.astype(np.float32) * (1 - lam) + mixed_temp = blend(mixed_i, mixed_j, lam, is_tensor, round_output) + mixed_j = blend(mixed_j, mixed_i, lam, is_tensor, round_output) mixed_i = mixed_temp - np.rint(mixed_j, out=mixed_j) - np.rint(mixed_i, out=mixed_i) - output[i] += torch.from_numpy(mixed_i.astype(np.uint8)) - output[j] += torch.from_numpy(mixed_j.astype(np.uint8)) + if is_tensor: + mixed_i = mixed_i.to(dtype=output.dtype) + mixed_j = mixed_j.to(dtype=output.dtype) + else: + mixed_i = torch.from_numpy(mixed_i.astype(np.uint8)) + mixed_j = torch.from_numpy(mixed_j.astype(np.uint8)) + output[i].copy_(mixed_i) + output[j].copy_(mixed_j) lam_batch = np.concatenate((lam_batch, lam_batch[::-1])) return torch.tensor(lam_batch).unsqueeze(1) - def _mix_batch_collate(self, output, batch): + def _mix_batch_collate(self, output, batch, is_tensor=False): batch_size = len(batch) lam, use_cutmix = self._params_per_batch() + round_output = output.dtype == torch.uint8 if use_cutmix: (yl, yh, xl, xh), lam = cutmix_bbox_and_lam( output.shape, lam, ratio_minmax=self.cutmix_minmax, correct_lam=self.correct_lam) @@ -288,12 +310,12 @@ def _mix_batch_collate(self, output, batch): mixed = batch[i][0] if lam != 1.: if use_cutmix: - mixed = mixed.copy() # don't want to modify the original while iterating + mixed = mixed.clone() if is_tensor else mixed.copy() # don't want to modify while iterating mixed[:, yl:yh, xl:xh] = batch[j][0][:, yl:yh, xl:xh] else: - mixed = mixed.astype(np.float32) * lam + batch[j][0].astype(np.float32) * (1 - lam) - np.rint(mixed, out=mixed) - output[i] += torch.from_numpy(mixed.astype(np.uint8)) + mixed = blend(mixed, batch[j][0], lam, is_tensor, round_output) + mixed = mixed.to(dtype=output.dtype) if is_tensor else torch.from_numpy(mixed.astype(np.uint8)) + output[i].copy_(mixed) return lam def __call__(self, batch, _=None): @@ -302,13 +324,15 @@ def __call__(self, batch, _=None): half = 'half' in self.mode if half: batch_size //= 2 - output = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8) + is_tensor = isinstance(batch[0][0], torch.Tensor) + output_dtype = batch[0][0].dtype if is_tensor else torch.uint8 # always uint8 if numpy src + output = torch.zeros((batch_size, *batch[0][0].shape), dtype=output_dtype) if self.mode == 'elem' or self.mode == 'half': - lam = self._mix_elem_collate(output, batch, half=half) + lam = self._mix_elem_collate(output, batch, half=half, is_tensor=is_tensor) elif self.mode == 'pair': - lam = self._mix_pair_collate(output, batch) + lam = self._mix_pair_collate(output, batch, is_tensor=is_tensor) else: - lam = self._mix_batch_collate(output, batch) + lam = self._mix_batch_collate(output, batch, is_tensor=is_tensor) target = torch.tensor([b[1] for b in batch], dtype=torch.int64) target = mixup_target(target, self.num_classes, lam, self.label_smoothing, device='cpu') target = target[:batch_size] diff --git a/timm/data/prefetcher_cuda.py b/timm/data/prefetcher_cuda.py index 4f1c4e10af..9432df5949 100644 --- a/timm/data/prefetcher_cuda.py +++ b/timm/data/prefetcher_cuda.py @@ -7,25 +7,34 @@ class PrefetcherCuda: - def __init__(self, - loader, - mean=IMAGENET_DEFAULT_MEAN, - std=IMAGENET_DEFAULT_STD, - fp16=False, - re_prob=0., - re_mode='const', - re_count=1, - re_num_splits=0): + def __init__( + self, + loader, + device: torch.device = torch.device('cuda'), + dtype=torch.float32, + normalize=True, + normalize_shape=(1, 3, 1, 1), + mean=IMAGENET_DEFAULT_MEAN, + std=IMAGENET_DEFAULT_STD, + num_aug_splits=0, + re_prob=0., + re_mode='const', + re_count=1 + ): self.loader = loader - self.mean = torch.tensor([x * 255 for x in mean]).cuda().view(1, 3, 1, 1) - self.std = torch.tensor([x * 255 for x in std]).cuda().view(1, 3, 1, 1) - self.fp16 = fp16 - if fp16: - self.mean = self.mean.half() - self.std = self.std.half() + self.device = device + self.dtype = dtype + if normalize: + self.mean = torch.tensor( + [x * 255 for x in mean], dtype=self.dtype, device=self.device).view(normalize_shape) + self.std = torch.tensor( + [x * 255 for x in std], dtype=self.dtype, device=self.device).view(normalize_shape) + else: + self.mean = None + self.std = None if re_prob > 0.: self.random_erasing = RandomErasing( - probability=re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits) + probability=re_prob, mode=re_mode, count=re_count, num_splits=num_aug_splits, device=device) else: self.random_erasing = None @@ -35,12 +44,11 @@ def __iter__(self): for next_input, next_target in self.loader: with torch.cuda.stream(stream): - next_input = next_input.cuda(non_blocking=True) - next_target = next_target.cuda(non_blocking=True) - if self.fp16: - next_input = next_input.half().sub_(self.mean).div_(self.std) - else: - next_input = next_input.float().sub_(self.mean).div_(self.std) + next_input = next_input.to(device=self.device, non_blocking=True) + next_input = next_input.to(dtype=self.dtype) + if self.mean is not None: + next_input.sub_(self.mean).div_(self.std) + next_target = next_target.to(device=self.device, non_blocking=True) if self.random_erasing is not None: next_input = self.random_erasing(next_input) @@ -76,4 +84,4 @@ def mixup_enabled(self): @mixup_enabled.setter def mixup_enabled(self, x): if isinstance(self.loader.collate_fn, FastCollateMixup): - self.loader.collate_fn.mixup_enabled = x \ No newline at end of file + self.loader.collate_fn.mixup_enabled = x diff --git a/timm/data/random_erasing.py b/timm/data/random_erasing.py index 78967d105d..65d085a988 100644 --- a/timm/data/random_erasing.py +++ b/timm/data/random_erasing.py @@ -38,21 +38,20 @@ class RandomErasing: 'const' - erase block is constant color of 0 for all channels 'rand' - erase block is same per-channel random (normal) color 'pixel' - erase block is per-pixel random (normal) color - max_count: maximum number of erasing blocks per image, area per box is scaled by count. + count: maximum number of erasing blocks per image, area per box is scaled by count. per-image count is randomly chosen between 1 and this value. """ def __init__( self, probability=0.5, min_area=0.02, max_area=1/3, min_aspect=0.3, max_aspect=None, - mode='const', min_count=1, max_count=None, num_splits=0, device='cuda'): + mode='const', count=1, num_splits=0): self.probability = probability self.min_area = min_area self.max_area = max_area max_aspect = max_aspect or 1 / min_aspect self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect)) - self.min_count = min_count - self.max_count = max_count or min_count + self.count = count self.num_splits = num_splits mode = mode.lower() self.rand_color = False @@ -63,14 +62,13 @@ def __init__( self.per_pixel = True # per pixel random normal else: assert not mode or mode == 'const' - self.device = device def _erase(self, img, chan, img_h, img_w, dtype): + device = img.device if random.random() > self.probability: return area = img_h * img_w - count = self.min_count if self.min_count == self.max_count else \ - random.randint(self.min_count, self.max_count) + count = random.randint(1, self.count) if self.count > 1 else self.count for _ in range(count): for attempt in range(10): target_area = random.uniform(self.min_area, self.max_area) * area / count @@ -81,17 +79,76 @@ def _erase(self, img, chan, img_h, img_w, dtype): top = random.randint(0, img_h - h) left = random.randint(0, img_w - w) img[:, top:top + h, left:left + w] = _get_pixels( - self.per_pixel, self.rand_color, (chan, h, w), - dtype=dtype, device=self.device) + self.per_pixel, self.rand_color, (chan, h, w), dtype=dtype, device=device) break - def __call__(self, input): - if len(input.size()) == 3: - self._erase(input, *input.size(), input.dtype) + def __call__(self, x): + if len(x.size()) == 3: + self._erase(x, *x.shape, x.dtype) else: - batch_size, chan, img_h, img_w = input.size() + batch_size, chan, img_h, img_w = x.shape # skip first slice of batch if num_splits is set (for clean portion of samples) batch_start = batch_size // self.num_splits if self.num_splits > 1 else 0 for i in range(batch_start, batch_size): - self._erase(input[i], chan, img_h, img_w, input.dtype) - return input + self._erase(x[i], chan, img_h, img_w, x.dtype) + return x + + +class RandomErasingMasked: + """ Randomly selects a rectangle region in an image and erases its pixels. + 'Random Erasing Data Augmentation' by Zhong et al. + See https://arxiv.org/pdf/1708.04896.pdf + + This variant of RandomErasing is intended to be applied to either a batch + or single image tensor after it has been normalized by dataset mean and std. + Args: + probability: Probability that the Random Erasing operation will be performed for each box (count) + min_area: Minimum percentage of erased area wrt input image area. + max_area: Maximum percentage of erased area wrt input image area. + min_aspect: Minimum aspect ratio of erased area. + count: maximum number of erasing blocks per image, area per box is scaled by count. + per-image count is between 0 and this value. + """ + + def __init__( + self, + probability=0.5, min_area=0.02, max_area=1/3, min_aspect=0.3, max_aspect=None, + mode='const', count=1, num_splits=0): + self.probability = probability + self.min_area = min_area + self.max_area = max_area + max_aspect = max_aspect or 1 / min_aspect + self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect)) + self.mode = mode # FIXME currently ignored, add back options besides normal mean=0, std=1 noise? + self.count = count + self.num_splits = num_splits + + @torch.no_grad() + def __call__(self, x: torch.Tensor) -> torch.Tensor: + device = x.device + batch_size, _, img_h, img_w = x.shape + batch_start = batch_size // self.num_splits if self.num_splits > 1 else 0 + + # NOTE simplified from v1 with with one count value and same prob applied for all + enable = (torch.empty((batch_size, self.count), device=device).uniform_() < self.probability).float() + enable = enable / enable.sum(dim=1, keepdim=True).clamp(min=1) + target_area = torch.empty( + (batch_size, self.count), device=device).uniform_(self.min_area, self.max_area) * enable + aspect_ratio = torch.empty((batch_size, self.count), device=device).uniform_(*self.log_aspect_ratio).exp() + h_coord = torch.arange(0, img_h, device=device).unsqueeze(-1).expand(-1, self.count).float() + w_coord = torch.arange(0, img_w, device=device).unsqueeze(-1).expand(-1, self.count).float() + h_mid = torch.rand((batch_size, self.count), device=device) * img_h + w_mid = torch.rand((batch_size, self.count), device=device) * img_w + noise = torch.empty_like(x[0]).normal_() + + for i in range(batch_start, batch_size): + h_half = (img_h / 2) * torch.sqrt(target_area[i] * aspect_ratio[i]) # 1/2 box h + h_mask = (h_coord > (h_mid[i] - h_half)) & (h_coord < (h_mid[i] + h_half)) + w_half = (img_w / 2) * torch.sqrt(target_area[i] / aspect_ratio[i]) # 1/2 box w + w_mask = (w_coord > (w_mid[i] - w_half)) & (w_coord < (w_mid[i] + w_half)) + #mask = (h_mask.unsqueeze(1) & w_mask.unsqueeze(0)).any(dim=-1) + #x[i].copy_(torch.where(mask, noise, x[i])) + mask = ~(h_mask.unsqueeze(1) & w_mask.unsqueeze(0)).any(dim=-1) + x[i] = x[i].where(mask, noise) + #x[i].masked_scatter_(mask, noise) + return x diff --git a/timm/data/transforms.py b/timm/data/transforms.py index 4220304f66..03f0e8259c 100644 --- a/timm/data/transforms.py +++ b/timm/data/transforms.py @@ -1,5 +1,7 @@ import torch import torchvision.transforms.functional as F +from torchvision.transforms import InterpolationMode + from PIL import Image import warnings import math @@ -30,29 +32,40 @@ def __call__(self, pil_img): return torch.from_numpy(np_img).to(dtype=self.dtype) -_pil_interpolation_to_str = { - Image.NEAREST: 'PIL.Image.NEAREST', - Image.BILINEAR: 'PIL.Image.BILINEAR', - Image.BICUBIC: 'PIL.Image.BICUBIC', - Image.LANCZOS: 'PIL.Image.LANCZOS', - Image.HAMMING: 'PIL.Image.HAMMING', - Image.BOX: 'PIL.Image.BOX', -} +class ToTensorNormalize: + def __init__(self, mean, std, dtype=torch.float32, device=torch.device('cpu')): + self.dtype = dtype + mean = torch.as_tensor(mean, dtype=dtype, device=device) + std = torch.as_tensor(std, dtype=dtype, device=device) + if (std == 0).any(): + raise ValueError('std evaluated to zero after conversion to {}, leading to division by zero.'.format(dtype)) + if mean.ndim == 1: + mean = mean.view(-1, 1, 1) + if std.ndim == 1: + std = std.view(-1, 1, 1) + self.mean = mean + self.std = std -def _pil_interp(method): - if method == 'bicubic': - return Image.BICUBIC - elif method == 'lanczos': - return Image.LANCZOS - elif method == 'hamming': - return Image.HAMMING - else: - # default bilinear, do we want to allow nearest? - return Image.BILINEAR + def __call__(self, pil_img): + mode_to_nptype = {'I': np.int32, 'I;16': np.int16, 'F': np.float32} + img = torch.from_numpy( + np.array(pil_img, mode_to_nptype.get(pil_img.mode, np.uint8)) + ) + if pil_img.mode == '1': + img = 255 * img + img = img.view(pil_img.size[1], pil_img.size[0], len(pil_img.getbands())) + img = img.permute((2, 0, 1)) + if isinstance(img, torch.ByteTensor): + img = img.to(self.dtype) + img.sub_(self.mean * 255.).div_(self.std * 255.) + else: + img = img.to(self.dtype) + img.sub_(self.mean).div_(self.std) + return img -_RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC) +_RANDOM_INTERPOLATION = (InterpolationMode.BILINEAR, InterpolationMode.BICUBIC) class RandomResizedCropAndInterpolation: @@ -82,7 +95,7 @@ def __init__(self, size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.), if interpolation == 'random': self.interpolation = _RANDOM_INTERPOLATION else: - self.interpolation = _pil_interp(interpolation) + self.interpolation = InterpolationMode(interpolation) self.scale = scale self.ratio = ratio @@ -146,9 +159,9 @@ def __call__(self, img): def __repr__(self): if isinstance(self.interpolation, (tuple, list)): - interpolate_str = ' '.join([_pil_interpolation_to_str[x] for x in self.interpolation]) + interpolate_str = ' '.join([x.value for x in self.interpolation]) else: - interpolate_str = _pil_interpolation_to_str[self.interpolation] + interpolate_str = self.interpolation.value format_string = self.__class__.__name__ + '(size={0}'.format(self.size) format_string += ', scale={0}'.format(tuple(round(s, 4) for s in self.scale)) format_string += ', ratio={0}'.format(tuple(round(r, 4) for r in self.ratio)) diff --git a/timm/data/transforms_factory.py b/timm/data/transforms_factory.py index 16e08a39f4..1c8d15e2d4 100644 --- a/timm/data/transforms_factory.py +++ b/timm/data/transforms_factory.py @@ -4,59 +4,50 @@ Hacked together by / Copyright 2020 Ross Wightman """ import math +from typing import Union, Tuple import torch from torchvision import transforms -from timm.data.constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, DEFAULT_CROP_PCT from timm.data.auto_augment import rand_augment_transform, augment_and_mix_transform, auto_augment_transform -from timm.data.transforms import _pil_interp, RandomResizedCropAndInterpolation, ToNumpy, ToTensor +from timm.data.config import PreprocessCfg, AugCfg +from timm.data.constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, DEFAULT_CROP_PCT from timm.data.random_erasing import RandomErasing +from timm.data.transforms import RandomResizedCropAndInterpolation, ToNumpy, ToTensorNormalize def transforms_noaug_train( - img_size=224, + img_size: Union[int, Tuple[int]] = 224, interpolation='bilinear', - use_prefetcher=False, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, + normalize=False, ): if interpolation == 'random': # random interpolation not supported with no-aug interpolation = 'bilinear' tfl = [ - transforms.Resize(img_size, _pil_interp(interpolation)), + transforms.Resize(img_size, transforms.InterpolationMode(interpolation)), transforms.CenterCrop(img_size) ] - if use_prefetcher: - # prefetcher and collate will handle tensor conversion and norm - tfl += [ToNumpy()] - else: + if normalize: tfl += [ transforms.ToTensor(), - transforms.Normalize( - mean=torch.tensor(mean), - std=torch.tensor(std)) + transforms.Normalize(mean=torch.tensor(mean), std=torch.tensor(std)) ] + else: + # (pre)fetcher and collate will handle tensor conversion and normalize + tfl += [ToNumpy()] return transforms.Compose(tfl) def transforms_imagenet_train( - img_size=224, - scale=None, - ratio=None, - hflip=0.5, - vflip=0., - color_jitter=0.4, - auto_augment=None, + img_size: Union[int, Tuple[int]] = 224, interpolation='random', - use_prefetcher=False, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, - re_prob=0., - re_mode='const', - re_count=1, - re_num_splits=0, + aug_cfg=AugCfg(), + normalize=False, separate=False, ): """ @@ -66,18 +57,24 @@ def transforms_imagenet_train( * a portion of the data through the secondary transform * normalizes and converts the branches above with the third, final transform """ - scale = tuple(scale or (0.08, 1.0)) # default imagenet scale range - ratio = tuple(ratio or (3./4., 4./3.)) # default imagenet ratio range + scale_range = tuple(aug_cfg.scale_range or (0.08, 1.0)) # default imagenet scale range + ratio_range = tuple(aug_cfg.ratio_range or (3. / 4., 4. / 3.)) # default imagenet ratio range + + # 'primary' train transforms include random resize + crop w/ optional horizontal and vertical flipping aug. + # This is the core of standard ImageNet ResNet and Inception pre-processing primary_tfl = [ - RandomResizedCropAndInterpolation(img_size, scale=scale, ratio=ratio, interpolation=interpolation)] - if hflip > 0.: - primary_tfl += [transforms.RandomHorizontalFlip(p=hflip)] - if vflip > 0.: - primary_tfl += [transforms.RandomVerticalFlip(p=vflip)] + RandomResizedCropAndInterpolation(img_size, scale=scale_range, ratio=ratio_range, interpolation=interpolation)] + if aug_cfg.hflip_prob > 0.: + primary_tfl += [transforms.RandomHorizontalFlip(p=aug_cfg.hflip_prob)] + if aug_cfg.vflip_prob > 0.: + primary_tfl += [transforms.RandomVerticalFlip(p=aug_cfg.vflip_prob)] + # 'secondary' transform stage includes either color jitter (could add lighting too) or auto-augmentations + # such as AutoAugment, RandAugment, AugMix, etc secondary_tfl = [] - if auto_augment: - assert isinstance(auto_augment, str) + if aug_cfg.auto_augment: + aa = aug_cfg.auto_augment + assert isinstance(aa, str) if isinstance(img_size, (tuple, list)): img_size_min = min(img_size) else: @@ -87,58 +84,63 @@ def transforms_imagenet_train( img_mean=tuple([min(255, round(255 * x)) for x in mean]), ) if interpolation and interpolation != 'random': - aa_params['interpolation'] = _pil_interp(interpolation) - if auto_augment.startswith('rand'): - secondary_tfl += [rand_augment_transform(auto_augment, aa_params)] - elif auto_augment.startswith('augmix'): + aa_params['interpolation'] = interpolation + if aa.startswith('rand'): + secondary_tfl += [rand_augment_transform(aa, aa_params)] + elif aa.startswith('augmix'): aa_params['translate_pct'] = 0.3 - secondary_tfl += [augment_and_mix_transform(auto_augment, aa_params)] + secondary_tfl += [augment_and_mix_transform(aa, aa_params)] else: - secondary_tfl += [auto_augment_transform(auto_augment, aa_params)] - elif color_jitter is not None: + secondary_tfl += [auto_augment_transform(aa, aa_params)] + elif aug_cfg.color_jitter is not None: # color jitter is enabled when not using AA - if isinstance(color_jitter, (list, tuple)): + cj = aug_cfg.color_jitter + if isinstance(cj, (list, tuple)): # color jitter should be a 3-tuple/list if spec brightness/contrast/saturation # or 4 if also augmenting hue - assert len(color_jitter) in (3, 4) + assert len(cj) in (3, 4) else: # if it's a scalar, duplicate for brightness, contrast, and saturation, no hue - color_jitter = (float(color_jitter),) * 3 - secondary_tfl += [transforms.ColorJitter(*color_jitter)] + cj = (float(cj),) * 3 + secondary_tfl += [transforms.ColorJitter(*cj)] + # 'final' transform stage includes normalization, followed by optional random erasing and tensor conversion final_tfl = [] - if use_prefetcher: - # prefetcher and collate will handle tensor conversion and norm - final_tfl += [ToNumpy()] - else: + if normalize: final_tfl += [ - transforms.ToTensor(), - transforms.Normalize( - mean=torch.tensor(mean), - std=torch.tensor(std)) + ToTensorNormalize(mean=mean, std=std) ] - if re_prob > 0.: - final_tfl.append( - RandomErasing(re_prob, mode=re_mode, max_count=re_count, num_splits=re_num_splits, device='cpu')) + if aug_cfg.re_prob > 0.: + final_tfl.append(RandomErasing( + aug_cfg.re_prob, + mode=aug_cfg.re_mode, + count=aug_cfg.re_count, + num_splits=aug_cfg.num_aug_splits)) + else: + # when normalize disabled, (pre)fetcher and collate will handle tensor conversion and normalize + final_tfl += [ToNumpy()] if separate: + # return each transform stage separately return transforms.Compose(primary_tfl), transforms.Compose(secondary_tfl), transforms.Compose(final_tfl) else: return transforms.Compose(primary_tfl + secondary_tfl + final_tfl) def transforms_imagenet_eval( - img_size=224, + img_size: Union[int, Tuple[int]] = 224, crop_pct=None, interpolation='bilinear', - use_prefetcher=False, mean=IMAGENET_DEFAULT_MEAN, - std=IMAGENET_DEFAULT_STD): + std=IMAGENET_DEFAULT_STD, + normalize=False, +): crop_pct = crop_pct or DEFAULT_CROP_PCT if isinstance(img_size, (tuple, list)): assert len(img_size) == 2 if img_size[-1] == img_size[-2]: + # FIXME handle case where img is square and we want non aspect preserving resize # fall-back to older behaviour so Resize scales to shortest edge if target is square scale_size = int(math.floor(img_size[0] / crop_pct)) else: @@ -147,27 +149,87 @@ def transforms_imagenet_eval( scale_size = int(math.floor(img_size / crop_pct)) tfl = [ - transforms.Resize(scale_size, _pil_interp(interpolation)), + transforms.Resize(scale_size, transforms.InterpolationMode(interpolation)), transforms.CenterCrop(img_size), ] - if use_prefetcher: - # prefetcher and collate will handle tensor conversion and norm - tfl += [ToNumpy()] - else: + if normalize: tfl += [ - transforms.ToTensor(), - transforms.Normalize( - mean=torch.tensor(mean), - std=torch.tensor(std)) + ToTensorNormalize(mean=mean, std=std) ] + else: + # (pre)fetcher and collate will handle tensor conversion and normalize + tfl += [ToNumpy()] return transforms.Compose(tfl) +def create_transform_v2( + cfg=PreprocessCfg(), + is_training=False, + normalize=False, + separate=False, + tf_preprocessing=False, +): + """ + + Args: + cfg: Pre-processing configuration + is_training (bool): Create transform for training pre-processing + tf_preprocessing (bool): Use Tensorflow pre-processing (for validation) + normalize (bool): Enable normalization in transforms (otherwise handled by fetcher/pre-fetcher) + separate (bool): Return transforms separated into stages (for train) + + Returns: + + """ + input_size = cfg.input_size + if isinstance(input_size, (tuple, list)): + img_size = input_size[-2:] + else: + img_size = input_size + + if tf_preprocessing: + assert not normalize, "Expecting normalization to be handled in (pre)fetcher w/ TF preprocessing" + assert not separate, "Separate transforms not supported for TF preprocessing" + from timm.data.tf_preprocessing import TfPreprocessTransform + transform = TfPreprocessTransform( + is_training=is_training, size=img_size, interpolation=cfg.interpolation) + else: + if is_training and cfg.aug is None: + assert not separate, "Cannot perform split augmentation with no_aug" + transform = transforms_noaug_train( + img_size, + interpolation=cfg.interpolation, + normalize=normalize, + mean=cfg.mean, + std=cfg.std) + elif is_training: + transform = transforms_imagenet_train( + img_size, + interpolation=cfg.interpolation, + mean=cfg.mean, + std=cfg.std, + aug_cfg=cfg.aug, + normalize=normalize, + separate=separate) + else: + assert not separate, "Separate transforms not supported for validation preprocessing" + transform = transforms_imagenet_eval( + img_size, + interpolation=cfg.interpolation, + crop_pct=cfg.crop_pct, + mean=cfg.mean, + std=cfg.std, + normalize=normalize, + ) + + return transform + + def create_transform( input_size, is_training=False, - use_fetcher=False, + use_prefetcher=False, no_aug=False, scale=None, ratio=None, @@ -191,7 +253,8 @@ def create_transform( else: img_size = input_size - if tf_preprocessing and use_fetcher: + normalize_in_transform = not use_prefetcher + if tf_preprocessing and use_prefetcher: assert not separate, "Separate transforms not supported for TF preprocessing" from timm.data.tf_preprocessing import TfPreprocessTransform transform = TfPreprocessTransform( @@ -202,35 +265,41 @@ def create_transform( transform = transforms_noaug_train( img_size, interpolation=interpolation, - use_prefetcher=use_fetcher, mean=mean, - std=std) + std=std, + normalize=normalize_in_transform, + ) elif is_training: - transform = transforms_imagenet_train( - img_size, - scale=scale, - ratio=ratio, - hflip=hflip, - vflip=vflip, + aug_cfg = AugCfg( + scale_range=scale, + ratio_range=ratio, + hflip_prob=hflip, + vflip_prob=vflip, color_jitter=color_jitter, auto_augment=auto_augment, - interpolation=interpolation, - use_prefetcher=use_fetcher, - mean=mean, - std=std, re_prob=re_prob, re_mode=re_mode, re_count=re_count, - re_num_splits=re_num_splits, - separate=separate) + num_aug_splits=re_num_splits, + ) + transform = transforms_imagenet_train( + img_size, + interpolation=interpolation, + mean=mean, + std=std, + aug_cfg=aug_cfg, + normalize=normalize_in_transform, + separate=separate + ) else: - assert not separate, "Separate transforms not supported for validation preprocessing" + assert not separate, "Separate transforms not supported for validation pre-processing" transform = transforms_imagenet_eval( img_size, interpolation=interpolation, - use_prefetcher=use_fetcher, mean=mean, std=std, - crop_pct=crop_pct) + crop_pct=crop_pct, + normalize=normalize_in_transform, + ) return transform diff --git a/timm/models/helpers.py b/timm/models/helpers.py index adfef550dc..39f44c8799 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -24,13 +24,20 @@ def load_state_dict(checkpoint_path, use_ema=False): if checkpoint_path and os.path.isfile(checkpoint_path): checkpoint = torch.load(checkpoint_path, map_location='cpu') - state_dict_key = 'state_dict' + state_dict_key = '' if isinstance(checkpoint, dict): if use_ema and 'state_dict_ema' in checkpoint: state_dict_key = 'state_dict_ema' - if state_dict_key and state_dict_key in checkpoint: + elif use_ema and 'model_ema' in checkpoint: + state_dict_key = 'model_ema' + elif 'state_dict' in checkpoint: + state_dict_key = 'state_dict' + elif 'model' in checkpoint: + state_dict_key = 'model' + if state_dict_key: + state_dict = checkpoint[state_dict_key] new_state_dict = OrderedDict() - for k, v in checkpoint[state_dict_key].items(): + for k, v in state_dict.items(): # strip `module.` prefix name = k[7:] if k.startswith('module') else k new_state_dict[name] = v diff --git a/train.py b/train.py index cca814fd65..1e95c83101 100755 --- a/train.py +++ b/train.py @@ -30,7 +30,8 @@ from timm.bits import initialize_device, setup_model_and_optimizer, DeviceEnv, Monitor, Tracker,\ TrainState, TrainServices, TrainCfg, CheckpointManager, AccuracyTopK, AvgTensor, distribute_bn -from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset +from timm.data import create_dataset, create_transform_v2, create_loader_v2, resolve_data_config,\ + PreprocessCfg, AugCfg, MixupCfg, AugMixDataset from timm.models import create_model, safe_model_name, convert_splitbn_model from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy from timm.optim import optimizer_kwargs @@ -283,10 +284,11 @@ def main(): else: _logger.info('Training with a single process on 1 device.') - mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None - random_seed(args.seed, 0) # Set all random seeds the same for model/state init (mandatory for XLA) + mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None + assert args.aug_splits == 0 or args.aug_splits > 1, 'A split of 1 makes no sense' + train_state = setup_train_task(args, dev_env, mixup_active) train_cfg = train_state.train_cfg @@ -421,11 +423,9 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): _logger.info( f'Model {safe_model_name(args.model)} created, param count:{sum([m.numel() for m in model.parameters()])}') - # setup augmentation batch splits for contrastive loss or split bn - assert args.aug_splits == 0 or args.aug_splits > 1, 'A split of 1 makes no sense' # enable split bn (separate bn stats per batch-portion) if args.split_bn: - assert args.aug_splits > 1 or args.resplit + assert args.aug_splits > 1 model = convert_splitbn_model(model, max(args.aug_splits, 2)) train_state = setup_model_and_optimizer( @@ -481,7 +481,7 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): return train_state -def setup_data(args, default_cfg, dev_env, mixup_active): +def setup_data(args, default_cfg, dev_env: DeviceEnv, mixup_active: bool): data_config = resolve_data_config(vars(args), default_cfg=default_cfg, verbose=dev_env.primary) # create the train and eval datasets @@ -489,18 +489,18 @@ def setup_data(args, default_cfg, dev_env, mixup_active): args.dataset, root=args.data_dir, split=args.train_split, is_training=True, batch_size=args.batch_size, repeats=args.epoch_repeats) + dataset_eval = create_dataset( - args.dataset, root=args.data_dir, split=args.val_split, is_training=False, batch_size=args.batch_size) + args.dataset, + root=args.data_dir, split=args.val_split, is_training=False, batch_size=args.batch_size) # setup mixup / cutmix - collate_fn = None + mixup_cfg = None if mixup_active: - mixup_args = dict( - mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, + mixup_cfg = MixupCfg( prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode, + mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, label_smoothing=args.smoothing, num_classes=args.num_classes) - assert not args.aug_splits # collate conflict (need to support deinterleaving in collate mixup) - collate_fn = FastCollateMixup(**mixup_args) # wrap dataset in AugMix helper if args.aug_splits > 1: @@ -510,46 +510,72 @@ def setup_data(args, default_cfg, dev_env, mixup_active): train_interpolation = args.train_interpolation if args.no_aug or not train_interpolation: train_interpolation = data_config['interpolation'] - loader_train = create_loader( - dataset_train, + + if args.no_aug: + train_aug_cfg = None + else: + train_aug_cfg = AugCfg( + re_prob=args.reprob, + re_mode=args.remode, + re_count=args.recount, + ratio_range=args.ratio, + scale_range=args.scale, + hflip_prob=args.hflip, + vflip_prob=args.vflip, + color_jitter=args.color_jitter, + auto_augment=args.aa, + num_aug_splits=args.aug_splits, + ) + + train_pp_cfg = PreprocessCfg( input_size=data_config['input_size'], - batch_size=args.batch_size, - is_training=True, - no_aug=args.no_aug, - re_prob=args.reprob, - re_mode=args.remode, - re_count=args.recount, - re_split=args.resplit, - scale=args.scale, - ratio=args.ratio, - hflip=args.hflip, - vflip=args.vflip, - color_jitter=args.color_jitter, - auto_augment=args.aa, - num_aug_splits=args.aug_splits, interpolation=train_interpolation, + crop_pct=data_config['crop_pct'], mean=data_config['mean'], std=data_config['std'], + aug=train_aug_cfg, + ) + + # if using PyTorch XLA and RandomErasing is enabled, we must normalize and do RE in transforms on CPU + normalize_in_transform = dev_env.type_xla and args.reprob > 0 + + dataset_train.transform = create_transform_v2( + cfg=train_pp_cfg, is_training=True, normalize=normalize_in_transform) + + loader_train = create_loader_v2( + dataset_train, + batch_size=args.batch_size, + is_training=True, + normalize=not normalize_in_transform, + pp_cfg=train_pp_cfg, + mix_cfg=mixup_cfg, num_workers=args.workers, - collate_fn=collate_fn, pin_memory=args.pin_mem, use_multi_epochs_loader=args.use_multi_epochs_loader ) + eval_pp_cfg = PreprocessCfg( + input_size=data_config['input_size'], + interpolation=data_config['interpolation'], + crop_pct=data_config['crop_pct'], + mean=data_config['mean'], + std=data_config['std'], + ) + + dataset_eval.transform = create_transform_v2( + cfg=eval_pp_cfg, is_training=False, normalize=normalize_in_transform) + eval_workers = args.workers if 'tfds' in args.dataset: # FIXME reduce validation issues when using TFDS w/ workers and distributed training eval_workers = min(2, args.workers) - loader_eval = create_loader( + loader_eval = create_loader_v2( dataset_eval, - input_size=data_config['input_size'], batch_size=args.validation_batch_size_multiplier * args.batch_size, is_training=False, - interpolation=data_config['interpolation'], - mean=data_config['mean'], - std=data_config['std'], + normalize=not normalize_in_transform, + pp_cfg=eval_pp_cfg, num_workers=eval_workers, - crop_pct=data_config['crop_pct'], pin_memory=args.pin_mem, ) return data_config, loader_eval, loader_train @@ -700,8 +726,12 @@ def evaluate( loss = loss_fn(output, target) # FIXME, explictly marking step for XLA use since I'm not using the parallel xm loader - # need to investigate whether parallel loader wrapper is helpful on tpu-vm or only usefor for 2-vm setup. - dev_env.mark_step() + # need to investigate whether parallel loader wrapper is helpful on tpu-vm or only use for 2-vm setup. + if dev_env.type_xla: + dev_env.mark_step() + elif dev_env.type_cuda: + dev_env.synchronize() + tracker.mark_iter_step_end() losses_m.update(loss, output.size(0)) accuracy_m.update(output, target) diff --git a/validate.py b/validate.py index cee359c395..f4dc84e898 100755 --- a/validate.py +++ b/validate.py @@ -20,7 +20,8 @@ from timm.bits import initialize_device, Tracker, Monitor, AccuracyTopK, AvgTensor from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models -from timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet +from timm.data import create_dataset, create_transform_v2, create_loader_v2, resolve_data_config, RealLabelsImagenet, \ + PreprocessCfg from timm.utils import natural_key, setup_default_logging @@ -141,18 +142,22 @@ def validate(args): else: real_labels = None - crop_pct = 1.0 if test_time_pool else data_config['crop_pct'] - loader = create_loader( - dataset, + eval_pp_cfg = PreprocessCfg( input_size=data_config['input_size'], - batch_size=args.batch_size, interpolation=data_config['interpolation'], + crop_pct=1.0 if test_time_pool else data_config['crop_pct'], mean=data_config['mean'], std=data_config['std'], + ) + + dataset.transform = create_transform_v2(cfg=eval_pp_cfg, is_training=False) + + loader = create_loader_v2( + dataset, + batch_size=args.batch_size, + pp_cfg=eval_pp_cfg, num_workers=args.workers, - crop_pct=crop_pct, - pin_memory=args.pin_mem, - tf_preprocessing=args.tf_preprocessing) + pin_memory=args.pin_mem) logger = Monitor(logger=_logger) tracker = Tracker() @@ -175,16 +180,17 @@ def validate(args): loss = criterion(output, target) if dev_env.type_cuda: - torch.cuda.synchronize() + dev_env.synchronize() tracker.mark_iter_step_end() - losses.update(loss.detach(), sample.size(0)) + if dev_env.type_xla: + dev_env.mark_step() + if real_labels is not None: real_labels.add_result(output) - accuracy.update(output.detach(), target) - if dev_env.type_xla: - dev_env.mark_step() + losses.update(loss.detach(), sample.size(0)) + accuracy.update(output.detach(), target) tracker.mark_iter() if step_idx % args.log_freq == 0: @@ -212,7 +218,7 @@ def validate(args): top5=round(top5a, 4), top5_err=round(100 - top5a, 4), param_count=round(param_count / 1e6, 2), img_size=data_config['input_size'][-1], - cropt_pct=crop_pct, + cropt_pct=eval_pp_cfg.crop_pct, interpolation=data_config['interpolation']) logger.log_phase(phase='eval', name_map={'top1': 'Acc@1', 'top5': 'Acc@5'}, **results) From cb621e0f007c0d8e67ca5915fe5ad2e133b66114 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 13 Aug 2021 12:54:12 -0700 Subject: [PATCH 19/61] Remove print, arg order --- timm/data/fetcher.py | 1 - timm/data/prefetcher_cuda.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/timm/data/fetcher.py b/timm/data/fetcher.py index c833b5964c..42cf791260 100644 --- a/timm/data/fetcher.py +++ b/timm/data/fetcher.py @@ -51,7 +51,6 @@ def __init__( self._loader = pl.MpDeviceLoader(loader, device) else: self._loader = loader - print('re', self.random_erasing, self.mean, self.std) def __iter__(self): for sample, target in self._loader: diff --git a/timm/data/prefetcher_cuda.py b/timm/data/prefetcher_cuda.py index 9432df5949..0b36c027f2 100644 --- a/timm/data/prefetcher_cuda.py +++ b/timm/data/prefetcher_cuda.py @@ -16,10 +16,10 @@ def __init__( normalize_shape=(1, 3, 1, 1), mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, - num_aug_splits=0, re_prob=0., re_mode='const', - re_count=1 + re_count=1, + num_aug_splits=0, ): self.loader = loader self.device = device From b76b48e8e9071061ffbf9852137b99ddc0cc33e2 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 18 Aug 2021 11:14:51 -0700 Subject: [PATCH 20/61] Update optimizer creation for master optimizer changes --- timm/bits/train_setup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/timm/bits/train_setup.py b/timm/bits/train_setup.py index 5aca908f54..87c1b5c519 100644 --- a/timm/bits/train_setup.py +++ b/timm/bits/train_setup.py @@ -72,9 +72,10 @@ def setup_model_and_optimizer( 'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.') if isinstance(optimizer, Callable): - optimizer = optimizer(model=model, **optimizer_cfg) + # FIXME this interface needs to be figured out, model, model and/or parameters, or just parameters? + optimizer = optimizer(model, **optimizer_cfg) else: - optimizer = create_optimizer_v2(model=model, **optimizer_cfg) + optimizer = create_optimizer_v2(model, **optimizer_cfg) updater = create_updater( model=model, From 0d82876132846311f75fa752560c43ad4bd7f34b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 18 Aug 2021 11:15:32 -0700 Subject: [PATCH 21/61] Add comment for reference re PyTorch XLA 'race' issue --- train.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/train.py b/train.py index 1e95c83101..217f9a8856 100755 --- a/train.py +++ b/train.py @@ -732,6 +732,11 @@ def evaluate( elif dev_env.type_cuda: dev_env.synchronize() + # FIXME uncommenting this fixes race btw model `output`/`loss` and loss_m/accuracy_m meter input + # for PyTorch XLA GPU use. + # This issue does not exist for normal PyTorch w/ GPU (CUDA) or PyTorch XLA w/ TPU. + # loss.item() + tracker.mark_iter_step_end() losses_m.update(loss, output.size(0)) accuracy_m.update(output, target) From f2e14685a8729f009ebf4d6128ceb4bcd5b95d22 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 22 Aug 2021 13:15:27 -0700 Subject: [PATCH 22/61] Add force-cpu flag for train/validate, fix CPU fallback for device init, remove old force cpu flag for EMA model weights --- timm/bits/device_env_factory.py | 6 +++++- train.py | 12 ++++++------ validate.py | 8 +++++--- 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/timm/bits/device_env_factory.py b/timm/bits/device_env_factory.py index bb92daab3a..620e400d82 100644 --- a/timm/bits/device_env_factory.py +++ b/timm/bits/device_env_factory.py @@ -20,8 +20,12 @@ def initialize_device(force_cpu: bool = False, **kwargs) -> DeviceEnv: elif is_cuda_available(): denv = DeviceEnvCuda(**kwargs) + # CPU fallback if denv is None: - denv = DeviceEnv() + if is_xla_available('CPU'): + denv = DeviceEnvXla(device_type='CPU', **kwargs) + else: + denv = DeviceEnv() _logger.info(f'Initialized device {denv.device}. ' f'Rank: {denv.global_rank} ({denv.local_rank}) of {denv.world_size}.') diff --git a/train.py b/train.py index 217f9a8856..cad41bca5f 100755 --- a/train.py +++ b/train.py @@ -60,8 +60,8 @@ help='dataset train split (default: train)') parser.add_argument('--val-split', metavar='NAME', default='validation', help='dataset validation split (default: validation)') -parser.add_argument('--model', default='resnet101', type=str, metavar='MODEL', - help='Name of model to train (default: "countception"') +parser.add_argument('--model', default='resnet50', type=str, metavar='MODEL', + help='Name of model to train (default: "resnet50"') parser.add_argument('--pretrained', action='store_true', default=False, help='Start with pretrained version of specified network (if avail)') parser.add_argument('--initial-checkpoint', default='', type=str, metavar='PATH', @@ -215,8 +215,6 @@ # Model Exponential Moving Average parser.add_argument('--model-ema', action='store_true', default=False, help='Enable tracking moving average of model weights') -parser.add_argument('--model-ema-force-cpu', action='store_true', default=False, - help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.') parser.add_argument('--model-ema-decay', type=float, default=0.9998, help='decay factor for model weights moving average (default: 0.9998)') @@ -252,6 +250,8 @@ help='use the multi-epochs-loader to save time at the beginning of every epoch') parser.add_argument('--torchscript', dest='torchscript', action='store_true', help='convert model torchscript for inference') +parser.add_argument('--force-cpu', action='store_true', default=False, + help='Force CPU to be used even if HW accelerator exists.') parser.add_argument('--log-wandb', action='store_true', default=False, help='log training and validation metrics to wandb') @@ -277,7 +277,7 @@ def main(): setup_default_logging() args, args_text = _parse_args() - dev_env = initialize_device(amp=args.amp, channels_last=args.channels_last) + dev_env = initialize_device(force_cpu=args.force_cpu, amp=args.amp, channels_last=args.channels_last) if dev_env.distributed: _logger.info('Training in distributed mode with multiple processes, 1 device per process. Process %d, total %d.' % (dev_env.global_rank, dev_env.world_size)) @@ -364,7 +364,7 @@ def main(): services.monitor, dev_env) - if train_state.model_ema is not None and not args.model_ema_force_cpu: + if train_state.model_ema is not None: if dev_env.distributed and args.dist_bn in ('broadcast', 'reduce'): distribute_bn(train_state.model_ema, args.dist_bn == 'reduce', dev_env) diff --git a/validate.py b/validate.py index f4dc84e898..f91891718e 100755 --- a/validate.py +++ b/validate.py @@ -35,8 +35,8 @@ help='dataset type (default: ImageFolder/ImageTar if empty)') parser.add_argument('--split', metavar='NAME', default='validation', help='dataset split (default: validation)') -parser.add_argument('--model', '-m', metavar='NAME', default='dpn92', - help='model architecture (default: dpn92)') +parser.add_argument('--model', '-m', metavar='NAME', default='resnet50', + help='model architecture (default: resnet50)') parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', help='number of data loading workers (default: 2)') parser.add_argument('-b', '--batch-size', default=256, type=int, @@ -87,13 +87,15 @@ help='Real labels JSON file for imagenet evaluation') parser.add_argument('--valid-labels', default='', type=str, metavar='FILENAME', help='Valid label indices txt file for validation of partial label space') +parser.add_argument('--force-cpu', action='store_true', default=False, + help='Force CPU to be used even if HW accelerator exists.') def validate(args): # might as well try to validate something args.pretrained = args.pretrained or not args.checkpoint - dev_env = initialize_device(amp=args.amp) + dev_env = initialize_device(force_cpu=args.force_cpu, amp=args.amp) # create model model = create_model( From 3581affb7769ec3554b2ea3d242c83db3f92a960 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 5 Sep 2021 16:05:31 -0700 Subject: [PATCH 23/61] Update train.py with some flags related to scheduler tweaks, fix best checkpoint bug. --- timm/bits/checkpoint_manager.py | 2 +- train.py | 24 ++++++++++++++++++------ 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/timm/bits/checkpoint_manager.py b/timm/bits/checkpoint_manager.py index b2c692cb44..a867e22990 100644 --- a/timm/bits/checkpoint_manager.py +++ b/timm/bits/checkpoint_manager.py @@ -193,7 +193,7 @@ def save_checkpoint(self, train_state: TrainState, metrics: Optional[Dict[str, f best_save_path = os.path.join(self.checkpoint_dir, 'best' + self.extension) self._duplicate(last_save_path, best_save_path) - return None if self.best_checkpoint is None else curr_checkpoint + return curr_checkpoint if self.best_checkpoint is None else self.best_checkpoint def save_recovery(self, train_state: TrainState): tmp_save_path = os.path.join(self.recovery_dir, 'recovery_tmp' + self.extension) diff --git a/train.py b/train.py index cad41bca5f..43a8108ab2 100755 --- a/train.py +++ b/train.py @@ -33,7 +33,7 @@ from timm.data import create_dataset, create_transform_v2, create_loader_v2, resolve_data_config,\ PreprocessCfg, AugCfg, MixupCfg, AugMixDataset from timm.models import create_model, safe_model_name, convert_splitbn_model -from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy +from timm.loss import * from timm.optim import optimizer_kwargs from timm.scheduler import create_scheduler from timm.utils import setup_default_logging, random_seed, get_outdir, unwrap_model @@ -121,8 +121,12 @@ help='learning rate noise std-dev (default: 1.0)') parser.add_argument('--lr-cycle-mul', type=float, default=1.0, metavar='MULT', help='learning rate cycle len multiplier (default: 1.0)') +parser.add_argument('--lr-cycle-decay', type=float, default=0.5, metavar='MULT', + help='amount to decay each learning rate cycle (default: 0.5)') parser.add_argument('--lr-cycle-limit', type=int, default=1, metavar='N', - help='learning rate cycle limit') + help='learning rate cycle limit, cycles enabled if > 1') +parser.add_argument('--lr-k-decay', type=float, default=1.0, + help='learning rate k-decay for cosine/poly (default: 1.0)') parser.add_argument('--warmup-lr', type=float, default=0.0001, metavar='LR', help='warmup learning rate (default: 0.0001)') parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR', @@ -161,8 +165,10 @@ help='Use AutoAugment policy. "v0" or "original". (default: None)'), parser.add_argument('--aug-splits', type=int, default=0, help='Number of augmentation splits (default: 0, valid: 0 or >=2)') -parser.add_argument('--jsd', action='store_true', default=False, +parser.add_argument('--jsd-loss', action='store_true', default=False, help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.') +parser.add_argument('--bce-loss', action='store_true', default=False, + help='Enable BCE loss w/ Mixup/CutMix use.') parser.add_argument('--reprob', type=float, default=0., metavar='PCT', help='Random erase prob (default: 0.)') parser.add_argument('--remode', type=str, default='const', @@ -448,14 +454,20 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): lr_scheduler.step(train_state.epoch) # setup loss function - if args.jsd: + if args.jsd_loss: assert args.aug_splits > 1 # JSD only valid with aug splits set train_loss_fn = JsdCrossEntropy(num_splits=args.aug_splits, smoothing=args.smoothing) elif mixup_active: # smoothing is handled with mixup target transform - train_loss_fn = SoftTargetCrossEntropy() + if args.bce_loss: + train_loss_fn = nn.BCEWithLogitsLoss() + else: + train_loss_fn = SoftTargetCrossEntropy() elif args.smoothing: - train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing) + if args.bce_loss: + train_loss_fn = DenseBinaryCrossEntropy(smoothing=args.smoothing) + else: + train_loss_fn = LabelSmoothingCrossEntropy(smoothing=args.smoothing) else: train_loss_fn = nn.CrossEntropyLoss() eval_loss_fn = nn.CrossEntropyLoss() From 690f31d02d3732767c5a68194e5fdaa52c790fad Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 23 Oct 2021 20:30:53 -0700 Subject: [PATCH 24/61] Post merge cleanup, restore previous unwrap fn --- timm/utils/model.py | 56 ++++++++++++++++++++++++++------------------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/timm/utils/model.py b/timm/utils/model.py index b9f3e9d384..3fbc9f2576 100644 --- a/timm/utils/model.py +++ b/timm/utils/model.py @@ -7,33 +7,38 @@ import torch from torchvision.ops.misc import FrozenBatchNorm2d -from .model_ema import ModelEma +_SUB_MODULE_ATTR = ('module', 'model') -def unwrap_model(model): - if isinstance(model, ModelEma): - return unwrap_model(model.ema) - else: - return model.module if hasattr(model, 'module') else model + +def unwrap_model(model, recursive=True): + for attr in _SUB_MODULE_ATTR: + sub_module = getattr(model, attr, None) + if sub_module is not None: + return unwrap_model(sub_module) if recursive else sub_module + return model def get_state_dict(model, unwrap_fn=unwrap_model): return unwrap_fn(model).state_dict() -def avg_sq_ch_mean(model, input, output): - "calculate average channel square mean of output activations" - return torch.mean(output.mean(axis=[0,2,3])**2).item() +def avg_sq_ch_mean(model, input, output): + """ calculate average channel square mean of output activations + """ + return torch.mean(output.mean(axis=[0, 2, 3]) ** 2).item() -def avg_ch_var(model, input, output): - "calculate average channel variance of output activations" - return torch.mean(output.var(axis=[0,2,3])).item()\ +def avg_ch_var(model, input, output): + """calculate average channel variance of output activations + """ + return torch.mean(output.var(axis=[0, 2, 3])).item() -def avg_ch_var_residual(model, input, output): - "calculate average channel variance of output activations" - return torch.mean(output.var(axis=[0,2,3])).item() +def avg_ch_var_residual(model, input, output): + """calculate average channel variance of output activations + """ + return torch.mean(output.var(axis=[0, 2, 3])).item() class ActivationStatsHook: @@ -62,15 +67,16 @@ def __init__(self, model, hook_fn_locs, hook_fns): raise ValueError("Please provide `hook_fns` for each `hook_fn_locs`, \ their lengths are different.") self.stats = dict((hook_fn.__name__, []) for hook_fn in hook_fns) - for hook_fn_loc, hook_fn in zip(hook_fn_locs, hook_fns): + for hook_fn_loc, hook_fn in zip(hook_fn_locs, hook_fns): self.register_hook(hook_fn_loc, hook_fn) def _create_hook(self, hook_fn): def append_activation_stats(module, input, output): out = hook_fn(module, input, output) self.stats[hook_fn.__name__].append(out) + return append_activation_stats - + def register_hook(self, hook_fn_loc, hook_fn): for name, module in self.model.named_modules(): if not fnmatch.fnmatch(name, hook_fn_loc): @@ -78,17 +84,18 @@ def register_hook(self, hook_fn_loc, hook_fn): module.register_forward_hook(self._create_hook(hook_fn)) -def extract_spp_stats(model, - hook_fn_locs, - hook_fns, - input_shape=[8, 3, 224, 224]): +def extract_spp_stats( + model, + hook_fn_locs, + hook_fns, + input_shape=[8, 3, 224, 224]): """Extract average square channel mean and variance of activations during forward pass to plot Signal Propogation Plots (SPP). Paper: https://arxiv.org/abs/2101.08692 Example Usage: https://gist.github.com/amaarora/6e56942fcb46e67ba203f3009b30d950 - """ + """ x = torch.normal(0., 1., input_shape) hook = ActivationStatsHook(model, hook_fn_locs=hook_fn_locs, hook_fns=hook_fns) _ = model(x) @@ -186,7 +193,7 @@ def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, named_modules = submodules submodules = [root_module.get_submodule(m) for m in submodules] - if not(len(submodules)): + if not (len(submodules)): named_modules, submodules = list(zip(*root_module.named_children())) for n, m in zip(named_modules, submodules): @@ -201,13 +208,14 @@ def _add_submodule(module, name, submodule): module.get_submodule(split[0]).add_module(split[1], submodule) else: module.add_module(name, submodule) + # Freeze batch norm if mode == 'freeze': res = freeze_batch_norm_2d(m) # It's possible that `m` is a type of BatchNorm in itself, in which case `unfreeze_batch_norm_2d` won't # convert it in place, but will return the converted result. In this case `res` holds the converted # result and we may try to re-assign the named module - if isinstance(m, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + if isinstance(m, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): _add_submodule(root_module, n, res) # Unfreeze batch norm else: From 59a3409182db7e56134556005d1272f5e2df8d0d Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 3 Nov 2021 17:10:41 -0700 Subject: [PATCH 25/61] Update README.md --- timm/bits/README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/timm/bits/README.md b/timm/bits/README.md index feacb52335..800647374d 100644 --- a/timm/bits/README.md +++ b/timm/bits/README.md @@ -118,9 +118,8 @@ Or this for imagenet in a local folder, * When PyTorch XLA crashes, you hit a TPU OOM etc, lots of processes get orphaned. Get in the habit of killing all python processes before starting a new train run. * `alias fml='pkill -f python3'` * For TFDS use, due to the way PyTorch IterableDatasets work at the loader level, each worker process builds batches independently -- they are not dequeued and collated across workers. For validation especially, getting all the samples evenly divided across BOTH the distributed processes AND the dataset workers is a bit annoying. For now keeping the num_workers arg (j) low is advisable, especially for very small validation sets. This can limit your throughput though. -* Random erasing for on-device XLA tensors doesn't work. XLA isn't compatible with the array slicing approach to my RE impl, currently it's done by default after moving tensors to device. I need to fix. +* Random erasing works with PyTorch XLA but it must be done on the images before they are moved into tensors on the XLA device. This changes the dataloader pipelien a bit and increases the size of the data being moved to device (float instead of int8) so has an impact on dataloading speed. * There are a number of models using ops that aren't lowered to XLA, this will REALLY slow things down to the point of being unusable. There are flags you can set to debug this, see PyTorch XLA troubleshooting page (https://github.com/pytorch/xla/blob/master/TROUBLESHOOTING.md) - * For NFNet models, force the ScaledStdConv `use_layernorm` arg to True, it is lowered, `std_mean` op is not * This code doesn't currently work when float16 is forced via `XLA_USE_BF16=1` env arg, it will mess up metrics tensors that overflow in bfloat16. Better controlling model activation vs weight precision vs other tensors is a TODO. * I haven't tested this code with pre TPU-VM (2-VM) setups, but it should work w/ correct config. I intend to make it work with Colab and Kaggle TPU notebooks soon. * Your first batch, and generally first epoch will be slow with Pytorch XLA, after that things pick up and move along quickly. Be patient. From 07693f81b06cfdfac77230e4a0a16a70278842b3 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 10 Nov 2021 15:54:21 -0800 Subject: [PATCH 26/61] Validation fix since we don't have multi-GPU DataParallel support yet --- validate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/validate.py b/validate.py index ad746ffc89..24c5dc59d0 100755 --- a/validate.py +++ b/validate.py @@ -266,7 +266,7 @@ def main(): args.checkpoint = c result = OrderedDict(model=args.model) r = {} - while not r and batch_size >= args.num_gpu: + while not r and batch_size >= 1: try: args.batch_size = batch_size print('Validating with batch size: %d' % args.batch_size) From 80ca078aedee177831beeffbf24e0aa48a45909b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 11 Nov 2021 15:09:31 -0800 Subject: [PATCH 27/61] Fix a few bugs and formatting/naming issues * Pass optimizer resume flag through to checkpoint / updater restore. Related to #961 but not clear how relates to crash. * Rename monitor step args, cleanup handling of step_end_idx vs num_steps for consistent log output in either case * Resume from proper epoch (ie next epoch relative to checkpoint) --- timm/bits/checkpoint.py | 2 +- timm/bits/monitor.py | 20 +++++++------------- timm/bits/train_state.py | 6 +++--- train.py | 17 +++++++++-------- validate.py | 5 +++-- 5 files changed, 23 insertions(+), 27 deletions(-) diff --git a/timm/bits/checkpoint.py b/timm/bits/checkpoint.py index df21ab5eee..b7afd7313f 100644 --- a/timm/bits/checkpoint.py +++ b/timm/bits/checkpoint.py @@ -62,7 +62,7 @@ def load_train_state( _logger.info("Loaded legacy checkpoint '{}' (epoch {})".format(checkpoint_path, train_state.epoch)) return - train_state.load_state_dict(checkpoint, unwrap_fn=unwrap_fn) + train_state.load_state_dict(checkpoint, unwrap_fn=unwrap_fn, load_opt=load_opt) if log_info: _logger.info("Loaded checkpoint '{}' (epoch {})".format(checkpoint_path, train_state.epoch)) diff --git a/timm/bits/monitor.py b/timm/bits/monitor.py index af397e1aed..e4dd95f0fe 100644 --- a/timm/bits/monitor.py +++ b/timm/bits/monitor.py @@ -43,13 +43,6 @@ # f' Data: {data_time.smooth_val:.3f} ({data_time.avg:.3f})' # log_str += f' Loss: {loss.smooth_val:>9.6f} ({loss.avg:>6.4f}) ' # log_str += f' LR: {lr:.3e} ' -# -# if args.save_images and output_dir: -# torchvision.utils.save_image( -# input, -# os.path.join(output_dir, 'train-batch-%d.jpg' % batch_idx), -# padding=0, -# normalize=True) def summary_row_dict(results, index=None, index_name='epoch'): @@ -159,8 +152,8 @@ def __init__( def log_step( self, phase: str, - step: int, - step_end: Optional[int] = None, + step_idx: int, + step_end_idx: Optional[int] = None, epoch: Optional[int] = None, loss: Optional[float] = None, rate: Optional[float] = None, @@ -171,14 +164,15 @@ def log_step( """ if not self.output_enabled: return - + if 'num_steps' in kwargs: + step_end_idx = max(0, kwargs.pop('num_steps') - 1) phase_title = f'{phase.capitalize()} ({phase_suffix})' if phase_suffix else f'{phase.capitalize()}:' - progress = 100. * step / step_end if step_end else 0. + progress = 100. * step_idx / step_end_idx if step_end_idx else 0. text_update = [ phase_title, f'{epoch}' if epoch is not None else None, - f'[{step}]' if step_end is None else None, - f'[{step}/{step_end} ({progress:>3.0f}%)]' if step_end is not None else None, + f'[{step_idx}]' if step_end_idx is None else None, + f'[{step_idx}/{step_end_idx} ({progress:>3.0f}%)]' if step_end_idx is not None else None, f'Rate: {rate:.2f}/s' if rate is not None else None, f'Loss: {loss:.5f}' if loss is not None else None, ] diff --git a/timm/bits/train_state.py b/timm/bits/train_state.py index 91fcf76fb2..5d20f500fd 100644 --- a/timm/bits/train_state.py +++ b/timm/bits/train_state.py @@ -45,13 +45,13 @@ def state_dict(self, unwrap_fn=unwrap_model): train_cfg=vars(self.train_cfg) ) # FIXME include lr_scheduler state? - state.update(self.updater.state_dict()) # updater (optimizer, scaler,e tc) state added to state + state.update(self.updater.state_dict()) # updater (optimizer, scaler, etc.) state added to state return state def load_state_dict(self, state_dict, unwrap_fn=unwrap_model, load_opt=True): # restore train loop state - self.epoch = state_dict['epoch'] - self.step_count = state_dict['step_count'] + self.epoch = state_dict['epoch'] + 1 + self.step_count = 0 # FIXME need more logic to restore part way through epoch self.step_count_global = state_dict['step_count_global'] # restore model params / state diff --git a/train.py b/train.py index 5c4dab8aef..fb6b43195d 100755 --- a/train.py +++ b/train.py @@ -452,6 +452,7 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): model_ema=args.model_ema, model_ema_decay=args.model_ema_decay, resume_path=args.resume, + resume_opt=not args.no_resume_opt, use_syncbn=args.sync_bn, ) @@ -683,7 +684,7 @@ def after_train_step( Returns: """ - end_step = step_idx == step_end_idx + last_step = step_idx == step_end_idx with torch.no_grad(): output, target, loss = tensors @@ -696,15 +697,15 @@ def after_train_step( state = replace(state, step_count_global=state.step_count_global + 1) cfg = state.train_cfg - if services.monitor is not None and end_step or (step_idx + 1) % cfg.log_interval == 0: + if services.monitor is not None and last_step or (step_idx + 1) % cfg.log_interval == 0: global_batch_size = dev_env.world_size * output.shape[0] loss_avg = loss_meter.compute() if services.monitor is not None: lr_avg = state.updater.get_average_lr() services.monitor.log_step( 'Train', - step=step_idx, - step_end=step_end_idx, + step_idx=step_idx, + step_end_idx=step_end_idx, epoch=state.epoch, loss=loss_avg.item(), rate=tracker.get_avg_iter_rate(global_batch_size), @@ -712,8 +713,8 @@ def after_train_step( ) if services.checkpoint is not None and cfg.recovery_interval and ( - end_step or (step_idx + 1) % cfg.recovery_interval == 0): - services.checkpoint.save_recovery(state.epoch, batch_idx=step_idx) + last_step or (step_idx + 1) % cfg.recovery_interval == 0): + services.checkpoint.save_recovery(state) if state.lr_scheduler is not None: # FIXME perform scheduler update here or via updater after_step call? @@ -770,8 +771,8 @@ def evaluate( loss_avg = losses_m.compute() logger.log_step( 'Eval', - step=step_idx, - step_end=end_idx, + step_idx=step_idx, + step_end_idx=end_idx, loss=loss_avg.item(), top1=top1.item(), top5=top5.item(), diff --git a/validate.py b/validate.py index ac1d9eb1a1..03a90dc01d 100755 --- a/validate.py +++ b/validate.py @@ -173,6 +173,7 @@ def validate(args): with torch.no_grad(): tracker.mark_iter() for step_idx, (sample, target) in enumerate(loader): + last_step = step_idx == num_steps - 1 tracker.mark_iter_data_end() # compute output @@ -197,12 +198,12 @@ def validate(args): accuracy.update(output.detach(), target) tracker.mark_iter() - if step_idx % args.log_freq == 0: + if last_step or step_idx % args.log_freq == 0: top1, top5 = accuracy.compute().values() loss_avg = losses.compute() logger.log_step( phase='eval', - step=step_idx, + step_idx=step_idx, num_steps=num_steps, rate=args.batch_size / tracker.iter_time.avg, loss=loss_avg.item(), From d9b0b3d60f5a49403799886583b58ff2bd9d8b43 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 11 Nov 2021 16:21:13 -0800 Subject: [PATCH 28/61] device arg wasn't removed from PrefetcherCuda instantiation of RE --- timm/data/prefetcher_cuda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/data/prefetcher_cuda.py b/timm/data/prefetcher_cuda.py index 0b36c027f2..3ae52dc210 100644 --- a/timm/data/prefetcher_cuda.py +++ b/timm/data/prefetcher_cuda.py @@ -34,7 +34,7 @@ def __init__( self.std = None if re_prob > 0.: self.random_erasing = RandomErasing( - probability=re_prob, mode=re_mode, count=re_count, num_splits=num_aug_splits, device=device) + probability=re_prob, mode=re_mode, count=re_count, num_splits=num_aug_splits) else: self.random_erasing = None From 4f338556d8f943c5df8f3b3333690cf14b7af5f1 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 12 Nov 2021 13:40:26 -0800 Subject: [PATCH 29/61] Fixes and improvements for metrics, tfds parser, loader / transform handling * add back ability to create transform with loader * change 'samples' -> 'examples' for tfds wrapper to match tfds naming * add support for specifying feature names for input and target in tfds wrapper * add class_to_idx for image classification datasets in tfds wrapper * add accumulate_type to avg meters and metrics to allow float32 or float64 accumulation control with lower prec data * minor cleanup, log output rate prev and avg --- timm/bits/avg_scalar.py | 6 -- timm/bits/avg_tensor.py | 18 +++-- timm/bits/metric.py | 6 +- timm/bits/metric_accuracy.py | 67 ++++++------------ timm/bits/monitor.py | 9 ++- timm/data/__init__.py | 4 +- timm/data/loader.py | 34 +++++++--- timm/data/mixup.py | 3 +- timm/data/parsers/parser_tfds.py | 112 ++++++++++++++++++++----------- timm/data/transforms_factory.py | 29 +++++--- train.py | 22 ++---- validate.py | 15 ++--- 12 files changed, 179 insertions(+), 146 deletions(-) diff --git a/timm/bits/avg_scalar.py b/timm/bits/avg_scalar.py index 04d41c8e18..6a6ce31be2 100644 --- a/timm/bits/avg_scalar.py +++ b/timm/bits/avg_scalar.py @@ -2,12 +2,6 @@ class AvgMinMaxScalar: """Computes and stores the average and current value""" def __init__(self): - self.val = 0 - self.avg = 0 - self.min = None - self.max = None - self.sum = 0 - self.count = 0 self.reset() def reset(self): diff --git a/timm/bits/avg_tensor.py b/timm/bits/avg_tensor.py index 0aaf92e305..01219b56f4 100644 --- a/timm/bits/avg_tensor.py +++ b/timm/bits/avg_tensor.py @@ -4,7 +4,8 @@ class AvgTensor: """Computes and stores the average and current value""" - def __init__(self): + def __init__(self, accumulate_dtype=torch.float32): + self.accumulate_dtype = accumulate_dtype self.sum = None self.count = None self.reset() @@ -16,7 +17,7 @@ def reset(self): def update(self, val: torch.Tensor, n=1): if self.sum is None: - self.sum = torch.zeros_like(val) + self.sum = torch.zeros_like(val, dtype=self.accumulate_dtype) self.count = torch.tensor(0, dtype=torch.long, device=val.device) self.sum += (val * n) self.count += n @@ -28,7 +29,13 @@ def compute(self): class TensorEma: """Computes and stores the average and current value""" - def __init__(self, smoothing_factor=0.9, init_zero=False): + def __init__( + self, + smoothing_factor=0.9, + init_zero=False, + accumulate_dtype=torch.float32 + ): + self.accumulate_dtype = accumulate_dtype self.smoothing_factor = smoothing_factor self.init_zero = init_zero self.val = None @@ -40,5 +47,8 @@ def reset(self): def update(self, val): if self.val is None: - self.val = torch.zeros_like(val) if self.init_zero else val.clone() + if self.init_zero: + self.val = torch.zeros_like(val, dtype=self.accumulate_dtype) + else: + self.val = val.clone().to(dtype=self.accumulate_dtype) self.val = (1. - self.smoothing_factor) * val + self.smoothing_factor * self.val diff --git a/timm/bits/metric.py b/timm/bits/metric.py index b18282b8be..0a0f6d8b22 100644 --- a/timm/bits/metric.py +++ b/timm/bits/metric.py @@ -10,6 +10,7 @@ MetricValueT = Union[float, torch.Tensor, List[float], List[torch.Tensor]] + @dataclass class ValueInfo: initial: Optional[MetricValueT] = 0. @@ -20,7 +21,10 @@ class ValueInfo: class Metric(abc.ABC): - def __init__(self, dev_env: DeviceEnv = None): + def __init__( + self, + dev_env: DeviceEnv = None + ): self._infos: Dict[str, ValueInfo] = {} self._values: Dict[str, Optional[MetricValueT]] = {} self._values_dist: Dict[str, Optional[MetricValueT]] = {} diff --git a/timm/bits/metric_accuracy.py b/timm/bits/metric_accuracy.py index 0db72c6dce..1a3fbefc3f 100644 --- a/timm/bits/metric_accuracy.py +++ b/timm/bits/metric_accuracy.py @@ -7,15 +7,22 @@ class Accuracy(Metric): - def __init__(self, threshold=0.5, multi_label=False, dev_env=None): + def __init__( + self, + threshold=0.5, + multi_label=False, + accumulate_dtype=torch.float32, + dev_env=None, + ): super().__init__(dev_env=dev_env) + self.accumulate_dtype = accumulate_dtype self.threshold = threshold self.eps = 1e-8 self.multi_label = multi_label # statistics / counts - self._register_value('correct') - self._register_value('total') + self._register_value('correct', ValueInfo(dtype=accumulate_dtype)) + self._register_value('total', ValueInfo(dtype=accumulate_dtype)) def _update(self, predictions, target): raise NotImplemented() @@ -24,65 +31,31 @@ def _compute(self): raise NotImplemented() -# class AccuracyTopK(torch.nn.Module): -# -# def __init__(self, topk=(1, 5), device=None): -# super().__init__() -# self.eps = 1e-8 -# self.device = device -# self.topk = topk -# self.maxk = max(topk) -# # FIXME handle distributed operation -# -# # statistics / counts -# self.reset() -# -# def update(self, predictions: torch.Tensor, target: torch.Tensor): -# sorted_indices = predictions.topk(self.maxk, dim=1)[1] -# sorted_indices.t_() -# correct = sorted_indices.eq(target.reshape(1, -1).expand_as(sorted_indices)) -# -# batch_size = target.shape[0] -# correct_k = {k: correct[:k].reshape(-1).float().sum(0) for k in self.topk} -# for k, v in correct_k.items(): -# attr = f'_correct_top{k}' -# old_v = getattr(self, attr) -# setattr(self, attr, old_v + v) -# self._total_sum += batch_size -# -# def reset(self): -# for k in self.topk: -# setattr(self, f'_correct_top{k}', torch.tensor(0, dtype=torch.float32)) -# self._total_sum = torch.tensor(0, dtype=torch.float32) -# -# @property -# def counts(self): -# pass -# -# def compute(self) -> Dict[str, torch.Tensor]: -# # FIXME handle distributed reduction -# return {f'top{k}': 100 * getattr(self, f'_correct_top{k}') / self._total_sum for k in self.topk} - - class AccuracyTopK(Metric): - def __init__(self, topk=(1, 5), dev_env: DeviceEnv = None): + def __init__( + self, + topk=(1, 5), + accumulate_dtype=torch.float32, + dev_env: DeviceEnv = None + ): super().__init__(dev_env=dev_env) + self.accumulate_dtype = accumulate_dtype self.eps = 1e-8 self.topk = topk self.maxk = max(topk) # statistics / counts for k in self.topk: - self._register_value(f'top{k}') - self._register_value('total') + self._register_value(f'top{k}', ValueInfo(dtype=accumulate_dtype)) + self._register_value('total', ValueInfo(dtype=accumulate_dtype)) self.reset() def _update(self, predictions: torch.Tensor, target: torch.Tensor): batch_size = predictions.shape[0] sorted_indices = predictions.topk(self.maxk, dim=1)[1] target_reshape = target.reshape(-1, 1).expand_as(sorted_indices) - correct = sorted_indices.eq(target_reshape).float().sum(0) + correct = sorted_indices.eq(target_reshape).to(dtype=self.accumulate_dtype).sum(0) for k in self.topk: attr_name = f'top{k}' correct_at_k = correct[:k].sum() diff --git a/timm/bits/monitor.py b/timm/bits/monitor.py index e4dd95f0fe..ca9c19beaa 100644 --- a/timm/bits/monitor.py +++ b/timm/bits/monitor.py @@ -156,7 +156,7 @@ def log_step( step_end_idx: Optional[int] = None, epoch: Optional[int] = None, loss: Optional[float] = None, - rate: Optional[float] = None, + rate: Optional[Union[float, Tuple[float, float]]] = None, phase_suffix: str = '', **kwargs, ): @@ -168,12 +168,17 @@ def log_step( step_end_idx = max(0, kwargs.pop('num_steps') - 1) phase_title = f'{phase.capitalize()} ({phase_suffix})' if phase_suffix else f'{phase.capitalize()}:' progress = 100. * step_idx / step_end_idx if step_end_idx else 0. + rate_str = '' + if isinstance(rate, (tuple, list)): + rate_str = f'Rate: {rate[0]:.2f}/s ({rate[1]:.2f}/s)' + elif rate is not None: + rate_str = f'Rate: {rate:.2f}/s' text_update = [ phase_title, f'{epoch}' if epoch is not None else None, f'[{step_idx}]' if step_end_idx is None else None, f'[{step_idx}/{step_end_idx} ({progress:>3.0f}%)]' if step_end_idx is not None else None, - f'Rate: {rate:.2f}/s' if rate is not None else None, + rate_str, f'Loss: {loss:.5f}' if loss is not None else None, ] _add_kwargs(text_update, **kwargs) diff --git a/timm/data/__init__.py b/timm/data/__init__.py index 163bcea737..de978419f7 100644 --- a/timm/data/__init__.py +++ b/timm/data/__init__.py @@ -1,10 +1,10 @@ from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\ rand_augment_transform, auto_augment_transform -from .config import resolve_data_config +from .config import resolve_data_config, PreprocessCfg, AugCfg, MixupCfg from .constants import * from .dataset import ImageDataset, IterableImageDataset, AugMixDataset from .dataset_factory import create_dataset -from .loader import create_loader_v2, PreprocessCfg, AugCfg, MixupCfg +from .loader import create_loader_v2 from .mixup import Mixup, FastCollateMixup from .parsers import create_parser from .real_labels import RealLabelsImagenet diff --git a/timm/data/loader.py b/timm/data/loader.py index 67d30765a5..750067d496 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -6,18 +6,19 @@ Hacked together by / Copyright 2020 Ross Wightman """ -from typing import Tuple, Optional, Union, Callable +from typing import Optional, Callable -import torch.utils.data import numpy as np +import torch.utils.data from timm.bits import DeviceEnv from .collate import fast_collate -from .config import PreprocessCfg, AugCfg, MixupCfg +from .config import PreprocessCfg, MixupCfg from .distributed_sampler import OrderedDistributedSampler from .fetcher import Fetcher from .mixup import FastCollateMixup from .prefetcher_cuda import PrefetcherCuda +from .transforms_factory import create_transform_v2 def _worker_init(worker_id): @@ -31,9 +32,11 @@ def create_loader_v2( batch_size: int, is_training: bool = False, dev_env: Optional[DeviceEnv] = None, - normalize=True, pp_cfg: PreprocessCfg = PreprocessCfg(), mix_cfg: MixupCfg = None, + create_transform: bool = True, + normalize_in_transform: bool = True, + separate_transform: bool = False, num_workers: int = 1, collate_fn: Optional[Callable] = None, pin_memory: bool = False, @@ -46,10 +49,12 @@ def create_loader_v2( dataset: batch_size: is_training: - dev_env: - normalize: + dev_env: pp_cfg: - mix_cfg: + mix_cfg: + create_transform: + normalize_in_transform: + separate_transform: num_workers: collate_fn: pin_memory: @@ -62,6 +67,14 @@ def create_loader_v2( if dev_env is None: dev_env = DeviceEnv.instance() + if create_transform: + dataset.transform = create_transform_v2( + cfg=pp_cfg, + is_training=is_training, + normalize=normalize_in_transform, + separate=separate_transform, + ) + sampler = None if dev_env.distributed and not isinstance(dataset, torch.utils.data.IterableDataset): if is_training: @@ -110,18 +123,19 @@ def create_loader_v2( loader = loader_class(dataset, **loader_args) fetcher_kwargs = dict( - normalize=normalize, + normalize=not normalize_in_transform, mean=pp_cfg.mean, std=pp_cfg.std, ) - if normalize and is_training and pp_cfg.aug is not None: + if not normalize_in_transform and is_training and pp_cfg.aug is not None: + # If normalization can be done in the prefetcher, random erasing is done there too + # NOTE RandomErasing does not work well in XLA so normalize_in_transform will be True fetcher_kwargs.update(dict( re_prob=pp_cfg.aug.re_prob, re_mode=pp_cfg.aug.re_mode, re_count=pp_cfg.aug.re_count, num_aug_splits=pp_cfg.aug.num_aug_splits, )) - if dev_env.type_cuda: loader = PrefetcherCuda(loader, **fetcher_kwargs) else: diff --git a/timm/data/mixup.py b/timm/data/mixup.py index 074b69413a..bf5d1b0ef9 100644 --- a/timm/data/mixup.py +++ b/timm/data/mixup.py @@ -103,6 +103,7 @@ class Mixup: """ def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0, switch_prob=0.5, mode='batch', correct_lam=True, label_smoothing=0., num_classes=0): + assert num_classes > 0, 'num_classes must be set for target generation' self.mixup_alpha = mixup_alpha self.cutmix_alpha = cutmix_alpha self.cutmix_minmax = cutmix_minmax @@ -113,8 +114,6 @@ def __init__(self, mixup_alpha=1., cutmix_alpha=0., cutmix_minmax=None, prob=1.0 self.mix_prob = prob self.switch_prob = switch_prob self.label_smoothing = label_smoothing - if label_smoothing > 0.: - assert num_classes > 0 self.num_classes = num_classes self.mode = mode self.correct_lam = correct_lam # correct lambda based on clipped area for cutmix diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index e96e827bc8..dd24c55cd5 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -27,36 +27,44 @@ exit(1) from .parser import Parser -from timm.bits import get_global_device +from timm.bits import get_global_device, is_global_device MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities -SHUFFLE_SIZE = 16384 # samples to shuffle in DS queue -PREFETCH_SIZE = 2048 # samples to prefetch +SHUFFLE_SIZE = 8192 # examples to shuffle in DS queue +PREFETCH_SIZE = 2048 # examples to prefetch -def even_split_indices(split, n, num_samples): - partitions = [round(i * num_samples / n) for i in range(n + 1)] +def even_split_indices(split, n, num_examples): + partitions = [round(i * num_examples / n) for i in range(n + 1)] return [f"{split}[{partitions[i]}:{partitions[i+1]}]" for i in range(n)] +def get_class_labels(info): + if 'label' not in info.features: + return {} + class_label = info.features['label'] + class_to_idx = {n: class_label.str2int(n) for n in class_label.names} + return class_to_idx + + class ParserTfds(Parser): """ Wrap Tensorflow Datasets for use in PyTorch There several things to be aware of: - * To prevent excessive samples being dropped per epoch w/ distributed training or multiplicity of + * To prevent excessive examples being dropped per epoch w/ distributed training or multiplicity of dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last https://github.com/pytorch/pytorch/issues/33413 * With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch from each worker could be a different size. For training this is worked around by option above, for - validation extra samples are inserted iff distributed mode is enabled so that the batches being reduced + validation extra examples are inserted iff distributed mode is enabled so that the batches being reduced across replicas are of same size. This will slightly alter the results, distributed validation will not be 100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse - since there are up to N * J extra samples with IterableDatasets. + since there are up to N * J extra examples with IterableDatasets. * The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of replicas and dataloader workers you can use. For really small datasets that only contain a few shards you may have to train non-distributed w/ 1-2 dataloader workers. This is likely not a huge concern as the benefit of distributed training or fast dataloading should be much less for small datasets. - * This wrapper is currently configured to return individual, decompressed image samples from the TFDS + * This wrapper is currently configured to return individual, decompressed image examples from the TFDS dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible to specify TF augmentation fn and return augmented batches w/ some modifications to other downstream components. @@ -72,6 +80,10 @@ def __init__( download=False, repeats=0, seed=42, + input_name='image', + input_image='RGB', + target_name='label', + target_image='', prefetch_size=None, shuffle_size=None, max_threadpool_size=None @@ -83,10 +95,13 @@ def __init__( name: tfds dataset name (eg `imagenet2012`) split: tfds dataset split (can use all TFDS split strings eg `train[:10%]`) is_training: training mode, shuffle enabled, dataset len rounded by batch_size - batch_size: batch_size to use to unsure total samples % batch_size == 0 in training across all dis nodes + batch_size: batch_size to use to unsure total examples % batch_size == 0 in training across all dis nodes download: download and build TFDS dataset if set, otherwise must use tfds CLI repeats: iterate through (repeat) the dataset this many times per iteration (once if 0 or 1) seed: common seed for shard shuffle across all distributed/worker instances + input_image: image mode if input is an image (currently PIL mode string) + target_name: name of Feature to return as target (label) + target_image: image mode if target is an image (currently PIL mode string) prefetch_size: override default tf.data prefetch buffer size shuffle_size: override default tf.data shuffle buffer size max_threadpool_size: override default threadpool size for tf.data @@ -101,25 +116,39 @@ def __init__( self.batch_size = batch_size self.repeats = repeats self.common_seed = seed # a seed that's fixed across all worker / distributed instances + + # Performance settings self.prefetch_size = prefetch_size or PREFETCH_SIZE self.shuffle_size = shuffle_size or SHUFFLE_SIZE self.max_threadpool_size = max_threadpool_size or MAX_TP_SIZE # TFDS builder and split information + self.input_name = input_name # FIXME support tuples / lists of inputs and targets and full range of Feature + self.input_image = input_image + self.target_name = target_name + self.target_image = target_image self.builder = tfds.builder(name, data_dir=root) # NOTE: the tfds command line app can be used download & prepare datasets if you don't enable download flag if download: self.builder.download_and_prepare() + self.class_to_idx = get_class_labels(self.builder.info) if self.target_name == 'label' else {} self.split_info = self.builder.info.splits[split] - self.num_samples = self.split_info.num_examples + self.num_examples = self.split_info.num_examples # Distributed world state self.dist_rank = 0 self.dist_num_replicas = 1 - dev_env = get_global_device() # FIXME allow to work without devenv usage? - if dev_env.distributed and dev_env.world_size > 1: - self.dist_rank = dev_env.global_rank - self.dist_num_replicas = dev_env.world_size + if is_global_device(): + dev_env = get_global_device() + if dev_env.distributed and dev_env.world_size > 1: + self.dist_rank = dev_env.global_rank + self.dist_num_replicas = dev_env.world_size + else: + # FIXME warn if we fallback to torch distributed? + import torch.distributed as dist + if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: + self.dist_rank = dist.get_rank() + self.dist_num_replicas = dist.get_world_size() # Attributes that are updated in _lazy_init, including the tf.data pipeline itself self.global_num_workers = 1 @@ -159,17 +188,17 @@ def _lazy_init(self): I am currently using a mix of InputContext shard assignment and fine-grained sub-splits for distributing the data across workers. For training InputContext is used to assign shards to nodes unless num_shards in dataset < total number of workers. Otherwise sub-split API is used for datasets without enough shards or - for validation where we can't drop samples and need to avoid minimize uneven splits to avoid padding. + for validation where we can't drop examples and need to avoid minimize uneven splits to avoid padding. """ should_subsplit = self.global_num_workers > 1 and ( self.split_info.num_shards < self.global_num_workers or not self.is_training) if should_subsplit: - # split the dataset w/o using sharding for more even samples / worker, can result in less optimal + # split the dataset w/o using sharding for more even examples / worker, can result in less optimal # read patterns for distributed training (overlap across shards) so better to use InputContext there if has_buggy_even_splits: # my even_split workaround doesn't work on subsplits, upgrade tfds! if not isinstance(self.split_info, tfds.core.splits.SubSplitInfo): - subsplits = even_split_indices(self.split, self.global_num_workers, self.num_samples) + subsplits = even_split_indices(self.split, self.global_num_workers, self.num_examples) self.subsplit = subsplits[global_worker_id] else: subsplits = tfds.even_splits(self.split, self.global_num_workers) @@ -200,8 +229,8 @@ def _lazy_init(self): # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading ds = ds.repeat() # allow wrap around and break iteration manually if self.is_training: - ds = ds.shuffle(min(self.num_samples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed) - ds = ds.prefetch(min(self.num_samples // self.global_num_workers, self.prefetch_size)) + ds = ds.shuffle(min(self.num_examples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed) + ds = ds.prefetch(min(self.num_examples // self.global_num_workers, self.prefetch_size)) self.ds = tfds.as_numpy(ds) def __iter__(self): @@ -210,44 +239,49 @@ def __iter__(self): # Compute a rounded up sample count that is used to: # 1. make batches even cross workers & replicas in distributed validation. - # This adds extra samples and will slightly alter validation results. + # This adds extra examples and will slightly alter validation results. # 2. determine loop ending condition in training w/ repeat enabled so that only full batch_size # batches are produced (underlying tfds iter wraps around) - target_sample_count = math.ceil(max(1, self.repeats) * self.num_samples / self.global_num_workers) + target_example_count = math.ceil(max(1, self.repeats) * self.num_examples / self.global_num_workers) if self.is_training: # round up to nearest batch_size per worker-replica - target_sample_count = math.ceil(target_sample_count / self.batch_size) * self.batch_size + target_example_count = math.ceil(target_example_count / self.batch_size) * self.batch_size # Iterate until exhausted or sample count hits target when training (ds.repeat enabled) - sample_count = 0 - for sample in self.ds: - img = Image.fromarray(sample['image'], mode='RGB') - yield img, sample['label'] - sample_count += 1 - if self.is_training and sample_count >= target_sample_count: + example_count = 0 + for example in self.ds: + input_data = example[self.input_name] + if self.input_image: + input_data = Image.fromarray(input_data, mode=self.input_image) + target_data = example[self.target_name] + if self.target_image: + target_data = Image.fromarray(target_data, mode=self.target_image) + yield input_data, target_data + example_count += 1 + if self.is_training and example_count >= target_example_count: # Need to break out of loop when repeat() is enabled for training w/ oversampling - # this results in extra samples per epoch but seems more desirable than dropping + # this results in extra examples per epoch but seems more desirable than dropping # up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes) break - # Pad across distributed nodes (make counts equal by adding samples) + # Pad across distributed nodes (make counts equal by adding examples) if not self.is_training and self.dist_num_replicas > 1 and self.subsplit is not None and \ - 0 < sample_count < target_sample_count: + 0 < example_count < target_example_count: # Validation batch padding only done for distributed training where results are reduced across nodes. # For single process case, it won't matter if workers return different batch sizes. # If using input_context or % based splits, sample count can vary significantly across workers and this # approach should not be used (hence disabled if self.subsplit isn't set). - while sample_count < target_sample_count: - yield img, sample['label'] # yield prev sample again - sample_count += 1 + while example_count < target_example_count: + yield input_data, target_data # yield prev sample again + example_count += 1 def __len__(self): - # this is just an estimate and does not factor in extra samples added to pad batches based on + # this is just an estimate and does not factor in extra examples added to pad batches based on # complete worker & replica info (not available until init in dataloader). - return math.ceil(max(1, self.repeats) * self.num_samples / self.dist_num_replicas) + return math.ceil(max(1, self.repeats) * self.num_examples / self.dist_num_replicas) def _filename(self, index, basename=False, absolute=False): - assert False, "Not supported" # no random access to samples + assert False, "Not supported" # no random access to examples def filenames(self, basename=False, absolute=False): """ Return all filenames in dataset, overrides base""" @@ -255,7 +289,7 @@ def filenames(self, basename=False, absolute=False): self._lazy_init() names = [] for sample in self.ds: - if len(names) > self.num_samples: + if len(names) > self.num_examples: break # safety for ds.repeat() case if 'file_name' in sample: name = sample['file_name'] diff --git a/timm/data/transforms_factory.py b/timm/data/transforms_factory.py index 1c8d15e2d4..24c89ce357 100644 --- a/timm/data/transforms_factory.py +++ b/timm/data/transforms_factory.py @@ -22,6 +22,7 @@ def transforms_noaug_train( mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, normalize=False, + compose=True, ): if interpolation == 'random': # random interpolation not supported with no-aug @@ -38,7 +39,7 @@ def transforms_noaug_train( else: # (pre)fetcher and collate will handle tensor conversion and normalize tfl += [ToNumpy()] - return transforms.Compose(tfl) + return transforms.Compose(tfl) if compose else tfl def transforms_imagenet_train( @@ -49,6 +50,7 @@ def transforms_imagenet_train( aug_cfg=AugCfg(), normalize=False, separate=False, + compose=True, ): """ If separate==True, the transforms are returned as a tuple of 3 separate transforms @@ -122,9 +124,13 @@ def transforms_imagenet_train( if separate: # return each transform stage separately - return transforms.Compose(primary_tfl), transforms.Compose(secondary_tfl), transforms.Compose(final_tfl) + if compose: + return transforms.Compose(primary_tfl), transforms.Compose(secondary_tfl), transforms.Compose(final_tfl) + else: + return primary_tfl, secondary_tfl, final_tfl else: - return transforms.Compose(primary_tfl + secondary_tfl + final_tfl) + tfl = primary_tfl + secondary_tfl + final_tfl + return transforms.Compose(tfl) if compose else tfl def transforms_imagenet_eval( @@ -134,6 +140,7 @@ def transforms_imagenet_eval( mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, normalize=False, + compose=True, ): crop_pct = crop_pct or DEFAULT_CROP_PCT @@ -160,7 +167,7 @@ def transforms_imagenet_eval( # (pre)fetcher and collate will handle tensor conversion and normalize tfl += [ToNumpy()] - return transforms.Compose(tfl) + return transforms.Compose(tfl) if compose else tfl def create_transform_v2( @@ -168,6 +175,7 @@ def create_transform_v2( is_training=False, normalize=False, separate=False, + compose=True, tf_preprocessing=False, ): """ @@ -175,10 +183,10 @@ def create_transform_v2( Args: cfg: Pre-processing configuration is_training (bool): Create transform for training pre-processing - tf_preprocessing (bool): Use Tensorflow pre-processing (for validation) normalize (bool): Enable normalization in transforms (otherwise handled by fetcher/pre-fetcher) separate (bool): Return transforms separated into stages (for train) - + compose (bool): Wrap transforms in transform.Compose(), returns list otherwise + tf_preprocessing (bool): Use Tensorflow pre-processing (for validation) Returns: """ @@ -202,7 +210,9 @@ def create_transform_v2( interpolation=cfg.interpolation, normalize=normalize, mean=cfg.mean, - std=cfg.std) + std=cfg.std, + compose=compose, + ) elif is_training: transform = transforms_imagenet_train( img_size, @@ -211,7 +221,9 @@ def create_transform_v2( std=cfg.std, aug_cfg=cfg.aug, normalize=normalize, - separate=separate) + separate=separate, + compose=compose, + ) else: assert not separate, "Separate transforms not supported for validation preprocessing" transform = transforms_imagenet_eval( @@ -221,6 +233,7 @@ def create_transform_v2( mean=cfg.mean, std=cfg.std, normalize=normalize, + compose=compose, ) return transform diff --git a/train.py b/train.py index fb6b43195d..0440f551c1 100755 --- a/train.py +++ b/train.py @@ -563,17 +563,13 @@ def setup_data(args, default_cfg, dev_env: DeviceEnv, mixup_active: bool): # if using PyTorch XLA and RandomErasing is enabled, we must normalize and do RE in transforms on CPU normalize_in_transform = dev_env.type_xla and args.reprob > 0 - - dataset_train.transform = create_transform_v2( - cfg=train_pp_cfg, is_training=True, normalize=normalize_in_transform) - loader_train = create_loader_v2( dataset_train, batch_size=args.batch_size, is_training=True, - normalize=not normalize_in_transform, pp_cfg=train_pp_cfg, mix_cfg=mixup_cfg, + normalize_in_transform=normalize_in_transform, num_workers=args.workers, pin_memory=args.pin_mem, use_multi_epochs_loader=args.use_multi_epochs_loader @@ -587,19 +583,17 @@ def setup_data(args, default_cfg, dev_env: DeviceEnv, mixup_active: bool): std=data_config['std'], ) - dataset_eval.transform = create_transform_v2( - cfg=eval_pp_cfg, is_training=False, normalize=normalize_in_transform) - eval_workers = args.workers if 'tfds' in args.dataset: - # FIXME reduce validation issues when using TFDS w/ workers and distributed training + # FIXME reduces validation padding issues when using TFDS w/ workers and distributed training eval_workers = min(2, args.workers) + loader_eval = create_loader_v2( dataset_eval, batch_size=args.validation_batch_size or args.batch_size, is_training=False, - normalize=not normalize_in_transform, pp_cfg=eval_pp_cfg, + normalize_in_transform=normalize_in_transform, num_workers=eval_workers, pin_memory=args.pin_mem, ) @@ -708,7 +702,7 @@ def after_train_step( step_end_idx=step_end_idx, epoch=state.epoch, loss=loss_avg.item(), - rate=tracker.get_avg_iter_rate(global_batch_size), + rate=(tracker.get_last_iter_rate(global_batch_size), tracker.get_avg_iter_rate(global_batch_size)), lr=lr_avg, ) @@ -756,16 +750,14 @@ def evaluate( dev_env.mark_step() elif dev_env.type_cuda: dev_env.synchronize() - - # FIXME uncommenting this fixes race btw model `output`/`loss` and loss_m/accuracy_m meter input + # FIXME uncommenting this fixes race btw model `output` / `loss` and loss_m / accuracy_m meter input # for PyTorch XLA GPU use. # This issue does not exist for normal PyTorch w/ GPU (CUDA) or PyTorch XLA w/ TPU. # loss.item() - tracker.mark_iter_step_end() + losses_m.update(loss, output.size(0)) accuracy_m.update(output, target) - if last_step or step_idx % log_interval == 0: top1, top5 = accuracy_m.compute().values() loss_avg = losses_m.compute() diff --git a/validate.py b/validate.py index 03a90dc01d..d2eca03efe 100755 --- a/validate.py +++ b/validate.py @@ -154,11 +154,10 @@ def validate(args): std=data_config['std'], ) - dataset.transform = create_transform_v2(cfg=eval_pp_cfg, is_training=False) - loader = create_loader_v2( dataset, batch_size=args.batch_size, + is_training=False, pp_cfg=eval_pp_cfg, num_workers=args.workers, pin_memory=args.pin_mem) @@ -176,24 +175,20 @@ def validate(args): last_step = step_idx == num_steps - 1 tracker.mark_iter_data_end() - # compute output with dev_env.autocast(): output = model(sample) - if valid_labels is not None: output = output[:, valid_labels] loss = criterion(output, target) - if dev_env.type_cuda: - dev_env.synchronize() - tracker.mark_iter_step_end() - if dev_env.type_xla: dev_env.mark_step() + elif dev_env.type_cuda: + dev_env.synchronize() + tracker.mark_iter_step_end() if real_labels is not None: real_labels.add_result(output) - losses.update(loss.detach(), sample.size(0)) accuracy.update(output.detach(), target) @@ -205,7 +200,7 @@ def validate(args): phase='eval', step_idx=step_idx, num_steps=num_steps, - rate=args.batch_size / tracker.iter_time.avg, + rate=(tracker.get_last_iter_rate(output.shape[0]), tracker.get_avg_iter_rate(args.batch_size)), loss=loss_avg.item(), top1=top1.item(), top5=top5.item(), From 871cef4198a02cf59044d222effa129e2157550a Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 12 Nov 2021 13:53:55 -0800 Subject: [PATCH 30/61] version 0.5.1 --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index 2b8877c505..93b60a1dcc 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.5.0' +__version__ = '0.5.1' From 820ae9925e66357bbd5b5d7db4e16ec7809c3e62 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 3 Dec 2021 13:22:25 -0800 Subject: [PATCH 31/61] Fix load_state_dict to handle None ema entries --- timm/models/helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/timm/models/helpers.py b/timm/models/helpers.py index 6aa1faa30e..16ce64d0bf 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -27,9 +27,9 @@ def load_state_dict(checkpoint_path, use_ema=False): checkpoint = torch.load(checkpoint_path, map_location='cpu') state_dict_key = '' if isinstance(checkpoint, dict): - if use_ema and 'state_dict_ema' in checkpoint: + if use_ema and checkpoint.get('state_dict_ema', None) is not None: state_dict_key = 'state_dict_ema' - elif use_ema and 'model_ema' in checkpoint: + elif use_ema and checkpoint.get('model_ema', None) is not None: state_dict_key = 'model_ema' elif 'state_dict' in checkpoint: state_dict_key = 'state_dict' From ff0f709c206d748bd553970dcc9dd7659373e6e1 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 8 Dec 2021 14:03:27 -0800 Subject: [PATCH 32/61] Testing TFDS shuffle across epochs --- timm/data/parsers/parser_tfds.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 8fb1de1491..132065be02 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -162,6 +162,8 @@ def __init__( self.worker_seed = 0 # seed unique to each work instance self.subsplit = None # set when data is distributed across workers using sub-splits self.ds = None # initialized lazily on each dataloader worker process + self.init_count = 0 + self.reinit_each_iter = False # self.is_training # FIXME evaluating shuffle across epochs def _lazy_init(self): """ Lazily initialize the dataset. @@ -174,12 +176,10 @@ def _lazy_init(self): instances once it has been initialized. Do not call any dataset methods that can call _lazy_init before it is passed to dataloader. """ - worker_info = torch.utils.data.get_worker_info() - # setup input context to split dataset across distributed processes - num_workers = 1 - global_worker_id = 0 - if worker_info is not None: + if self.worker_info is None: + worker_info = torch.utils.data.get_worker_info() + assert worker_info is not None self.worker_info = worker_info self.worker_seed = worker_info.seed num_workers = worker_info.num_workers @@ -209,6 +209,9 @@ def _lazy_init(self): else: subsplits = tfds.even_splits(self.split, self.global_num_workers) self.subsplit = subsplits[global_worker_id] + else: + num_workers = self.worker_info.num_workers + global_worker_id = self.dist_rank * num_workers + self.worker_info.id input_context = None if self.global_num_workers > 1 and self.subsplit is None: @@ -219,8 +222,8 @@ def _lazy_init(self): num_replicas_in_sync=self.dist_num_replicas # FIXME does this arg have any impact? ) read_config = tfds.ReadConfig( - shuffle_seed=self.common_seed, - shuffle_reshuffle_each_iteration=True, + shuffle_seed=self.common_seed + self.init_count, + shuffle_reshuffle_each_iteration=not self.reinit_each_iter, input_context=input_context) ds = self.builder.as_dataset( split=self.subsplit or self.split, shuffle_files=self.is_training, read_config=read_config) @@ -235,12 +238,15 @@ def _lazy_init(self): # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading ds = ds.repeat() # allow wrap around and break iteration manually if self.is_training: - ds = ds.shuffle(min(self.num_examples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed) + ds = ds.shuffle( + min(self.num_examples, self.shuffle_size) // self.global_num_workers, + seed=self.worker_seed + self.init_count) ds = ds.prefetch(min(self.num_examples // self.global_num_workers, self.prefetch_size)) self.ds = tfds.as_numpy(ds) + self.init_count += 1 def __iter__(self): - if self.ds is None: + if self.ds is None or self.reinit_each_iter: self._lazy_init() # Compute a rounded up sample count that is used to: From 7bbbd5ef1b2ad03ace04982152a8ca395fed4f43 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 8 Dec 2021 14:05:12 -0800 Subject: [PATCH 33/61] EvoNorm and GroupNormAct options for debugging TPU / XLA concerns --- timm/models/layers/evo_norm.py | 48 +++++++++++++++++++++------------- timm/models/layers/norm_act.py | 30 ++++++++++++++++++++- 2 files changed, 59 insertions(+), 19 deletions(-) diff --git a/timm/models/layers/evo_norm.py b/timm/models/layers/evo_norm.py index d42c502c1c..5032a527e9 100644 --- a/timm/models/layers/evo_norm.py +++ b/timm/models/layers/evo_norm.py @@ -23,6 +23,7 @@ Hacked together by / Copyright 2020 Ross Wightman """ +from typing import Sequence, Union import torch import torch.nn as nn @@ -38,36 +39,47 @@ def instance_std(x, eps: float = 1e-5): def instance_rms(x, eps: float = 1e-5): - rms = x.square().float().mean(dim=(2, 3), keepdim=True).add(eps).sqrt().to(dtype=x.dtype) + rms = x.float().square().mean(dim=(2, 3), keepdim=True).add(eps).sqrt().to(x.dtype) return rms.expand(x.shape) +def manual_var(x, dim: Union[int, Sequence[int]], diff_sqm: bool = False): + xm = x.mean(dim=dim, keepdim=True) + if diff_sqm: + # difference of squared mean and mean squared, faster on TPU can be less stable + var = (x.square().mean(dim=(2, 3, 4), keepdim=True) - xm.square()).clamp(0) + else: + var = (x - xm).square().mean(dim=(2, 3, 4), keepdim=True) + return var + + def group_std(x, groups: int = 32, eps: float = 1e-5, flatten: bool = False): B, C, H, W = x.shape x_dtype = x.dtype _assert(C % groups == 0, '') - # x = x.reshape(B, groups, -1) # FIXME simpler shape causing TPU / XLA issues - # std = x.float().var(dim=2, unbiased=False, keepdim=True).add(eps).sqrt() - x = x.reshape(B, groups, C // groups, H, W) - std = x.float().var(dim=(2, 3, 4), unbiased=False, keepdim=True).add(eps).sqrt() - return std.expand(x.shape).reshape(B, C, H, W).to(x_dtype) + torch.var() + if flatten: + x = x.reshape(B, groups, -1) # FIXME simpler shape causing TPU / XLA issues + std = x.float().var(dim=2, unbiased=False, keepdim=True).add(eps).sqrt().to(x_dtype) + else: + x = x.reshape(B, groups, C // groups, H, W) + std = x.float().var(dim=(2, 3, 4), unbiased=False, keepdim=True).add(eps).sqrt().to(x_dtype) + return std.expand(x.shape).reshape(B, C, H, W) -def group_std_tpu(x, groups: int = 32, eps: float = 1e-5, diff_sqm: bool = False): +def group_std_tpu(x, groups: int = 32, eps: float = 1e-5, diff_sqm: bool = False, flatten: bool = False): # This is a workaround for some stability / odd behaviour of .var and .std # running on PyTorch XLA w/ TPUs. These manual var impl are producing much better results B, C, H, W = x.shape _assert(C % groups == 0, '') - x_dtype = x.dtype - x = x.float().reshape(B, groups, C // groups, H, W) - xm = x.mean(dim=(2, 3, 4), keepdim=True) - if diff_sqm: - # difference of squared mean and mean squared, faster on TPU - var = (x.square().mean(dim=(2, 3, 4), keepdim=True) - xm.square()).clamp(0) + if flatten: + x = x.reshape(B, groups, -1) # FIXME simpler shape causing TPU / XLA issues + var = manual_var(x, dim=-1, diff_sqm=diff_sqm) else: - var = (x - xm).square().mean(dim=(2, 3, 4), keepdim=True) - return var.add(eps).sqrt().expand(x.shape).reshape(B, C, H, W).to(x_dtype) -# group_std = group_std_tpu # temporary, for TPU / PT XLA + x = x.reshape(B, groups, C // groups, H, W) + var = manual_var(x, dim=(2, 3, 4), diff_sqm=diff_sqm) + return var.add(eps).sqrt().expand(x.shape).reshape(B, C, H, W) +#group_std = group_std_tpu # FIXME TPU temporary def group_rms(x, groups: int = 32, eps: float = 1e-5): @@ -75,8 +87,8 @@ def group_rms(x, groups: int = 32, eps: float = 1e-5): _assert(C % groups == 0, '') x_dtype = x.dtype x = x.reshape(B, groups, C // groups, H, W) - sqm = x.square().mean(dim=(2, 3, 4), keepdim=True).add(eps).sqrt_().to(dtype=x_dtype) - return sqm.expand(x.shape).reshape(B, C, H, W) + rms = x.float().square().mean(dim=(2, 3, 4), keepdim=True).add(eps).sqrt_().to(dtype=x_dtype) + return rms.expand(x.shape).reshape(B, C, H, W) class EvoNorm2dB0(nn.Module): diff --git a/timm/models/layers/norm_act.py b/timm/models/layers/norm_act.py index 2e15181fdc..40bd57effb 100644 --- a/timm/models/layers/norm_act.py +++ b/timm/models/layers/norm_act.py @@ -66,6 +66,31 @@ def forward(self, x): return x +def group_norm_tpu(x, w, b, groups: int = 32, eps: float = 1e-5, diff_sqm: bool = False, flatten: bool = False): + # This is a workaround for some odd behaviour running on PyTorch XLA w/ TPUs. + x_shape = x.shape + x_dtype = x.dtype + if flatten: + norm_shape = (x_shape[0], groups, -1) + reduce_dim = -1 + else: + norm_shape = (x_shape[0], groups, x_shape[1] // groups) + x_shape[2:] + reduce_dim = tuple(range(2, x.ndim + 1)) + affine_shape = (1, -1) + (1,) * (x.ndim - 2) + x = x.reshape(norm_shape) + # x = x.to(torch.float32) # for testing w/ AMP + xm = x.mean(dim=reduce_dim, keepdim=True) + if diff_sqm: + # difference of squared mean and mean squared, faster on TPU + var = (x.square().mean(dim=reduce_dim, keepdim=True) - xm.square()).clamp(0) + else: + var = (x - xm).square().mean(dim=reduce_dim, keepdim=True) + x = (x - xm.expand(norm_shape)) / var.add(eps).sqrt().expand(norm_shape) + x = x.reshape(x_shape) * w.view(affine_shape) + b.view(affine_shape) + # x = x.to(x_dtype) # for testing w/ AMP + return x + + class GroupNormAct(nn.GroupNorm): # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args def __init__(self, num_channels, num_groups=32, eps=1e-5, affine=True, @@ -80,6 +105,9 @@ def __init__(self, num_channels, num_groups=32, eps=1e-5, affine=True, self.act = nn.Identity() def forward(self, x): - x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + if False: # FIXME TPU temporary while resolving some performance issues + x = group_norm_tpu(x, self.weight, self.bias, self.num_groups, self.eps) + else: + x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps) x = self.act(x) return x From 66daee4f3143cc2b4b0e86fa3f15133115ab4599 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 8 Dec 2021 14:14:48 -0800 Subject: [PATCH 34/61] Last change wasn't complete, missed adding full evo_norm changeset --- timm/models/layers/evo_norm.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/timm/models/layers/evo_norm.py b/timm/models/layers/evo_norm.py index 5032a527e9..cc0a691ad2 100644 --- a/timm/models/layers/evo_norm.py +++ b/timm/models/layers/evo_norm.py @@ -34,8 +34,14 @@ def instance_std(x, eps: float = 1e-5): - rms = x.float().var(dim=(2, 3), unbiased=False, keepdim=True).add(eps).sqrt().to(x.dtype) - return rms.expand(x.shape) + std = x.float().var(dim=(2, 3), unbiased=False, keepdim=True).add(eps).sqrt().to(x.dtype) + return std.expand(x.shape) + + +def instance_std_tpu(x, eps: float = 1e-5): + std = manual_var(x, dim=(2, 3)).add(eps).sqrt() + return std.expand(x.shape) +# instance_std = instance_std_tpu def instance_rms(x, eps: float = 1e-5): @@ -47,9 +53,9 @@ def manual_var(x, dim: Union[int, Sequence[int]], diff_sqm: bool = False): xm = x.mean(dim=dim, keepdim=True) if diff_sqm: # difference of squared mean and mean squared, faster on TPU can be less stable - var = (x.square().mean(dim=(2, 3, 4), keepdim=True) - xm.square()).clamp(0) + var = (x.square().mean(dim=dim, keepdim=True) - xm.square()).clamp(0) else: - var = (x - xm).square().mean(dim=(2, 3, 4), keepdim=True) + var = (x - xm).square().mean(dim=dim, keepdim=True) return var @@ -57,7 +63,6 @@ def group_std(x, groups: int = 32, eps: float = 1e-5, flatten: bool = False): B, C, H, W = x.shape x_dtype = x.dtype _assert(C % groups == 0, '') - torch.var() if flatten: x = x.reshape(B, groups, -1) # FIXME simpler shape causing TPU / XLA issues std = x.float().var(dim=2, unbiased=False, keepdim=True).add(eps).sqrt().to(x_dtype) @@ -116,6 +121,7 @@ def forward(self, x): if self.v is not None: if self.training: var = x.float().var(dim=(0, 2, 3), unbiased=False) + # var = manual_var(x, dim=(0, 2, 3)) n = x.numel() / x.shape[1] self.running_var.copy_( self.running_var * (1 - self.momentum) + From 88a5b54802b1ab88057744b3925490211621facd Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 10 Dec 2021 14:00:00 -0800 Subject: [PATCH 35/61] A few small evonorm tweaks for convergence comparisons --- timm/models/layers/evo_norm.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/timm/models/layers/evo_norm.py b/timm/models/layers/evo_norm.py index cc0a691ad2..d89aa42439 100644 --- a/timm/models/layers/evo_norm.py +++ b/timm/models/layers/evo_norm.py @@ -53,9 +53,9 @@ def manual_var(x, dim: Union[int, Sequence[int]], diff_sqm: bool = False): xm = x.mean(dim=dim, keepdim=True) if diff_sqm: # difference of squared mean and mean squared, faster on TPU can be less stable - var = (x.square().mean(dim=dim, keepdim=True) - xm.square()).clamp(0) + var = ((x * x).mean(dim=dim, keepdim=True) - (xm * xm)).clamp(0) else: - var = (x - xm).square().mean(dim=dim, keepdim=True) + var = ((x - xm) * (x - xm)).mean(dim=dim, keepdim=True) return var @@ -121,7 +121,7 @@ def forward(self, x): if self.v is not None: if self.training: var = x.float().var(dim=(0, 2, 3), unbiased=False) - # var = manual_var(x, dim=(0, 2, 3)) + # var = manual_var(x, dim=(0, 2, 3)).squeeze() n = x.numel() / x.shape[1] self.running_var.copy_( self.running_var * (1 - self.momentum) + From 4d7a5544f7f8596e1cdf4a72b08a5fb26070efbf Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 10 Dec 2021 14:01:09 -0800 Subject: [PATCH 36/61] Remove inplace sigmoid for consistency with other impl --- timm/models/layers/evo_norm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/layers/evo_norm.py b/timm/models/layers/evo_norm.py index d89aa42439..f48d9a835c 100644 --- a/timm/models/layers/evo_norm.py +++ b/timm/models/layers/evo_norm.py @@ -248,7 +248,7 @@ def forward(self, x): d = group_std(x, self.groups, self.eps) if self.v is not None: v = self.v.view(v_shape).to(dtype=x_dtype) - x = x * (x * v).sigmoid_() + x = x * (x * v).sigmoid() x = x / d return x * self.weight.view(v_shape).to(dtype=x_dtype) + self.bias.view(v_shape).to(dtype=x_dtype) From 1f54a1fff7300565a3fde23c498065baafaa8c87 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 10 Dec 2021 17:10:13 -0800 Subject: [PATCH 37/61] Add C16 and E8 EvoNormS0 configs for RegNetZ BYOB nets --- timm/models/byobnet.py | 55 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/timm/models/byobnet.py b/timm/models/byobnet.py index 44f26e4e09..a2dc6153b4 100644 --- a/timm/models/byobnet.py +++ b/timm/models/byobnet.py @@ -159,12 +159,18 @@ def _cfgr(url='', **kwargs): mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), input_size=(3, 224, 224), pool_size=(7, 7), test_input_size=(3, 288, 288), first_conv='stem.conv', crop_pct=0.94), + 'regnetz_c16_evos': _cfgr( + url='', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), 'regnetz_d8_evob': _cfgr( url='', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), 'regnetz_d8_evos': _cfgr( url='', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), + 'regnetz_e8_evos': _cfgr( + url='', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), } @@ -621,6 +627,23 @@ def interleave_blocks( attn_kwargs=dict(rd_ratio=0.25), block_kwargs=dict(bottle_in=True, linear_out=True), ), + regnetz_c16_evos=ByoModelCfg( + blocks=( + ByoBlockCfg(type='bottle', d=2, c=48, s=2, gs=16, br=4), + ByoBlockCfg(type='bottle', d=6, c=96, s=2, gs=16, br=4), + ByoBlockCfg(type='bottle', d=12, c=192, s=2, gs=16, br=4), + ByoBlockCfg(type='bottle', d=2, c=288, s=2, gs=16, br=4), + ), + stem_chs=32, + stem_pool='', + downsample='', + num_features=1536, + act_layer='silu', + norm_layer=partial(EvoNorm2dS0a, group_size=16), + attn_layer='se', + attn_kwargs=dict(rd_ratio=0.25), + block_kwargs=dict(bottle_in=True, linear_out=True), + ), regnetz_d8_evob=ByoModelCfg( blocks=( ByoBlockCfg(type='bottle', d=3, c=64, s=1, gs=8, br=4), @@ -657,6 +680,24 @@ def interleave_blocks( attn_kwargs=dict(rd_ratio=0.25), block_kwargs=dict(bottle_in=True, linear_out=True), ), + regnetz_e8_evos=ByoModelCfg( + blocks=( + ByoBlockCfg(type='bottle', d=3, c=96, s=1, gs=8, br=4), + ByoBlockCfg(type='bottle', d=8, c=192, s=2, gs=8, br=4), + ByoBlockCfg(type='bottle', d=16, c=384, s=2, gs=8, br=4), + ByoBlockCfg(type='bottle', d=3, c=512, s=2, gs=8, br=4), + ), + stem_chs=64, + stem_type='deep', + stem_pool='', + downsample='', + num_features=2048, + act_layer='silu', + attn_layer='se', + attn_kwargs=dict(rd_ratio=0.25), + norm_layer=partial(EvoNorm2dS0a, group_size=16), + block_kwargs=dict(bottle_in=True, linear_out=True), + ), ) @register_model @@ -887,6 +928,13 @@ def regnetz_b16_evos(pretrained=False, **kwargs): return _create_byobnet('regnetz_b16_evos', pretrained=pretrained, **kwargs) +@register_model +def regnetz_c16_evos(pretrained=False, **kwargs): + """ + """ + return _create_byobnet('regnetz_c16_evos', pretrained=pretrained, **kwargs) + + @register_model def regnetz_d8_evob(pretrained=False, **kwargs): """ @@ -901,6 +949,13 @@ def regnetz_d8_evos(pretrained=False, **kwargs): return _create_byobnet('regnetz_d8_evos', pretrained=pretrained, **kwargs) +@register_model +def regnetz_e8_evos(pretrained=False, **kwargs): + """ + """ + return _create_byobnet('regnetz_e8_evos', pretrained=pretrained, **kwargs) + + def expand_blocks_cfg(stage_blocks_cfg: Union[ByoBlockCfg, Sequence[ByoBlockCfg]]) -> List[ByoBlockCfg]: if not isinstance(stage_blocks_cfg, Sequence): stage_blocks_cfg = (stage_blocks_cfg,) From 57fca2b5b23c0738280dbc7e4045887d7e4fa4a4 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 10 Dec 2021 20:23:17 -0800 Subject: [PATCH 38/61] Fix c16_evos stem / first conv setup --- timm/models/byobnet.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/timm/models/byobnet.py b/timm/models/byobnet.py index a2dc6153b4..d6e50cc865 100644 --- a/timm/models/byobnet.py +++ b/timm/models/byobnet.py @@ -157,11 +157,10 @@ def _cfgr(url='', **kwargs): 'regnetz_b16_evos': _cfgr( url='', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 224, 224), pool_size=(7, 7), test_input_size=(3, 288, 288), first_conv='stem.conv', - crop_pct=0.94), + input_size=(3, 224, 224), pool_size=(7, 7), test_input_size=(3, 288, 288), first_conv='stem.conv', crop_pct=0.94), 'regnetz_c16_evos': _cfgr( url='', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), first_conv='stem.conv', crop_pct=0.95), 'regnetz_d8_evob': _cfgr( url='', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), test_input_size=(3, 320, 320), crop_pct=0.95), From d829858550a89f043be63762511a8321c0ca3dad Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 14 Dec 2021 13:48:30 -0800 Subject: [PATCH 39/61] Significant norm update * ConvBnAct layer renamed -> ConvNormAct and ConvNormActAa for anti-aliased * Significant update to EfficientNet and MobileNetV3 arch to support NormAct layers and grouped conv (as alternative to depthwise) * Update RegNet to add Z variant * Add Pre variant of XceptionAligned that works with NormAct layers * EvoNorm matches bits_and_tpu branch for merge --- timm/models/byobnet.py | 27 +-- timm/models/cspnet.py | 39 +-- timm/models/densenet.py | 4 +- timm/models/dpn.py | 4 +- timm/models/efficientnet.py | 98 ++++++-- timm/models/efficientnet_blocks.py | 180 ++++++-------- timm/models/efficientnet_builder.py | 66 ++++-- timm/models/layers/__init__.py | 6 +- timm/models/layers/cbam.py | 6 +- timm/models/layers/conv_bn_act.py | 55 ++++- timm/models/layers/create_conv2d.py | 7 +- timm/models/layers/create_norm_act.py | 31 ++- timm/models/layers/drop.py | 23 +- timm/models/layers/inplace_abn.py | 4 +- timm/models/layers/non_local_attn.py | 10 +- timm/models/layers/norm_act.py | 111 +++++++-- timm/models/layers/pooled_attn.py | 143 +++++++++++ timm/models/layers/selective_kernel.py | 17 +- timm/models/layers/separable_conv.py | 19 +- timm/models/layers/split_attn.py | 7 +- timm/models/mobilenetv3.py | 9 +- timm/models/nasnet.py | 4 +- timm/models/pnasnet.py | 4 +- timm/models/regnet.py | 313 +++++++++++++++---------- timm/models/resnest.py | 12 +- timm/models/resnet.py | 32 +-- timm/models/rexnet.py | 12 +- timm/models/sknet.py | 27 +-- timm/models/vovnet.py | 20 +- timm/models/xception_aligned.py | 128 +++++++--- 30 files changed, 916 insertions(+), 502 deletions(-) create mode 100644 timm/models/layers/pooled_attn.py diff --git a/timm/models/byobnet.py b/timm/models/byobnet.py index d6e50cc865..233b99f708 100644 --- a/timm/models/byobnet.py +++ b/timm/models/byobnet.py @@ -34,8 +34,8 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg, named_apply -from .layers import ClassifierHead, ConvBnAct, BatchNormAct2d, DropPath, AvgPool2dSame, \ - create_conv2d, get_act_layer, convert_norm_act, get_attn, make_divisible, to_2tuple, EvoNorm2dS0, EvoNorm2dS0a,\ +from .layers import ClassifierHead, ConvNormAct, BatchNormAct2d, DropPath, AvgPool2dSame, \ + create_conv2d, get_act_layer, get_norm_act_layer, get_attn, make_divisible, to_2tuple, EvoNorm2dS0, EvoNorm2dS0a,\ EvoNorm2dS1, EvoNorm2dS1a, EvoNorm2dS2, EvoNorm2dS2a, FilterResponseNormAct2d, FilterResponseNormTlu2d from .registry import register_model @@ -975,7 +975,7 @@ def num_groups(group_size, channels): @dataclass class LayerFn: - conv_norm_act: Callable = ConvBnAct + conv_norm_act: Callable = ConvNormAct norm_act: Callable = BatchNormAct2d act: Callable = nn.ReLU attn: Optional[Callable] = None @@ -1032,7 +1032,7 @@ def __init__( self.conv1_kxk = layers.conv_norm_act(in_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0]) self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs) self.conv2_kxk = layers.conv_norm_act( - mid_chs, out_chs, kernel_size, dilation=dilation[1], groups=groups, drop_block=drop_block, apply_act=False) + mid_chs, out_chs, kernel_size, dilation=dilation[1], groups=groups, drop_layer=drop_block, apply_act=False) self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs) self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() self.act = nn.Identity() if linear_out else layers.act(inplace=True) @@ -1073,11 +1073,9 @@ def __init__(self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bo self.conv1_1x1 = layers.conv_norm_act(in_chs, mid_chs, 1) self.conv2_kxk = layers.conv_norm_act( - mid_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], - groups=groups, drop_block=drop_block) + mid_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], groups=groups, drop_layer=drop_block) if extra_conv: - self.conv2b_kxk = layers.conv_norm_act( - mid_chs, mid_chs, kernel_size, dilation=dilation[1], groups=groups, drop_block=drop_block) + self.conv2b_kxk = layers.conv_norm_act(mid_chs, mid_chs, kernel_size, dilation=dilation[1], groups=groups) else: self.conv2b_kxk = nn.Identity() self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs) @@ -1134,7 +1132,7 @@ def __init__(self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bo self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs) self.conv2_kxk = layers.conv_norm_act( mid_chs, out_chs, kernel_size, stride=stride, dilation=dilation[0], - groups=groups, drop_block=drop_block, apply_act=False) + groups=groups, drop_layer=drop_block, apply_act=False) self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs) self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() self.act = nn.Identity() if linear_out else layers.act(inplace=True) @@ -1181,8 +1179,7 @@ def __init__(self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bo apply_act=False, layers=layers) self.conv1_kxk = layers.conv_norm_act( - in_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], - groups=groups, drop_block=drop_block) + in_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], groups=groups, drop_layer=drop_block) self.attn = nn.Identity() if attn_last or layers.attn is None else layers.attn(mid_chs) self.conv2_1x1 = layers.conv_norm_act(mid_chs, out_chs, 1, apply_act=False) self.attn_last = nn.Identity() if not attn_last or layers.attn is None else layers.attn(out_chs) @@ -1226,7 +1223,7 @@ def __init__(self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bo self.identity = layers.norm_act(out_chs, apply_act=False) if use_ident else None self.conv_kxk = layers.conv_norm_act( in_chs, out_chs, kernel_size, stride=stride, dilation=dilation[0], - groups=groups, drop_block=drop_block, apply_act=False) + groups=groups, drop_layer=drop_block, apply_act=False) self.conv_1x1 = layers.conv_norm_act(in_chs, out_chs, 1, stride=stride, groups=groups, apply_act=False) self.attn = nn.Identity() if layers.attn is None else layers.attn(out_chs) self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. and use_ident else nn.Identity() @@ -1273,7 +1270,7 @@ def __init__(self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bo if extra_conv: self.conv2_kxk = layers.conv_norm_act( mid_chs, mid_chs, kernel_size, stride=stride, dilation=dilation[0], - groups=groups, drop_block=drop_block) + groups=groups, drop_layer=drop_block) stride = 1 # striding done via conv if enabled else: self.conv2_kxk = nn.Identity() @@ -1520,8 +1517,8 @@ def create_byob_stages( def get_layer_fns(cfg: ByoModelCfg): act = get_act_layer(cfg.act_layer) - norm_act = convert_norm_act(norm_layer=cfg.norm_layer, act_layer=act) - conv_norm_act = partial(ConvBnAct, norm_layer=cfg.norm_layer, act_layer=act) + norm_act = get_norm_act_layer(norm_layer=cfg.norm_layer, act_layer=act) + conv_norm_act = partial(ConvNormAct, norm_layer=cfg.norm_layer, act_layer=act) attn = partial(get_attn(cfg.attn_layer), **cfg.attn_kwargs) if cfg.attn_layer else None self_attn = partial(get_attn(cfg.self_attn_layer), **cfg.self_attn_kwargs) if cfg.self_attn_layer else None layer_fn = LayerFn(conv_norm_act=conv_norm_act, norm_act=norm_act, act=act, attn=attn, self_attn=self_attn) diff --git a/timm/models/cspnet.py b/timm/models/cspnet.py index 39d16200f8..aa57bd88ad 100644 --- a/timm/models/cspnet.py +++ b/timm/models/cspnet.py @@ -14,11 +14,10 @@ """ import torch import torch.nn as nn -import torch.nn.functional as F from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg -from .layers import ClassifierHead, ConvBnAct, DropPath, create_attn, get_norm_act_layer +from .layers import ClassifierHead, ConvNormAct, ConvNormActAa, DropPath, create_attn, get_norm_act_layer from .registry import register_model @@ -130,7 +129,7 @@ def _cfg(url='', **kwargs): def create_stem( in_chans=3, out_chs=32, kernel_size=3, stride=2, pool='', - act_layer=None, norm_layer=None, aa_layer=None): + act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, aa_layer=None): stem = nn.Sequential() if not isinstance(out_chs, (tuple, list)): out_chs = [out_chs] @@ -138,7 +137,7 @@ def create_stem( in_c = in_chans for i, out_c in enumerate(out_chs): conv_name = f'conv{i + 1}' - stem.add_module(conv_name, ConvBnAct( + stem.add_module(conv_name, ConvNormAct( in_c, out_c, kernel_size, stride=stride if i == 0 else 1, act_layer=act_layer, norm_layer=norm_layer)) in_c = out_c @@ -161,12 +160,14 @@ def __init__(self, in_chs, out_chs, dilation=1, bottle_ratio=0.25, groups=1, attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): super(ResBottleneck, self).__init__() mid_chs = int(round(out_chs * bottle_ratio)) - ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer, drop_block=drop_block) + ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer) - self.conv1 = ConvBnAct(in_chs, mid_chs, kernel_size=1, **ckwargs) - self.conv2 = ConvBnAct(mid_chs, mid_chs, kernel_size=3, dilation=dilation, groups=groups, **ckwargs) + self.conv1 = ConvNormAct(in_chs, mid_chs, kernel_size=1, **ckwargs) + self.conv2 = ConvNormActAa( + mid_chs, mid_chs, kernel_size=3, dilation=dilation, groups=groups, + aa_layer=aa_layer, drop_layer=drop_block, **ckwargs) self.attn2 = create_attn(attn_layer, channels=mid_chs) if not attn_last else None - self.conv3 = ConvBnAct(mid_chs, out_chs, kernel_size=1, apply_act=False, **ckwargs) + self.conv3 = ConvNormAct(mid_chs, out_chs, kernel_size=1, apply_act=False, **ckwargs) self.attn3 = create_attn(attn_layer, channels=out_chs) if attn_last else None self.drop_path = drop_path self.act3 = act_layer(inplace=True) @@ -201,9 +202,11 @@ def __init__(self, in_chs, out_chs, dilation=1, bottle_ratio=0.5, groups=1, drop_block=None, drop_path=None): super(DarkBlock, self).__init__() mid_chs = int(round(out_chs * bottle_ratio)) - ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer, drop_block=drop_block) - self.conv1 = ConvBnAct(in_chs, mid_chs, kernel_size=1, **ckwargs) - self.conv2 = ConvBnAct(mid_chs, out_chs, kernel_size=3, dilation=dilation, groups=groups, **ckwargs) + ckwargs = dict(act_layer=act_layer, norm_layer=norm_layer) + self.conv1 = ConvNormAct(in_chs, mid_chs, kernel_size=1, **ckwargs) + self.conv2 = ConvNormActAa( + mid_chs, out_chs, kernel_size=3, dilation=dilation, groups=groups, + aa_layer=aa_layer, drop_layer=drop_block, **ckwargs) self.attn = create_attn(attn_layer, channels=out_chs) self.drop_path = drop_path @@ -235,7 +238,7 @@ def __init__(self, in_chs, out_chs, stride, dilation, depth, block_ratio=1., bot conv_kwargs = dict(act_layer=block_kwargs.get('act_layer'), norm_layer=block_kwargs.get('norm_layer')) if stride != 1 or first_dilation != dilation: - self.conv_down = ConvBnAct( + self.conv_down = ConvNormActAa( in_chs, down_chs, kernel_size=3, stride=stride, dilation=first_dilation, groups=groups, aa_layer=block_kwargs.get('aa_layer', None), **conv_kwargs) prev_chs = down_chs @@ -246,7 +249,7 @@ def __init__(self, in_chs, out_chs, stride, dilation, depth, block_ratio=1., bot # FIXME this 1x1 expansion is pushed down into the cross and block paths in the darknet cfgs. Also, # there is also special case for the first stage for some of the model that results in uneven split # across the two paths. I did it this way for simplicity for now. - self.conv_exp = ConvBnAct(prev_chs, exp_chs, kernel_size=1, apply_act=not cross_linear, **conv_kwargs) + self.conv_exp = ConvNormAct(prev_chs, exp_chs, kernel_size=1, apply_act=not cross_linear, **conv_kwargs) prev_chs = exp_chs // 2 # output of conv_exp is always split in two self.blocks = nn.Sequential() @@ -257,8 +260,8 @@ def __init__(self, in_chs, out_chs, stride, dilation, depth, block_ratio=1., bot prev_chs = block_out_chs # transition convs - self.conv_transition_b = ConvBnAct(prev_chs, exp_chs // 2, kernel_size=1, **conv_kwargs) - self.conv_transition = ConvBnAct(exp_chs, out_chs, kernel_size=1, **conv_kwargs) + self.conv_transition_b = ConvNormAct(prev_chs, exp_chs // 2, kernel_size=1, **conv_kwargs) + self.conv_transition = ConvNormAct(exp_chs, out_chs, kernel_size=1, **conv_kwargs) def forward(self, x): if self.conv_down is not None: @@ -280,7 +283,7 @@ def __init__(self, in_chs, out_chs, stride, dilation, depth, block_ratio=1., bot super(DarkStage, self).__init__() first_dilation = first_dilation or dilation - self.conv_down = ConvBnAct( + self.conv_down = ConvNormActAa( in_chs, out_chs, kernel_size=3, stride=stride, dilation=first_dilation, groups=groups, act_layer=block_kwargs.get('act_layer'), norm_layer=block_kwargs.get('norm_layer'), aa_layer=block_kwargs.get('aa_layer', None)) @@ -437,7 +440,7 @@ def cspresnext50(pretrained=False, **kwargs): @register_model def cspresnext50_iabn(pretrained=False, **kwargs): - norm_layer = get_norm_act_layer('iabn') + norm_layer = get_norm_act_layer('iabn', act_layer='leaky_relu') return _create_cspnet('cspresnext50_iabn', pretrained=pretrained, norm_layer=norm_layer, **kwargs) @@ -448,7 +451,7 @@ def cspdarknet53(pretrained=False, **kwargs): @register_model def cspdarknet53_iabn(pretrained=False, **kwargs): - norm_layer = get_norm_act_layer('iabn') + norm_layer = get_norm_act_layer('iabn', act_layer='leaky_relu') return _create_cspnet('cspdarknet53_iabn', pretrained=pretrained, block_fn=DarkBlock, norm_layer=norm_layer, **kwargs) diff --git a/timm/models/densenet.py b/timm/models/densenet.py index 38a1972787..7be15f4949 100644 --- a/timm/models/densenet.py +++ b/timm/models/densenet.py @@ -14,7 +14,7 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg -from .layers import BatchNormAct2d, create_norm_act, BlurPool2d, create_classifier +from .layers import BatchNormAct2d, create_norm_act_layer, BlurPool2d, create_classifier from .registry import register_model __all__ = ['DenseNet'] @@ -370,7 +370,7 @@ def densenet264d_iabn(pretrained=False, **kwargs): r"""Densenet-264 model with deep stem and Inplace-ABN """ def norm_act_fn(num_features, **kwargs): - return create_norm_act('iabn', num_features, **kwargs) + return create_norm_act_layer('iabn', num_features, act_layer='leaky_relu', **kwargs) model = _create_densenet( 'densenet264d_iabn', growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep', norm_layer=norm_act_fn, pretrained=pretrained, **kwargs) diff --git a/timm/models/dpn.py b/timm/models/dpn.py index c4e380b1e3..07e4a128c2 100644 --- a/timm/models/dpn.py +++ b/timm/models/dpn.py @@ -16,7 +16,7 @@ from timm.data import IMAGENET_DPN_MEAN, IMAGENET_DPN_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg -from .layers import BatchNormAct2d, ConvBnAct, create_conv2d, create_classifier +from .layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier from .registry import register_model __all__ = ['DPN'] @@ -180,7 +180,7 @@ def __init__(self, small=False, num_init_features=64, k_r=96, groups=32, blocks = OrderedDict() # conv1 - blocks['conv1_1'] = ConvBnAct( + blocks['conv1_1'] = ConvNormAct( in_chans, num_init_features, kernel_size=3 if small else 7, stride=2, norm_layer=norm_layer) blocks['conv1_pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.feature_info = [dict(num_chs=num_init_features, reduction=2, module='features.conv1_1')] diff --git a/timm/models/efficientnet.py b/timm/models/efficientnet.py index 3d50b704cd..b38b3c0e71 100644 --- a/timm/models/efficientnet.py +++ b/timm/models/efficientnet.py @@ -45,7 +45,7 @@ round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT from .features import FeatureInfo, FeatureHooks from .helpers import build_model_with_cfg, default_cfg_for_features -from .layers import create_conv2d, create_classifier +from .layers import create_conv2d, create_classifier, get_norm_act_layer, EvoNorm2dS0, GroupNormAct from .registry import register_model __all__ = ['EfficientNet', 'EfficientNetFeatures'] @@ -117,6 +117,20 @@ def _cfg(url='', **kwargs): 'efficientnet_l2': _cfg( url='', input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.961), + # FIXME experimental + 'efficientnet_b0_gn': _cfg( + url=''), + 'efficientnet_b0_g8': _cfg( + url=''), + 'efficientnet_b0_g16_evos': _cfg( + url=''), + 'efficientnet_b3_gn': _cfg( + url='', + input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), + 'efficientnet_b3_g8_gn': _cfg( + url='', + input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), + 'efficientnet_es': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth'), 'efficientnet_em': _cfg( @@ -431,6 +445,7 @@ def __init__(self, block_args, num_classes=1000, num_features=1280, in_chans=3, super(EfficientNet, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) se_layer = se_layer or SqueezeExcite self.num_classes = num_classes self.num_features = num_features @@ -440,8 +455,7 @@ def __init__(self, block_args, num_classes=1000, num_features=1280, in_chans=3, if not fix_stem: stem_size = round_chs_fn(stem_size) self.conv_stem = create_conv2d(in_chans, stem_size, 3, stride=2, padding=pad_type) - self.bn1 = norm_layer(stem_size) - self.act1 = act_layer(inplace=True) + self.bn1 = norm_act_layer(stem_size, inplace=True) # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( @@ -453,17 +467,16 @@ def __init__(self, block_args, num_classes=1000, num_features=1280, in_chans=3, # Head + Pooling self.conv_head = create_conv2d(head_chs, self.num_features, 1, padding=pad_type) - self.bn2 = norm_layer(self.num_features) - self.act2 = act_layer(inplace=True) + self.bn2 = norm_act_layer(self.num_features, inplace=True) self.global_pool, self.classifier = create_classifier( self.num_features, self.num_classes, pool_type=global_pool) efficientnet_init_weights(self) def as_sequential(self): - layers = [self.conv_stem, self.bn1, self.act1] + layers = [self.conv_stem, self.bn1] layers.extend(self.blocks) - layers.extend([self.conv_head, self.bn2, self.act2, self.global_pool]) + layers.extend([self.conv_head, self.bn2, self.global_pool]) layers.extend([nn.Dropout(self.drop_rate), self.classifier]) return nn.Sequential(*layers) @@ -478,11 +491,9 @@ def reset_classifier(self, num_classes, global_pool='avg'): def forward_features(self, x): x = self.conv_stem(x) x = self.bn1(x) - x = self.act1(x) x = self.blocks(x) x = self.conv_head(x) x = self.bn2(x) - x = self.act2(x) return x def forward(self, x): @@ -506,6 +517,7 @@ def __init__(self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bo super(EfficientNetFeatures, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) se_layer = se_layer or SqueezeExcite self.drop_rate = drop_rate @@ -513,8 +525,7 @@ def __init__(self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bo if not fix_stem: stem_size = round_chs_fn(stem_size) self.conv_stem = create_conv2d(in_chans, stem_size, 3, stride=2, padding=pad_type) - self.bn1 = norm_layer(stem_size) - self.act1 = act_layer(inplace=True) + self.bn1 = norm_act_layer(stem_size, inplace=True) # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( @@ -536,7 +547,6 @@ def __init__(self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bo def forward(self, x) -> List[torch.Tensor]: x = self.conv_stem(x) x = self.bn1(x) - x = self.act1(x) if self.feature_hooks is None: features = [] if 0 in self._stage_out_idx: @@ -767,7 +777,9 @@ def _gen_spnasnet(variant, channel_multiplier=1.0, pretrained=False, **kwargs): return model -def _gen_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs): +def _gen_efficientnet( + variant, channel_multiplier=1.0, depth_multiplier=1.0, channel_divisor=8, + group_size=None, pretrained=False, **kwargs): """Creates an EfficientNet model. Ref impl: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py @@ -800,9 +812,9 @@ def _gen_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.0, pre ['ir_r4_k5_s2_e6_c192_se0.25'], ['ir_r1_k3_s1_e6_c320_se0.25'], ] - round_chs_fn = partial(round_channels, multiplier=channel_multiplier) + round_chs_fn = partial(round_channels, multiplier=channel_multiplier, divisor=channel_divisor) model_kwargs = dict( - block_args=decode_arch_def(arch_def, depth_multiplier), + block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size), num_features=round_chs_fn(1280), stem_size=32, round_chs_fn=round_chs_fn, @@ -814,7 +826,8 @@ def _gen_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.0, pre return model -def _gen_efficientnet_edge(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs): +def _gen_efficientnet_edge( + variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, pretrained=False, **kwargs): """ Creates an EfficientNet-EdgeTPU model Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/edgetpu @@ -832,7 +845,7 @@ def _gen_efficientnet_edge(variant, channel_multiplier=1.0, depth_multiplier=1.0 ] round_chs_fn = partial(round_channels, multiplier=channel_multiplier) model_kwargs = dict( - block_args=decode_arch_def(arch_def, depth_multiplier), + block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size), num_features=round_chs_fn(1280), stem_size=32, round_chs_fn=round_chs_fn, @@ -946,7 +959,7 @@ def _gen_efficientnetv2_base( def _gen_efficientnetv2_s( - variant, channel_multiplier=1.0, depth_multiplier=1.0, rw=False, pretrained=False, **kwargs): + variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, rw=False, pretrained=False, **kwargs): """ Creates an EfficientNet-V2 Small model Ref impl: https://github.com/google/automl/tree/master/efficientnetv2 @@ -972,7 +985,7 @@ def _gen_efficientnetv2_s( round_chs_fn = partial(round_channels, multiplier=channel_multiplier) model_kwargs = dict( - block_args=decode_arch_def(arch_def, depth_multiplier), + block_args=decode_arch_def(arch_def, depth_multiplier, group_size=group_size), num_features=round_chs_fn(num_features), stem_size=24, round_chs_fn=round_chs_fn, @@ -1366,6 +1379,52 @@ def efficientnet_l2(pretrained=False, **kwargs): return model +# FIXME experimental group cong / GroupNorm / EvoNorm experiments +@register_model +def efficientnet_b0_gn(pretrained=False, **kwargs): + """ EfficientNet-B0 + GroupNorm""" + model = _gen_efficientnet( + 'efficientnet_b0_gn', norm_layer=partial(GroupNormAct, group_size=8), pretrained=pretrained, **kwargs) + return model + + +@register_model +def efficientnet_b0_g8(pretrained=False, **kwargs): + """ EfficientNet-B0 w/ group conv + BN""" + model = _gen_efficientnet( + 'efficientnet_b0_g8', group_size=8, pretrained=pretrained, **kwargs) + return model + + +@register_model +def efficientnet_b0_g16_evos(pretrained=False, **kwargs): + """ EfficientNet-B0 w/ group 16 conv + EvoNorm""" + model = _gen_efficientnet( + 'efficientnet_b0_g16_evos', group_size=16, channel_divisor=16, + norm_layer=partial(EvoNorm2dS0, group_size=16), pretrained=pretrained, **kwargs) + return model + + +@register_model +def efficientnet_b3_gn(pretrained=False, **kwargs): + """ EfficientNet-B3 w/ GroupNorm """ + # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2 + model = _gen_efficientnet( + 'efficientnet_b3_gn', channel_multiplier=1.2, depth_multiplier=1.4, channel_divisor=16, + norm_layer=partial(GroupNormAct, group_size=16), pretrained=pretrained, **kwargs) + return model + + +@register_model +def efficientnet_b3_g8_gn(pretrained=False, **kwargs): + """ EfficientNet-B3 w/ grouped conv + BN""" + # NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2 + model = _gen_efficientnet( + 'efficientnet_b3_g8_gn', channel_multiplier=1.2, depth_multiplier=1.4, group_size=8, channel_divisor=16, + norm_layer=partial(GroupNormAct, group_size=16), pretrained=pretrained, **kwargs) + return model + + @register_model def efficientnet_es(pretrained=False, **kwargs): """ EfficientNet-Edge Small. """ @@ -1373,6 +1432,7 @@ def efficientnet_es(pretrained=False, **kwargs): 'efficientnet_es', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs) return model + @register_model def efficientnet_es_pruned(pretrained=False, **kwargs): """ EfficientNet-Edge Small Pruned. For more info: https://github.com/DeGirum/pruned-models/releases/tag/efficientnet_v1.0""" diff --git a/timm/models/efficientnet_blocks.py b/timm/models/efficientnet_blocks.py index b1fec449c4..0e91319b9e 100644 --- a/timm/models/efficientnet_blocks.py +++ b/timm/models/efficientnet_blocks.py @@ -2,18 +2,31 @@ Hacked together by / Copyright 2020 Ross Wightman """ +import math import torch import torch.nn as nn from torch.nn import functional as F -from .layers import create_conv2d, drop_path, make_divisible, create_act_layer -from .layers.activations import sigmoid +from .layers import create_conv2d, DropPath, make_divisible, create_act_layer, get_norm_act_layer __all__ = [ 'SqueezeExcite', 'ConvBnAct', 'DepthwiseSeparableConv', 'InvertedResidual', 'CondConvResidual', 'EdgeResidual'] +def num_groups(group_size, channels): + if not group_size: # 0 or None + return 1 # normal conv with 1 group + else: + # NOTE group_size == 1 -> depthwise conv + #assert channels % group_size == 0 + if channels % group_size != 0: + num_groups = math.floor(channels / group_size) + print(channels, group_size, num_groups) + return int(num_groups) + return channels // group_size + + class SqueezeExcite(nn.Module): """ Squeeze-and-Excitation w/ specific features for EfficientNet/MobileNet family @@ -51,31 +64,30 @@ class ConvBnAct(nn.Module): """ Conv + Norm Layer + Activation w/ optional skip connection """ def __init__( - self, in_chs, out_chs, kernel_size, stride=1, dilation=1, pad_type='', + self, in_chs, out_chs, kernel_size, stride=1, dilation=1, group_size=0, pad_type='', skip=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, drop_path_rate=0.): super(ConvBnAct, self).__init__() - self.has_residual = skip and stride == 1 and in_chs == out_chs - self.drop_path_rate = drop_path_rate - self.conv = create_conv2d(in_chs, out_chs, kernel_size, stride=stride, dilation=dilation, padding=pad_type) - self.bn1 = norm_layer(out_chs) - self.act1 = act_layer(inplace=True) + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) + groups = num_groups(group_size, in_chs) + self.has_skip = skip and stride == 1 and in_chs == out_chs + + self.conv = create_conv2d( + in_chs, out_chs, kernel_size, stride=stride, dilation=dilation, groups=groups, padding=pad_type) + self.bn1 = norm_act_layer(out_chs, inplace=True) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity() def feature_info(self, location): if location == 'expansion': # output of conv after act, same as block coutput - info = dict(module='act1', hook_type='forward', num_chs=self.conv.out_channels) + return dict(module='bn1', hook_type='forward', num_chs=self.conv.out_channels) else: # location == 'bottleneck', block output - info = dict(module='', hook_type='', num_chs=self.conv.out_channels) - return info + return dict(module='', hook_type='', num_chs=self.conv.out_channels) def forward(self, x): shortcut = x x = self.conv(x) x = self.bn1(x) - x = self.act1(x) - if self.has_residual: - if self.drop_path_rate > 0.: - x = drop_path(x, self.drop_path_rate, self.training) - x += shortcut + if self.has_skip: + x = x + self.drop_path(shortcut) return x @@ -85,50 +97,41 @@ class DepthwiseSeparableConv(nn.Module): (factor of 1.0). This is an alternative to having a IR with an optional first pw conv. """ def __init__( - self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, pad_type='', + self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, group_size=1, pad_type='', noskip=False, pw_kernel_size=1, pw_act=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, se_layer=None, drop_path_rate=0.): super(DepthwiseSeparableConv, self).__init__() - self.has_residual = (stride == 1 and in_chs == out_chs) and not noskip + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) + groups = num_groups(group_size, in_chs) + self.has_skip = (stride == 1 and in_chs == out_chs) and not noskip self.has_pw_act = pw_act # activation after point-wise conv - self.drop_path_rate = drop_path_rate self.conv_dw = create_conv2d( - in_chs, in_chs, dw_kernel_size, stride=stride, dilation=dilation, padding=pad_type, depthwise=True) - self.bn1 = norm_layer(in_chs) - self.act1 = act_layer(inplace=True) + in_chs, in_chs, dw_kernel_size, stride=stride, dilation=dilation, padding=pad_type, groups=groups) + self.bn1 = norm_act_layer(in_chs, inplace=True) # Squeeze-and-excitation self.se = se_layer(in_chs, act_layer=act_layer) if se_layer else nn.Identity() self.conv_pw = create_conv2d(in_chs, out_chs, pw_kernel_size, padding=pad_type) - self.bn2 = norm_layer(out_chs) - self.act2 = act_layer(inplace=True) if self.has_pw_act else nn.Identity() + self.bn2 = norm_act_layer(out_chs, inplace=True, apply_act=self.has_pw_act) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity() def feature_info(self, location): if location == 'expansion': # after SE, input to PW - info = dict(module='conv_pw', hook_type='forward_pre', num_chs=self.conv_pw.in_channels) + return dict(module='conv_pw', hook_type='forward_pre', num_chs=self.conv_pw.in_channels) else: # location == 'bottleneck', block output - info = dict(module='', hook_type='', num_chs=self.conv_pw.out_channels) - return info + return dict(module='', hook_type='', num_chs=self.conv_pw.out_channels) def forward(self, x): shortcut = x - x = self.conv_dw(x) x = self.bn1(x) - x = self.act1(x) - x = self.se(x) - x = self.conv_pw(x) x = self.bn2(x) - x = self.act2(x) - - if self.has_residual: - if self.drop_path_rate > 0.: - x = drop_path(x, self.drop_path_rate, self.training) - x += shortcut + if self.has_skip: + x = x + self.drop_path(shortcut) return x @@ -143,66 +146,51 @@ class InvertedResidual(nn.Module): """ def __init__( - self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, pad_type='', + self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, group_size=1, pad_type='', noskip=False, exp_ratio=1.0, exp_kernel_size=1, pw_kernel_size=1, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, se_layer=None, conv_kwargs=None, drop_path_rate=0.): super(InvertedResidual, self).__init__() + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) conv_kwargs = conv_kwargs or {} mid_chs = make_divisible(in_chs * exp_ratio) - self.has_residual = (in_chs == out_chs and stride == 1) and not noskip - self.drop_path_rate = drop_path_rate + groups = num_groups(group_size, mid_chs) + self.has_skip = (in_chs == out_chs and stride == 1) and not noskip # Point-wise expansion self.conv_pw = create_conv2d(in_chs, mid_chs, exp_kernel_size, padding=pad_type, **conv_kwargs) - self.bn1 = norm_layer(mid_chs) - self.act1 = act_layer(inplace=True) + self.bn1 = norm_act_layer(mid_chs, inplace=True) # Depth-wise convolution self.conv_dw = create_conv2d( mid_chs, mid_chs, dw_kernel_size, stride=stride, dilation=dilation, - padding=pad_type, depthwise=True, **conv_kwargs) - self.bn2 = norm_layer(mid_chs) - self.act2 = act_layer(inplace=True) + groups=groups, padding=pad_type, **conv_kwargs) + self.bn2 = norm_act_layer(mid_chs, inplace=True) # Squeeze-and-excitation self.se = se_layer(mid_chs, act_layer=act_layer) if se_layer else nn.Identity() # Point-wise linear projection self.conv_pwl = create_conv2d(mid_chs, out_chs, pw_kernel_size, padding=pad_type, **conv_kwargs) - self.bn3 = norm_layer(out_chs) + self.bn3 = norm_act_layer(out_chs, apply_act=False) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity() def feature_info(self, location): if location == 'expansion': # after SE, input to PWL - info = dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels) + return dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels) else: # location == 'bottleneck', block output - info = dict(module='', hook_type='', num_chs=self.conv_pwl.out_channels) - return info + return dict(module='', hook_type='', num_chs=self.conv_pwl.out_channels) def forward(self, x): shortcut = x - - # Point-wise expansion x = self.conv_pw(x) x = self.bn1(x) - x = self.act1(x) - - # Depth-wise convolution x = self.conv_dw(x) x = self.bn2(x) - x = self.act2(x) - - # Squeeze-and-excitation x = self.se(x) - - # Point-wise linear projection x = self.conv_pwl(x) x = self.bn3(x) - - if self.has_residual: - if self.drop_path_rate > 0.: - x = drop_path(x, self.drop_path_rate, self.training) - x += shortcut - + if self.has_skip: + x = x + self.drop_path(shortcut) return x @@ -210,7 +198,7 @@ class CondConvResidual(InvertedResidual): """ Inverted residual block w/ CondConv routing""" def __init__( - self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, pad_type='', + self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, group_size=1, pad_type='', noskip=False, exp_ratio=1.0, exp_kernel_size=1, pw_kernel_size=1, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, se_layer=None, num_experts=0, drop_path_rate=0.): @@ -218,8 +206,8 @@ def __init__( conv_kwargs = dict(num_experts=self.num_experts) super(CondConvResidual, self).__init__( - in_chs, out_chs, dw_kernel_size=dw_kernel_size, stride=stride, dilation=dilation, pad_type=pad_type, - act_layer=act_layer, noskip=noskip, exp_ratio=exp_ratio, exp_kernel_size=exp_kernel_size, + in_chs, out_chs, dw_kernel_size=dw_kernel_size, stride=stride, dilation=dilation, group_size=group_size, + pad_type=pad_type, act_layer=act_layer, noskip=noskip, exp_ratio=exp_ratio, exp_kernel_size=exp_kernel_size, pw_kernel_size=pw_kernel_size, se_layer=se_layer, norm_layer=norm_layer, conv_kwargs=conv_kwargs, drop_path_rate=drop_path_rate) @@ -227,32 +215,17 @@ def __init__( def forward(self, x): shortcut = x - - # CondConv routing - pooled_inputs = F.adaptive_avg_pool2d(x, 1).flatten(1) + pooled_inputs = F.adaptive_avg_pool2d(x, 1).flatten(1) # CondConv routing routing_weights = torch.sigmoid(self.routing_fn(pooled_inputs)) - - # Point-wise expansion x = self.conv_pw(x, routing_weights) x = self.bn1(x) - x = self.act1(x) - - # Depth-wise convolution x = self.conv_dw(x, routing_weights) x = self.bn2(x) - x = self.act2(x) - - # Squeeze-and-excitation x = self.se(x) - - # Point-wise linear projection x = self.conv_pwl(x, routing_weights) x = self.bn3(x) - - if self.has_residual: - if self.drop_path_rate > 0.: - x = drop_path(x, self.drop_path_rate, self.training) - x += shortcut + if self.has_skip: + x = x + self.drop_path(shortcut) return x @@ -269,55 +242,44 @@ class EdgeResidual(nn.Module): """ def __init__( - self, in_chs, out_chs, exp_kernel_size=3, stride=1, dilation=1, pad_type='', + self, in_chs, out_chs, exp_kernel_size=3, stride=1, dilation=1, group_size=0, pad_type='', force_in_chs=0, noskip=False, exp_ratio=1.0, pw_kernel_size=1, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, se_layer=None, drop_path_rate=0.): super(EdgeResidual, self).__init__() + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) if force_in_chs > 0: mid_chs = make_divisible(force_in_chs * exp_ratio) else: mid_chs = make_divisible(in_chs * exp_ratio) - self.has_residual = (in_chs == out_chs and stride == 1) and not noskip - self.drop_path_rate = drop_path_rate + groups = num_groups(group_size, in_chs) + self.has_skip = (in_chs == out_chs and stride == 1) and not noskip # Expansion convolution self.conv_exp = create_conv2d( - in_chs, mid_chs, exp_kernel_size, stride=stride, dilation=dilation, padding=pad_type) - self.bn1 = norm_layer(mid_chs) - self.act1 = act_layer(inplace=True) + in_chs, mid_chs, exp_kernel_size, stride=stride, dilation=dilation, groups=groups, padding=pad_type) + self.bn1 = norm_act_layer(mid_chs, inplace=True) # Squeeze-and-excitation self.se = se_layer(mid_chs, act_layer=act_layer) if se_layer else nn.Identity() # Point-wise linear projection self.conv_pwl = create_conv2d(mid_chs, out_chs, pw_kernel_size, padding=pad_type) - self.bn2 = norm_layer(out_chs) + self.bn2 = norm_act_layer(out_chs, apply_act=False) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate else nn.Identity() def feature_info(self, location): if location == 'expansion': # after SE, before PWL - info = dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels) + return dict(module='conv_pwl', hook_type='forward_pre', num_chs=self.conv_pwl.in_channels) else: # location == 'bottleneck', block output - info = dict(module='', hook_type='', num_chs=self.conv_pwl.out_channels) - return info + return dict(module='', hook_type='', num_chs=self.conv_pwl.out_channels) def forward(self, x): shortcut = x - - # Expansion convolution x = self.conv_exp(x) x = self.bn1(x) - x = self.act1(x) - - # Squeeze-and-excitation x = self.se(x) - - # Point-wise linear projection x = self.conv_pwl(x) x = self.bn2(x) - - if self.has_residual: - if self.drop_path_rate > 0.: - x = drop_path(x, self.drop_path_rate, self.training) - x += shortcut - + if self.has_skip: + x = x + self.drop_path(shortcut) return x diff --git a/timm/models/efficientnet_builder.py b/timm/models/efficientnet_builder.py index a23e8273d9..a102a872f2 100644 --- a/timm/models/efficientnet_builder.py +++ b/timm/models/efficientnet_builder.py @@ -139,60 +139,52 @@ def _decode_block_str(block_str): exp_kernel_size = _parse_ksize(options['a']) if 'a' in options else 1 pw_kernel_size = _parse_ksize(options['p']) if 'p' in options else 1 force_in_chs = int(options['fc']) if 'fc' in options else 0 # FIXME hack to deal with in_chs issue in TPU def - num_repeat = int(options['r']) + # each type of block has different valid arguments, fill accordingly + block_args = dict( + block_type=block_type, + out_chs=int(options['c']), + stride=int(options['s']), + act_layer=act_layer, + ) if block_type == 'ir': - block_args = dict( - block_type=block_type, + block_args.update(dict( dw_kernel_size=_parse_ksize(options['k']), exp_kernel_size=exp_kernel_size, pw_kernel_size=pw_kernel_size, - out_chs=int(options['c']), exp_ratio=float(options['e']), se_ratio=float(options['se']) if 'se' in options else 0., - stride=int(options['s']), - act_layer=act_layer, noskip=skip is False, - ) + )) if 'cc' in options: block_args['num_experts'] = int(options['cc']) elif block_type == 'ds' or block_type == 'dsa': - block_args = dict( - block_type=block_type, + block_args.update(dict( dw_kernel_size=_parse_ksize(options['k']), pw_kernel_size=pw_kernel_size, - out_chs=int(options['c']), se_ratio=float(options['se']) if 'se' in options else 0., - stride=int(options['s']), - act_layer=act_layer, pw_act=block_type == 'dsa', noskip=block_type == 'dsa' or skip is False, - ) + )) elif block_type == 'er': - block_args = dict( - block_type=block_type, + block_args.update(dict( exp_kernel_size=_parse_ksize(options['k']), pw_kernel_size=pw_kernel_size, - out_chs=int(options['c']), exp_ratio=float(options['e']), force_in_chs=force_in_chs, se_ratio=float(options['se']) if 'se' in options else 0., - stride=int(options['s']), - act_layer=act_layer, noskip=skip is False, - ) + )) elif block_type == 'cn': - block_args = dict( - block_type=block_type, + block_args.update(dict( kernel_size=int(options['k']), - out_chs=int(options['c']), - stride=int(options['s']), - act_layer=act_layer, skip=skip is True, - ) + )) else: assert False, 'Unknown block type (%s)' % block_type + if 'gs' in options: + block_args['group_size'] = options['gs'] return block_args, num_repeat @@ -235,7 +227,27 @@ def _scale_stage_depth(stack_args, repeats, depth_multiplier=1.0, depth_trunc='c return sa_scaled -def decode_arch_def(arch_def, depth_multiplier=1.0, depth_trunc='ceil', experts_multiplier=1, fix_first_last=False): +def decode_arch_def( + arch_def, + depth_multiplier=1.0, + depth_trunc='ceil', + experts_multiplier=1, + fix_first_last=False, + group_size=None, +): + """ Decode block architecture definition strings -> block kwargs + + Args: + arch_def: architecture definition strings, list of list of strings + depth_multiplier: network depth multiplier + depth_trunc: networ depth truncation mode when applying multiplier + experts_multiplier: CondConv experts multiplier + fix_first_last: fix first and last block depths when multiplier is applied + group_size: group size override for all blocks that weren't explicitly set in arch string + + Returns: + list of list of block kwargs + """ arch_args = [] if isinstance(depth_multiplier, tuple): assert len(depth_multiplier) == len(arch_def) @@ -250,6 +262,8 @@ def decode_arch_def(arch_def, depth_multiplier=1.0, depth_trunc='ceil', experts_ ba, rep = _decode_block_str(block_str) if ba.get('num_experts', 0) > 0 and experts_multiplier > 1: ba['num_experts'] *= experts_multiplier + if group_size is not None: + ba.setdefault('group_size', group_size) stack_args.append(ba) repeats.append(rep) if fix_first_last and (stack_idx == 0 or stack_idx == len(arch_def) - 1): diff --git a/timm/models/layers/__init__.py b/timm/models/layers/__init__.py index 0ed0c3af6b..1319cc7443 100644 --- a/timm/models/layers/__init__.py +++ b/timm/models/layers/__init__.py @@ -7,11 +7,11 @@ from .config import is_exportable, is_scriptable, is_no_jit, set_exportable, set_scriptable, set_no_jit,\ set_layer_config from .conv2d_same import Conv2dSame, conv2d_same -from .conv_bn_act import ConvBnAct +from .conv_bn_act import ConvNormAct, ConvNormActAa, ConvBnAct from .create_act import create_act_layer, get_act_layer, get_act_fn from .create_attn import get_attn, create_attn from .create_conv2d import create_conv2d -from .create_norm_act import get_norm_act_layer, create_norm_act, convert_norm_act +from .create_norm_act import get_norm_act_layer, create_norm_act_layer, get_norm_act_layer from .drop import DropBlock2d, DropPath, drop_block_2d, drop_path from .eca import EcaModule, CecaModule, EfficientChannelAttn, CircularEfficientChannelAttn from .evo_norm import EvoNorm2dB0, EvoNorm2dB1, EvoNorm2dB2,\ @@ -32,7 +32,7 @@ from .pool2d_same import AvgPool2dSame, create_pool2d from .squeeze_excite import SEModule, SqueezeExcite, EffectiveSEModule, EffectiveSqueezeExcite from .selective_kernel import SelectiveKernel -from .separable_conv import SeparableConv2d, SeparableConvBnAct +from .separable_conv import SeparableConv2d, SeparableConvNormAct from .space_to_depth import SpaceToDepthModule from .split_attn import SplitAttn from .split_batchnorm import SplitBatchNorm2d, convert_splitbn_model diff --git a/timm/models/layers/cbam.py b/timm/models/layers/cbam.py index bacf5cf07b..576a8306d9 100644 --- a/timm/models/layers/cbam.py +++ b/timm/models/layers/cbam.py @@ -11,7 +11,7 @@ from torch import nn as nn import torch.nn.functional as F -from .conv_bn_act import ConvBnAct +from .conv_bn_act import ConvNormAct from .create_act import create_act_layer, get_act_layer from .helpers import make_divisible @@ -56,7 +56,7 @@ class SpatialAttn(nn.Module): """ def __init__(self, kernel_size=7, gate_layer='sigmoid'): super(SpatialAttn, self).__init__() - self.conv = ConvBnAct(2, 1, kernel_size, act_layer=None) + self.conv = ConvNormAct(2, 1, kernel_size, apply_act=False) self.gate = create_act_layer(gate_layer) def forward(self, x): @@ -70,7 +70,7 @@ class LightSpatialAttn(nn.Module): """ def __init__(self, kernel_size=7, gate_layer='sigmoid'): super(LightSpatialAttn, self).__init__() - self.conv = ConvBnAct(1, 1, kernel_size, act_layer=None) + self.conv = ConvNormAct(1, 1, kernel_size, apply_act=False) self.gate = create_act_layer(gate_layer) def forward(self, x): diff --git a/timm/models/layers/conv_bn_act.py b/timm/models/layers/conv_bn_act.py index 33005c37b7..af010573e5 100644 --- a/timm/models/layers/conv_bn_act.py +++ b/timm/models/layers/conv_bn_act.py @@ -5,14 +5,46 @@ from torch import nn as nn from .create_conv2d import create_conv2d -from .create_norm_act import convert_norm_act +from .create_norm_act import get_norm_act_layer -class ConvBnAct(nn.Module): - def __init__(self, in_channels, out_channels, kernel_size=1, stride=1, padding='', dilation=1, groups=1, - bias=False, apply_act=True, norm_layer=nn.BatchNorm2d, act_layer=nn.ReLU, aa_layer=None, - drop_block=None): - super(ConvBnAct, self).__init__() +class ConvNormAct(nn.Module): + def __init__( + self, in_channels, out_channels, kernel_size=1, stride=1, padding='', dilation=1, groups=1, + bias=False, apply_act=True, norm_layer=nn.BatchNorm2d, act_layer=nn.ReLU, drop_layer=None): + super(ConvNormAct, self).__init__() + self.conv = create_conv2d( + in_channels, out_channels, kernel_size, stride=stride, + padding=padding, dilation=dilation, groups=groups, bias=bias) + + # NOTE for backwards compatibility with models that use separate norm and act layer definitions + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) + # NOTE for backwards (weight) compatibility, norm layer name remains `.bn` + norm_kwargs = dict(drop_layer=drop_layer) if drop_layer is not None else {} + self.bn = norm_act_layer(out_channels, apply_act=apply_act, **norm_kwargs) + + @property + def in_channels(self): + return self.conv.in_channels + + @property + def out_channels(self): + return self.conv.out_channels + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + return x + + +ConvBnAct = ConvNormAct + + +class ConvNormActAa(nn.Module): + def __init__( + self, in_channels, out_channels, kernel_size=1, stride=1, padding='', dilation=1, groups=1, + bias=False, apply_act=True, norm_layer=nn.BatchNorm2d, act_layer=nn.ReLU, aa_layer=None, drop_layer=None): + super(ConvNormActAa, self).__init__() use_aa = aa_layer is not None self.conv = create_conv2d( @@ -20,9 +52,11 @@ def __init__(self, in_channels, out_channels, kernel_size=1, stride=1, padding=' padding=padding, dilation=dilation, groups=groups, bias=bias) # NOTE for backwards compatibility with models that use separate norm and act layer definitions - norm_act_layer = convert_norm_act(norm_layer, act_layer) - self.bn = norm_act_layer(out_channels, apply_act=apply_act, drop_block=drop_block) - self.aa = aa_layer(channels=out_channels) if stride == 2 and use_aa else None + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) + # NOTE for backwards (weight) compatibility, norm layer name remains `.bn` + norm_kwargs = dict(drop_layer=drop_layer) if drop_layer is not None else {} + self.bn = norm_act_layer(out_channels, apply_act=apply_act, **norm_kwargs) + self.aa = aa_layer(channels=out_channels) if stride == 2 and use_aa else nn.Identity() @property def in_channels(self): @@ -35,6 +69,5 @@ def out_channels(self): def forward(self, x): x = self.conv(x) x = self.bn(x) - if self.aa is not None: - x = self.aa(x) + x = self.aa(x) return x diff --git a/timm/models/layers/create_conv2d.py b/timm/models/layers/create_conv2d.py index 3a0cc03a5c..ac9489ce49 100644 --- a/timm/models/layers/create_conv2d.py +++ b/timm/models/layers/create_conv2d.py @@ -16,7 +16,12 @@ def create_conv2d(in_channels, out_channels, kernel_size, **kwargs): """ if isinstance(kernel_size, list): assert 'num_experts' not in kwargs # MixNet + CondConv combo not supported currently - assert 'groups' not in kwargs # MixedConv groups are defined by kernel list + if 'groups' in kwargs: + groups = kwargs.pop('groups') + if groups == in_channels: + kwargs['depthwise'] = True + else: + assert groups == 1 # We're going to use only lists for defining the MixedConv2d kernel groups, # ints, tuples, other iterables will continue to pass to normal conv and specify h, w. m = MixedConv2d(in_channels, out_channels, kernel_size, **kwargs) diff --git a/timm/models/layers/create_norm_act.py b/timm/models/layers/create_norm_act.py index 5d4894a03a..cd15c2f830 100644 --- a/timm/models/layers/create_norm_act.py +++ b/timm/models/layers/create_norm_act.py @@ -11,12 +11,15 @@ from .evo_norm import * from .filter_response_norm import FilterResponseNormAct2d, FilterResponseNormTlu2d -from .norm_act import BatchNormAct2d, GroupNormAct +from .norm_act import BatchNormAct2d, GroupNormAct, LayerNormAct, LayerNormAct2d from .inplace_abn import InplaceAbn _NORM_ACT_MAP = dict( batchnorm=BatchNormAct2d, + batchnorm2d=BatchNormAct2d, groupnorm=GroupNormAct, + layernorm=LayerNormAct, + layernorm2d=LayerNormAct2d, evonormb0=EvoNorm2dB0, evonormb1=EvoNorm2dB1, evonormb2=EvoNorm2dB2, @@ -33,28 +36,19 @@ ) _NORM_ACT_TYPES = {m for n, m in _NORM_ACT_MAP.items()} # has act_layer arg to define act type -_NORM_ACT_REQUIRES_ARG = {BatchNormAct2d, GroupNormAct, FilterResponseNormAct2d, InplaceAbn} +_NORM_ACT_REQUIRES_ARG = { + BatchNormAct2d, GroupNormAct, LayerNormAct, LayerNormAct2d, FilterResponseNormAct2d, InplaceAbn} -def get_norm_act_layer(layer_name): - layer_name = layer_name.replace('_', '').lower().split('-')[0] - layer = _NORM_ACT_MAP.get(layer_name, None) - assert layer is not None, "Invalid norm_act layer (%s)" % layer_name - return layer - - -def create_norm_act(layer_name, num_features, apply_act=True, jit=False, **kwargs): - layer_parts = layer_name.split('-') # e.g. batchnorm-leaky_relu - assert len(layer_parts) in (1, 2) - layer = get_norm_act_layer(layer_parts[0]) - #activation_class = layer_parts[1].lower() if len(layer_parts) > 1 else '' # FIXME support string act selection? +def create_norm_act_layer(layer_name, num_features, act_layer=None, apply_act=True, jit=False, **kwargs): + layer = get_norm_act_layer(layer_name, act_layer=act_layer) layer_instance = layer(num_features, apply_act=apply_act, **kwargs) if jit: layer_instance = torch.jit.script(layer_instance) return layer_instance -def convert_norm_act(norm_layer, act_layer): +def get_norm_act_layer(norm_layer, act_layer=None): assert isinstance(norm_layer, (type, str, types.FunctionType, functools.partial)) assert act_layer is None or isinstance(act_layer, (type, str, types.FunctionType, functools.partial)) norm_act_kwargs = {} @@ -65,7 +59,8 @@ def convert_norm_act(norm_layer, act_layer): norm_layer = norm_layer.func if isinstance(norm_layer, str): - norm_act_layer = get_norm_act_layer(norm_layer) + layer_name = norm_layer.replace('_', '').lower().split('-')[0] + norm_act_layer = _NORM_ACT_MAP.get(layer_name, None) elif norm_layer in _NORM_ACT_TYPES: norm_act_layer = norm_layer elif isinstance(norm_layer, types.FunctionType): @@ -77,6 +72,10 @@ def convert_norm_act(norm_layer, act_layer): norm_act_layer = BatchNormAct2d elif type_name.startswith('groupnorm'): norm_act_layer = GroupNormAct + elif type_name.startswith('layernorm2d'): + norm_act_layer = LayerNormAct2d + elif type_name.startswith('layernorm'): + norm_act_layer = LayerNormAct else: assert False, f"No equivalent norm_act layer for {type_name}" diff --git a/timm/models/layers/drop.py b/timm/models/layers/drop.py index 90c1933a1b..fb20dfcef1 100644 --- a/timm/models/layers/drop.py +++ b/timm/models/layers/drop.py @@ -20,7 +20,7 @@ def drop_block_2d( - x, drop_prob: float = 0.1, block_size: int = 7, gamma_scale: float = 1.0, + x, drop_prob: float = 0.1, block_size: int = 7, gamma_scale: float = 1.0, with_noise: bool = False, inplace: bool = False, batchwise: bool = False): """ DropBlock. See https://arxiv.org/pdf/1810.12890.pdf @@ -32,7 +32,7 @@ def drop_block_2d( clipped_block_size = min(block_size, min(W, H)) # seed_drop_rate, the gamma parameter gamma = gamma_scale * drop_prob * total_size / clipped_block_size ** 2 / ( - (W - block_size + 1) * (H - block_size + 1)) + (W - block_size + 1) * (H - block_size + 1)) # Forces the block to be inside the feature map. w_i, h_i = torch.meshgrid(torch.arange(W).to(x.device), torch.arange(H).to(x.device)) @@ -104,14 +104,16 @@ def drop_block_fast_2d( class DropBlock2d(nn.Module): """ DropBlock. See https://arxiv.org/pdf/1810.12890.pdf """ - def __init__(self, - drop_prob=0.1, - block_size=7, - gamma_scale=1.0, - with_noise=False, - inplace=False, - batchwise=False, - fast=True): + + def __init__( + self, + drop_prob=0.1, + block_size=7, + gamma_scale=1.0, + with_noise=False, + inplace=False, + batchwise=False, + fast=True): super(DropBlock2d, self).__init__() self.drop_prob = drop_prob self.gamma_scale = gamma_scale @@ -155,6 +157,7 @@ def drop_path(x, drop_prob: float = 0., training: bool = False, scale_by_keep: b class DropPath(nn.Module): """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). """ + def __init__(self, drop_prob=None, scale_by_keep=True): super(DropPath, self).__init__() self.drop_prob = drop_prob diff --git a/timm/models/layers/inplace_abn.py b/timm/models/layers/inplace_abn.py index 3aae7cf563..a80889339e 100644 --- a/timm/models/layers/inplace_abn.py +++ b/timm/models/layers/inplace_abn.py @@ -38,7 +38,7 @@ class InplaceAbn(nn.Module): """ def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, apply_act=True, - act_layer="leaky_relu", act_param=0.01, drop_block=None): + act_layer="leaky_relu", act_param=0.01, drop_layer=None): super(InplaceAbn, self).__init__() self.num_features = num_features self.affine = affine @@ -54,7 +54,7 @@ def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, apply_act= self.act_name = 'elu' elif act_layer == nn.LeakyReLU: self.act_name = 'leaky_relu' - elif act_layer == nn.Identity: + elif act_layer is None or act_layer == nn.Identity: self.act_name = 'identity' else: assert False, f'Invalid act layer {act_layer.__name__} for IABN' diff --git a/timm/models/layers/non_local_attn.py b/timm/models/layers/non_local_attn.py index 881fa36d11..670e8f2475 100644 --- a/timm/models/layers/non_local_attn.py +++ b/timm/models/layers/non_local_attn.py @@ -8,7 +8,7 @@ from torch import nn from torch.nn import functional as F -from .conv_bn_act import ConvBnAct +from .conv_bn_act import ConvNormAct from .helpers import make_divisible from .trace_utils import _assert @@ -74,10 +74,10 @@ class BilinearAttnTransform(nn.Module): def __init__(self, in_channels, block_size, groups, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d): super(BilinearAttnTransform, self).__init__() - self.conv1 = ConvBnAct(in_channels, groups, 1, act_layer=act_layer, norm_layer=norm_layer) + self.conv1 = ConvNormAct(in_channels, groups, 1, act_layer=act_layer, norm_layer=norm_layer) self.conv_p = nn.Conv2d(groups, block_size * block_size * groups, kernel_size=(block_size, 1)) self.conv_q = nn.Conv2d(groups, block_size * block_size * groups, kernel_size=(1, block_size)) - self.conv2 = ConvBnAct(in_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer) + self.conv2 = ConvNormAct(in_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer) self.block_size = block_size self.groups = groups self.in_channels = in_channels @@ -132,9 +132,9 @@ def __init__( super().__init__() if rd_channels is None: rd_channels = make_divisible(in_channels * rd_ratio, divisor=rd_divisor) - self.conv1 = ConvBnAct(in_channels, rd_channels, 1, act_layer=act_layer, norm_layer=norm_layer) + self.conv1 = ConvNormAct(in_channels, rd_channels, 1, act_layer=act_layer, norm_layer=norm_layer) self.ba = BilinearAttnTransform(rd_channels, block_size, groups, act_layer=act_layer, norm_layer=norm_layer) - self.conv2 = ConvBnAct(rd_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer) + self.conv2 = ConvNormAct(rd_channels, in_channels, 1, act_layer=act_layer, norm_layer=norm_layer) self.dropout = nn.Dropout2d(p=drop_rate) def forward(self, x): diff --git a/timm/models/layers/norm_act.py b/timm/models/layers/norm_act.py index 40bd57effb..5ddb07af68 100644 --- a/timm/models/layers/norm_act.py +++ b/timm/models/layers/norm_act.py @@ -1,5 +1,7 @@ """ Normalization + Activation Layers """ +from typing import Union, List + import torch from torch import nn as nn from torch.nn import functional as F @@ -14,12 +16,13 @@ class BatchNormAct2d(nn.BatchNorm2d): compatible with weights trained with separate bn, act. This is why we inherit from BN instead of composing it as a .bn member. """ - def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_block=None): + def __init__( + self, num_features, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, + apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): super(BatchNormAct2d, self).__init__( num_features, eps=eps, momentum=momentum, affine=affine, track_running_stats=track_running_stats) - if isinstance(act_layer, str): - act_layer = get_act_layer(act_layer) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module if act_layer is not None and apply_act: act_args = dict(inplace=True) if inplace else {} self.act = act_layer(**act_args) @@ -29,8 +32,8 @@ def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True, track_runn def _forward_jit(self, x): """ A cut & paste of the contents of the PyTorch BatchNorm2d forward function """ - # exponential_average_factor is self.momentum set to - # (when it is available) only so that if gets updated + # exponential_average_factor is set to self.momentum + # (when it is available) only so that it gets updated # in ONNX graph when this node is exported to ONNX. if self.momentum is None: exponential_average_factor = 0.0 @@ -39,18 +42,38 @@ def _forward_jit(self, x): if self.training and self.track_running_stats: # TODO: if statement only here to tell the jit to skip emitting this when it is None - if self.num_batches_tracked is not None: - self.num_batches_tracked += 1 + if self.num_batches_tracked is not None: # type: ignore[has-type] + self.num_batches_tracked = self.num_batches_tracked + 1 # type: ignore[has-type] if self.momentum is None: # use cumulative moving average exponential_average_factor = 1.0 / float(self.num_batches_tracked) else: # use exponential moving average exponential_average_factor = self.momentum - x = F.batch_norm( - x, self.running_mean, self.running_var, self.weight, self.bias, - self.training or not self.track_running_stats, - exponential_average_factor, self.eps) - return x + r""" + Decide whether the mini-batch stats should be used for normalization rather than the buffers. + Mini-batch stats are used in training mode, and in eval mode when buffers are None. + """ + if self.training: + bn_training = True + else: + bn_training = (self.running_mean is None) and (self.running_var is None) + + r""" + Buffers are only updated if they are to be tracked and we are in training mode. Thus they only need to be + passed when the update should occur (i.e. in training mode when they are tracked), or when buffer stats are + used for normalization (i.e. in eval mode when buffers are not None). + """ + return F.batch_norm( + x, + # If buffers are not to be tracked, ensure that they won't be updated + self.running_mean if not self.training or self.track_running_stats else None, + self.running_var if not self.training or self.track_running_stats else None, + self.weight, + self.bias, + bn_training, + exponential_average_factor, + self.eps, + ) @torch.jit.ignore def _forward_python(self, x): @@ -62,6 +85,7 @@ def forward(self, x): x = self._forward_jit(x) else: x = self._forward_python(x) + x = self.drop(x) x = self.act(x) return x @@ -91,13 +115,22 @@ def group_norm_tpu(x, w, b, groups: int = 32, eps: float = 1e-5, diff_sqm: bool return x +def _num_groups(num_channels, num_groups, group_size): + if group_size: + assert num_channels % group_size == 0 + return num_channels // group_size + return num_groups + + class GroupNormAct(nn.GroupNorm): # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args - def __init__(self, num_channels, num_groups=32, eps=1e-5, affine=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_block=None): - super(GroupNormAct, self).__init__(num_groups, num_channels, eps=eps, affine=affine) - if isinstance(act_layer, str): - act_layer = get_act_layer(act_layer) + def __init__( + self, num_channels, num_groups=32, eps=1e-5, affine=True, group_size=None, + apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + super(GroupNormAct, self).__init__( + _num_groups(num_channels, num_groups, group_size), num_channels, eps=eps, affine=affine) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module if act_layer is not None and apply_act: act_args = dict(inplace=True) if inplace else {} self.act = act_layer(**act_args) @@ -109,5 +142,47 @@ def forward(self, x): x = group_norm_tpu(x, self.weight, self.bias, self.num_groups, self.eps) else: x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + x = self.drop(x) + x = self.act(x) + return x + + +class LayerNormAct(nn.LayerNorm): + def __init__( + self, normalization_shape: Union[int, List[int], torch.Size], eps=1e-5, affine=True, + apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + super(LayerNormAct, self).__init__(normalization_shape, eps=eps, elementwise_affine=affine) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + + def forward(self, x): + x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) + x = self.drop(x) + x = self.act(x) + return x + + +class LayerNormAct2d(nn.LayerNorm): + def __init__( + self, num_channels, eps=1e-5, affine=True, + apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + super(LayerNormAct2d, self).__init__(num_channels, eps=eps, elementwise_affine=affine) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + + def forward(self, x): + x = F.layer_norm( + x.permute(0, 2, 3, 1), self.normalized_shape, self.weight, self.bias, self.eps).permute(0, 3, 1, 2) + x = self.drop(x) x = self.act(x) return x diff --git a/timm/models/layers/pooled_attn.py b/timm/models/layers/pooled_attn.py new file mode 100644 index 0000000000..40cf2b345e --- /dev/null +++ b/timm/models/layers/pooled_attn.py @@ -0,0 +1,143 @@ +from typing import List + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .helpers import to_2tuple +from .weight_init import trunc_normal_ + + +def rel_logits_1d(q, rel_k, permute_mask: List[int]): + """ Compute relative logits along one dimension + + As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2 + Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925 + + Args: + q: (batch, heads, height, width, dim) + rel_k: (2 * width - 1, dim) + permute_mask: permute output dim according to this + """ + B, H, W, dim = q.shape + x = (q @ rel_k.transpose(-1, -2)) + x = x.reshape(-1, W, 2 * W -1) + + # pad to shift from relative to absolute indexing + x_pad = F.pad(x, [0, 1]).flatten(1) + x_pad = F.pad(x_pad, [0, W - 1]) + + # reshape and slice out the padded elements + x_pad = x_pad.reshape(-1, W + 1, 2 * W - 1) + x = x_pad[:, :W, W - 1:] + + # reshape and tile + x = x.reshape(B, H, 1, W, W).expand(-1, -1, H, -1, -1) + return x.permute(permute_mask) + + +class PosEmbedRel(nn.Module): + """ Relative Position Embedding + As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2 + Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925 + """ + def __init__(self, feat_size, dim_head, scale): + super().__init__() + self.height, self.width = to_2tuple(feat_size) + self.dim_head = dim_head + self.scale = scale + self.height_rel = nn.Parameter(torch.randn(self.height * 2 - 1, dim_head) * self.scale) + self.width_rel = nn.Parameter(torch.randn(self.width * 2 - 1, dim_head) * self.scale) + + def forward(self, q): + B, num_heads, HW, _ = q.shape + + # relative logits in width dimension. + q = q.reshape(B * num_heads, self.height, self.width, -1) + rel_logits_w = rel_logits_1d(q, self.width_rel, permute_mask=(0, 1, 3, 2, 4)) + + # relative logits in height dimension. + q = q.transpose(1, 2) + rel_logits_h = rel_logits_1d(q, self.height_rel, permute_mask=(0, 3, 1, 4, 2)) + + rel_logits = rel_logits_h + rel_logits_w + rel_logits = rel_logits.reshape(B, num_heads, HW, HW) + return rel_logits + + +class BottleneckAttn(nn.Module): + """ Bottleneck Attention + Paper: `Bottleneck Transformers for Visual Recognition` - https://arxiv.org/abs/2101.11605 + """ + def __init__(self, dim, dim_out=None, feat_size=None, stride=1, num_heads=4, qkv_bias=False): + super().__init__() + assert feat_size is not None, 'A concrete feature size matching expected input (H, W) is required' + dim_out = dim_out or dim + assert dim_out % num_heads == 0 + self.num_heads = num_heads + self.dim_out = dim_out + self.dim_head = dim_out // num_heads + self.scale = self.dim_head ** -0.5 + + self.qkv = nn.Conv2d(dim, self.dim_out * 3, 1, bias=qkv_bias) + + # NOTE I'm only supporting relative pos embedding for now + self.pos_embed = PosEmbedRel(feat_size, dim_head=self.dim_head, scale=self.scale) + + self.pool = nn.AvgPool2d(2, 2) if stride == 2 else nn.Identity() + + self.reset_parameters() + + def reset_parameters(self): + trunc_normal_(self.qkv.weight, std=self.qkv.weight.shape[1] ** -0.5) + trunc_normal_(self.pos_embed.height_rel, std=self.scale) + trunc_normal_(self.pos_embed.width_rel, std=self.scale) + + def forward(self, x): + B, C, H, W = x.shape + assert H == self.pos_embed.height + assert W == self.pos_embed.width + + x = self.qkv(x) # B, 3 * num_heads * dim_head, H, W + x = x.reshape(B, -1, self.dim_head, H * W).transpose(-1, -2) + q, k, v = torch.split(x, self.num_heads, dim=1) + + attn_logits = (q @ k.transpose(-1, -2)) * self.scale + attn_logits = attn_logits + self.pos_embed(q) # B, num_heads, H * W, H * W + + attn_out = attn_logits.softmax(dim=-1) + attn_out = (attn_out @ v).transpose(-1, -2).reshape(B, self.dim_out, H, W) # B, dim_out, H, W + attn_out = self.pool(attn_out) + return attn_out + + +class PoolingAttention(nn.Module): + def __init__(self, in_features: int, attention_features: int, segments: int, max_pool_kernel: int): + super(PoolingAttention, self).__init__() + self.attn = nn.Linear(in_features, attention_features * 5) + self.segments = segments + self.max_pool_kernel = max_pool_kernel + + def forward(self, inp: torch.Tensor): # Shape: [Batch, Sequence, Features] + batch, sequence, features = inp.size() + assert sequence % self.segments == 0 + + qry, key, val, seg, loc = self.attn(inp).chunk(5, 2) # 5x Shape: [Batch, Sequence, AttentionFeatures] + + aggregated = qry.mean(1, keepdim=True) # Shape: [Batch, AttentionFeatures] + aggregated = torch.einsum("ba,bsa->bs", aggregated, key) # Shape: [Batch, Sequence] + aggregated = F.softmax(aggregated, 1) + aggregated = torch.einsum("bs,bsa,bza->bza", aggregated, val, + qry) # Shape: [Batch, Sequence, AttentionFeatures] + + pooled_sequence = sequence // self.segments + segment_max_pooled = seg.view(batch, pooled_sequence, self.segments, -1) + segment_max_pooled = segment_max_pooled.max(2, keepdim=True) # Shape: [Batch, PooledSequence, 1, AttentionFeatures] + segment_max_pooled = segment_max_pooled * qry.view(batch, pooled_sequence, self.segments, -1) # Shape: [Batch, PooledSequence, PoolSize, AttentionFeatures] + segment_max_pooled = segment_max_pooled.view(batch, sequence, -1) # Shape: [Batch, Sequence, AttentionFeatures] + + loc = loc.transpose(1, 2) # Shape: [Batch, AttentionFeatures, Sequence] + local_max_pooled = F.max_pool1d(loc, self.max_pool_kernel, 1, self.max_pool_kernel // 2) + local_max_pooled = local_max_pooled.transpose(1, 2) # Shape: [Batch, Sequence, AttentionFeatures] + + return aggregated + segment_max_pooled + local_max_pooled \ No newline at end of file diff --git a/timm/models/layers/selective_kernel.py b/timm/models/layers/selective_kernel.py index 1aeb92945e..3d71e3aa69 100644 --- a/timm/models/layers/selective_kernel.py +++ b/timm/models/layers/selective_kernel.py @@ -7,7 +7,7 @@ import torch from torch import nn as nn -from .conv_bn_act import ConvBnAct +from .conv_bn_act import ConvNormActAa from .helpers import make_divisible from .trace_utils import _assert @@ -20,8 +20,7 @@ def _kernel_valid(k): class SelectiveKernelAttn(nn.Module): - def __init__(self, channels, num_paths=2, attn_channels=32, - act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d): + def __init__(self, channels, num_paths=2, attn_channels=32, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d): """ Selective Kernel Attention Module Selective Kernel attention mechanism factored out into its own module. @@ -51,7 +50,7 @@ class SelectiveKernel(nn.Module): def __init__(self, in_channels, out_channels=None, kernel_size=None, stride=1, dilation=1, groups=1, rd_ratio=1./16, rd_channels=None, rd_divisor=8, keep_3x3=True, split_input=True, - drop_block=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, aa_layer=None): + act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, aa_layer=None, drop_layer=None): """ Selective Kernel Convolution Module As described in Selective Kernel Networks (https://arxiv.org/abs/1903.06586) with some modifications. @@ -72,9 +71,10 @@ def __init__(self, in_channels, out_channels=None, kernel_size=None, stride=1, d keep_3x3 (bool): keep all branch convolution kernels as 3x3, changing larger kernels for dilations split_input (bool): split input channels evenly across each convolution branch, keeps param count lower, can be viewed as grouping by path, output expands to module out_channels count - drop_block (nn.Module): drop block module act_layer (nn.Module): activation layer to use norm_layer (nn.Module): batchnorm/norm layer to use + aa_layer (nn.Module): anti-aliasing module + drop_layer (nn.Module): spatial drop module in convs (drop block, etc) """ super(SelectiveKernel, self).__init__() out_channels = out_channels or in_channels @@ -97,15 +97,14 @@ def __init__(self, in_channels, out_channels=None, kernel_size=None, stride=1, d groups = min(out_channels, groups) conv_kwargs = dict( - stride=stride, groups=groups, drop_block=drop_block, act_layer=act_layer, norm_layer=norm_layer, - aa_layer=aa_layer) + stride=stride, groups=groups, act_layer=act_layer, norm_layer=norm_layer, + aa_layer=aa_layer, drop_layer=drop_layer) self.paths = nn.ModuleList([ - ConvBnAct(in_channels, out_channels, kernel_size=k, dilation=d, **conv_kwargs) + ConvNormActAa(in_channels, out_channels, kernel_size=k, dilation=d, **conv_kwargs) for k, d in zip(kernel_size, dilation)]) attn_channels = rd_channels or make_divisible(out_channels * rd_ratio, divisor=rd_divisor) self.attn = SelectiveKernelAttn(out_channels, self.num_paths, attn_channels) - self.drop_block = drop_block def forward(self, x): if self.split_input: diff --git a/timm/models/layers/separable_conv.py b/timm/models/layers/separable_conv.py index 1ddcb4e624..c081e02bc4 100644 --- a/timm/models/layers/separable_conv.py +++ b/timm/models/layers/separable_conv.py @@ -8,16 +8,16 @@ from torch import nn as nn from .create_conv2d import create_conv2d -from .create_norm_act import convert_norm_act +from .create_norm_act import get_norm_act_layer -class SeparableConvBnAct(nn.Module): +class SeparableConvNormAct(nn.Module): """ Separable Conv w/ trailing Norm and Activation """ def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, padding='', bias=False, channel_multiplier=1.0, pw_kernel_size=1, norm_layer=nn.BatchNorm2d, act_layer=nn.ReLU, - apply_act=True, drop_block=None): - super(SeparableConvBnAct, self).__init__() + apply_act=True, drop_layer=None): + super(SeparableConvNormAct, self).__init__() self.conv_dw = create_conv2d( in_channels, int(in_channels * channel_multiplier), kernel_size, @@ -26,8 +26,9 @@ def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation= self.conv_pw = create_conv2d( int(in_channels * channel_multiplier), out_channels, pw_kernel_size, padding=padding, bias=bias) - norm_act_layer = convert_norm_act(norm_layer, act_layer) - self.bn = norm_act_layer(out_channels, apply_act=apply_act, drop_block=drop_block) + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) + norm_kwargs = dict(drop_layer=drop_layer) if drop_layer is not None else {} + self.bn = norm_act_layer(out_channels, apply_act=apply_act, **norm_kwargs) @property def in_channels(self): @@ -40,11 +41,13 @@ def out_channels(self): def forward(self, x): x = self.conv_dw(x) x = self.conv_pw(x) - if self.bn is not None: - x = self.bn(x) + x = self.bn(x) return x +SeparableConvBnAct = SeparableConvNormAct + + class SeparableConv2d(nn.Module): """ Separable Conv """ diff --git a/timm/models/layers/split_attn.py b/timm/models/layers/split_attn.py index dde601befa..ac54f8988a 100644 --- a/timm/models/layers/split_attn.py +++ b/timm/models/layers/split_attn.py @@ -35,11 +35,10 @@ class SplitAttn(nn.Module): """ def __init__(self, in_channels, out_channels=None, kernel_size=3, stride=1, padding=None, dilation=1, groups=1, bias=False, radix=2, rd_ratio=0.25, rd_channels=None, rd_divisor=8, - act_layer=nn.ReLU, norm_layer=None, drop_block=None, **kwargs): + act_layer=nn.ReLU, norm_layer=None, drop_layer=None, **kwargs): super(SplitAttn, self).__init__() out_channels = out_channels or in_channels self.radix = radix - self.drop_block = drop_block mid_chs = out_channels * radix if rd_channels is None: attn_chs = make_divisible(in_channels * radix * rd_ratio, min_value=32, divisor=rd_divisor) @@ -51,6 +50,7 @@ def __init__(self, in_channels, out_channels=None, kernel_size=3, stride=1, padd in_channels, mid_chs, kernel_size, stride, padding, dilation, groups=groups * radix, bias=bias, **kwargs) self.bn0 = norm_layer(mid_chs) if norm_layer else nn.Identity() + self.drop = drop_layer() if drop_layer is not None else nn.Identity() self.act0 = act_layer(inplace=True) self.fc1 = nn.Conv2d(out_channels, attn_chs, 1, groups=groups) self.bn1 = norm_layer(attn_chs) if norm_layer else nn.Identity() @@ -61,8 +61,7 @@ def __init__(self, in_channels, out_channels=None, kernel_size=3, stride=1, padd def forward(self, x): x = self.conv(x) x = self.bn0(x) - if self.drop_block is not None: - x = self.drop_block(x) + x = self.drop(x) x = self.act0(x) B, RC, H, W = x.shape diff --git a/timm/models/mobilenetv3.py b/timm/models/mobilenetv3.py index f810eb8281..f49a35de7f 100644 --- a/timm/models/mobilenetv3.py +++ b/timm/models/mobilenetv3.py @@ -20,7 +20,7 @@ round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT from .features import FeatureInfo, FeatureHooks from .helpers import build_model_with_cfg, default_cfg_for_features -from .layers import SelectAdaptivePool2d, Linear, create_conv2d, get_act_fn, hard_sigmoid +from .layers import SelectAdaptivePool2d, Linear, create_conv2d, get_act_fn, get_norm_act_layer from .registry import register_model __all__ = ['MobileNetV3', 'MobileNetV3Features'] @@ -95,6 +95,7 @@ def __init__(self, block_args, num_classes=1000, in_chans=3, stem_size=16, num_f super(MobileNetV3, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d + norm_act_layer = get_norm_act_layer(norm_layer, act_layer) se_layer = se_layer or SqueezeExcite self.num_classes = num_classes self.num_features = num_features @@ -103,8 +104,7 @@ def __init__(self, block_args, num_classes=1000, in_chans=3, stem_size=16, num_f # Stem stem_size = round_chs_fn(stem_size) self.conv_stem = create_conv2d(in_chans, stem_size, 3, stride=2, padding=pad_type) - self.bn1 = norm_layer(stem_size) - self.act1 = act_layer(inplace=True) + self.bn1 = norm_act_layer(stem_size, inplace=True) # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( @@ -125,7 +125,7 @@ def __init__(self, block_args, num_classes=1000, in_chans=3, stem_size=16, num_f efficientnet_init_weights(self) def as_sequential(self): - layers = [self.conv_stem, self.bn1, self.act1] + layers = [self.conv_stem, self.bn1] layers.extend(self.blocks) layers.extend([self.global_pool, self.conv_head, self.act2]) layers.extend([nn.Flatten(), nn.Dropout(self.drop_rate), self.classifier]) @@ -144,7 +144,6 @@ def reset_classifier(self, num_classes, global_pool='avg'): def forward_features(self, x): x = self.conv_stem(x) x = self.bn1(x) - x = self.act1(x) x = self.blocks(x) x = self.global_pool(x) x = self.conv_head(x) diff --git a/timm/models/nasnet.py b/timm/models/nasnet.py index 2afe82c3f3..9c257d9d00 100644 --- a/timm/models/nasnet.py +++ b/timm/models/nasnet.py @@ -9,7 +9,7 @@ import torch.nn.functional as F from .helpers import build_model_with_cfg -from .layers import ConvBnAct, create_conv2d, create_pool2d, create_classifier +from .layers import ConvNormAct, create_conv2d, create_pool2d, create_classifier from .registry import register_model __all__ = ['NASNetALarge'] @@ -420,7 +420,7 @@ def __init__(self, num_classes=1000, in_chans=3, stem_size=96, channel_multiplie channels = self.num_features // 24 # 24 is default value for the architecture - self.conv0 = ConvBnAct( + self.conv0 = ConvNormAct( in_channels=in_chans, out_channels=self.stem_size, kernel_size=3, padding=0, stride=2, norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.1), apply_act=False) diff --git a/timm/models/pnasnet.py b/timm/models/pnasnet.py index 999181563a..208bccf341 100644 --- a/timm/models/pnasnet.py +++ b/timm/models/pnasnet.py @@ -13,7 +13,7 @@ import torch.nn.functional as F from .helpers import build_model_with_cfg -from .layers import ConvBnAct, create_conv2d, create_pool2d, create_classifier +from .layers import ConvNormAct, create_conv2d, create_pool2d, create_classifier from .registry import register_model __all__ = ['PNASNet5Large'] @@ -243,7 +243,7 @@ def __init__(self, num_classes=1000, in_chans=3, output_stride=32, drop_rate=0., self.num_features = 4320 assert output_stride == 32 - self.conv_0 = ConvBnAct( + self.conv_0 = ConvNormAct( in_chans, 96, kernel_size=3, stride=2, padding=0, norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.1), apply_act=False) diff --git a/timm/models/regnet.py b/timm/models/regnet.py index 6a38107467..8a0689f71d 100644 --- a/timm/models/regnet.py +++ b/timm/models/regnet.py @@ -15,45 +15,76 @@ """ import numpy as np import torch.nn as nn +from dataclasses import dataclass +from functools import partial +from typing import Optional, Union, Callable from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from .helpers import build_model_with_cfg -from .layers import ClassifierHead, AvgPool2dSame, ConvBnAct, SEModule, DropPath +from .helpers import build_model_with_cfg, named_apply +from .layers import ClassifierHead, AvgPool2dSame, ConvNormAct, SEModule, DropPath, get_act_layer, GroupNormAct from .registry import register_model -def _mcfg(**kwargs): - cfg = dict(se_ratio=0., bottle_ratio=1., stem_width=32) - cfg.update(**kwargs) - return cfg +@dataclass +class RegNetCfg: + depth: int = 21 + w0: int = 80 + wa: float = 42.63 + wm: float = 2.66 + group_size: int = 24 + bottle_ratio: float = 1. + se_ratio: float = 0. + stem_width: int = 32 + downsample: Optional[str] = 'conv1x1' + linear_out: bool = False + act_layer: Union[str, Callable] = 'relu' + norm_layer: Union[str, Callable] = 'batchnorm' # Model FLOPS = three trailing digits * 10^8 model_cfgs = dict( - regnetx_002=_mcfg(w0=24, wa=36.44, wm=2.49, group_w=8, depth=13), - regnetx_004=_mcfg(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22), - regnetx_006=_mcfg(w0=48, wa=36.97, wm=2.24, group_w=24, depth=16), - regnetx_008=_mcfg(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16), - regnetx_016=_mcfg(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18), - regnetx_032=_mcfg(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25), - regnetx_040=_mcfg(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23), - regnetx_064=_mcfg(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17), - regnetx_080=_mcfg(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23), - regnetx_120=_mcfg(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19), - regnetx_160=_mcfg(w0=216, wa=55.59, wm=2.1, group_w=128, depth=22), - regnetx_320=_mcfg(w0=320, wa=69.86, wm=2.0, group_w=168, depth=23), - regnety_002=_mcfg(w0=24, wa=36.44, wm=2.49, group_w=8, depth=13, se_ratio=0.25), - regnety_004=_mcfg(w0=48, wa=27.89, wm=2.09, group_w=8, depth=16, se_ratio=0.25), - regnety_006=_mcfg(w0=48, wa=32.54, wm=2.32, group_w=16, depth=15, se_ratio=0.25), - regnety_008=_mcfg(w0=56, wa=38.84, wm=2.4, group_w=16, depth=14, se_ratio=0.25), - regnety_016=_mcfg(w0=48, wa=20.71, wm=2.65, group_w=24, depth=27, se_ratio=0.25), - regnety_032=_mcfg(w0=80, wa=42.63, wm=2.66, group_w=24, depth=21, se_ratio=0.25), - regnety_040=_mcfg(w0=96, wa=31.41, wm=2.24, group_w=64, depth=22, se_ratio=0.25), - regnety_064=_mcfg(w0=112, wa=33.22, wm=2.27, group_w=72, depth=25, se_ratio=0.25), - regnety_080=_mcfg(w0=192, wa=76.82, wm=2.19, group_w=56, depth=17, se_ratio=0.25), - regnety_120=_mcfg(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, se_ratio=0.25), - regnety_160=_mcfg(w0=200, wa=106.23, wm=2.48, group_w=112, depth=18, se_ratio=0.25), - regnety_320=_mcfg(w0=232, wa=115.89, wm=2.53, group_w=232, depth=20, se_ratio=0.25), + # RegNet-X + regnetx_002=RegNetCfg(w0=24, wa=36.44, wm=2.49, group_size=8, depth=13), + regnetx_004=RegNetCfg(w0=24, wa=24.48, wm=2.54, group_size=16, depth=22), + regnetx_006=RegNetCfg(w0=48, wa=36.97, wm=2.24, group_size=24, depth=16), + regnetx_008=RegNetCfg(w0=56, wa=35.73, wm=2.28, group_size=16, depth=16), + regnetx_016=RegNetCfg(w0=80, wa=34.01, wm=2.25, group_size=24, depth=18), + regnetx_032=RegNetCfg(w0=88, wa=26.31, wm=2.25, group_size=48, depth=25), + regnetx_040=RegNetCfg(w0=96, wa=38.65, wm=2.43, group_size=40, depth=23), + regnetx_064=RegNetCfg(w0=184, wa=60.83, wm=2.07, group_size=56, depth=17), + regnetx_080=RegNetCfg(w0=80, wa=49.56, wm=2.88, group_size=120, depth=23), + regnetx_120=RegNetCfg(w0=168, wa=73.36, wm=2.37, group_size=112, depth=19), + regnetx_160=RegNetCfg(w0=216, wa=55.59, wm=2.1, group_size=128, depth=22), + regnetx_320=RegNetCfg(w0=320, wa=69.86, wm=2.0, group_size=168, depth=23), + + # RegNet-Y + regnety_002=RegNetCfg(w0=24, wa=36.44, wm=2.49, group_size=8, depth=13, se_ratio=0.25), + regnety_004=RegNetCfg(w0=48, wa=27.89, wm=2.09, group_size=8, depth=16, se_ratio=0.25), + regnety_006=RegNetCfg(w0=48, wa=32.54, wm=2.32, group_size=16, depth=15, se_ratio=0.25), + regnety_008=RegNetCfg(w0=56, wa=38.84, wm=2.4, group_size=16, depth=14, se_ratio=0.25), + regnety_016=RegNetCfg(w0=48, wa=20.71, wm=2.65, group_size=24, depth=27, se_ratio=0.25), + regnety_032=RegNetCfg(w0=80, wa=42.63, wm=2.66, group_size=24, depth=21, se_ratio=0.25), + regnety_040=RegNetCfg(w0=96, wa=31.41, wm=2.24, group_size=64, depth=22, se_ratio=0.25), + regnety_064=RegNetCfg(w0=112, wa=33.22, wm=2.27, group_size=72, depth=25, se_ratio=0.25), + regnety_080=RegNetCfg(w0=192, wa=76.82, wm=2.19, group_size=56, depth=17, se_ratio=0.25), + regnety_120=RegNetCfg(w0=168, wa=73.36, wm=2.37, group_size=112, depth=19, se_ratio=0.25), + regnety_160=RegNetCfg(w0=200, wa=106.23, wm=2.48, group_size=112, depth=18, se_ratio=0.25), + regnety_320=RegNetCfg(w0=232, wa=115.89, wm=2.53, group_size=232, depth=20, se_ratio=0.25), + + # Experimental + regnety_040s_gn=RegNetCfg( + w0=96, wa=31.41, wm=2.24, group_size=64, depth=22, se_ratio=0.25, + act_layer='silu', norm_layer=partial(GroupNormAct, group_size=16)), + + # RegNet-Z (unverified) + regnetz_005=RegNetCfg( + depth=21, w0=16, wa=10.7, wm=2.51, group_size=4, bottle_ratio=4.0, se_ratio=0.25, + downsample=None, linear_out=True, act_layer='silu', + ), + regnetz_040=RegNetCfg( + depth=28, w0=48, wa=14.5, wm=2.226, group_size=8, bottle_ratio=4.0, se_ratio=0.25, + downsample=None, linear_out=True, act_layer='silu', + ), ) @@ -80,6 +111,7 @@ def _cfg(url='', **kwargs): regnetx_120=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_120-65d5521e.pth'), regnetx_160=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_160-c98c4112.pth'), regnetx_320=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnetx_320-8ea38b93.pth'), + regnety_002=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_002-e68ca334.pth'), regnety_004=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_004-0db870e6.pth'), regnety_006=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_006-c67e57ec.pth'), @@ -96,6 +128,11 @@ def _cfg(url='', **kwargs): url='https://dl.fbaipublicfiles.com/deit/regnety_160-a5fe301d.pth', # from Facebook DeiT GitHub repository crop_pct=1.0, test_input_size=(3, 288, 288)), regnety_320=_cfg(url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-regnet/regnety_320-ba464b29.pth'), + + regnety_040s_gn=_cfg(url=''), + + regnetz_005=_cfg(url=''), + regnetz_040=_cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), ) @@ -125,6 +162,40 @@ def generate_regnet(width_slope, width_initial, width_mult, depth, q=8): return widths, num_stages, max_stage, widths_cont +def downsample_conv(in_chs, out_chs, kernel_size=1, stride=1, dilation=1, norm_layer=None): + norm_layer = norm_layer or nn.BatchNorm2d + kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size + dilation = dilation if kernel_size > 1 else 1 + return ConvNormAct( + in_chs, out_chs, kernel_size, stride=stride, dilation=dilation, norm_layer=norm_layer, apply_act=False) + + +def downsample_avg(in_chs, out_chs, kernel_size=1, stride=1, dilation=1, norm_layer=None): + """ AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.""" + norm_layer = norm_layer or nn.BatchNorm2d + avg_stride = stride if dilation == 1 else 1 + pool = nn.Identity() + if stride > 1 or dilation > 1: + avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d + pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False) + return nn.Sequential(*[ + pool, ConvNormAct(in_chs, out_chs, 1, stride=1, norm_layer=norm_layer, apply_act=False)]) + + +def create_shortcut(downsample_type, in_chs, out_chs, kernel_size, stride, dilation=(1, 1), norm_layer=None): + assert downsample_type in ('avg', 'conv1x1', '', None) + if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]: + if not downsample_type: + return None # no shortcut, no downsample + elif downsample_type == 'avg': + return downsample_avg(in_chs, out_chs, stride=stride, dilation=dilation[0], norm_layer=norm_layer) + else: + return downsample_conv( + in_chs, out_chs, kernel_size=kernel_size, stride=stride, dilation=dilation[0], norm_layer=norm_layer) + else: + return nn.Identity() # identity shortcut (no downsample) + + class Bottleneck(nn.Module): """ RegNet Bottleneck @@ -132,97 +203,70 @@ class Bottleneck(nn.Module): after conv3 to after conv2. Otherwise, it's just redefining the arguments for groups/bottleneck channels. """ - def __init__(self, in_chs, out_chs, stride=1, dilation=1, bottleneck_ratio=1, group_width=1, se_ratio=0.25, - downsample=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, aa_layer=None, - drop_block=None, drop_path=None): + def __init__(self, in_chs, out_chs, stride=1, dilation=(1, 1), bottle_ratio=1, group_size=1, se_ratio=0.25, + downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, + drop_block=None, drop_path_rate=0.): super(Bottleneck, self).__init__() - bottleneck_chs = int(round(out_chs * bottleneck_ratio)) - groups = bottleneck_chs // group_width - - cargs = dict(act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer, drop_block=drop_block) - self.conv1 = ConvBnAct(in_chs, bottleneck_chs, kernel_size=1, **cargs) - self.conv2 = ConvBnAct( - bottleneck_chs, bottleneck_chs, kernel_size=3, stride=stride, dilation=dilation, - groups=groups, **cargs) + act_layer = get_act_layer(act_layer) + bottleneck_chs = int(round(out_chs * bottle_ratio)) + groups = bottleneck_chs // group_size + + cargs = dict(act_layer=act_layer, norm_layer=norm_layer) + self.conv1 = ConvNormAct(in_chs, bottleneck_chs, kernel_size=1, **cargs) + self.conv2 = ConvNormAct( + bottleneck_chs, bottleneck_chs, kernel_size=3, stride=stride, dilation=dilation[0], + groups=groups, drop_layer=drop_block, **cargs) if se_ratio: se_channels = int(round(in_chs * se_ratio)) - self.se = SEModule(bottleneck_chs, rd_channels=se_channels) + self.se = SEModule(bottleneck_chs, rd_channels=se_channels, act_layer=act_layer) else: - self.se = None - cargs['act_layer'] = None - self.conv3 = ConvBnAct(bottleneck_chs, out_chs, kernel_size=1, **cargs) - self.act3 = act_layer(inplace=True) - self.downsample = downsample - self.drop_path = drop_path - - def zero_init_last_bn(self): + self.se = nn.Identity() + self.conv3 = ConvNormAct(bottleneck_chs, out_chs, kernel_size=1, apply_act=False, **cargs) + self.act3 = nn.Identity() if linear_out else act_layer() + self.downsample = create_shortcut(downsample, in_chs, out_chs, 1, stride, dilation, norm_layer=norm_layer) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity() + + def zero_init_last(self): nn.init.zeros_(self.conv3.bn.weight) def forward(self, x): shortcut = x x = self.conv1(x) x = self.conv2(x) - if self.se is not None: - x = self.se(x) + x = self.se(x) x = self.conv3(x) - if self.drop_path is not None: - x = self.drop_path(x) if self.downsample is not None: - shortcut = self.downsample(shortcut) - x += shortcut + # NOTE stuck with downsample as the attr name due to weight compatibility + # now represents the shortcut, no shortcut if None, and non-downsample shortcut == nn.Identity() + x = x + self.drop_path(self.downsample(shortcut)) x = self.act3(x) return x -def downsample_conv( - in_chs, out_chs, kernel_size, stride=1, dilation=1, norm_layer=None): - norm_layer = norm_layer or nn.BatchNorm2d - kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size - dilation = dilation if kernel_size > 1 else 1 - return ConvBnAct( - in_chs, out_chs, kernel_size, stride=stride, dilation=dilation, norm_layer=norm_layer, act_layer=None) - - -def downsample_avg( - in_chs, out_chs, kernel_size, stride=1, dilation=1, norm_layer=None): - """ AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.""" - norm_layer = norm_layer or nn.BatchNorm2d - avg_stride = stride if dilation == 1 else 1 - pool = nn.Identity() - if stride > 1 or dilation > 1: - avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d - pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False) - return nn.Sequential(*[ - pool, ConvBnAct(in_chs, out_chs, 1, stride=1, norm_layer=norm_layer, act_layer=None)]) - - class RegStage(nn.Module): """Stage (sequence of blocks w/ the same output shape).""" - def __init__(self, in_chs, out_chs, stride, dilation, depth, bottle_ratio, group_width, - block_fn=Bottleneck, se_ratio=0., drop_path_rates=None, drop_block=None): + def __init__( + self, depth, in_chs, out_chs, stride, dilation, bottle_ratio=1.0, group_size=8, block_fn=Bottleneck, + se_ratio=0., downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, + drop_path_rates=None, drop_block=None): super(RegStage, self).__init__() - block_kwargs = {} # FIXME setup to pass various aa, norm, act layer common args + block_kwargs = dict( + bottle_ratio=bottle_ratio, group_size=group_size, se_ratio=se_ratio, downsample=downsample, + linear_out=linear_out, act_layer=act_layer, norm_layer=norm_layer, drop_block=drop_block) first_dilation = 1 if dilation in (1, 2) else 2 for i in range(depth): block_stride = stride if i == 0 else 1 block_in_chs = in_chs if i == 0 else out_chs - block_dilation = first_dilation if i == 0 else dilation - if drop_path_rates is not None and drop_path_rates[i] > 0.: - drop_path = DropPath(drop_path_rates[i]) - else: - drop_path = None - if (block_in_chs != out_chs) or (block_stride != 1): - proj_block = downsample_conv(block_in_chs, out_chs, 1, block_stride, block_dilation) - else: - proj_block = None - + block_dilation = (first_dilation, dilation) + dpr = drop_path_rates[i] if drop_path_rates is not None else 0. name = "b{}".format(i + 1) self.add_module( name, block_fn( - block_in_chs, out_chs, block_stride, block_dilation, bottle_ratio, group_width, se_ratio, - downsample=proj_block, drop_block=drop_block, drop_path=drop_path, **block_kwargs) + block_in_chs, out_chs, stride=block_stride, dilation=block_dilation, + drop_path_rate=dpr, **block_kwargs) ) + first_dilation = dilation def forward(self, x): for block in self.children(): @@ -231,33 +275,34 @@ def forward(self, x): class RegNet(nn.Module): - """RegNet model. + """RegNet-X, Y, and Z Models Paper: https://arxiv.org/abs/2003.13678 Original Impl: https://github.com/facebookresearch/pycls/blob/master/pycls/models/regnet.py """ - def __init__(self, cfg, in_chans=3, num_classes=1000, output_stride=32, global_pool='avg', drop_rate=0., - drop_path_rate=0., zero_init_last_bn=True): + def __init__( + self, cfg: RegNetCfg, in_chans=3, num_classes=1000, output_stride=32, global_pool='avg', + drop_rate=0., drop_path_rate=0., zero_init_last=True): super().__init__() - # TODO add drop block, drop path, anti-aliasing, custom bn/act args self.num_classes = num_classes self.drop_rate = drop_rate assert output_stride in (8, 16, 32) # Construct the stem - stem_width = cfg['stem_width'] - self.stem = ConvBnAct(in_chans, stem_width, 3, stride=2) + stem_width = cfg.stem_width + self.stem = ConvNormAct(in_chans, stem_width, 3, stride=2, act_layer=cfg.act_layer, norm_layer=cfg.norm_layer) self.feature_info = [dict(num_chs=stem_width, reduction=2, module='stem')] # Construct the stages prev_width = stem_width curr_stride = 2 stage_params = self._get_stage_params(cfg, output_stride=output_stride, drop_path_rate=drop_path_rate) - se_ratio = cfg['se_ratio'] for i, stage_args in enumerate(stage_params): stage_name = "s{}".format(i + 1) - self.add_module(stage_name, RegStage(prev_width, **stage_args, se_ratio=se_ratio)) + self.add_module(stage_name, RegStage( + in_chs=prev_width, se_ratio=cfg.se_ratio, downsample=cfg.downsample, linear_out=cfg.linear_out, + act_layer=cfg.act_layer, norm_layer=cfg.norm_layer, **stage_args)) prev_width = stage_args['out_chs'] curr_stride *= stage_args['stride'] self.feature_info += [dict(num_chs=prev_width, reduction=curr_stride, module=stage_name)] @@ -267,31 +312,18 @@ def __init__(self, cfg, in_chans=3, num_classes=1000, output_stride=32, global_p self.head = ClassifierHead( in_chs=prev_width, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) - for m in self.modules(): - if isinstance(m, nn.Conv2d): - nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') - elif isinstance(m, nn.BatchNorm2d): - nn.init.ones_(m.weight) - nn.init.zeros_(m.bias) - elif isinstance(m, nn.Linear): - nn.init.normal_(m.weight, mean=0.0, std=0.01) - nn.init.zeros_(m.bias) - if zero_init_last_bn: - for m in self.modules(): - if hasattr(m, 'zero_init_last_bn'): - m.zero_init_last_bn() - - def _get_stage_params(self, cfg, default_stride=2, output_stride=32, drop_path_rate=0.): + named_apply(partial(_init_weights, zero_init_last=zero_init_last), self) + + def _get_stage_params(self, cfg: RegNetCfg, default_stride=2, output_stride=32, drop_path_rate=0.): # Generate RegNet ws per block - w_a, w_0, w_m, d = cfg['wa'], cfg['w0'], cfg['wm'], cfg['depth'] - widths, num_stages, _, _ = generate_regnet(w_a, w_0, w_m, d) + widths, num_stages, _, _ = generate_regnet(cfg.wa, cfg.w0, cfg.wm, cfg.depth) # Convert to per stage format stage_widths, stage_depths = np.unique(widths, return_counts=True) # Use the same group width, bottleneck mult and stride for each stage - stage_groups = [cfg['group_w'] for _ in range(num_stages)] - stage_bottle_ratios = [cfg['bottle_ratio'] for _ in range(num_stages)] + stage_groups = [cfg.group_size for _ in range(num_stages)] + stage_bottle_ratios = [cfg.bottle_ratio for _ in range(num_stages)] stage_strides = [] stage_dilations = [] net_stride = 2 @@ -305,11 +337,11 @@ def _get_stage_params(self, cfg, default_stride=2, output_stride=32, drop_path_r net_stride *= stride stage_strides.append(stride) stage_dilations.append(dilation) - stage_dpr = np.split(np.linspace(0, drop_path_rate, d), np.cumsum(stage_depths[:-1])) + stage_dpr = np.split(np.linspace(0, drop_path_rate, cfg.depth), np.cumsum(stage_depths[:-1])) # Adjust the compatibility of ws and gws stage_widths, stage_groups = adjust_widths_groups_comp(stage_widths, stage_bottle_ratios, stage_groups) - param_names = ['out_chs', 'stride', 'dilation', 'depth', 'bottle_ratio', 'group_width', 'drop_path_rates'] + param_names = ['out_chs', 'stride', 'dilation', 'depth', 'bottle_ratio', 'group_size', 'drop_path_rates'] stage_params = [ dict(zip(param_names, params)) for params in zip(stage_widths, stage_strides, stage_dilations, stage_depths, stage_bottle_ratios, stage_groups, @@ -333,6 +365,19 @@ def forward(self, x): return x +def _init_weights(module, name='', zero_init_last=False): + if isinstance(module, nn.Conv2d): + nn.init.kaiming_normal_(module.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(module, nn.BatchNorm2d): + nn.init.ones_(module.weight) + nn.init.zeros_(module.bias) + elif isinstance(module, nn.Linear): + nn.init.normal_(module.weight, mean=0.0, std=0.01) + nn.init.zeros_(module.bias) + elif hasattr(module, 'zero_init_last'): + module.zero_init_last() + + def _filter_fn(state_dict): """ convert patch embedding weight from manual patchify + linear proj to conv""" if 'model' in state_dict: @@ -492,3 +537,27 @@ def regnety_160(pretrained=False, **kwargs): def regnety_320(pretrained=False, **kwargs): """RegNetY-32GF""" return _create_regnet('regnety_320', pretrained, **kwargs) + + +@register_model +def regnety_040s_gn(pretrained=False, **kwargs): + """RegNetY-4.0GF w/ GroupNorm """ + return _create_regnet('regnety_040s_gn', pretrained, **kwargs) + + +@register_model +def regnetz_005(pretrained=False, **kwargs): + """RegNetZ-500MF + NOTE: config found in https://github.com/facebookresearch/ClassyVision/blob/main/classy_vision/models/regnet.py + but it's not clear it is equivalent to paper model as not detailed in the paper. + """ + return _create_regnet('regnetz_005', pretrained, **kwargs) + + +@register_model +def regnetz_040(pretrained=False, **kwargs): + """RegNetZ-4.0GF + NOTE: config found in https://github.com/facebookresearch/ClassyVision/blob/main/classy_vision/models/regnet.py + but it's not clear it is equivalent to paper model as not detailed in the paper. + """ + return _create_regnet('regnetz_040', pretrained, **kwargs) diff --git a/timm/models/resnest.py b/timm/models/resnest.py index 31eebd8092..f311980718 100644 --- a/timm/models/resnest.py +++ b/timm/models/resnest.py @@ -75,7 +75,6 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, else: avd_stride = 0 self.radix = radix - self.drop_block = drop_block self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False) self.bn1 = norm_layer(group_width) @@ -85,14 +84,16 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, if self.radix >= 1: self.conv2 = SplitAttn( group_width, group_width, kernel_size=3, stride=stride, padding=first_dilation, - dilation=first_dilation, groups=cardinality, radix=radix, norm_layer=norm_layer, drop_block=drop_block) + dilation=first_dilation, groups=cardinality, radix=radix, norm_layer=norm_layer, drop_layer=drop_block) self.bn2 = nn.Identity() + self.drop_block = nn.Identity() self.act2 = nn.Identity() else: self.conv2 = nn.Conv2d( group_width, group_width, kernel_size=3, stride=stride, padding=first_dilation, dilation=first_dilation, groups=cardinality, bias=False) self.bn2 = norm_layer(group_width) + self.drop_block = drop_block() if drop_block is not None else nn.Identity() self.act2 = act_layer(inplace=True) self.avd_last = nn.AvgPool2d(3, avd_stride, padding=1) if avd_stride > 0 and not avd_first else None @@ -109,8 +110,6 @@ def forward(self, x): out = self.conv1(x) out = self.bn1(out) - if self.drop_block is not None: - out = self.drop_block(out) out = self.act1(out) if self.avd_first is not None: @@ -118,8 +117,7 @@ def forward(self, x): out = self.conv2(out) out = self.bn2(out) - if self.drop_block is not None: - out = self.drop_block(out) + out = self.drop_block(out) out = self.act2(out) if self.avd_last is not None: @@ -127,8 +125,6 @@ def forward(self, x): out = self.conv3(out) out = self.bn3(out) - if self.drop_block is not None: - out = self.drop_block(out) if self.downsample is not None: shortcut = self.downsample(x) diff --git a/timm/models/resnet.py b/timm/models/resnet.py index bbcae9a345..cb71c464ca 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -307,8 +307,9 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b inplanes, first_planes, kernel_size=3, stride=1 if use_aa else stride, padding=first_dilation, dilation=first_dilation, bias=False) self.bn1 = norm_layer(first_planes) + self.drop_block = drop_block() if drop_block is not None else nn.Identity() self.act1 = act_layer(inplace=True) - self.aa = aa_layer(channels=first_planes, stride=stride) if use_aa else None + self.aa = aa_layer(channels=first_planes, stride=stride) if use_aa else nn.Identity() self.conv2 = nn.Conv2d( first_planes, outplanes, kernel_size=3, padding=dilation, dilation=dilation, bias=False) @@ -320,7 +321,6 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b self.downsample = downsample self.stride = stride self.dilation = dilation - self.drop_block = drop_block self.drop_path = drop_path def zero_init_last_bn(self): @@ -331,16 +331,12 @@ def forward(self, x): x = self.conv1(x) x = self.bn1(x) - if self.drop_block is not None: - x = self.drop_block(x) + x = self.drop_block(x) x = self.act1(x) - if self.aa is not None: - x = self.aa(x) + x = self.aa(x) x = self.conv2(x) x = self.bn2(x) - if self.drop_block is not None: - x = self.drop_block(x) if self.se is not None: x = self.se(x) @@ -378,8 +374,9 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b first_planes, width, kernel_size=3, stride=1 if use_aa else stride, padding=first_dilation, dilation=first_dilation, groups=cardinality, bias=False) self.bn2 = norm_layer(width) + self.drop_block = drop_block() if drop_block is not None else nn.Identity() self.act2 = act_layer(inplace=True) - self.aa = aa_layer(channels=width, stride=stride) if use_aa else None + self.aa = aa_layer(channels=width, stride=stride) if use_aa else nn.Identity() self.conv3 = nn.Conv2d(width, outplanes, kernel_size=1, bias=False) self.bn3 = norm_layer(outplanes) @@ -390,7 +387,6 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b self.downsample = downsample self.stride = stride self.dilation = dilation - self.drop_block = drop_block self.drop_path = drop_path def zero_init_last_bn(self): @@ -401,22 +397,16 @@ def forward(self, x): x = self.conv1(x) x = self.bn1(x) - if self.drop_block is not None: - x = self.drop_block(x) x = self.act1(x) x = self.conv2(x) x = self.bn2(x) - if self.drop_block is not None: - x = self.drop_block(x) + x = self.drop_block(x) x = self.act2(x) - if self.aa is not None: - x = self.aa(x) + x = self.aa(x) x = self.conv3(x) x = self.bn3(x) - if self.drop_block is not None: - x = self.drop_block(x) if self.se is not None: x = self.se(x) @@ -463,11 +453,11 @@ def downsample_avg( ]) -def drop_blocks(drop_block_rate=0.): +def drop_blocks(drop_prob=0.): return [ None, None, - DropBlock2d(drop_block_rate, 5, 0.25) if drop_block_rate else None, - DropBlock2d(drop_block_rate, 3, 1.00) if drop_block_rate else None] + partial(DropBlock2d, drop_prob=drop_prob, block_size=5, gamma_scale=0.25) if drop_prob else None, + partial(DropBlock2d, drop_prob=drop_prob, block_size=3, gamma_scale=1.00) if drop_prob else None] def make_blocks( diff --git a/timm/models/rexnet.py b/timm/models/rexnet.py index f27ce5d899..1cb8e2f545 100644 --- a/timm/models/rexnet.py +++ b/timm/models/rexnet.py @@ -17,7 +17,7 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg -from .layers import ClassifierHead, create_act_layer, ConvBnAct, DropPath, make_divisible, SEModule +from .layers import ClassifierHead, create_act_layer, ConvNormAct, DropPath, make_divisible, SEModule from .registry import register_model from .efficientnet_builder import efficientnet_init_weights @@ -63,19 +63,19 @@ def __init__(self, in_chs, out_chs, stride, exp_ratio=1.0, se_ratio=0., ch_div=1 if exp_ratio != 1.: dw_chs = make_divisible(round(in_chs * exp_ratio), divisor=ch_div) - self.conv_exp = ConvBnAct(in_chs, dw_chs, act_layer=act_layer) + self.conv_exp = ConvNormAct(in_chs, dw_chs, act_layer=act_layer) else: dw_chs = in_chs self.conv_exp = None - self.conv_dw = ConvBnAct(dw_chs, dw_chs, 3, stride=stride, groups=dw_chs, apply_act=False) + self.conv_dw = ConvNormAct(dw_chs, dw_chs, 3, stride=stride, groups=dw_chs, apply_act=False) if se_ratio > 0: self.se = SEWithNorm(dw_chs, rd_channels=make_divisible(int(dw_chs * se_ratio), ch_div)) else: self.se = None self.act_dw = create_act_layer(dw_act_layer) - self.conv_pwl = ConvBnAct(dw_chs, out_chs, 1, apply_act=False) + self.conv_pwl = ConvNormAct(dw_chs, out_chs, 1, apply_act=False) self.drop_path = drop_path def feat_channels(self, exp=False): @@ -138,7 +138,7 @@ def _build_blocks( feat_chs += [features[-1].feat_channels()] pen_chs = make_divisible(1280 * width_mult, divisor=ch_div) feature_info += [dict(num_chs=feat_chs[-1], reduction=curr_stride, module=f'features.{len(features) - 1}')] - features.append(ConvBnAct(prev_chs, pen_chs, act_layer=act_layer)) + features.append(ConvNormAct(prev_chs, pen_chs, act_layer=act_layer)) return features, feature_info @@ -153,7 +153,7 @@ def __init__(self, in_chans=3, num_classes=1000, global_pool='avg', output_strid assert output_stride == 32 # FIXME support dilation stem_base_chs = 32 / width_mult if width_mult < 1.0 else 32 stem_chs = make_divisible(round(stem_base_chs * width_mult), divisor=ch_div) - self.stem = ConvBnAct(in_chans, stem_chs, 3, stride=2, act_layer=act_layer) + self.stem = ConvNormAct(in_chans, stem_chs, 3, stride=2, act_layer=act_layer) block_cfg = _block_cfg(width_mult, depth_mult, initial_chs, final_chs, se_ratio, ch_div) features, self.feature_info = _build_blocks( diff --git a/timm/models/sknet.py b/timm/models/sknet.py index 4dc2aa534c..87520fbe09 100644 --- a/timm/models/sknet.py +++ b/timm/models/sknet.py @@ -14,7 +14,7 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import build_model_with_cfg -from .layers import SelectiveKernel, ConvBnAct, create_attn +from .layers import SelectiveKernel, ConvNormAct, ConvNormActAa, create_attn from .registry import register_model from .resnet import ResNet @@ -52,7 +52,7 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b super(SelectiveKernelBasic, self).__init__() sk_kwargs = sk_kwargs or {} - conv_kwargs = dict(drop_block=drop_block, act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer) + conv_kwargs = dict(act_layer=act_layer, norm_layer=norm_layer) assert cardinality == 1, 'BasicBlock only supports cardinality of 1' assert base_width == 64, 'BasicBlock doest not support changing base width' first_planes = planes // reduce_first @@ -60,16 +60,13 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, b first_dilation = first_dilation or dilation self.conv1 = SelectiveKernel( - inplanes, first_planes, stride=stride, dilation=first_dilation, **conv_kwargs, **sk_kwargs) - conv_kwargs['act_layer'] = None - self.conv2 = ConvBnAct( - first_planes, outplanes, kernel_size=3, dilation=dilation, **conv_kwargs) + inplanes, first_planes, stride=stride, dilation=first_dilation, + aa_layer=aa_layer, drop_layer=drop_block, **conv_kwargs, **sk_kwargs) + self.conv2 = ConvNormAct( + first_planes, outplanes, kernel_size=3, dilation=dilation, apply_act=False, **conv_kwargs) self.se = create_attn(attn_layer, outplanes) self.act = act_layer(inplace=True) self.downsample = downsample - self.stride = stride - self.dilation = dilation - self.drop_block = drop_block self.drop_path = drop_path def zero_init_last_bn(self): @@ -100,24 +97,20 @@ def __init__(self, inplanes, planes, stride=1, downsample=None, super(SelectiveKernelBottleneck, self).__init__() sk_kwargs = sk_kwargs or {} - conv_kwargs = dict(drop_block=drop_block, act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer) + conv_kwargs = dict(act_layer=act_layer, norm_layer=norm_layer) width = int(math.floor(planes * (base_width / 64)) * cardinality) first_planes = width // reduce_first outplanes = planes * self.expansion first_dilation = first_dilation or dilation - self.conv1 = ConvBnAct(inplanes, first_planes, kernel_size=1, **conv_kwargs) + self.conv1 = ConvNormAct(inplanes, first_planes, kernel_size=1, **conv_kwargs) self.conv2 = SelectiveKernel( first_planes, width, stride=stride, dilation=first_dilation, groups=cardinality, - **conv_kwargs, **sk_kwargs) - conv_kwargs['act_layer'] = None - self.conv3 = ConvBnAct(width, outplanes, kernel_size=1, **conv_kwargs) + aa_layer=aa_layer, drop_layer=drop_block, **conv_kwargs, **sk_kwargs) + self.conv3 = ConvNormAct(width, outplanes, kernel_size=1, apply_act=False, **conv_kwargs) self.se = create_attn(attn_layer, outplanes) self.act = act_layer(inplace=True) self.downsample = downsample - self.stride = stride - self.dilation = dilation - self.drop_block = drop_block self.drop_path = drop_path def zero_init_last_bn(self): diff --git a/timm/models/vovnet.py b/timm/models/vovnet.py index 608cd45b4e..c9d8c6ffb1 100644 --- a/timm/models/vovnet.py +++ b/timm/models/vovnet.py @@ -20,8 +20,8 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .registry import register_model from .helpers import build_model_with_cfg -from .layers import ConvBnAct, SeparableConvBnAct, BatchNormAct2d, ClassifierHead, DropPath,\ - create_attn, create_norm_act, get_norm_act_layer +from .layers import ConvNormAct, SeparableConvNormAct, BatchNormAct2d, ClassifierHead, DropPath,\ + create_attn, create_norm_act_layer, get_norm_act_layer # model cfgs adapted from https://github.com/youngwanLEE/vovnet-detectron2 & @@ -189,23 +189,23 @@ def __init__(self, in_chs, mid_chs, out_chs, layer_per_block, residual=False, next_in_chs = in_chs if self.depthwise and next_in_chs != mid_chs: assert not residual - self.conv_reduction = ConvBnAct(next_in_chs, mid_chs, 1, **conv_kwargs) + self.conv_reduction = ConvNormAct(next_in_chs, mid_chs, 1, **conv_kwargs) else: self.conv_reduction = None mid_convs = [] for i in range(layer_per_block): if self.depthwise: - conv = SeparableConvBnAct(mid_chs, mid_chs, **conv_kwargs) + conv = SeparableConvNormAct(mid_chs, mid_chs, **conv_kwargs) else: - conv = ConvBnAct(next_in_chs, mid_chs, 3, **conv_kwargs) + conv = ConvNormAct(next_in_chs, mid_chs, 3, **conv_kwargs) next_in_chs = mid_chs mid_convs.append(conv) self.conv_mid = SequentialAppendList(*mid_convs) # feature aggregation next_in_chs = in_chs + layer_per_block * mid_chs - self.conv_concat = ConvBnAct(next_in_chs, out_chs, **conv_kwargs) + self.conv_concat = ConvNormAct(next_in_chs, out_chs, **conv_kwargs) if attn: self.attn = create_attn(attn, out_chs) @@ -283,9 +283,9 @@ def __init__(self, cfg, in_chans=3, num_classes=1000, global_pool='avg', drop_ra # Stem module last_stem_stride = stem_stride // 2 - conv_type = SeparableConvBnAct if cfg["depthwise"] else ConvBnAct + conv_type = SeparableConvNormAct if cfg["depthwise"] else ConvNormAct self.stem = nn.Sequential(*[ - ConvBnAct(in_chans, stem_chs[0], 3, stride=2, **conv_kwargs), + ConvNormAct(in_chans, stem_chs[0], 3, stride=2, **conv_kwargs), conv_type(stem_chs[0], stem_chs[1], 3, stride=1, **conv_kwargs), conv_type(stem_chs[1], stem_chs[2], 3, stride=last_stem_stride, **conv_kwargs), ]) @@ -395,12 +395,12 @@ def eca_vovnet39b(pretrained=False, **kwargs): @register_model def ese_vovnet39b_evos(pretrained=False, **kwargs): def norm_act_fn(num_features, **nkwargs): - return create_norm_act('evonorms0', num_features, jit=False, **nkwargs) + return create_norm_act_layer('evonorms0', num_features, jit=False, **nkwargs) return _create_vovnet('ese_vovnet39b_evos', pretrained=pretrained, norm_layer=norm_act_fn, **kwargs) @register_model def ese_vovnet99b_iabn(pretrained=False, **kwargs): - norm_layer = get_norm_act_layer('iabn') + norm_layer = get_norm_act_layer('iabn', act_layer='leaky_relu') return _create_vovnet( 'ese_vovnet99b_iabn', pretrained=pretrained, norm_layer=norm_layer, act_layer=nn.LeakyReLU, **kwargs) diff --git a/timm/models/xception_aligned.py b/timm/models/xception_aligned.py index ea7f5c05e0..457dc11a86 100644 --- a/timm/models/xception_aligned.py +++ b/timm/models/xception_aligned.py @@ -12,7 +12,7 @@ from timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD from .helpers import build_model_with_cfg -from .layers import ClassifierHead, ConvBnAct, create_conv2d +from .layers import ClassifierHead, ConvNormAct, create_conv2d, get_norm_act_layer from .layers.helpers import to_3tuple from .registry import register_model @@ -37,12 +37,14 @@ def _cfg(url='', **kwargs): url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_xception_65-c9ae96e8.pth'), xception71=_cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_xception_71-8eec7df1.pth'), + + xception41p=_cfg(url=''), ) class SeparableConv2d(nn.Module): def __init__( - self, inplanes, planes, kernel_size=3, stride=1, dilation=1, padding='', + self, in_chs, out_chs, kernel_size=3, stride=1, dilation=1, padding='', act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d): super(SeparableConv2d, self).__init__() self.kernel_size = kernel_size @@ -50,31 +52,48 @@ def __init__( # depthwise convolution self.conv_dw = create_conv2d( - inplanes, inplanes, kernel_size, stride=stride, + in_chs, in_chs, kernel_size, stride=stride, padding=padding, dilation=dilation, depthwise=True) - self.bn_dw = norm_layer(inplanes) - if act_layer is not None: - self.act_dw = act_layer(inplace=True) - else: - self.act_dw = None + self.bn_dw = norm_layer(in_chs) + self.act_dw = act_layer(inplace=True) if act_layer is not None else nn.Identity() # pointwise convolution - self.conv_pw = create_conv2d(inplanes, planes, kernel_size=1) - self.bn_pw = norm_layer(planes) - if act_layer is not None: - self.act_pw = act_layer(inplace=True) - else: - self.act_pw = None + self.conv_pw = create_conv2d(in_chs, out_chs, kernel_size=1) + self.bn_pw = norm_layer(out_chs) + self.act_pw = act_layer(inplace=True) if act_layer is not None else nn.Identity() def forward(self, x): x = self.conv_dw(x) x = self.bn_dw(x) - if self.act_dw is not None: - x = self.act_dw(x) + x = self.act_dw(x) x = self.conv_pw(x) x = self.bn_pw(x) - if self.act_pw is not None: - x = self.act_pw(x) + x = self.act_pw(x) + return x + + +class PreSeparableConv2d(nn.Module): + def __init__( + self, in_chs, out_chs, kernel_size=3, stride=1, dilation=1, padding='', + act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, first_act=True): + super(PreSeparableConv2d, self).__init__() + norm_act_layer = get_norm_act_layer(norm_layer, act_layer=act_layer) + self.kernel_size = kernel_size + self.dilation = dilation + + self.norm = norm_act_layer(in_chs, inplace=True) if first_act else nn.Identity() + # depthwise convolution + self.conv_dw = create_conv2d( + in_chs, in_chs, kernel_size, stride=stride, + padding=padding, dilation=dilation, depthwise=True) + + # pointwise convolution + self.conv_pw = create_conv2d(in_chs, out_chs, kernel_size=1) + + def forward(self, x): + x = self.norm(x) + x = self.conv_dw(x) + x = self.conv_pw(x) return x @@ -88,8 +107,8 @@ def __init__( self.out_channels = out_chs[-1] self.no_skip = no_skip if not no_skip and (self.out_channels != self.in_channels or stride != 1): - self.shortcut = ConvBnAct( - in_chs, self.out_channels, 1, stride=stride, norm_layer=norm_layer, act_layer=None) + self.shortcut = ConvNormAct( + in_chs, self.out_channels, 1, stride=stride, norm_layer=norm_layer, apply_act=False) else: self.shortcut = None @@ -97,7 +116,7 @@ def __init__( self.stack = nn.Sequential() for i in range(3): if start_with_relu: - self.stack.add_module(f'act{i + 1}', nn.ReLU(inplace=i > 0)) + self.stack.add_module(f'act{i + 1}', act_layer(inplace=i > 0)) self.stack.add_module(f'conv{i + 1}', SeparableConv2d( in_chs, out_chs[i], 3, stride=stride if i == 2 else 1, dilation=dilation, padding=pad_type, act_layer=separable_act_layer, norm_layer=norm_layer)) @@ -113,11 +132,42 @@ def forward(self, x): return x +class PreXceptionModule(nn.Module): + def __init__( + self, in_chs, out_chs, stride=1, dilation=1, pad_type='', + no_skip=False, act_layer=nn.ReLU, norm_layer=None): + super(PreXceptionModule, self).__init__() + out_chs = to_3tuple(out_chs) + self.in_channels = in_chs + self.out_channels = out_chs[-1] + self.no_skip = no_skip + if not no_skip and (self.out_channels != self.in_channels or stride != 1): + self.shortcut = create_conv2d(in_chs, self.out_channels, 1, stride=stride) + else: + self.shortcut = nn.Identity() + + self.norm = get_norm_act_layer(norm_layer, act_layer=act_layer)(in_chs, inplace=True) + self.stack = nn.Sequential() + for i in range(3): + self.stack.add_module(f'conv{i + 1}', PreSeparableConv2d( + in_chs, out_chs[i], 3, stride=stride if i == 2 else 1, dilation=dilation, padding=pad_type, + act_layer=act_layer, norm_layer=norm_layer, first_act=i > 0)) + in_chs = out_chs[i] + + def forward(self, x): + x = self.norm(x) + skip = x + x = self.stack(x) + if not self.no_skip: + x = x + self.shortcut(skip) + return x + + class XceptionAligned(nn.Module): """Modified Aligned Xception """ - def __init__(self, block_cfg, num_classes=1000, in_chans=3, output_stride=32, + def __init__(self, block_cfg, num_classes=1000, in_chans=3, output_stride=32, preact=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, drop_rate=0., global_pool='avg'): super(XceptionAligned, self).__init__() self.num_classes = num_classes @@ -126,31 +176,33 @@ def __init__(self, block_cfg, num_classes=1000, in_chans=3, output_stride=32, layer_args = dict(act_layer=act_layer, norm_layer=norm_layer) self.stem = nn.Sequential(*[ - ConvBnAct(in_chans, 32, kernel_size=3, stride=2, **layer_args), - ConvBnAct(32, 64, kernel_size=3, stride=1, **layer_args) + ConvNormAct(in_chans, 32, kernel_size=3, stride=2, **layer_args), + create_conv2d(32, 64, kernel_size=3, stride=1) if preact else + ConvNormAct(32, 64, kernel_size=3, stride=1, **layer_args) ]) curr_dilation = 1 curr_stride = 2 self.feature_info = [] self.blocks = nn.Sequential() + module_fn = PreXceptionModule if preact else XceptionModule for i, b in enumerate(block_cfg): b['dilation'] = curr_dilation if b['stride'] > 1: - self.feature_info += [dict( - num_chs=to_3tuple(b['out_chs'])[-2], reduction=curr_stride, module=f'blocks.{i}.stack.act3')] + name = f'blocks.{i}.stack.conv2' if preact else f'blocks.{i}.stack.act3' + self.feature_info += [dict(num_chs=to_3tuple(b['out_chs'])[-2], reduction=curr_stride, module=name)] next_stride = curr_stride * b['stride'] if next_stride > output_stride: curr_dilation *= b['stride'] b['stride'] = 1 else: curr_stride = next_stride - self.blocks.add_module(str(i), XceptionModule(**b, **layer_args)) + self.blocks.add_module(str(i), module_fn(**b, **layer_args)) self.num_features = self.blocks[-1].out_channels self.feature_info += [dict( num_chs=self.num_features, reduction=curr_stride, module='blocks.' + str(len(self.blocks) - 1))] - + self.act = act_layer(inplace=True) if preact else nn.Identity() self.head = ClassifierHead( in_chs=self.num_features, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) @@ -163,6 +215,7 @@ def reset_classifier(self, num_classes, global_pool='avg'): def forward_features(self, x): x = self.stem(x) x = self.blocks(x) + x = self.act(x) return x def forward(self, x): @@ -236,3 +289,22 @@ def xception71(pretrained=False, **kwargs): ] model_args = dict(block_cfg=block_cfg, norm_layer=partial(nn.BatchNorm2d, eps=.001, momentum=.1), **kwargs) return _xception('xception71', pretrained=pretrained, **model_args) + + +@register_model +def xception41p(pretrained=False, **kwargs): + """ Modified Aligned Xception-41 w/ Pre-Act + """ + block_cfg = [ + # entry flow + dict(in_chs=64, out_chs=128, stride=2), + dict(in_chs=128, out_chs=256, stride=2), + dict(in_chs=256, out_chs=728, stride=2), + # middle flow + *([dict(in_chs=728, out_chs=728, stride=1)] * 8), + # exit flow + dict(in_chs=728, out_chs=(728, 1024, 1024), stride=2), + dict(in_chs=1024, out_chs=(1536, 1536, 2048), no_skip=True, stride=1), + ] + model_args = dict(block_cfg=block_cfg, preact=True, norm_layer=nn.BatchNorm2d, **kwargs) + return _xception('xception41p', pretrained=pretrained, **model_args) From 1c21cac8f9dfea6d443c76a5786b029dac4948df Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 14 Dec 2021 13:51:00 -0800 Subject: [PATCH 40/61] Add drop args to benchmark.py --- benchmark.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/benchmark.py b/benchmark.py index ccd9b4fac1..f1604a0474 100755 --- a/benchmark.py +++ b/benchmark.py @@ -199,7 +199,11 @@ def __init__( num_classes=kwargs.pop('num_classes', None), in_chans=3, global_pool=kwargs.pop('gp', 'fast'), - scriptable=torchscript) + scriptable=torchscript, + drop_rate=kwargs.pop('drop', 0.), + drop_path_rate=kwargs.pop('drop_path', None), + drop_block_rate=kwargs.pop('drop_block', None), + ) self.model.to( device=self.device, dtype=self.model_dtype, From 4c8bb295abce63d5513d7933f3d65376d9120f7e Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 16 Dec 2021 17:11:51 -0800 Subject: [PATCH 41/61] Remove bn-tf arg --- train.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/train.py b/train.py index 0440f551c1..6e74f67bb3 100755 --- a/train.py +++ b/train.py @@ -213,8 +213,6 @@ help='Drop block rate (default: None)') # Batch norm parameters (only works with gen_efficientnet based models currently) -parser.add_argument('--bn-tf', action='store_true', default=False, - help='Use Tensorflow BatchNorm defaults for models that support it (default: False)') parser.add_argument('--bn-momentum', type=float, default=None, help='BatchNorm momentum override (if not None)') parser.add_argument('--bn-eps', type=float, default=None, @@ -424,7 +422,6 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): drop_path_rate=args.drop_path, drop_block_rate=args.drop_block, global_pool=args.gp, - bn_tf=args.bn_tf, bn_momentum=args.bn_momentum, bn_eps=args.bn_eps, scriptable=args.torchscript, From 7eb7e73216c69b87f32973965ae61dd43f8ac7ec Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 17 Dec 2021 09:46:46 -0800 Subject: [PATCH 42/61] File will not stay deleted --- timm/models/layers/pooled_attn.py | 143 ------------------------------ 1 file changed, 143 deletions(-) delete mode 100644 timm/models/layers/pooled_attn.py diff --git a/timm/models/layers/pooled_attn.py b/timm/models/layers/pooled_attn.py deleted file mode 100644 index 40cf2b345e..0000000000 --- a/timm/models/layers/pooled_attn.py +++ /dev/null @@ -1,143 +0,0 @@ -from typing import List - -import torch -import torch.nn as nn -import torch.nn.functional as F - -from .helpers import to_2tuple -from .weight_init import trunc_normal_ - - -def rel_logits_1d(q, rel_k, permute_mask: List[int]): - """ Compute relative logits along one dimension - - As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2 - Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925 - - Args: - q: (batch, heads, height, width, dim) - rel_k: (2 * width - 1, dim) - permute_mask: permute output dim according to this - """ - B, H, W, dim = q.shape - x = (q @ rel_k.transpose(-1, -2)) - x = x.reshape(-1, W, 2 * W -1) - - # pad to shift from relative to absolute indexing - x_pad = F.pad(x, [0, 1]).flatten(1) - x_pad = F.pad(x_pad, [0, W - 1]) - - # reshape and slice out the padded elements - x_pad = x_pad.reshape(-1, W + 1, 2 * W - 1) - x = x_pad[:, :W, W - 1:] - - # reshape and tile - x = x.reshape(B, H, 1, W, W).expand(-1, -1, H, -1, -1) - return x.permute(permute_mask) - - -class PosEmbedRel(nn.Module): - """ Relative Position Embedding - As per: https://gist.github.com/aravindsrinivas/56359b79f0ce4449bcb04ab4b56a57a2 - Originally from: `Attention Augmented Convolutional Networks` - https://arxiv.org/abs/1904.09925 - """ - def __init__(self, feat_size, dim_head, scale): - super().__init__() - self.height, self.width = to_2tuple(feat_size) - self.dim_head = dim_head - self.scale = scale - self.height_rel = nn.Parameter(torch.randn(self.height * 2 - 1, dim_head) * self.scale) - self.width_rel = nn.Parameter(torch.randn(self.width * 2 - 1, dim_head) * self.scale) - - def forward(self, q): - B, num_heads, HW, _ = q.shape - - # relative logits in width dimension. - q = q.reshape(B * num_heads, self.height, self.width, -1) - rel_logits_w = rel_logits_1d(q, self.width_rel, permute_mask=(0, 1, 3, 2, 4)) - - # relative logits in height dimension. - q = q.transpose(1, 2) - rel_logits_h = rel_logits_1d(q, self.height_rel, permute_mask=(0, 3, 1, 4, 2)) - - rel_logits = rel_logits_h + rel_logits_w - rel_logits = rel_logits.reshape(B, num_heads, HW, HW) - return rel_logits - - -class BottleneckAttn(nn.Module): - """ Bottleneck Attention - Paper: `Bottleneck Transformers for Visual Recognition` - https://arxiv.org/abs/2101.11605 - """ - def __init__(self, dim, dim_out=None, feat_size=None, stride=1, num_heads=4, qkv_bias=False): - super().__init__() - assert feat_size is not None, 'A concrete feature size matching expected input (H, W) is required' - dim_out = dim_out or dim - assert dim_out % num_heads == 0 - self.num_heads = num_heads - self.dim_out = dim_out - self.dim_head = dim_out // num_heads - self.scale = self.dim_head ** -0.5 - - self.qkv = nn.Conv2d(dim, self.dim_out * 3, 1, bias=qkv_bias) - - # NOTE I'm only supporting relative pos embedding for now - self.pos_embed = PosEmbedRel(feat_size, dim_head=self.dim_head, scale=self.scale) - - self.pool = nn.AvgPool2d(2, 2) if stride == 2 else nn.Identity() - - self.reset_parameters() - - def reset_parameters(self): - trunc_normal_(self.qkv.weight, std=self.qkv.weight.shape[1] ** -0.5) - trunc_normal_(self.pos_embed.height_rel, std=self.scale) - trunc_normal_(self.pos_embed.width_rel, std=self.scale) - - def forward(self, x): - B, C, H, W = x.shape - assert H == self.pos_embed.height - assert W == self.pos_embed.width - - x = self.qkv(x) # B, 3 * num_heads * dim_head, H, W - x = x.reshape(B, -1, self.dim_head, H * W).transpose(-1, -2) - q, k, v = torch.split(x, self.num_heads, dim=1) - - attn_logits = (q @ k.transpose(-1, -2)) * self.scale - attn_logits = attn_logits + self.pos_embed(q) # B, num_heads, H * W, H * W - - attn_out = attn_logits.softmax(dim=-1) - attn_out = (attn_out @ v).transpose(-1, -2).reshape(B, self.dim_out, H, W) # B, dim_out, H, W - attn_out = self.pool(attn_out) - return attn_out - - -class PoolingAttention(nn.Module): - def __init__(self, in_features: int, attention_features: int, segments: int, max_pool_kernel: int): - super(PoolingAttention, self).__init__() - self.attn = nn.Linear(in_features, attention_features * 5) - self.segments = segments - self.max_pool_kernel = max_pool_kernel - - def forward(self, inp: torch.Tensor): # Shape: [Batch, Sequence, Features] - batch, sequence, features = inp.size() - assert sequence % self.segments == 0 - - qry, key, val, seg, loc = self.attn(inp).chunk(5, 2) # 5x Shape: [Batch, Sequence, AttentionFeatures] - - aggregated = qry.mean(1, keepdim=True) # Shape: [Batch, AttentionFeatures] - aggregated = torch.einsum("ba,bsa->bs", aggregated, key) # Shape: [Batch, Sequence] - aggregated = F.softmax(aggregated, 1) - aggregated = torch.einsum("bs,bsa,bza->bza", aggregated, val, - qry) # Shape: [Batch, Sequence, AttentionFeatures] - - pooled_sequence = sequence // self.segments - segment_max_pooled = seg.view(batch, pooled_sequence, self.segments, -1) - segment_max_pooled = segment_max_pooled.max(2, keepdim=True) # Shape: [Batch, PooledSequence, 1, AttentionFeatures] - segment_max_pooled = segment_max_pooled * qry.view(batch, pooled_sequence, self.segments, -1) # Shape: [Batch, PooledSequence, PoolSize, AttentionFeatures] - segment_max_pooled = segment_max_pooled.view(batch, sequence, -1) # Shape: [Batch, Sequence, AttentionFeatures] - - loc = loc.transpose(1, 2) # Shape: [Batch, AttentionFeatures, Sequence] - local_max_pooled = F.max_pool1d(loc, self.max_pool_kernel, 1, self.max_pool_kernel // 2) - local_max_pooled = local_max_pooled.transpose(1, 2) # Shape: [Batch, Sequence, AttentionFeatures] - - return aggregated + segment_max_pooled + local_max_pooled \ No newline at end of file From f82fb6b608027a5893d936008a7f76db7e431986 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 27 Jan 2022 17:24:47 -0800 Subject: [PATCH 43/61] Add base lr w/ linear and sqrt scaling to train script --- train.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index 6e74f67bb3..0405f941b2 100755 --- a/train.py +++ b/train.py @@ -117,8 +117,12 @@ # Learning rate schedule parameters parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER', help='LR scheduler (default: "cosine"') -parser.add_argument('--lr', type=float, default=0.1, metavar='LR', - help='learning rate (default: 0.05)') +parser.add_argument('--lr', type=float, default=None, metavar='LR', + help='learning rate (default: None => --lr-base') +parser.add_argument('--lr-base', type=float, default=0.1, metavar='LR', + help='base learning rate: lr = lr_base * global_batch_size / 256') +parser.add_argument('--lr-base-scale', type=str, default='', metavar='SCALE', + help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', help='learning rate noise on/off epoch percentages') parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT', @@ -165,7 +169,7 @@ help='Horizontal flip training aug probability') parser.add_argument('--vflip', type=float, default=0., help='Vertical flip training aug probability') -parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT', +parser.add_argument('--color-jitter', type=float, default=None, metavar='PCT', help='Color jitter factor (default: 0.4)') parser.add_argument('--aa', type=str, default=None, metavar='NAME', help='Use AutoAugment policy. "v0" or "original". (default: None)'), @@ -439,6 +443,18 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): assert args.aug_splits > 1 model = convert_splitbn_model(model, max(args.aug_splits, 2)) + if args.lr is None: + global_batch_size = args.batch_size * dev_env.world_size + batch_ratio = global_batch_size / 256 + if not args.lr_base_scale: + on = args.opt.lower() + args.lr_base_scale = 'sqrt' if any([o in on for o in ('adam', 'lamb', 'adabelief')]) else 'linear' + if args.lr_base_scale == 'sqrt': + batch_ratio = batch_ratio ** 0.5 + args.lr = args.lr_base * batch_ratio + _logger.info(f'Calculated learning rate ({args.lr}) from base learning rate ({args.lr_base}) ' + f'and global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.') + train_state = setup_model_and_optimizer( dev_env=dev_env, model=model, From 7148039f9fce6271881ad36c5235b251d69adaeb Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 27 Jan 2022 17:29:49 -0800 Subject: [PATCH 44/61] Tweak base lr log --- train.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index 0405f941b2..3acb990991 100755 --- a/train.py +++ b/train.py @@ -452,8 +452,9 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): if args.lr_base_scale == 'sqrt': batch_ratio = batch_ratio ** 0.5 args.lr = args.lr_base * batch_ratio - _logger.info(f'Calculated learning rate ({args.lr}) from base learning rate ({args.lr_base}) ' - f'and global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.') + if dev_env.primary: + _logger.info(f'Learning rate ({args.lr}) calculated from base learning rate ({args.lr_base}) ' + f'and global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.') train_state = setup_model_and_optimizer( dev_env=dev_env, From fafece230b8c8325fd6144efbab25cbc6cf5ca5c Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 27 Jan 2022 17:35:50 -0800 Subject: [PATCH 45/61] Allow changing base lr batch size from 256 via arg --- train.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index 3acb990991..7ac84a8164 100755 --- a/train.py +++ b/train.py @@ -120,7 +120,9 @@ parser.add_argument('--lr', type=float, default=None, metavar='LR', help='learning rate (default: None => --lr-base') parser.add_argument('--lr-base', type=float, default=0.1, metavar='LR', - help='base learning rate: lr = lr_base * global_batch_size / 256') + help='base learning rate: lr = lr_base * global_batch_size / base_size') +parser.add_argument('--lr-base-size', type=int, default=256, metavar='DIV', + help='base learning rate batch size (divisor, default: 256).') parser.add_argument('--lr-base-scale', type=str, default='', metavar='SCALE', help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', @@ -445,7 +447,7 @@ def setup_train_task(args, dev_env: DeviceEnv, mixup_active: bool): if args.lr is None: global_batch_size = args.batch_size * dev_env.world_size - batch_ratio = global_batch_size / 256 + batch_ratio = global_batch_size / args.lr_base_size if not args.lr_base_scale: on = args.opt.lower() args.lr_base_scale = 'sqrt' if any([o in on for o in ('adam', 'lamb', 'adabelief')]) else 'linear' From c639a86c67d449b39a1c08542f5eb06ced84ca84 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Feb 2022 16:28:42 -0800 Subject: [PATCH 46/61] Change TFDS default to full re-shuffle (init) each epoch (for now) --- timm/data/parsers/parser_tfds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 132065be02..08fa6dbdb5 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -163,7 +163,7 @@ def __init__( self.subsplit = None # set when data is distributed across workers using sub-splits self.ds = None # initialized lazily on each dataloader worker process self.init_count = 0 - self.reinit_each_iter = False # self.is_training # FIXME evaluating shuffle across epochs + self.reinit_each_iter = self.is_training # FIXME need to determine if this is necessary def _lazy_init(self): """ Lazily initialize the dataset. From bb85b09d2a32b1e5b92790dc9e160081744ba65c Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Feb 2022 16:39:16 -0800 Subject: [PATCH 47/61] swin v2 fixup for latest changes on norm_norm_norm / bits_and_tpu branch --- timm/models/swin_transformer_v2_cr.py | 25 +++---------------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/timm/models/swin_transformer_v2_cr.py b/timm/models/swin_transformer_v2_cr.py index b2915bf8b7..bb77466fcd 100644 --- a/timm/models/swin_transformer_v2_cr.py +++ b/timm/models/swin_transformer_v2_cr.py @@ -37,7 +37,7 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .fx_features import register_notrace_function -from .helpers import build_model_with_cfg, overlay_external_default_cfg, named_apply +from .helpers import build_model_with_cfg, named_apply from .layers import DropPath, Mlp, to_2tuple, _assert from .registry import register_model from .vision_transformer import checkpoint_filter_fn @@ -754,29 +754,10 @@ def init_weights(module: nn.Module, name: str = ''): nn.init.zeros_(module.bias) -def _create_swin_transformer_v2_cr(variant, pretrained=False, default_cfg=None, **kwargs): - if default_cfg is None: - default_cfg = deepcopy(default_cfgs[variant]) - overlay_external_default_cfg(default_cfg, kwargs) - default_num_classes = default_cfg['num_classes'] - default_img_size = default_cfg['input_size'][-2:] - - num_classes = kwargs.pop('num_classes', default_num_classes) - img_size = kwargs.pop('img_size', default_img_size) +def _create_swin_transformer_v2_cr(variant, pretrained=False, **kwargs): if kwargs.get('features_only', None): raise RuntimeError('features_only not implemented for Vision Transformer models.') - - model = build_model_with_cfg( - SwinTransformerV2Cr, - variant, - pretrained, - default_cfg=default_cfg, - img_size=img_size, - num_classes=num_classes, - pretrained_filter_fn=checkpoint_filter_fn, - **kwargs - ) - + model = build_model_with_cfg(SwinTransformerV2Cr, variant, pretrained, **kwargs) return model From 15cc9eae3efb54019668b1761312c273cd709764 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Feb 2022 16:44:24 -0800 Subject: [PATCH 48/61] Fix Swin v2 tuple type hint --- timm/models/swin_transformer_v2_cr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/swin_transformer_v2_cr.py b/timm/models/swin_transformer_v2_cr.py index bb77466fcd..39ea993e89 100644 --- a/timm/models/swin_transformer_v2_cr.py +++ b/timm/models/swin_transformer_v2_cr.py @@ -117,7 +117,7 @@ def window_partition(x, window_size: Tuple[int, int]): @register_notrace_function # reason: int argument is a Proxy -def window_reverse(windows, window_size: tuple[int, int], img_size: tuple[int, int]): +def window_reverse(windows, window_size: Tuple[int, int], img_size: Tuple[int, int]): """ Args: windows: (num_windows * B, window_size[0], window_size[1], C) From da2796ae822ccbbf51fcbeed6c53bfee95918386 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 7 Mar 2022 21:13:20 -0800 Subject: [PATCH 49/61] Add webdataset (WDS) support, update TFDS to make some naming in parsers more similar. Fix workers=0 compatibility. Add ImageNet22k/12k synset defs. --- results/imagenet12k_rw_synsets.txt | 11821 ++++++++++++++ results/imagenet22k_synsets.txt | 21841 ++++++++++++++++++++++++++ timm/data/dataset_factory.py | 5 + timm/data/parsers/parser_factory.py | 4 + timm/data/parsers/parser_tfds.py | 99 +- timm/data/parsers/parser_wds.py | 261 + train.py | 2 +- 7 files changed, 33988 insertions(+), 45 deletions(-) create mode 100644 results/imagenet12k_rw_synsets.txt create mode 100644 results/imagenet22k_synsets.txt create mode 100644 timm/data/parsers/parser_wds.py diff --git a/results/imagenet12k_rw_synsets.txt b/results/imagenet12k_rw_synsets.txt new file mode 100644 index 0000000000..c1517a458f --- /dev/null +++ b/results/imagenet12k_rw_synsets.txt @@ -0,0 +1,11821 @@ +n00005787 +n00006484 +n00007846 +n00015388 +n00017222 +n00021265 +n00021939 +n00120010 +n00141669 +n00288000 +n00288384 +n00324978 +n00326094 +n00433458 +n00433661 +n00433802 +n00434075 +n00439826 +n00440039 +n00440382 +n00440509 +n00440747 +n00440941 +n00441073 +n00441824 +n00442115 +n00442437 +n00442847 +n00442981 +n00443231 +n00443692 +n00443803 +n00444340 +n00444651 +n00444846 +n00444937 +n00445055 +n00445226 +n00445351 +n00445685 +n00445802 +n00446311 +n00446493 +n00446804 +n00446980 +n00447073 +n00447221 +n00447463 +n00447540 +n00447957 +n00448126 +n00448232 +n00448466 +n00448640 +n00448748 +n00448872 +n00448958 +n00449054 +n00449168 +n00449295 +n00449517 +n00449695 +n00449796 +n00449892 +n00449977 +n00450070 +n00450335 +n00450700 +n00450866 +n00450998 +n00451186 +n00451370 +n00451563 +n00451635 +n00452034 +n00452152 +n00452293 +n00452864 +n00453126 +n00453313 +n00453396 +n00453478 +n00453935 +n00454237 +n00454395 +n00454493 +n00454624 +n00454983 +n00455173 +n00456465 +n00463246 +n00463543 +n00464277 +n00464478 +n00464651 +n00464894 +n00466273 +n00466377 +n00466524 +n00466630 +n00466712 +n00466880 +n00467320 +n00467536 +n00467719 +n00467995 +n00468299 +n00468480 +n00469651 +n00470554 +n00470682 +n00470830 +n00470966 +n00471437 +n00471613 +n00474568 +n00474657 +n00474881 +n00475014 +n00475273 +n00475403 +n00475535 +n00475661 +n00475787 +n00476235 +n00476389 +n00477392 +n00477639 +n00478262 +n00479076 +n00479440 +n00479616 +n00479887 +n00480211 +n00480366 +n00480508 +n00480993 +n00481803 +n00482122 +n00482298 +n00483205 +n00483313 +n00483409 +n00483508 +n00483605 +n00483705 +n00483848 +n00523513 +n00825773 +n00887544 +n01055165 +n01314388 +n01314663 +n01314781 +n01315213 +n01316422 +n01317089 +n01317294 +n01317541 +n01317813 +n01317916 +n01318279 +n01318381 +n01318894 +n01319467 +n01321123 +n01321230 +n01321456 +n01321579 +n01321770 +n01321854 +n01322221 +n01322343 +n01322508 +n01322604 +n01322685 +n01322898 +n01322983 +n01323068 +n01323155 +n01323261 +n01323355 +n01323493 +n01323599 +n01324431 +n01324610 +n01326291 +n01338685 +n01339083 +n01339336 +n01339471 +n01339801 +n01340014 +n01379389 +n01381044 +n01384164 +n01386354 +n01392275 +n01392380 +n01395254 +n01396048 +n01397114 +n01397871 +n01402600 +n01405007 +n01405616 +n01407798 +n01410457 +n01415626 +n01421807 +n01422335 +n01424420 +n01438581 +n01439121 +n01439514 +n01440160 +n01440764 +n01441117 +n01442972 +n01443243 +n01443537 +n01443831 +n01444339 +n01446760 +n01447331 +n01447658 +n01448291 +n01448594 +n01448951 +n01449374 +n01449712 +n01451426 +n01453087 +n01454545 +n01455778 +n01456756 +n01457852 +n01458842 +n01459791 +n01461315 +n01462042 +n01462544 +n01464844 +n01468238 +n01468712 +n01469103 +n01471682 +n01472303 +n01477525 +n01477875 +n01482071 +n01482330 +n01483830 +n01484097 +n01484285 +n01484850 +n01485479 +n01486838 +n01487506 +n01488038 +n01489501 +n01489709 +n01489920 +n01490112 +n01490360 +n01490670 +n01491006 +n01491361 +n01491874 +n01492569 +n01493146 +n01494475 +n01495006 +n01495493 +n01495701 +n01496331 +n01497118 +n01498041 +n01498989 +n01499396 +n01500091 +n01500476 +n01501160 +n01503061 +n01503976 +n01504179 +n01504344 +n01514668 +n01514752 +n01514859 +n01515303 +n01517565 +n01517966 +n01518878 +n01519563 +n01519873 +n01520576 +n01521399 +n01521756 +n01524359 +n01526521 +n01527194 +n01527347 +n01527617 +n01527917 +n01528396 +n01528654 +n01528845 +n01529672 +n01530439 +n01530575 +n01531178 +n01531344 +n01531512 +n01531811 +n01531971 +n01532325 +n01532511 +n01532829 +n01533000 +n01533339 +n01533481 +n01533651 +n01533893 +n01534155 +n01534433 +n01534582 +n01535140 +n01535469 +n01535690 +n01536035 +n01536186 +n01536334 +n01536644 +n01536780 +n01537134 +n01537544 +n01537895 +n01538059 +n01538200 +n01538630 +n01538955 +n01539573 +n01539925 +n01540090 +n01540233 +n01540566 +n01540832 +n01541102 +n01541386 +n01541760 +n01541922 +n01542786 +n01543175 +n01543632 +n01544389 +n01544704 +n01545574 +n01546039 +n01546506 +n01547832 +n01548301 +n01548492 +n01548694 +n01548865 +n01549053 +n01549430 +n01549641 +n01549886 +n01550172 +n01551080 +n01551300 +n01551711 +n01552034 +n01552813 +n01553142 +n01554448 +n01555004 +n01555305 +n01555809 +n01556182 +n01557185 +n01557962 +n01558149 +n01558307 +n01558461 +n01558594 +n01558765 +n01558993 +n01559477 +n01559639 +n01559804 +n01560105 +n01560280 +n01560419 +n01560636 +n01560793 +n01560935 +n01561452 +n01561732 +n01562014 +n01562265 +n01562451 +n01563128 +n01563449 +n01563746 +n01563945 +n01564217 +n01564394 +n01564773 +n01564914 +n01565078 +n01565345 +n01565599 +n01565930 +n01566207 +n01566645 +n01567133 +n01567678 +n01567879 +n01568294 +n01568720 +n01568892 +n01569060 +n01569262 +n01569423 +n01569566 +n01569836 +n01569971 +n01570267 +n01570421 +n01570676 +n01570839 +n01571904 +n01572328 +n01572489 +n01572654 +n01572782 +n01573074 +n01573240 +n01573360 +n01573898 +n01574045 +n01574390 +n01574560 +n01574801 +n01575117 +n01575401 +n01575745 +n01576076 +n01576695 +n01577035 +n01577659 +n01577941 +n01578180 +n01578575 +n01579028 +n01579149 +n01579260 +n01579410 +n01579578 +n01579729 +n01580077 +n01580870 +n01581166 +n01581730 +n01581984 +n01582220 +n01582398 +n01582856 +n01583209 +n01583495 +n01583828 +n01584225 +n01584695 +n01584853 +n01585121 +n01585287 +n01585422 +n01585715 +n01586020 +n01586374 +n01586941 +n01587526 +n01587834 +n01588002 +n01588725 +n01589286 +n01589718 +n01589893 +n01591005 +n01591123 +n01591301 +n01591697 +n01592084 +n01592257 +n01592387 +n01592540 +n01592694 +n01593028 +n01594004 +n01594372 +n01594787 +n01594968 +n01595168 +n01595450 +n01595974 +n01596273 +n01596608 +n01597022 +n01597336 +n01597737 +n01597906 +n01598074 +n01598588 +n01598988 +n01599159 +n01599269 +n01599556 +n01600085 +n01600657 +n01601068 +n01601694 +n01602630 +n01602832 +n01603152 +n01603600 +n01603812 +n01603953 +n01604330 +n01604968 +n01605630 +n01606522 +n01606672 +n01606809 +n01607600 +n01607812 +n01607962 +n01608265 +n01608432 +n01608814 +n01609062 +n01609391 +n01609751 +n01609956 +n01610100 +n01610226 +n01610552 +n01610955 +n01611472 +n01611800 +n01611969 +n01612122 +n01612275 +n01612476 +n01612628 +n01613177 +n01613294 +n01613615 +n01613807 +n01614038 +n01614343 +n01614556 +n01614925 +n01615121 +n01615303 +n01615458 +n01615703 +n01616086 +n01616318 +n01617095 +n01617443 +n01617766 +n01618082 +n01618503 +n01618922 +n01619310 +n01619536 +n01619835 +n01620135 +n01620414 +n01620735 +n01621127 +n01621635 +n01622120 +n01622352 +n01622483 +n01622779 +n01622959 +n01623110 +n01623425 +n01623615 +n01623706 +n01623880 +n01624115 +n01624212 +n01624305 +n01624537 +n01624833 +n01625562 +n01627424 +n01628770 +n01629276 +n01629819 +n01629962 +n01630284 +n01630670 +n01630901 +n01631354 +n01631512 +n01631663 +n01632458 +n01632601 +n01632777 +n01633406 +n01633781 +n01635027 +n01636127 +n01636352 +n01636829 +n01637615 +n01639765 +n01640846 +n01641206 +n01641391 +n01641577 +n01641739 +n01642257 +n01642539 +n01643507 +n01643896 +n01644373 +n01644900 +n01645776 +n01646292 +n01646388 +n01646555 +n01646648 +n01646802 +n01646902 +n01647303 +n01647640 +n01648139 +n01648620 +n01649170 +n01650167 +n01650690 +n01650901 +n01651059 +n01652026 +n01653223 +n01654637 +n01661091 +n01662622 +n01662784 +n01663401 +n01663782 +n01664065 +n01664369 +n01664492 +n01664674 +n01664990 +n01665541 +n01665932 +n01666228 +n01666585 +n01667114 +n01667432 +n01667778 +n01668091 +n01668436 +n01668665 +n01668892 +n01669191 +n01669372 +n01669654 +n01670092 +n01670535 +n01670802 +n01671125 +n01671479 +n01672032 +n01673282 +n01674464 +n01674990 +n01675722 +n01677366 +n01677747 +n01678043 +n01678343 +n01679307 +n01679626 +n01679962 +n01680264 +n01680478 +n01680655 +n01680813 +n01681328 +n01681653 +n01681940 +n01682172 +n01682435 +n01682714 +n01683558 +n01684133 +n01684578 +n01685808 +n01686044 +n01687665 +n01687978 +n01688243 +n01689081 +n01689811 +n01690149 +n01690466 +n01691217 +n01692333 +n01692523 +n01693175 +n01693334 +n01693783 +n01694178 +n01694709 +n01694955 +n01695060 +n01696633 +n01697178 +n01697457 +n01697611 +n01698434 +n01698640 +n01698782 +n01699040 +n01699675 +n01701859 +n01704323 +n01713764 +n01726692 +n01727646 +n01728572 +n01728920 +n01729322 +n01729977 +n01730185 +n01730307 +n01730563 +n01730812 +n01730960 +n01731545 +n01731941 +n01732244 +n01732614 +n01732789 +n01733466 +n01733757 +n01733957 +n01734104 +n01734418 +n01734637 +n01734808 +n01735189 +n01735439 +n01735577 +n01737021 +n01737472 +n01737728 +n01737875 +n01738065 +n01738601 +n01739381 +n01740131 +n01740551 +n01741232 +n01741562 +n01741943 +n01742172 +n01742821 +n01743086 +n01743605 +n01743936 +n01744100 +n01744270 +n01744401 +n01745125 +n01745484 +n01745902 +n01746359 +n01747589 +n01747885 +n01748264 +n01748389 +n01748686 +n01748906 +n01749244 +n01749582 +n01749742 +n01749939 +n01750167 +n01750437 +n01751036 +n01751472 +n01751748 +n01752165 +n01752585 +n01752736 +n01753032 +n01753180 +n01753488 +n01753959 +n01754370 +n01754533 +n01754876 +n01755581 +n01755740 +n01756089 +n01756291 +n01756508 +n01756733 +n01757115 +n01757343 +n01757677 +n01757901 +n01758141 +n01758757 +n01767661 +n01768244 +n01769347 +n01770081 +n01770393 +n01770795 +n01771417 +n01772222 +n01772664 +n01773157 +n01773549 +n01773797 +n01774384 +n01774750 +n01775062 +n01775370 +n01776313 +n01777304 +n01778217 +n01779148 +n01779629 +n01782209 +n01782516 +n01784675 +n01785667 +n01786646 +n01787835 +n01789740 +n01790711 +n01791107 +n01791463 +n01791625 +n01791954 +n01792042 +n01792158 +n01792429 +n01792640 +n01792955 +n01793085 +n01793249 +n01793435 +n01793715 +n01794158 +n01794344 +n01794651 +n01795088 +n01795545 +n01795735 +n01796340 +n01796519 +n01796729 +n01797020 +n01797307 +n01797601 +n01797886 +n01798168 +n01798484 +n01798706 +n01798839 +n01799679 +n01800424 +n01801876 +n01803078 +n01803362 +n01804163 +n01804478 +n01804653 +n01805070 +n01805801 +n01806143 +n01806297 +n01806364 +n01806467 +n01806567 +n01806847 +n01807105 +n01807496 +n01807828 +n01808140 +n01809106 +n01809371 +n01809752 +n01810268 +n01811909 +n01812337 +n01812662 +n01812866 +n01813088 +n01813385 +n01813532 +n01813948 +n01814217 +n01814370 +n01814755 +n01814921 +n01815601 +n01816887 +n01817263 +n01817346 +n01817953 +n01818299 +n01818515 +n01818832 +n01819115 +n01819313 +n01819465 +n01819734 +n01820052 +n01820348 +n01820546 +n01821076 +n01821203 +n01821869 +n01822300 +n01823013 +n01823414 +n01824035 +n01824575 +n01825278 +n01826364 +n01826680 +n01827403 +n01827793 +n01828096 +n01828556 +n01828970 +n01829413 +n01829869 +n01830042 +n01830915 +n01832167 +n01832493 +n01833805 +n01834177 +n01834540 +n01835276 +n01837072 +n01838598 +n01839086 +n01839330 +n01839598 +n01839750 +n01840120 +n01840775 +n01841102 +n01841288 +n01841441 +n01841679 +n01842235 +n01842504 +n01843065 +n01843383 +n01843719 +n01844231 +n01844551 +n01844917 +n01845132 +n01846331 +n01847000 +n01847089 +n01847170 +n01847253 +n01847407 +n01847806 +n01847978 +n01848123 +n01848323 +n01848453 +n01848555 +n01848648 +n01848840 +n01848976 +n01849157 +n01849466 +n01849676 +n01849863 +n01850192 +n01850373 +n01850553 +n01850873 +n01851038 +n01851207 +n01851375 +n01851573 +n01851731 +n01851895 +n01852142 +n01852329 +n01852400 +n01852671 +n01852861 +n01853195 +n01853498 +n01853666 +n01853870 +n01854415 +n01854700 +n01854838 +n01855032 +n01855188 +n01855476 +n01855672 +n01856072 +n01856155 +n01856380 +n01856553 +n01856890 +n01857079 +n01857325 +n01857512 +n01857632 +n01857851 +n01858281 +n01858441 +n01858780 +n01858845 +n01858906 +n01859190 +n01859325 +n01859496 +n01859689 +n01859852 +n01860002 +n01860187 +n01860497 +n01861778 +n01862399 +n01871265 +n01871875 +n01872401 +n01872772 +n01873310 +n01874434 +n01874928 +n01875313 +n01876034 +n01876326 +n01877134 +n01877606 +n01877812 +n01878061 +n01878929 +n01879217 +n01879509 +n01881171 +n01882125 +n01882714 +n01883070 +n01884834 +n01885498 +n01886756 +n01887474 +n01887623 +n01887787 +n01887896 +n01888045 +n01888181 +n01888264 +n01888411 +n01889520 +n01891633 +n01892030 +n01893825 +n01896844 +n01897536 +n01899894 +n01900150 +n01903346 +n01904029 +n01904806 +n01904886 +n01905661 +n01906749 +n01909906 +n01910747 +n01913166 +n01914609 +n01914830 +n01915700 +n01915811 +n01916187 +n01916388 +n01916481 +n01916925 +n01917289 +n01917611 +n01917882 +n01918744 +n01922303 +n01923025 +n01924916 +n01930112 +n01934440 +n01935395 +n01937909 +n01938454 +n01940736 +n01942869 +n01943087 +n01943899 +n01944118 +n01944390 +n01944812 +n01944955 +n01945143 +n01945685 +n01946630 +n01947396 +n01947997 +n01948573 +n01949085 +n01950731 +n01951274 +n01951613 +n01953361 +n01953594 +n01953762 +n01955084 +n01955933 +n01956344 +n01956481 +n01956764 +n01957335 +n01958038 +n01958346 +n01958531 +n01959492 +n01959985 +n01960177 +n01960459 +n01961985 +n01963317 +n01963571 +n01964049 +n01964271 +n01964441 +n01965529 +n01965889 +n01968897 +n01970164 +n01970667 +n01971280 +n01972541 +n01974773 +n01976146 +n01976868 +n01976957 +n01978287 +n01978455 +n01979874 +n01980166 +n01981276 +n01982068 +n01982347 +n01982650 +n01983481 +n01984245 +n01984695 +n01985128 +n01985493 +n01986214 +n01986806 +n01987545 +n01990007 +n01990800 +n01991028 +n01991520 +n01992773 +n01994910 +n01998183 +n01998741 +n01999186 +n02000954 +n02002075 +n02002556 +n02002724 +n02003037 +n02003204 +n02003577 +n02003839 +n02004131 +n02004492 +n02004855 +n02005399 +n02005790 +n02006063 +n02006364 +n02006656 +n02006985 +n02007284 +n02007558 +n02008041 +n02008497 +n02008643 +n02008796 +n02009229 +n02009380 +n02009508 +n02009750 +n02009912 +n02010272 +n02010453 +n02010728 +n02011016 +n02011281 +n02011460 +n02011805 +n02011943 +n02012185 +n02012849 +n02013177 +n02013567 +n02013706 +n02014237 +n02014524 +n02014941 +n02015357 +n02015554 +n02016066 +n02016358 +n02016659 +n02016816 +n02016956 +n02017213 +n02017475 +n02017725 +n02018027 +n02018207 +n02018368 +n02018795 +n02019190 +n02019929 +n02021050 +n02021795 +n02022684 +n02023341 +n02023855 +n02023992 +n02024185 +n02024479 +n02024763 +n02025043 +n02025239 +n02025389 +n02026059 +n02026629 +n02026948 +n02027075 +n02027357 +n02027492 +n02027897 +n02028035 +n02028175 +n02028342 +n02028451 +n02028727 +n02028900 +n02029087 +n02029378 +n02029706 +n02030035 +n02030287 +n02030837 +n02030996 +n02031585 +n02031934 +n02032222 +n02032355 +n02032480 +n02033041 +n02033208 +n02033561 +n02033779 +n02034129 +n02034295 +n02034661 +n02034971 +n02035210 +n02036053 +n02036711 +n02037110 +n02037464 +n02037869 +n02038466 +n02038993 +n02039171 +n02040266 +n02041085 +n02041246 +n02041678 +n02041875 +n02042046 +n02042180 +n02042472 +n02042759 +n02043063 +n02043333 +n02043808 +n02044178 +n02044517 +n02044778 +n02044908 +n02045369 +n02045596 +n02045864 +n02046171 +n02046759 +n02046939 +n02047045 +n02047260 +n02047411 +n02047517 +n02047614 +n02047975 +n02048115 +n02048353 +n02049088 +n02050004 +n02050313 +n02050442 +n02050586 +n02050809 +n02051059 +n02051845 +n02052204 +n02052365 +n02052775 +n02053083 +n02053425 +n02053584 +n02054036 +n02054502 +n02054711 +n02055107 +n02055658 +n02055803 +n02056228 +n02056570 +n02056728 +n02057035 +n02057330 +n02057731 +n02058221 +n02058594 +n02059162 +n02060133 +n02060411 +n02060569 +n02060889 +n02062017 +n02062430 +n02062744 +n02063224 +n02063662 +n02064338 +n02064816 +n02065026 +n02065263 +n02065407 +n02066245 +n02066707 +n02067240 +n02068541 +n02068974 +n02069412 +n02069701 +n02069974 +n02070174 +n02070430 +n02071294 +n02071636 +n02072040 +n02072798 +n02073831 +n02074367 +n02075296 +n02075612 +n02075927 +n02076196 +n02076402 +n02076779 +n02077152 +n02077384 +n02077658 +n02077787 +n02077923 +n02078292 +n02078574 +n02078738 +n02079005 +n02079389 +n02079851 +n02080146 +n02080415 +n02080713 +n02081571 +n02081798 +n02082791 +n02083346 +n02083672 +n02084071 +n02084732 +n02084861 +n02085272 +n02085374 +n02085620 +n02085936 +n02086079 +n02086240 +n02086478 +n02086646 +n02086753 +n02086910 +n02087046 +n02087122 +n02087394 +n02087551 +n02088094 +n02088238 +n02088364 +n02088466 +n02088632 +n02088839 +n02089232 +n02089468 +n02089555 +n02089973 +n02090379 +n02090475 +n02090622 +n02090721 +n02090827 +n02091032 +n02091134 +n02091244 +n02091467 +n02091831 +n02092002 +n02092339 +n02092468 +n02093056 +n02093256 +n02093428 +n02093647 +n02093754 +n02093859 +n02093991 +n02094114 +n02094258 +n02094433 +n02094562 +n02094721 +n02094931 +n02095050 +n02095314 +n02095412 +n02095570 +n02095727 +n02095889 +n02096051 +n02096177 +n02096294 +n02096437 +n02096585 +n02096756 +n02097047 +n02097130 +n02097209 +n02097298 +n02097474 +n02097658 +n02097786 +n02098105 +n02098286 +n02098413 +n02098550 +n02098806 +n02098906 +n02099029 +n02099267 +n02099429 +n02099601 +n02099712 +n02099849 +n02099997 +n02100236 +n02100399 +n02100583 +n02100735 +n02100877 +n02101006 +n02101108 +n02101388 +n02101556 +n02101861 +n02102040 +n02102177 +n02102318 +n02102480 +n02102605 +n02102973 +n02103406 +n02103841 +n02104029 +n02104280 +n02104365 +n02104523 +n02104882 +n02105056 +n02105162 +n02105251 +n02105412 +n02105505 +n02105641 +n02105855 +n02106030 +n02106166 +n02106382 +n02106550 +n02106662 +n02106854 +n02106966 +n02107142 +n02107312 +n02107420 +n02107574 +n02107683 +n02107908 +n02108000 +n02108089 +n02108254 +n02108422 +n02108551 +n02108672 +n02108915 +n02109047 +n02109525 +n02109811 +n02109961 +n02110063 +n02110185 +n02110341 +n02110627 +n02110806 +n02110958 +n02111129 +n02111277 +n02111500 +n02111626 +n02111889 +n02112018 +n02112137 +n02112350 +n02112497 +n02112826 +n02113023 +n02113186 +n02113335 +n02113624 +n02113712 +n02113799 +n02114100 +n02114367 +n02114548 +n02114712 +n02114855 +n02115096 +n02115335 +n02115641 +n02115913 +n02116738 +n02117135 +n02117512 +n02117900 +n02118333 +n02119022 +n02119477 +n02119634 +n02119789 +n02120079 +n02120505 +n02120997 +n02121620 +n02121808 +n02122298 +n02122430 +n02122510 +n02122580 +n02122725 +n02122878 +n02122948 +n02123045 +n02123159 +n02123242 +n02123394 +n02123478 +n02123597 +n02123785 +n02123917 +n02124075 +n02124313 +n02124484 +n02124623 +n02125010 +n02125081 +n02125311 +n02125494 +n02126028 +n02126139 +n02126640 +n02126787 +n02127052 +n02127292 +n02127381 +n02127482 +n02127586 +n02127678 +n02127808 +n02128385 +n02128669 +n02128757 +n02128925 +n02129165 +n02129463 +n02129604 +n02129837 +n02129923 +n02129991 +n02130308 +n02131653 +n02132136 +n02132466 +n02132580 +n02132788 +n02133161 +n02133704 +n02134084 +n02134418 +n02135220 +n02136103 +n02136452 +n02137015 +n02137549 +n02138441 +n02138647 +n02138777 +n02139199 +n02139671 +n02140049 +n02146371 +n02146700 +n02147173 +n02147328 +n02147591 +n02147947 +n02149420 +n02150482 +n02152740 +n02152881 +n02153109 +n02156871 +n02157206 +n02159955 +n02160947 +n02161338 +n02161457 +n02162561 +n02163297 +n02164464 +n02165105 +n02165456 +n02165877 +n02166567 +n02166826 +n02167151 +n02167820 +n02168245 +n02168699 +n02169023 +n02169497 +n02169705 +n02169974 +n02172182 +n02172518 +n02172870 +n02173113 +n02173373 +n02174001 +n02174659 +n02175014 +n02175569 +n02175916 +n02176261 +n02176439 +n02176747 +n02177972 +n02180875 +n02181235 +n02181477 +n02181724 +n02183096 +n02183857 +n02184473 +n02188699 +n02190166 +n02190790 +n02191773 +n02191979 +n02192252 +n02192513 +n02195526 +n02195819 +n02196119 +n02196344 +n02197185 +n02197689 +n02198859 +n02200198 +n02200509 +n02200850 +n02201000 +n02201626 +n02202006 +n02203152 +n02204907 +n02205219 +n02205673 +n02206856 +n02207179 +n02207345 +n02207805 +n02208280 +n02208498 +n02208848 +n02209111 +n02209354 +n02209624 +n02210427 +n02211444 +n02211627 +n02212062 +n02212958 +n02213107 +n02213239 +n02213543 +n02213663 +n02213788 +n02214341 +n02214773 +n02215621 +n02215770 +n02216211 +n02216365 +n02217563 +n02218371 +n02219486 +n02220518 +n02220804 +n02221083 +n02221414 +n02222035 +n02223266 +n02226429 +n02226821 +n02226970 +n02227247 +n02227966 +n02228341 +n02229156 +n02229544 +n02229765 +n02230187 +n02231052 +n02231487 +n02231803 +n02233338 +n02233943 +n02234355 +n02234848 +n02236044 +n02236241 +n02236355 +n02236896 +n02237581 +n02239774 +n02240068 +n02240517 +n02241426 +n02242137 +n02243562 +n02244797 +n02246628 +n02247216 +n02250822 +n02251775 +n02252226 +n02254697 +n02256656 +n02257284 +n02257985 +n02258198 +n02259212 +n02259708 +n02262449 +n02262803 +n02264232 +n02264363 +n02264885 +n02266050 +n02266864 +n02268148 +n02268443 +n02268853 +n02270623 +n02272871 +n02273392 +n02274024 +n02274259 +n02274822 +n02275560 +n02275773 +n02276078 +n02276258 +n02276355 +n02276749 +n02276902 +n02277094 +n02277268 +n02277742 +n02278024 +n02278210 +n02278839 +n02278980 +n02279257 +n02279637 +n02279972 +n02280649 +n02281015 +n02281136 +n02281406 +n02281787 +n02282257 +n02282385 +n02282553 +n02282903 +n02283077 +n02283201 +n02283951 +n02284611 +n02284884 +n02285801 +n02286089 +n02287004 +n02287799 +n02288268 +n02288789 +n02289610 +n02291748 +n02292692 +n02295064 +n02295390 +n02297442 +n02298218 +n02298541 +n02299157 +n02299505 +n02299846 +n02300797 +n02301935 +n02302244 +n02302459 +n02302620 +n02302969 +n02303284 +n02304036 +n02304432 +n02305085 +n02305929 +n02307325 +n02307681 +n02308139 +n02308471 +n02308735 +n02309242 +n02309337 +n02310334 +n02310585 +n02310717 +n02310941 +n02311060 +n02311617 +n02312006 +n02312427 +n02312640 +n02313008 +n02315487 +n02316707 +n02317335 +n02317781 +n02318167 +n02319095 +n02319308 +n02319555 +n02321170 +n02321529 +n02322047 +n02323449 +n02324045 +n02324431 +n02324514 +n02324587 +n02324850 +n02325366 +n02325722 +n02326432 +n02326862 +n02327028 +n02327656 +n02327842 +n02328150 +n02328429 +n02329401 +n02330245 +n02331046 +n02332156 +n02332755 +n02333190 +n02333546 +n02333909 +n02334201 +n02336641 +n02337001 +n02338145 +n02339376 +n02341475 +n02341974 +n02342885 +n02343320 +n02343772 +n02346627 +n02348173 +n02350105 +n02352591 +n02353861 +n02355227 +n02355477 +n02356381 +n02356612 +n02356798 +n02356977 +n02357111 +n02357401 +n02357585 +n02357911 +n02358091 +n02358390 +n02358584 +n02358890 +n02359047 +n02359324 +n02359556 +n02359915 +n02360282 +n02361337 +n02361587 +n02361706 +n02363005 +n02363245 +n02363351 +n02364520 +n02364673 +n02364840 +n02365108 +n02365480 +n02366002 +n02366959 +n02367492 +n02369293 +n02370806 +n02372584 +n02372952 +n02373336 +n02374149 +n02374451 +n02375302 +n02376542 +n02376679 +n02376791 +n02376918 +n02377063 +n02377181 +n02377291 +n02377388 +n02377480 +n02377603 +n02377703 +n02378541 +n02378969 +n02379081 +n02379183 +n02379329 +n02379430 +n02379630 +n02379908 +n02380052 +n02380335 +n02380464 +n02380583 +n02380745 +n02380875 +n02381004 +n02381261 +n02381364 +n02381460 +n02381609 +n02381831 +n02382039 +n02382132 +n02382204 +n02382338 +n02382437 +n02382635 +n02382750 +n02382850 +n02382948 +n02383231 +n02385214 +n02386014 +n02386141 +n02386224 +n02386310 +n02386496 +n02386853 +n02386968 +n02387093 +n02387254 +n02387346 +n02387722 +n02387887 +n02388143 +n02388276 +n02388735 +n02388832 +n02388917 +n02389026 +n02389128 +n02389261 +n02389346 +n02389559 +n02389779 +n02390015 +n02390101 +n02390454 +n02390640 +n02391049 +n02391234 +n02391373 +n02391508 +n02391994 +n02392434 +n02392824 +n02393161 +n02393580 +n02393807 +n02393940 +n02394477 +n02395003 +n02395406 +n02395694 +n02396014 +n02396088 +n02396427 +n02397096 +n02397529 +n02397744 +n02398521 +n02399000 +n02402010 +n02402175 +n02402425 +n02403003 +n02403231 +n02403325 +n02403454 +n02403740 +n02403920 +n02404186 +n02404432 +n02404573 +n02404906 +n02405101 +n02405302 +n02405440 +n02405799 +n02405929 +n02406174 +n02406432 +n02406533 +n02406647 +n02406749 +n02406859 +n02407071 +n02407276 +n02407390 +n02407521 +n02407625 +n02407959 +n02408429 +n02408817 +n02409508 +n02410011 +n02410509 +n02410702 +n02410900 +n02411206 +n02411705 +n02411999 +n02412080 +n02412210 +n02412440 +n02412629 +n02413050 +n02413131 +n02413593 +n02414209 +n02414290 +n02414578 +n02414763 +n02415253 +n02415435 +n02415577 +n02415829 +n02416104 +n02416519 +n02416820 +n02416880 +n02416964 +n02417070 +n02417387 +n02417534 +n02417663 +n02417914 +n02418465 +n02419336 +n02419634 +n02419796 +n02420509 +n02420828 +n02421136 +n02421449 +n02421792 +n02422106 +n02422391 +n02422699 +n02423022 +n02423218 +n02423589 +n02424085 +n02424305 +n02424486 +n02424909 +n02425228 +n02425887 +n02426481 +n02426813 +n02427032 +n02427470 +n02427576 +n02427724 +n02428349 +n02428508 +n02429456 +n02430045 +n02430559 +n02430830 +n02431122 +n02431337 +n02431441 +n02431628 +n02431785 +n02431976 +n02432291 +n02432511 +n02432704 +n02432983 +n02433318 +n02433546 +n02433925 +n02434190 +n02434954 +n02437136 +n02437312 +n02437482 +n02437616 +n02438173 +n02438272 +n02438580 +n02439033 +n02439398 +n02441326 +n02441942 +n02442336 +n02442845 +n02443015 +n02443114 +n02443346 +n02443484 +n02444819 +n02445004 +n02445171 +n02445394 +n02445715 +n02446206 +n02447366 +n02447762 +n02448060 +n02449350 +n02450295 +n02453108 +n02454379 +n02454794 +n02456962 +n02457408 +n02457945 +n02458135 +n02460009 +n02460451 +n02461128 +n02461830 +n02469248 +n02469472 +n02469914 +n02470238 +n02470325 +n02472293 +n02472987 +n02473307 +n02474777 +n02475078 +n02475669 +n02480153 +n02480495 +n02480855 +n02481103 +n02481235 +n02481366 +n02481500 +n02481823 +n02482286 +n02482474 +n02482650 +n02483362 +n02483708 +n02484322 +n02484473 +n02484975 +n02485536 +n02486261 +n02486410 +n02486657 +n02486908 +n02487347 +n02487547 +n02487675 +n02487847 +n02488291 +n02488415 +n02488702 +n02488894 +n02489166 +n02490219 +n02490811 +n02491107 +n02492035 +n02492660 +n02493509 +n02493793 +n02494079 +n02496913 +n02497673 +n02499022 +n02499316 +n02499808 +n02500267 +n02501583 +n02503517 +n02504013 +n02504458 +n02508021 +n02508213 +n02508742 +n02509197 +n02509515 +n02509815 +n02510455 +n02512053 +n02512830 +n02512938 +n02514041 +n02516188 +n02517442 +n02518324 +n02519148 +n02519686 +n02519862 +n02520147 +n02522399 +n02523427 +n02524202 +n02525382 +n02526121 +n02527057 +n02527271 +n02527622 +n02530421 +n02530831 +n02530999 +n02532028 +n02532602 +n02533209 +n02533834 +n02534734 +n02535163 +n02535258 +n02535537 +n02535759 +n02536165 +n02536456 +n02536864 +n02537085 +n02537319 +n02537525 +n02537716 +n02538010 +n02538216 +n02540412 +n02541687 +n02542432 +n02543565 +n02548247 +n02549248 +n02549989 +n02555863 +n02556846 +n02557182 +n02557318 +n02557591 +n02557749 +n02557909 +n02560110 +n02561108 +n02561381 +n02561514 +n02561661 +n02562315 +n02562796 +n02563182 +n02563648 +n02563792 +n02564270 +n02564720 +n02565072 +n02565324 +n02565573 +n02568087 +n02568447 +n02568959 +n02569484 +n02570164 +n02570838 +n02572196 +n02572484 +n02573704 +n02574271 +n02576575 +n02576906 +n02577403 +n02578771 +n02578928 +n02579303 +n02579928 +n02580336 +n02580679 +n02580830 +n02581957 +n02583890 +n02584145 +n02584449 +n02585872 +n02586543 +n02587618 +n02588286 +n02589623 +n02590094 +n02590702 +n02592055 +n02593019 +n02595702 +n02596067 +n02596381 +n02597608 +n02598573 +n02598878 +n02599052 +n02599347 +n02599557 +n02601344 +n02603317 +n02603540 +n02605316 +n02605703 +n02605936 +n02606052 +n02606384 +n02607072 +n02607201 +n02607470 +n02607862 +n02610066 +n02610664 +n02611561 +n02613181 +n02616851 +n02618827 +n02619165 +n02619550 +n02620167 +n02624167 +n02624551 +n02624807 +n02624987 +n02625258 +n02625612 +n02625851 +n02626265 +n02626762 +n02627292 +n02627532 +n02627835 +n02628062 +n02629230 +n02630281 +n02630615 +n02630739 +n02631041 +n02631330 +n02631475 +n02639087 +n02639605 +n02640242 +n02640626 +n02640857 +n02641379 +n02643112 +n02643566 +n02643836 +n02644113 +n02649546 +n02650050 +n02652132 +n02653145 +n02653497 +n02654112 +n02654425 +n02654745 +n02655020 +n02655848 +n02656032 +n02656670 +n02657368 +n02657694 +n02658531 +n02660208 +n02660640 +n02663211 +n02666196 +n02666501 +n02666624 +n02666943 +n02667093 +n02667244 +n02667379 +n02667478 +n02667576 +n02669295 +n02669534 +n02669723 +n02670186 +n02670382 +n02670683 +n02670935 +n02672371 +n02672831 +n02675219 +n02676566 +n02676938 +n02677718 +n02678897 +n02679257 +n02680110 +n02680512 +n02680754 +n02681392 +n02682311 +n02682569 +n02682922 +n02683323 +n02683454 +n02683558 +n02683791 +n02685082 +n02685995 +n02686121 +n02686227 +n02686379 +n02686568 +n02687172 +n02687423 +n02687821 +n02687992 +n02688273 +n02688443 +n02689144 +n02689274 +n02689434 +n02689748 +n02690373 +n02691156 +n02692086 +n02692232 +n02692877 +n02693246 +n02694045 +n02694426 +n02694662 +n02695627 +n02696165 +n02697221 +n02697675 +n02698634 +n02699494 +n02699629 +n02699770 +n02699915 +n02700064 +n02700258 +n02700895 +n02701002 +n02702989 +n02703275 +n02704645 +n02704792 +n02704949 +n02705201 +n02705429 +n02705944 +n02706806 +n02708093 +n02708433 +n02708555 +n02708711 +n02709101 +n02709367 +n02709637 +n02709908 +n02710044 +n02710201 +n02710324 +n02710429 +n02710600 +n02713003 +n02713364 +n02714751 +n02715229 +n02715513 +n02715712 +n02720048 +n02723165 +n02725872 +n02726017 +n02726210 +n02726305 +n02726681 +n02727016 +n02727141 +n02727426 +n02728440 +n02729837 +n02729965 +n02730930 +n02731398 +n02731629 +n02731900 +n02732072 +n02732572 +n02732827 +n02733213 +n02733524 +n02734725 +n02734835 +n02735361 +n02735538 +n02735688 +n02736798 +n02737660 +n02738031 +n02738535 +n02738741 +n02738859 +n02739427 +n02739550 +n02739668 +n02739889 +n02740300 +n02740533 +n02740764 +n02741475 +n02742322 +n02742468 +n02742753 +n02744323 +n02744844 +n02745611 +n02746365 +n02746595 +n02747177 +n02747672 +n02747802 +n02749479 +n02749953 +n02750070 +n02750169 +n02751215 +n02751295 +n02752496 +n02752615 +n02752810 +n02753044 +n02753394 +n02754103 +n02754656 +n02755140 +n02755529 +n02755823 +n02756098 +n02756977 +n02757061 +n02757337 +n02757462 +n02757714 +n02757810 +n02758134 +n02758863 +n02758960 +n02759257 +n02759387 +n02759963 +n02760099 +n02760199 +n02760429 +n02760658 +n02760855 +n02761206 +n02761392 +n02761557 +n02761696 +n02761834 +n02762371 +n02762508 +n02763306 +n02763604 +n02763901 +n02764044 +n02764398 +n02764505 +n02764779 +n02764935 +n02766320 +n02766534 +n02766792 +n02767038 +n02767147 +n02767433 +n02767665 +n02767956 +n02768114 +n02768226 +n02768655 +n02768973 +n02769075 +n02769290 +n02769669 +n02769748 +n02769963 +n02770211 +n02770721 +n02770830 +n02771004 +n02771166 +n02771286 +n02771750 +n02772101 +n02772435 +n02772700 +n02773037 +n02773838 +n02774152 +n02774630 +n02774921 +n02775039 +n02775178 +n02775483 +n02775897 +n02776007 +n02776205 +n02776631 +n02776825 +n02776978 +n02777100 +n02777292 +n02777734 +n02778294 +n02778456 +n02778669 +n02779435 +n02780704 +n02780815 +n02781121 +n02781338 +n02782093 +n02782602 +n02782681 +n02782778 +n02783161 +n02783324 +n02783459 +n02783900 +n02783994 +n02784124 +n02785648 +n02786058 +n02786198 +n02786331 +n02786736 +n02786837 +n02787435 +n02787622 +n02788021 +n02788148 +n02788572 +n02789487 +n02790669 +n02790823 +n02790996 +n02791124 +n02791270 +n02791665 +n02792409 +n02792552 +n02793089 +n02793199 +n02793495 +n02793842 +n02794008 +n02794156 +n02794664 +n02795169 +n02795528 +n02795670 +n02796207 +n02796318 +n02796995 +n02797295 +n02797535 +n02797692 +n02799071 +n02799175 +n02799323 +n02799897 +n02800213 +n02800497 +n02800675 +n02801184 +n02801450 +n02801525 +n02801823 +n02801938 +n02802215 +n02802426 +n02802544 +n02802721 +n02802990 +n02803349 +n02803539 +n02803666 +n02803934 +n02804123 +n02804252 +n02804414 +n02804515 +n02804610 +n02805983 +n02806088 +n02806379 +n02806530 +n02807133 +n02807523 +n02807616 +n02807731 +n02808185 +n02808304 +n02808440 +n02809105 +n02809241 +n02810270 +n02810471 +n02810782 +n02811059 +n02811204 +n02811350 +n02811468 +n02811618 +n02811719 +n02811936 +n02812201 +n02812949 +n02813252 +n02813399 +n02813544 +n02813645 +n02813752 +n02814116 +n02814428 +n02814533 +n02814774 +n02814860 +n02815749 +n02815834 +n02815950 +n02816656 +n02816768 +n02817031 +n02817516 +n02818135 +n02818832 +n02820210 +n02820556 +n02820675 +n02821202 +n02821415 +n02821627 +n02821943 +n02822064 +n02822220 +n02822579 +n02823124 +n02823335 +n02823428 +n02823510 +n02823586 +n02823750 +n02823848 +n02823964 +n02824058 +n02824319 +n02824448 +n02825153 +n02825442 +n02825657 +n02825961 +n02826068 +n02826589 +n02826886 +n02827606 +n02828299 +n02828427 +n02828884 +n02829596 +n02831237 +n02831335 +n02831595 +n02831724 +n02831894 +n02833793 +n02834397 +n02834506 +n02834778 +n02835271 +n02835412 +n02835724 +n02835829 +n02835915 +n02836035 +n02836174 +n02836392 +n02837789 +n02837887 +n02838345 +n02838728 +n02839110 +n02839351 +n02839592 +n02839910 +n02840134 +n02840245 +n02840619 +n02841187 +n02841315 +n02841506 +n02842573 +n02842809 +n02843029 +n02843158 +n02843276 +n02843553 +n02843684 +n02844307 +n02846141 +n02846511 +n02846733 +n02847631 +n02847852 +n02848216 +n02848523 +n02848921 +n02849154 +n02849885 +n02850732 +n02850950 +n02851099 +n02851939 +n02852043 +n02852173 +n02852360 +n02853016 +n02854378 +n02854532 +n02854739 +n02854926 +n02855089 +n02855390 +n02855701 +n02855925 +n02856237 +n02857477 +n02857644 +n02858304 +n02859184 +n02859343 +n02859443 +n02859955 +n02860415 +n02860640 +n02860847 +n02861022 +n02861147 +n02861387 +n02861886 +n02862048 +n02862916 +n02863014 +n02863426 +n02863536 +n02863750 +n02864504 +n02864593 +n02865351 +n02865665 +n02865931 +n02866386 +n02866578 +n02867715 +n02867966 +n02868638 +n02868975 +n02869155 +n02869249 +n02869563 +n02869737 +n02869837 +n02870526 +n02870676 +n02870880 +n02871005 +n02871147 +n02871314 +n02871439 +n02871525 +n02871824 +n02871963 +n02872333 +n02872529 +n02872752 +n02873520 +n02873733 +n02873839 +n02874086 +n02874442 +n02874537 +n02874750 +n02876084 +n02876326 +n02876657 +n02877266 +n02877765 +n02877962 +n02878222 +n02878425 +n02878628 +n02879087 +n02879309 +n02879718 +n02880189 +n02880393 +n02880546 +n02880842 +n02880940 +n02881193 +n02881757 +n02881906 +n02882190 +n02882301 +n02882647 +n02882894 +n02883004 +n02883205 +n02883344 +n02884994 +n02885108 +n02885338 +n02885462 +n02885882 +n02886321 +n02886434 +n02887079 +n02887209 +n02887489 +n02887970 +n02888270 +n02889425 +n02889646 +n02890188 +n02890351 +n02890513 +n02890662 +n02890940 +n02891188 +n02891788 +n02892201 +n02892304 +n02892499 +n02892767 +n02892948 +n02893608 +n02893692 +n02893941 +n02894158 +n02894337 +n02894605 +n02895154 +n02895328 +n02895438 +n02896442 +n02897097 +n02897820 +n02898269 +n02898369 +n02898585 +n02898711 +n02899439 +n02900160 +n02900705 +n02901114 +n02901259 +n02901377 +n02901793 +n02902079 +n02902687 +n02902916 +n02903126 +n02903204 +n02903852 +n02904233 +n02904640 +n02904803 +n02904927 +n02905036 +n02905152 +n02906734 +n02907082 +n02907391 +n02907656 +n02907873 +n02908217 +n02908773 +n02909285 +n02909870 +n02910145 +n02910353 +n02910542 +n02910864 +n02911332 +n02911485 +n02912065 +n02912319 +n02912557 +n02912894 +n02913152 +n02914991 +n02915904 +n02916179 +n02916350 +n02916936 +n02917067 +n02917377 +n02917521 +n02917607 +n02917964 +n02918112 +n02918330 +n02918595 +n02918831 +n02918964 +n02919148 +n02919414 +n02919792 +n02919890 +n02920083 +n02920259 +n02920369 +n02920658 +n02921029 +n02921195 +n02921756 +n02921884 +n02922292 +n02922578 +n02922798 +n02923682 +n02924116 +n02925009 +n02925107 +n02925519 +n02925666 +n02926426 +n02926591 +n02927161 +n02927764 +n02927887 +n02928049 +n02928299 +n02928608 +n02929289 +n02929582 +n02930080 +n02930214 +n02930645 +n02930766 +n02931148 +n02931294 +n02931417 +n02931836 +n02932019 +n02932400 +n02932523 +n02932693 +n02932891 +n02933112 +n02933340 +n02933462 +n02933649 +n02933990 +n02934168 +n02934451 +n02935017 +n02935387 +n02935658 +n02935891 +n02936176 +n02936281 +n02936402 +n02936570 +n02936714 +n02937958 +n02938886 +n02939185 +n02939866 +n02940385 +n02940570 +n02942349 +n02942460 +n02942699 +n02943241 +n02943871 +n02943964 +n02944075 +n02944146 +n02944459 +n02944579 +n02946127 +n02946270 +n02946348 +n02946509 +n02946824 +n02946921 +n02947660 +n02947818 +n02948072 +n02948403 +n02948557 +n02949202 +n02949542 +n02950256 +n02950632 +n02950826 +n02950943 +n02951358 +n02951585 +n02951703 +n02951843 +n02952109 +n02952237 +n02952374 +n02952485 +n02952585 +n02952674 +n02953197 +n02953455 +n02954163 +n02954340 +n02954938 +n02955065 +n02955247 +n02955540 +n02955767 +n02956699 +n02956795 +n02956883 +n02957008 +n02957135 +n02957755 +n02958343 +n02959942 +n02960352 +n02960690 +n02960903 +n02961035 +n02961225 +n02961451 +n02961544 +n02962061 +n02962200 +n02962414 +n02962843 +n02963159 +n02963302 +n02963503 +n02963692 +n02963821 +n02963987 +n02964843 +n02965216 +n02965300 +n02965783 +n02966193 +n02966545 +n02966687 +n02967294 +n02967626 +n02967782 +n02968074 +n02968333 +n02968473 +n02969010 +n02969323 +n02970408 +n02970534 +n02970685 +n02970849 +n02971167 +n02971356 +n02971473 +n02971579 +n02971691 +n02972397 +n02973017 +n02973236 +n02973805 +n02973904 +n02974003 +n02974348 +n02974697 +n02975212 +n02976123 +n02976249 +n02976350 +n02976455 +n02976939 +n02977058 +n02977330 +n02977438 +n02977619 +n02977936 +n02978055 +n02978367 +n02978478 +n02978753 +n02978881 +n02979074 +n02979186 +n02979290 +n02979399 +n02979836 +n02980036 +n02980441 +n02981024 +n02981321 +n02981792 +n02981911 +n02982232 +n02982416 +n02982515 +n02982599 +n02983189 +n02983357 +n02984061 +n02984203 +n02984469 +n02985963 +n02986160 +n02987379 +n02987492 +n02988066 +n02988156 +n02988304 +n02988486 +n02988679 +n02988963 +n02989099 +n02990373 +n02991302 +n02991847 +n02992032 +n02992211 +n02992368 +n02992529 +n02992795 +n02993194 +n02993368 +n02994573 +n02995345 +n02995871 +n02995998 +n02997391 +n02997607 +n02997910 +n02998003 +n02998563 +n02998841 +n02999138 +n02999410 +n02999936 +n03000134 +n03000247 +n03000684 +n03001115 +n03001627 +n03002096 +n03002341 +n03002711 +n03002816 +n03002948 +n03003091 +n03004275 +n03004824 +n03005033 +n03005147 +n03005285 +n03006626 +n03007130 +n03007444 +n03007591 +n03008177 +n03008976 +n03009794 +n03010473 +n03010656 +n03010795 +n03010915 +n03011018 +n03011355 +n03011741 +n03012013 +n03012373 +n03012897 +n03013438 +n03013580 +n03013850 +n03014440 +n03014705 +n03015149 +n03015254 +n03015478 +n03015851 +n03016389 +n03016609 +n03016737 +n03016868 +n03016953 +n03017070 +n03017168 +n03018209 +n03018349 +n03018712 +n03019434 +n03019685 +n03019806 +n03019938 +n03020034 +n03020416 +n03020692 +n03021228 +n03024064 +n03025165 +n03025250 +n03026506 +n03026907 +n03027001 +n03027108 +n03027250 +n03027625 +n03028079 +n03028596 +n03028785 +n03029197 +n03029445 +n03029925 +n03030262 +n03030353 +n03030557 +n03030880 +n03031012 +n03031152 +n03031422 +n03032252 +n03032453 +n03032811 +n03033362 +n03033986 +n03034244 +n03034405 +n03034663 +n03035252 +n03035832 +n03036022 +n03036469 +n03037404 +n03037709 +n03038281 +n03038480 +n03038685 +n03038870 +n03039015 +n03039259 +n03039493 +n03039827 +n03039947 +n03040376 +n03041114 +n03041449 +n03041632 +n03041810 +n03042139 +n03042490 +n03042697 +n03043423 +n03043693 +n03043958 +n03044934 +n03045228 +n03045337 +n03045698 +n03046029 +n03046133 +n03046257 +n03046802 +n03046921 +n03047052 +n03047690 +n03047799 +n03047941 +n03048883 +n03049066 +n03049782 +n03049924 +n03050453 +n03050546 +n03050655 +n03050864 +n03051041 +n03051249 +n03051396 +n03051540 +n03054901 +n03055418 +n03055857 +n03057021 +n03057541 +n03057636 +n03057920 +n03058107 +n03058603 +n03059685 +n03061211 +n03061345 +n03061505 +n03061674 +n03061893 +n03062015 +n03062122 +n03062245 +n03062336 +n03062985 +n03063073 +n03063199 +n03063338 +n03063485 +n03063599 +n03063689 +n03063968 +n03064250 +n03064350 +n03064758 +n03064935 +n03065243 +n03065424 +n03066359 +n03066849 +n03067093 +n03067212 +n03067339 +n03067518 +n03068181 +n03068998 +n03069752 +n03070059 +n03070193 +n03071021 +n03071160 +n03072201 +n03072440 +n03072682 +n03073296 +n03073545 +n03073694 +n03073977 +n03074380 +n03074855 +n03075097 +n03075370 +n03075634 +n03075768 +n03075946 +n03077616 +n03077741 +n03078287 +n03078802 +n03078995 +n03079136 +n03079230 +n03079494 +n03080497 +n03080633 +n03081986 +n03082280 +n03082656 +n03082807 +n03082979 +n03084420 +n03084834 +n03085013 +n03085219 +n03085602 +n03085915 +n03086457 +n03086580 +n03086670 +n03086868 +n03087069 +n03087245 +n03087366 +n03087816 +n03088389 +n03088580 +n03089624 +n03089753 +n03089879 +n03090000 +n03090172 +n03091044 +n03091374 +n03092166 +n03092314 +n03092656 +n03092883 +n03094159 +n03094503 +n03095699 +n03096960 +n03097362 +n03097535 +n03097673 +n03098140 +n03098688 +n03098959 +n03099147 +n03099274 +n03099454 +n03099945 +n03100240 +n03100346 +n03100490 +n03100897 +n03101156 +n03101517 +n03101664 +n03101796 +n03101986 +n03102371 +n03102654 +n03103396 +n03103563 +n03104512 +n03105088 +n03105306 +n03105467 +n03106898 +n03107046 +n03107488 +n03108455 +n03108853 +n03109150 +n03109253 +n03109693 +n03109881 +n03110669 +n03111041 +n03111177 +n03111296 +n03112719 +n03112869 +n03113152 +n03113657 +n03113835 +n03114236 +n03114379 +n03114504 +n03115180 +n03115400 +n03115762 +n03115897 +n03116530 +n03116767 +n03118969 +n03119203 +n03119396 +n03119510 +n03120491 +n03120778 +n03121298 +n03121431 +n03121897 +n03122073 +n03122202 +n03122295 +n03123553 +n03123809 +n03123917 +n03124043 +n03124170 +n03124474 +n03124590 +n03125057 +n03125729 +n03125870 +n03126385 +n03126580 +n03126707 +n03127203 +n03127408 +n03127747 +n03127925 +n03128085 +n03128248 +n03128427 +n03128519 +n03129001 +n03129471 +n03129753 +n03129848 +n03130761 +n03131574 +n03131669 +n03131967 +n03132076 +n03132261 +n03132666 +n03132776 +n03133050 +n03133415 +n03133878 +n03134739 +n03134853 +n03135030 +n03135532 +n03136369 +n03137473 +n03138344 +n03138669 +n03139464 +n03140126 +n03140292 +n03140431 +n03140652 +n03141065 +n03141327 +n03141455 +n03141702 +n03141823 +n03142679 +n03145147 +n03145522 +n03145719 +n03146219 +n03146687 +n03146777 +n03146846 +n03147280 +n03147509 +n03148324 +n03148727 +n03149135 +n03149686 +n03150232 +n03150511 +n03151077 +n03152303 +n03154073 +n03154895 +n03155178 +n03156279 +n03156767 +n03157348 +n03158186 +n03158885 +n03159535 +n03159640 +n03160309 +n03160740 +n03161450 +n03163222 +n03163381 +n03164344 +n03164605 +n03164722 +n03165096 +n03165466 +n03165616 +n03166514 +n03167978 +n03168107 +n03168217 +n03169176 +n03170635 +n03170872 +n03171228 +n03171356 +n03171635 +n03172038 +n03173270 +n03173387 +n03173929 +n03174450 +n03174731 +n03175081 +n03175189 +n03175457 +n03176386 +n03176594 +n03176763 +n03177059 +n03177165 +n03178000 +n03178430 +n03178674 +n03179701 +n03179910 +n03180011 +n03180384 +n03180504 +n03180865 +n03180969 +n03181293 +n03183080 +n03186285 +n03186818 +n03187037 +n03187268 +n03187595 +n03188531 +n03188725 +n03189083 +n03191286 +n03192543 +n03193107 +n03193260 +n03193423 +n03193597 +n03195332 +n03195959 +n03196062 +n03196217 +n03196598 +n03196990 +n03197201 +n03197337 +n03198500 +n03199647 +n03199775 +n03199901 +n03200231 +n03200357 +n03200539 +n03200701 +n03200906 +n03201035 +n03201208 +n03201529 +n03201638 +n03201776 +n03202354 +n03202940 +n03204306 +n03204558 +n03205458 +n03205574 +n03205669 +n03206158 +n03206282 +n03206718 +n03206908 +n03207305 +n03207630 +n03207743 +n03207835 +n03207941 +n03208556 +n03208938 +n03209359 +n03209477 +n03209910 +n03210245 +n03210372 +n03210552 +n03211117 +n03211789 +n03212114 +n03212811 +n03213538 +n03213715 +n03213826 +n03214253 +n03214582 +n03215508 +n03216402 +n03216710 +n03216828 +n03218198 +n03219010 +n03219135 +n03219483 +n03219612 +n03219859 +n03219966 +n03220237 +n03220513 +n03220692 +n03221059 +n03221351 +n03221540 +n03221720 +n03222176 +n03222318 +n03222516 +n03223162 +n03223299 +n03223553 +n03223686 +n03224603 +n03224753 +n03225108 +n03225777 +n03225988 +n03226254 +n03226375 +n03226538 +n03226880 +n03227184 +n03227317 +n03228254 +n03228365 +n03228692 +n03228967 +n03229244 +n03231160 +n03231368 +n03231819 +n03232309 +n03232543 +n03233123 +n03233624 +n03233744 +n03233905 +n03234164 +n03234952 +n03235042 +n03235180 +n03235327 +n03235796 +n03236093 +n03236217 +n03236423 +n03236735 +n03237340 +n03237416 +n03237839 +n03237992 +n03238131 +n03238286 +n03238586 +n03239054 +n03239259 +n03239726 +n03240140 +n03240683 +n03240892 +n03241093 +n03241335 +n03241496 +n03242506 +n03243218 +n03244047 +n03244231 +n03244775 +n03244919 +n03245724 +n03245889 +n03246454 +n03246933 +n03247083 +n03249342 +n03249569 +n03250089 +n03250279 +n03250405 +n03250847 +n03250952 +n03251533 +n03251766 +n03251932 +n03252637 +n03253279 +n03253796 +n03253886 +n03254046 +n03254189 +n03254374 +n03254862 +n03255030 +n03255899 +n03256032 +n03256166 +n03256788 +n03256928 +n03257210 +n03257586 +n03258330 +n03258577 +n03258905 +n03259009 +n03259280 +n03259401 +n03259505 +n03260849 +n03261019 +n03261603 +n03261776 +n03262072 +n03262248 +n03262519 +n03262717 +n03262809 +n03262932 +n03263076 +n03264906 +n03266371 +n03266749 +n03267113 +n03267468 +n03267821 +n03268142 +n03268311 +n03268645 +n03268790 +n03268918 +n03269203 +n03269401 +n03270165 +n03270854 +n03271030 +n03271574 +n03272010 +n03272125 +n03272239 +n03272383 +n03272562 +n03272810 +n03272940 +n03273061 +n03273551 +n03273740 +n03273913 +n03274265 +n03274435 +n03275681 +n03276696 +n03277459 +n03277771 +n03278248 +n03278914 +n03279508 +n03280644 +n03281145 +n03281673 +n03282295 +n03282401 +n03283221 +n03284308 +n03284743 +n03284886 +n03284981 +n03285578 +n03285912 +n03287351 +n03287733 +n03288003 +n03288500 +n03288886 +n03289660 +n03289985 +n03290096 +n03290195 +n03290653 +n03291413 +n03291741 +n03291819 +n03291963 +n03292475 +n03292603 +n03293741 +n03293863 +n03294048 +n03294604 +n03294833 +n03295012 +n03295246 +n03296081 +n03296328 +n03296478 +n03297103 +n03297226 +n03297495 +n03297644 +n03297735 +n03298089 +n03298716 +n03298858 +n03300216 +n03300443 +n03301291 +n03301568 +n03301833 +n03301940 +n03302671 +n03302938 +n03303217 +n03303831 +n03306385 +n03307037 +n03307792 +n03308152 +n03308481 +n03309110 +n03309356 +n03309465 +n03309687 +n03309808 +n03313333 +n03314227 +n03314608 +n03314780 +n03314884 +n03315644 +n03316105 +n03316406 +n03317788 +n03317889 +n03318136 +n03318294 +n03318865 +n03318983 +n03319457 +n03319745 +n03320046 +n03320262 +n03320421 +n03320519 +n03320959 +n03321103 +n03321563 +n03321954 +n03322570 +n03322704 +n03322836 +n03322940 +n03323096 +n03323703 +n03324928 +n03325088 +n03325403 +n03325584 +n03325691 +n03325941 +n03326660 +n03326795 +n03326948 +n03327133 +n03327234 +n03327553 +n03327691 +n03329302 +n03329536 +n03329663 +n03331077 +n03331599 +n03332005 +n03332271 +n03332393 +n03332989 +n03333129 +n03333252 +n03333610 +n03333711 +n03334291 +n03334382 +n03334912 +n03335030 +n03336282 +n03336575 +n03337140 +n03337383 +n03338821 +n03339529 +n03339643 +n03340723 +n03341153 +n03341297 +n03341606 +n03342015 +n03342127 +n03342262 +n03342657 +n03343354 +n03343560 +n03343737 +n03343853 +n03344305 +n03344393 +n03344642 +n03345487 +n03345837 +n03346135 +n03346455 +n03347037 +n03347617 +n03348868 +n03349469 +n03349771 +n03349892 +n03350204 +n03350602 +n03351434 +n03351979 +n03352628 +n03353951 +n03354207 +n03354903 +n03355768 +n03355925 +n03356858 +n03356982 +n03357267 +n03357716 +n03358172 +n03358380 +n03358726 +n03359137 +n03359285 +n03359436 +n03359566 +n03360300 +n03360431 +n03360622 +n03361297 +n03361380 +n03361550 +n03362890 +n03363363 +n03363549 +n03363749 +n03364008 +n03364599 +n03365231 +n03365374 +n03365592 +n03365991 +n03366823 +n03366974 +n03367059 +n03367410 +n03367545 +n03368352 +n03369276 +n03370387 +n03371875 +n03372029 +n03372549 +n03373237 +n03373611 +n03373943 +n03374372 +n03374473 +n03374649 +n03374838 +n03375171 +n03375329 +n03375575 +n03376159 +n03376279 +n03376595 +n03376938 +n03378005 +n03378174 +n03379051 +n03379204 +n03379343 +n03379828 +n03380724 +n03380867 +n03381126 +n03382292 +n03382413 +n03382856 +n03383099 +n03384352 +n03384891 +n03385557 +n03386011 +n03386544 +n03386726 +n03386870 +n03387653 +n03388043 +n03388183 +n03388323 +n03388549 +n03389611 +n03389761 +n03389889 +n03390075 +n03390786 +n03390983 +n03391301 +n03391770 +n03392741 +n03393017 +n03393761 +n03393912 +n03394272 +n03394480 +n03394649 +n03394916 +n03395256 +n03395514 +n03395859 +n03396074 +n03396580 +n03396654 +n03397087 +n03397266 +n03397532 +n03397947 +n03398153 +n03398228 +n03399677 +n03399761 +n03399971 +n03400231 +n03400972 +n03401129 +n03401279 +n03402188 +n03402369 +n03402941 +n03403643 +n03404149 +n03404251 +n03404360 +n03404449 +n03405111 +n03405265 +n03405595 +n03405725 +n03406966 +n03407369 +n03407865 +n03408054 +n03408444 +n03409297 +n03409393 +n03409591 +n03410423 +n03410571 +n03410740 +n03410938 +n03411079 +n03412058 +n03413684 +n03414029 +n03414162 +n03414676 +n03415252 +n03415486 +n03415749 +n03416094 +n03416489 +n03416640 +n03416775 +n03416900 +n03417042 +n03417202 +n03417345 +n03417749 +n03417970 +n03418158 +n03418242 +n03418402 +n03418618 +n03418915 +n03419014 +n03420345 +n03420801 +n03421117 +n03421324 +n03421485 +n03421669 +n03422072 +n03422771 +n03423306 +n03423479 +n03423568 +n03423719 +n03423877 +n03424325 +n03424489 +n03424630 +n03424862 +n03425241 +n03425325 +n03425413 +n03425595 +n03425769 +n03426134 +n03426285 +n03427202 +n03427296 +n03428090 +n03428226 +n03428349 +n03429003 +n03429137 +n03429288 +n03429682 +n03429914 +n03430091 +n03430313 +n03430418 +n03430551 +n03431243 +n03431745 +n03432061 +n03432129 +n03433877 +n03434188 +n03434285 +n03435593 +n03435743 +n03435991 +n03436075 +n03436182 +n03436417 +n03436549 +n03436891 +n03437430 +n03437741 +n03437829 +n03437941 +n03438071 +n03438257 +n03438661 +n03438863 +n03439348 +n03439814 +n03440216 +n03440682 +n03441112 +n03441345 +n03441582 +n03442597 +n03442756 +n03443005 +n03443149 +n03443371 +n03443912 +n03444034 +n03445326 +n03445617 +n03445777 +n03445924 +n03446070 +n03446268 +n03446832 +n03447075 +n03447358 +n03447447 +n03447721 +n03448590 +n03448956 +n03449309 +n03449451 +n03450230 +n03450516 +n03450734 +n03450974 +n03451120 +n03451711 +n03451798 +n03452267 +n03452449 +n03452594 +n03452741 +n03453231 +n03453443 +n03454110 +n03454211 +n03454442 +n03454536 +n03454707 +n03454885 +n03455488 +n03456024 +n03456186 +n03456299 +n03456447 +n03456548 +n03456665 +n03457008 +n03457686 +n03457902 +n03458271 +n03459328 +n03459775 +n03460040 +n03460147 +n03460297 +n03461288 +n03461385 +n03461988 +n03462110 +n03463381 +n03463666 +n03464053 +n03465151 +n03465426 +n03465500 +n03465718 +n03466493 +n03466600 +n03466839 +n03467068 +n03467517 +n03467796 +n03467984 +n03468696 +n03468821 +n03469175 +n03469493 +n03469903 +n03470629 +n03471190 +n03472232 +n03473227 +n03474779 +n03474896 +n03475581 +n03475823 +n03476083 +n03476313 +n03476684 +n03476991 +n03477512 +n03478589 +n03478756 +n03478907 +n03479121 +n03479397 +n03479502 +n03480579 +n03480719 +n03481172 +n03482252 +n03482405 +n03482523 +n03482877 +n03483230 +n03483316 +n03483823 +n03483971 +n03484083 +n03484487 +n03484576 +n03484809 +n03484931 +n03485198 +n03485309 +n03485407 +n03485794 +n03487090 +n03487331 +n03487444 +n03487533 +n03487642 +n03487774 +n03487886 +n03488188 +n03488438 +n03488887 +n03489162 +n03490006 +n03490119 +n03490884 +n03491032 +n03491988 +n03492250 +n03492542 +n03492922 +n03494278 +n03494537 +n03494706 +n03495039 +n03495258 +n03495570 +n03496296 +n03496612 +n03496892 +n03497352 +n03497657 +n03498441 +n03498662 +n03498781 +n03498962 +n03499354 +n03499468 +n03499907 +n03500209 +n03500389 +n03500699 +n03501152 +n03501614 +n03502200 +n03502331 +n03502509 +n03503233 +n03503477 +n03503997 +n03504205 +n03504723 +n03505133 +n03505383 +n03505504 +n03505667 +n03506028 +n03506184 +n03506370 +n03506560 +n03506727 +n03506880 +n03507241 +n03507458 +n03507963 +n03508101 +n03508881 +n03509394 +n03509608 +n03510244 +n03511175 +n03511333 +n03512147 +n03512911 +n03513137 +n03513376 +n03514451 +n03514693 +n03514894 +n03516367 +n03516844 +n03516996 +n03517647 +n03517760 +n03517899 +n03518135 +n03518305 +n03518445 +n03518943 +n03519081 +n03519387 +n03520493 +n03521076 +n03521544 +n03521675 +n03521899 +n03522003 +n03522100 +n03522634 +n03523134 +n03523987 +n03524150 +n03524574 +n03524745 +n03525074 +n03525454 +n03527149 +n03527444 +n03527565 +n03528263 +n03528523 +n03528901 +n03529175 +n03529444 +n03529629 +n03529860 +n03530511 +n03530642 +n03530910 +n03531281 +n03532342 +n03532672 +n03532919 +n03533014 +n03534580 +n03534776 +n03535024 +n03535780 +n03536122 +n03536761 +n03537241 +n03537412 +n03538037 +n03538179 +n03538406 +n03538634 +n03539433 +n03539546 +n03539678 +n03540090 +n03540267 +n03540595 +n03540914 +n03541091 +n03541269 +n03541537 +n03541696 +n03541923 +n03542333 +n03542605 +n03542860 +n03543012 +n03543112 +n03543254 +n03543394 +n03543603 +n03543735 +n03543945 +n03544143 +n03544238 +n03544360 +n03545150 +n03545470 +n03545756 +n03546112 +n03546235 +n03546340 +n03547054 +n03547229 +n03548086 +n03548402 +n03548626 +n03549199 +n03549473 +n03549589 +n03549732 +n03549897 +n03550153 +n03550289 +n03551395 +n03551582 +n03552749 +n03553019 +n03553248 +n03554460 +n03555006 +n03555426 +n03555564 +n03555662 +n03556679 +n03556992 +n03557270 +n03557360 +n03557590 +n03557692 +n03558176 +n03558404 +n03558633 +n03558739 +n03559999 +n03560430 +n03561047 +n03563200 +n03563460 +n03565288 +n03565830 +n03566193 +n03566730 +n03567066 +n03568117 +n03569293 +n03571280 +n03571625 +n03571942 +n03572107 +n03572321 +n03574243 +n03574555 +n03574816 +n03577090 +n03577672 +n03578055 +n03578251 +n03578656 +n03579538 +n03580518 +n03580845 +n03581125 +n03582508 +n03582959 +n03584254 +n03584400 +n03584829 +n03585073 +n03585438 +n03585682 +n03586219 +n03586631 +n03587205 +n03588841 +n03588951 +n03589513 +n03589791 +n03590306 +n03590588 +n03590841 +n03590932 +n03592245 +n03592669 +n03592773 +n03592931 +n03593122 +n03593526 +n03594148 +n03594523 +n03594734 +n03594945 +n03595264 +n03595409 +n03595523 +n03595614 +n03595860 +n03596285 +n03596543 +n03597916 +n03598151 +n03598299 +n03598515 +n03598930 +n03599486 +n03600285 +n03600475 +n03600722 +n03600977 +n03601442 +n03601638 +n03601840 +n03602081 +n03602883 +n03603442 +n03603594 +n03603722 +n03604156 +n03604311 +n03604400 +n03604843 +n03605598 +n03605722 +n03605915 +n03606251 +n03607029 +n03607659 +n03607923 +n03609235 +n03609397 +n03610098 +n03610418 +n03610524 +n03610682 +n03612010 +n03612814 +n03612965 +n03613294 +n03613592 +n03614007 +n03614532 +n03614782 +n03615300 +n03615406 +n03615563 +n03615655 +n03615790 +n03616428 +n03616763 +n03616979 +n03617095 +n03617312 +n03617480 +n03618101 +n03618982 +n03619196 +n03619275 +n03619396 +n03619650 +n03619793 +n03619890 +n03620052 +n03620967 +n03621049 +n03621377 +n03622058 +n03622526 +n03622839 +n03622931 +n03623198 +n03623338 +n03623556 +n03624134 +n03624400 +n03625355 +n03625539 +n03625646 +n03625943 +n03626115 +n03626760 +n03627232 +n03627954 +n03628215 +n03628511 +n03629100 +n03629231 +n03629520 +n03630262 +n03630383 +n03631177 +n03631922 +n03632577 +n03632729 +n03632852 +n03633091 +n03633886 +n03634034 +n03635032 +n03635108 +n03635330 +n03635668 +n03636248 +n03636649 +n03637181 +n03637318 +n03637898 +n03638883 +n03639077 +n03639497 +n03640850 +n03640988 +n03641569 +n03642444 +n03642806 +n03643149 +n03643253 +n03643491 +n03643737 +n03644378 +n03644858 +n03645011 +n03645577 +n03646020 +n03646148 +n03646296 +n03646916 +n03647520 +n03648431 +n03649161 +n03649674 +n03649797 +n03649909 +n03650551 +n03651388 +n03651843 +n03652100 +n03652729 +n03652932 +n03653110 +n03653220 +n03653583 +n03653740 +n03653833 +n03653975 +n03654576 +n03655072 +n03655720 +n03656484 +n03656957 +n03657121 +n03657511 +n03658185 +n03658858 +n03659292 +n03659686 +n03659809 +n03659950 +n03660124 +n03660909 +n03661043 +n03661340 +n03662601 +n03662719 +n03662887 +n03663531 +n03664943 +n03665366 +n03665924 +n03666362 +n03666591 +n03666917 +n03667235 +n03667552 +n03667664 +n03667829 +n03668067 +n03668279 +n03668488 +n03668803 +n03669534 +n03669886 +n03670208 +n03671914 +n03672827 +n03673027 +n03673450 +n03674270 +n03674440 +n03674731 +n03675235 +n03676087 +n03676483 +n03676623 +n03676759 +n03677115 +n03678558 +n03678729 +n03679384 +n03679712 +n03680355 +n03680512 +n03680734 +n03680858 +n03680942 +n03682487 +n03682877 +n03683079 +n03683457 +n03683606 +n03683708 +n03683995 +n03684143 +n03684224 +n03684611 +n03684823 +n03685307 +n03685820 +n03686130 +n03686924 +n03687137 +n03687928 +n03688192 +n03688405 +n03688605 +n03688943 +n03689157 +n03690279 +n03690473 +n03690938 +n03691459 +n03691817 +n03692379 +n03692522 +n03693293 +n03693474 +n03693707 +n03693860 +n03694639 +n03695452 +n03695753 +n03695857 +n03696065 +n03696301 +n03696568 +n03697007 +n03697552 +n03697913 +n03698360 +n03698604 +n03698723 +n03698815 +n03699280 +n03699591 +n03699975 +n03700963 +n03701391 +n03701790 +n03703730 +n03703862 +n03703945 +n03704549 +n03706229 +n03706653 +n03708036 +n03708843 +n03709206 +n03709363 +n03709823 +n03710193 +n03710637 +n03710721 +n03711044 +n03711999 +n03712111 +n03712337 +n03713069 +n03713436 +n03714235 +n03715114 +n03715386 +n03715669 +n03715892 +n03716887 +n03716966 +n03717131 +n03717285 +n03717447 +n03717622 +n03718212 +n03718335 +n03718458 +n03718581 +n03718789 +n03718935 +n03719053 +n03719343 +n03719743 +n03720163 +n03720891 +n03721047 +n03721252 +n03721384 +n03721590 +n03722007 +n03722288 +n03723267 +n03723781 +n03724066 +n03724417 +n03724538 +n03724623 +n03724756 +n03724870 +n03725035 +n03725600 +n03725717 +n03726760 +n03726993 +n03727067 +n03727465 +n03727605 +n03727837 +n03727946 +n03728437 +n03728982 +n03729308 +n03729826 +n03730153 +n03730334 +n03730494 +n03730893 +n03731019 +n03731483 +n03731695 +n03732020 +n03732114 +n03732458 +n03733131 +n03733281 +n03733644 +n03733805 +n03733925 +n03735637 +n03735963 +n03736064 +n03736470 +n03736970 +n03738066 +n03738241 +n03738472 +n03739518 +n03739693 +n03742019 +n03742115 +n03743016 +n03743279 +n03743902 +n03744276 +n03744840 +n03745146 +n03745571 +n03746005 +n03746155 +n03746330 +n03746486 +n03748162 +n03749807 +n03751269 +n03751458 +n03751757 +n03752185 +n03753077 +n03757604 +n03758089 +n03759243 +n03759661 +n03759954 +n03760310 +n03760671 +n03760944 +n03761084 +n03762332 +n03762434 +n03762602 +n03763968 +n03764276 +n03764736 +n03764822 +n03764995 +n03765561 +n03765934 +n03766044 +n03766322 +n03766508 +n03766935 +n03767112 +n03767203 +n03767459 +n03767745 +n03767966 +n03768916 +n03769610 +n03769881 +n03770085 +n03770316 +n03770439 +n03770679 +n03770954 +n03772077 +n03772269 +n03772584 +n03773035 +n03773504 +n03774327 +n03774461 +n03775071 +n03775199 +n03775388 +n03775546 +n03775636 +n03775747 +n03775847 +n03776460 +n03776877 +n03777568 +n03777754 +n03778817 +n03779128 +n03781244 +n03781683 +n03781787 +n03782006 +n03782190 +n03782794 +n03783430 +n03784270 +n03784896 +n03785016 +n03785237 +n03785721 +n03786194 +n03786313 +n03786621 +n03786715 +n03786901 +n03787032 +n03787523 +n03788047 +n03788195 +n03788365 +n03788498 +n03788601 +n03788914 +n03789171 +n03789946 +n03790230 +n03790512 +n03790755 +n03790953 +n03791053 +n03791235 +n03792048 +n03792334 +n03792526 +n03792782 +n03792972 +n03793489 +n03793850 +n03794056 +n03794136 +n03794798 +n03795123 +n03795269 +n03795758 +n03795976 +n03796401 +n03796522 +n03796605 +n03797182 +n03797264 +n03797390 +n03797896 +n03798061 +n03798442 +n03799876 +n03800933 +n03801353 +n03801533 +n03801671 +n03801760 +n03801880 +n03802007 +n03802393 +n03802643 +n03803284 +n03804744 +n03805180 +n03805280 +n03805725 +n03809312 +n03809603 +n03810952 +n03811295 +n03811444 +n03811847 +n03811965 +n03812924 +n03813078 +n03814639 +n03814817 +n03814906 +n03815149 +n03815482 +n03815615 +n03816005 +n03816136 +n03816530 +n03816849 +n03817191 +n03817647 +n03818343 +n03819336 +n03819448 +n03819595 +n03819994 +n03820318 +n03820728 +n03821518 +n03822171 +n03822504 +n03822656 +n03822767 +n03823111 +n03823216 +n03823312 +n03824381 +n03824713 +n03825080 +n03825271 +n03825788 +n03826039 +n03826186 +n03827536 +n03828020 +n03829954 +n03831382 +n03832144 +n03832673 +n03834040 +n03835197 +n03836062 +n03836451 +n03836906 +n03836976 +n03837422 +n03837606 +n03837698 +n03837869 +n03838298 +n03838899 +n03839424 +n03839671 +n03840681 +n03840823 +n03841143 +n03841290 +n03841666 +n03842012 +n03842156 +n03842377 +n03842986 +n03843316 +n03843438 +n03843555 +n03844045 +n03844233 +n03844673 +n03844815 +n03845190 +n03846100 +n03846234 +n03846431 +n03846677 +n03847471 +n03847823 +n03848168 +n03848348 +n03849679 +n03849814 +n03850053 +n03850245 +n03850492 +n03851341 +n03851787 +n03852280 +n03852688 +n03853924 +n03854065 +n03854421 +n03854506 +n03854722 +n03854815 +n03855214 +n03855333 +n03855604 +n03855756 +n03856012 +n03856465 +n03857291 +n03857687 +n03857828 +n03858085 +n03858183 +n03858418 +n03859000 +n03859170 +n03859280 +n03859495 +n03859608 +n03859958 +n03860404 +n03861271 +n03861430 +n03861842 +n03862676 +n03862862 +n03863108 +n03863262 +n03863923 +n03864139 +n03864356 +n03864692 +n03865371 +n03865557 +n03865949 +n03866082 +n03868242 +n03868406 +n03868643 +n03868863 +n03870105 +n03870672 +n03870980 +n03871083 +n03871371 +n03871524 +n03871628 +n03871724 +n03873416 +n03873699 +n03874138 +n03874293 +n03874487 +n03874599 +n03874823 +n03875218 +n03875806 +n03875955 +n03876231 +n03877351 +n03877472 +n03877674 +n03877845 +n03878066 +n03878211 +n03878828 +n03878963 +n03879705 +n03880323 +n03880531 +n03882611 +n03882960 +n03883054 +n03883385 +n03883524 +n03884397 +n03884778 +n03884926 +n03885028 +n03885194 +n03885293 +n03885535 +n03885669 +n03885788 +n03885904 +n03886053 +n03886641 +n03886762 +n03887185 +n03887330 +n03887697 +n03888257 +n03888605 +n03889503 +n03889726 +n03889871 +n03890093 +n03890233 +n03890514 +n03891051 +n03891251 +n03891332 +n03891538 +n03892178 +n03892425 +n03892557 +n03892728 +n03894051 +n03894379 +n03894677 +n03895866 +n03896103 +n03896233 +n03896419 +n03896526 +n03897943 +n03898129 +n03898271 +n03898395 +n03898633 +n03899768 +n03899933 +n03900393 +n03900979 +n03901229 +n03901750 +n03901974 +n03902125 +n03902482 +n03902756 +n03903424 +n03903733 +n03903868 +n03904060 +n03904183 +n03904433 +n03904657 +n03904782 +n03904909 +n03905947 +n03906224 +n03906463 +n03906997 +n03908204 +n03908618 +n03908714 +n03909020 +n03909160 +n03909406 +n03911513 +n03911658 +n03911767 +n03911866 +n03912218 +n03913343 +n03914106 +n03914337 +n03914438 +n03914583 +n03914831 +n03915118 +n03915437 +n03915900 +n03916031 +n03916470 +n03916720 +n03917198 +n03917814 +n03918480 +n03918737 +n03919096 +n03919289 +n03919430 +n03920288 +n03920641 +n03920737 +n03920867 +n03923379 +n03923918 +n03924069 +n03924679 +n03926148 +n03927091 +n03927299 +n03927539 +n03928116 +n03928814 +n03929660 +n03929855 +n03930313 +n03930630 +n03931765 +n03931885 +n03933933 +n03934042 +n03934229 +n03934311 +n03934565 +n03934656 +n03935116 +n03935234 +n03935335 +n03936466 +n03937543 +n03937835 +n03937931 +n03938037 +n03938244 +n03938401 +n03938522 +n03938725 +n03939178 +n03939677 +n03939844 +n03940256 +n03941013 +n03941231 +n03941417 +n03941684 +n03942813 +n03942920 +n03943115 +n03943266 +n03943714 +n03943920 +n03944024 +n03944138 +n03944341 +n03946076 +n03946162 +n03947466 +n03947798 +n03947888 +n03948242 +n03948459 +n03948830 +n03948950 +n03949145 +n03949317 +n03950228 +n03950537 +n03950899 +n03952576 +n03953901 +n03954393 +n03954731 +n03955296 +n03955489 +n03956157 +n03956623 +n03956785 +n03956922 +n03957315 +n03957420 +n03957762 +n03957991 +n03958227 +n03958752 +n03959014 +n03959701 +n03960374 +n03960490 +n03961711 +n03961939 +n03962852 +n03963198 +n03963294 +n03963645 +n03964495 +n03964611 +n03965456 +n03965907 +n03966206 +n03966976 +n03967270 +n03967396 +n03967562 +n03967942 +n03968293 +n03968581 +n03968728 +n03970156 +n03970546 +n03971218 +n03973285 +n03973402 +n03973628 +n03973839 +n03973945 +n03974070 +n03974915 +n03975035 +n03975657 +n03975788 +n03976467 +n03976657 +n03977592 +n03977966 +n03978421 +n03978686 +n03978815 +n03978966 +n03980026 +n03980478 +n03980874 +n03981340 +n03981566 +n03981760 +n03981924 +n03982232 +n03982331 +n03982430 +n03982642 +n03982895 +n03983396 +n03983612 +n03984234 +n03984381 +n03984643 +n03984759 +n03985069 +n03985232 +n03985441 +n03985881 +n03986224 +n03986355 +n03986562 +n03986704 +n03986949 +n03987266 +n03987376 +n03987990 +n03988170 +n03989665 +n03990474 +n03991062 +n03991202 +n03991646 +n03991837 +n03992325 +n03992436 +n03992509 +n03992703 +n03993053 +n03993180 +n03993403 +n03993703 +n03994008 +n03994614 +n03995265 +n03995372 +n03995535 +n03995856 +n03996145 +n03996416 +n03996849 +n03998194 +n03998333 +n03999160 +n03999992 +n04000311 +n04000592 +n04000998 +n04001265 +n04001499 +n04001845 +n04002262 +n04003241 +n04003856 +n04004210 +n04004475 +n04004767 +n04004990 +n04005197 +n04005630 +n04008385 +n04008634 +n04009552 +n04009801 +n04011827 +n04012084 +n04012482 +n04013729 +n04015908 +n04016240 +n04016576 +n04016684 +n04016846 +n04018155 +n04018667 +n04019101 +n04019541 +n04019696 +n04020087 +n04020298 +n04020912 +n04021028 +n04021798 +n04022332 +n04023695 +n04023962 +n04024274 +n04024862 +n04024983 +n04025508 +n04026053 +n04026180 +n04026417 +n04026813 +n04026918 +n04027023 +n04027706 +n04028074 +n04028221 +n04028315 +n04028581 +n04028764 +n04029647 +n04029734 +n04030274 +n04030518 +n04032603 +n04033425 +n04033901 +n04033995 +n04034262 +n04035836 +n04035912 +n04036303 +n04037220 +n04037443 +n04037964 +n04038231 +n04038338 +n04038440 +n04038727 +n04039381 +n04039742 +n04039848 +n04040247 +n04040373 +n04040759 +n04041069 +n04041243 +n04041408 +n04041544 +n04041747 +n04042358 +n04043411 +n04043733 +n04044307 +n04044498 +n04044716 +n04045255 +n04045397 +n04045644 +n04046091 +n04046277 +n04046400 +n04046590 +n04046974 +n04047401 +n04048441 +n04049303 +n04049405 +n04049585 +n04049753 +n04050066 +n04050313 +n04050933 +n04051549 +n04051825 +n04052442 +n04052658 +n04052757 +n04053508 +n04053677 +n04054361 +n04054670 +n04055180 +n04056180 +n04056413 +n04056932 +n04057047 +n04057215 +n04057981 +n04058096 +n04058239 +n04058594 +n04059157 +n04059516 +n04059947 +n04060647 +n04061681 +n04061793 +n04061969 +n04062428 +n04063154 +n04063373 +n04063868 +n04064401 +n04064747 +n04064862 +n04065272 +n04065464 +n04065789 +n04066270 +n04067472 +n04067658 +n04067818 +n04067921 +n04068441 +n04068601 +n04069276 +n04069434 +n04070003 +n04070207 +n04070415 +n04070727 +n04071263 +n04071393 +n04072193 +n04072551 +n04072960 +n04073948 +n04074185 +n04074963 +n04075291 +n04075468 +n04075715 +n04075916 +n04076284 +n04076713 +n04077430 +n04078574 +n04079244 +n04079933 +n04080138 +n04080454 +n04080705 +n04080833 +n04081281 +n04081699 +n04082562 +n04082710 +n04082886 +n04083309 +n04083800 +n04084889 +n04086273 +n04086446 +n04087432 +n04087709 +n04087826 +n04089376 +n04089666 +n04089836 +n04089976 +n04090263 +n04091097 +n04091693 +n04093625 +n04093775 +n04094720 +n04095109 +n04095210 +n04095342 +n04095577 +n04096066 +n04097373 +n04097760 +n04097866 +n04098513 +n04099003 +n04099175 +n04099429 +n04099969 +n04100519 +n04101701 +n04102037 +n04102162 +n04102285 +n04102406 +n04102618 +n04102872 +n04103094 +n04103206 +n04103364 +n04103665 +n04103769 +n04103918 +n04104147 +n04104384 +n04104500 +n04104770 +n04105068 +n04105704 +n04105893 +n04107743 +n04108268 +n04108822 +n04110178 +n04110955 +n04111190 +n04111414 +n04111531 +n04111668 +n04112147 +n04112252 +n04112430 +n04112579 +n04112654 +n04112752 +n04113194 +n04113316 +n04113406 +n04113641 +n04113765 +n04114069 +n04114844 +n04115144 +n04115256 +n04115456 +n04115802 +n04115996 +n04116098 +n04116294 +n04116512 +n04117464 +n04118021 +n04118538 +n04118635 +n04118776 +n04119091 +n04119230 +n04119360 +n04119478 +n04119751 +n04120489 +n04120842 +n04121426 +n04121511 +n04121728 +n04122349 +n04122492 +n04122578 +n04122685 +n04122825 +n04123026 +n04123448 +n04123567 +n04123740 +n04124098 +n04124202 +n04124370 +n04124488 +n04125021 +n04125257 +n04125853 +n04126066 +n04127249 +n04127395 +n04127521 +n04127633 +n04127904 +n04128413 +n04128499 +n04128710 +n04128837 +n04130143 +n04130257 +n04130907 +n04131208 +n04131368 +n04131690 +n04131929 +n04132158 +n04132603 +n04132985 +n04133789 +n04134008 +n04134170 +n04134523 +n04134632 +n04135024 +n04135118 +n04135315 +n04135710 +n04136045 +n04136161 +n04136333 +n04136510 +n04136800 +n04137089 +n04137217 +n04137355 +n04137444 +n04137773 +n04137897 +n04138261 +n04138977 +n04139140 +n04139395 +n04139859 +n04140064 +n04140631 +n04141076 +n04141198 +n04141327 +n04141712 +n04141838 +n04141975 +n04142434 +n04142731 +n04142999 +n04143140 +n04143897 +n04144241 +n04144539 +n04145863 +n04146050 +n04146343 +n04146504 +n04146614 +n04146862 +n04147183 +n04147793 +n04148054 +n04148579 +n04148703 +n04149083 +n04149374 +n04149813 +n04150153 +n04150980 +n04152387 +n04152593 +n04153025 +n04153751 +n04154152 +n04154340 +n04154565 +n04154938 +n04155068 +n04156040 +n04156140 +n04156946 +n04157099 +n04157320 +n04158807 +n04158956 +n04160372 +n04160586 +n04160847 +n04161358 +n04161981 +n04162433 +n04162706 +n04163530 +n04164002 +n04164406 +n04164757 +n04164868 +n04165409 +n04166281 +n04167346 +n04168199 +n04169437 +n04170037 +n04170933 +n04171208 +n04171459 +n04171629 +n04171831 +n04172107 +n04172230 +n04172342 +n04172776 +n04172904 +n04173046 +n04173511 +n04173907 +n04174101 +n04175039 +n04175147 +n04176068 +n04176190 +n04176295 +n04177041 +n04177755 +n04177820 +n04177931 +n04178190 +n04178329 +n04179126 +n04179712 +n04179824 +n04179913 +n04180063 +n04180229 +n04180888 +n04181228 +n04181561 +n04182152 +n04182322 +n04183217 +n04183329 +n04184316 +n04184435 +n04184880 +n04185071 +n04185529 +n04185804 +n04185946 +n04186051 +n04186268 +n04186455 +n04186848 +n04187061 +n04187233 +n04187547 +n04187970 +n04188179 +n04189282 +n04189651 +n04189816 +n04190052 +n04190376 +n04190997 +n04191595 +n04191943 +n04192238 +n04192698 +n04192858 +n04193377 +n04194127 +n04194289 +n04196502 +n04197110 +n04197391 +n04197781 +n04198355 +n04198453 +n04198562 +n04198722 +n04198797 +n04199027 +n04200000 +n04200258 +n04200537 +n04200800 +n04200908 +n04201064 +n04201297 +n04201733 +n04202417 +n04204081 +n04204238 +n04204347 +n04205318 +n04205505 +n04206225 +n04206356 +n04206570 +n04206790 +n04207151 +n04207343 +n04207596 +n04207763 +n04207903 +n04208065 +n04208210 +n04208427 +n04208760 +n04208936 +n04209133 +n04209239 +n04209509 +n04209613 +n04210120 +n04210288 +n04210390 +n04210591 +n04211219 +n04211356 +n04211528 +n04211857 +n04211970 +n04212165 +n04212282 +n04212467 +n04213353 +n04214046 +n04214282 +n04215153 +n04215402 +n04216634 +n04216860 +n04216963 +n04217546 +n04217882 +n04218564 +n04219185 +n04219424 +n04220250 +n04221823 +n04222210 +n04222307 +n04222470 +n04222723 +n04223170 +n04223299 +n04224543 +n04224842 +n04225031 +n04225729 +n04225987 +n04226464 +n04226826 +n04227144 +n04227900 +n04228054 +n04228215 +n04228581 +n04228693 +n04229007 +n04229107 +n04229480 +n04229737 +n04229816 +n04229959 +n04230387 +n04230603 +n04230808 +n04231272 +n04231693 +n04231905 +n04232153 +n04232800 +n04233124 +n04233715 +n04234455 +n04234887 +n04235291 +n04235860 +n04236377 +n04236809 +n04236935 +n04237423 +n04238128 +n04238321 +n04238617 +n04238763 +n04239074 +n04239436 +n04239786 +n04240752 +n04241249 +n04241573 +n04242084 +n04242408 +n04242704 +n04243546 +n04243941 +n04244379 +n04244997 +n04245508 +n04246060 +n04246271 +n04246731 +n04246855 +n04247011 +n04247630 +n04247736 +n04247876 +n04248396 +n04248507 +n04248851 +n04249415 +n04249582 +n04249882 +n04250224 +n04250473 +n04250692 +n04250850 +n04251144 +n04251701 +n04251791 +n04252077 +n04252225 +n04252331 +n04252560 +n04252653 +n04253057 +n04253168 +n04253931 +n04254009 +n04254120 +n04254680 +n04254777 +n04255163 +n04255586 +n04255899 +n04256520 +n04256891 +n04257223 +n04257684 +n04257790 +n04257986 +n04258138 +n04258333 +n04258438 +n04258618 +n04258732 +n04258859 +n04259630 +n04260364 +n04261116 +n04261281 +n04261638 +n04262161 +n04263257 +n04263336 +n04263502 +n04264628 +n04264765 +n04264914 +n04265275 +n04265904 +n04266014 +n04266162 +n04266375 +n04266486 +n04266968 +n04267435 +n04269270 +n04269822 +n04269944 +n04270147 +n04270371 +n04270891 +n04271531 +n04272054 +n04272389 +n04272928 +n04273285 +n04273569 +n04273659 +n04273796 +n04273972 +n04274985 +n04275175 +n04275548 +n04275661 +n04277352 +n04277493 +n04277826 +n04278247 +n04278353 +n04278447 +n04279172 +n04279353 +n04279462 +n04281260 +n04281375 +n04282231 +n04282494 +n04282872 +n04282992 +n04283096 +n04283255 +n04283378 +n04283585 +n04283905 +n04284002 +n04284341 +n04284438 +n04284572 +n04284869 +n04285008 +n04285146 +n04285803 +n04285965 +n04286575 +n04287451 +n04287747 +n04287898 +n04288272 +n04288533 +n04289027 +n04289195 +n04289576 +n04289690 +n04289827 +n04290079 +n04290259 +n04290507 +n04290615 +n04291992 +n04292080 +n04292414 +n04292572 +n04292921 +n04293119 +n04294426 +n04294614 +n04294879 +n04295081 +n04295571 +n04295881 +n04296562 +n04297098 +n04297750 +n04297847 +n04298053 +n04298661 +n04299215 +n04299370 +n04299963 +n04300643 +n04301000 +n04301760 +n04303357 +n04303497 +n04304215 +n04304375 +n04304680 +n04305210 +n04305323 +n04305572 +n04306080 +n04306592 +n04306847 +n04307767 +n04307986 +n04308084 +n04308273 +n04308397 +n04309049 +n04309348 +n04309548 +n04309833 +n04310018 +n04310157 +n04310721 +n04310904 +n04311004 +n04311174 +n04311595 +n04312154 +n04312432 +n04313220 +n04313503 +n04313628 +n04314522 +n04314914 +n04315342 +n04315713 +n04315948 +n04316498 +n04317063 +n04317175 +n04317325 +n04317420 +n04317833 +n04317976 +n04318787 +n04318892 +n04319937 +n04320973 +n04321453 +n04322026 +n04322801 +n04323819 +n04324297 +n04324387 +n04325041 +n04325704 +n04326547 +n04326676 +n04326799 +n04326896 +n04327204 +n04327682 +n04328186 +n04328329 +n04328946 +n04329834 +n04329958 +n04330267 +n04330340 +n04330746 +n04330998 +n04331277 +n04331639 +n04332074 +n04332243 +n04332580 +n04333129 +n04333869 +n04334105 +n04334365 +n04334599 +n04335209 +n04335435 +n04335693 +n04335886 +n04336792 +n04337287 +n04338517 +n04338963 +n04339879 +n04340521 +n04340750 +n04340935 +n04341133 +n04341686 +n04344003 +n04344734 +n04344873 +n04345028 +n04345201 +n04346003 +n04346157 +n04346328 +n04346428 +n04347119 +n04347519 +n04347754 +n04348359 +n04349306 +n04349401 +n04350458 +n04350581 +n04350769 +n04350905 +n04351699 +n04353573 +n04354026 +n04354182 +n04354487 +n04354589 +n04355115 +n04355267 +n04355338 +n04355511 +n04355933 +n04356056 +n04356595 +n04356925 +n04357121 +n04357314 +n04357531 +n04358117 +n04358491 +n04358707 +n04358874 +n04359335 +n04359500 +n04360798 +n04360914 +n04361095 +n04361260 +n04363082 +n04363777 +n04363991 +n04364160 +n04364545 +n04365328 +n04366033 +n04366116 +n04366367 +n04367011 +n04367371 +n04367480 +n04367746 +n04367950 +n04368496 +n04369025 +n04369282 +n04370048 +n04370288 +n04370456 +n04370774 +n04371050 +n04371430 +n04371563 +n04371774 +n04372370 +n04373089 +n04373428 +n04373704 +n04373795 +n04373894 +n04374315 +n04374735 +n04375241 +n04375405 +n04375615 +n04376400 +n04376876 +n04377057 +n04378956 +n04379243 +n04379964 +n04380255 +n04380346 +n04380533 +n04380916 +n04381073 +n04381587 +n04381724 +n04381860 +n04381994 +n04382438 +n04382695 +n04382880 +n04383015 +n04383130 +n04383839 +n04384593 +n04384910 +n04385536 +n04385799 +n04386051 +n04386664 +n04386792 +n04387095 +n04387201 +n04387261 +n04387400 +n04387706 +n04387932 +n04388743 +n04389033 +n04389430 +n04389521 +n04389718 +n04389854 +n04390577 +n04390873 +n04390977 +n04391445 +n04391838 +n04392113 +n04392526 +n04392764 +n04392985 +n04393095 +n04393549 +n04393808 +n04393913 +n04394630 +n04395024 +n04395106 +n04395651 +n04396335 +n04396808 +n04396902 +n04397027 +n04397452 +n04397645 +n04397768 +n04398044 +n04398497 +n04398688 +n04398834 +n04398951 +n04399158 +n04399537 +n04399846 +n04400109 +n04400289 +n04400737 +n04401088 +n04401578 +n04401680 +n04401828 +n04401949 +n04402057 +n04402449 +n04402580 +n04402746 +n04402984 +n04403413 +n04403524 +n04403638 +n04403925 +n04404412 +n04404817 +n04404997 +n04405540 +n04405762 +n04405907 +n04406239 +n04406817 +n04407435 +n04407686 +n04408871 +n04409011 +n04409128 +n04409384 +n04409515 +n04409625 +n04409806 +n04410086 +n04411264 +n04412097 +n04412416 +n04413969 +n04414199 +n04414319 +n04414476 +n04414675 +n04414909 +n04415663 +n04416005 +n04417086 +n04417180 +n04417361 +n04417672 +n04417809 +n04418357 +n04419073 +n04419642 +n04419868 +n04421872 +n04422409 +n04422727 +n04422875 +n04423845 +n04424692 +n04425804 +n04426316 +n04426427 +n04427715 +n04428191 +n04428634 +n04429376 +n04430475 +n04430896 +n04431025 +n04431745 +n04432203 +n04432662 +n04433585 +n04434207 +n04434531 +n04434932 +n04435180 +n04435653 +n04436012 +n04436185 +n04436329 +n04437953 +n04438304 +n04438507 +n04438897 +n04439585 +n04439712 +n04440963 +n04441662 +n04441790 +n04442312 +n04442441 +n04442741 +n04443164 +n04443257 +n04443766 +n04444749 +n04445040 +n04445154 +n04445327 +n04445952 +n04446276 +n04446844 +n04447028 +n04447276 +n04447443 +n04447861 +n04448070 +n04448361 +n04449290 +n04449966 +n04450133 +n04450243 +n04450640 +n04450749 +n04450994 +n04451318 +n04451818 +n04452528 +n04452615 +n04452757 +n04453037 +n04453156 +n04453390 +n04453666 +n04454908 +n04455250 +n04455652 +n04456115 +n04457157 +n04457474 +n04457767 +n04457910 +n04458633 +n04458843 +n04459018 +n04459362 +n04459610 +n04459773 +n04459909 +n04460130 +n04461437 +n04461570 +n04461696 +n04461879 +n04462011 +n04462240 +n04463679 +n04464615 +n04464852 +n04465050 +n04465358 +n04465501 +n04465666 +n04466871 +n04467099 +n04467307 +n04467665 +n04468005 +n04469003 +n04469514 +n04469813 +n04471148 +n04471632 +n04472563 +n04473108 +n04474035 +n04474187 +n04474466 +n04475411 +n04475631 +n04476116 +n04476259 +n04476831 +n04476972 +n04477219 +n04477387 +n04477548 +n04478512 +n04479046 +n04479823 +n04479939 +n04480033 +n04480853 +n04482177 +n04482297 +n04482393 +n04483073 +n04483307 +n04483925 +n04484432 +n04485082 +n04485423 +n04485884 +n04486054 +n04486213 +n04486934 +n04487081 +n04487394 +n04487724 +n04488202 +n04488427 +n04488530 +n04488742 +n04488857 +n04489008 +n04489695 +n04489817 +n04490091 +n04491388 +n04491638 +n04491769 +n04492060 +n04492375 +n04492749 +n04493381 +n04494204 +n04495698 +n04495843 +n04496614 +n04496726 +n04496872 +n04497442 +n04497570 +n04497801 +n04498389 +n04499062 +n04499446 +n04500060 +n04501370 +n04501550 +n04501837 +n04501947 +n04502059 +n04502197 +n04502502 +n04502670 +n04502851 +n04503413 +n04503593 +n04504141 +n04505036 +n04505345 +n04505470 +n04506289 +n04506506 +n04506688 +n04507155 +n04508163 +n04508489 +n04508949 +n04509171 +n04509260 +n04509417 +n04510706 +n04511002 +n04513827 +n04513998 +n04514241 +n04515003 +n04516116 +n04516214 +n04516354 +n04516672 +n04517211 +n04517408 +n04517823 +n04518132 +n04518343 +n04518643 +n04518764 +n04519153 +n04520170 +n04520382 +n04520784 +n04521863 +n04522168 +n04523525 +n04523831 +n04524142 +n04524313 +n04524941 +n04525038 +n04525191 +n04525305 +n04525417 +n04525584 +n04525821 +n04526964 +n04527648 +n04528079 +n04528968 +n04529108 +n04529681 +n04529962 +n04530283 +n04530566 +n04531098 +n04531873 +n04532106 +n04532398 +n04532670 +n04532831 +n04533199 +n04533499 +n04533594 +n04533700 +n04533802 +n04533946 +n04534127 +n04534359 +n04534520 +n04534895 +n04535252 +n04535370 +n04535524 +n04536153 +n04536335 +n04536595 +n04536866 +n04538552 +n04539053 +n04539203 +n04539794 +n04540053 +n04540255 +n04541320 +n04541987 +n04542095 +n04542715 +n04542858 +n04542943 +n04543158 +n04543636 +n04543772 +n04543996 +n04544325 +n04544450 +n04545305 +n04545748 +n04545858 +n04546194 +n04546340 +n04547592 +n04548280 +n04548362 +n04549028 +n04549122 +n04549629 +n04549919 +n04550184 +n04551055 +n04552348 +n04552696 +n04553561 +n04553703 +n04554211 +n04554406 +n04554684 +n04554871 +n04555291 +n04555400 +n04555600 +n04555700 +n04555897 +n04556408 +n04556533 +n04556948 +n04557648 +n04557751 +n04558478 +n04559166 +n04559451 +n04559730 +n04559910 +n04560113 +n04560292 +n04560804 +n04560882 +n04561287 +n04561422 +n04561734 +n04562262 +n04562496 +n04562935 +n04563204 +n04563413 +n04564278 +n04564581 +n04565375 +n04566257 +n04566561 +n04566756 +n04568069 +n04568557 +n04568841 +n04569063 +n04569822 +n04570214 +n04570815 +n04570958 +n04571292 +n04571566 +n04571686 +n04571958 +n04573281 +n04573379 +n04573513 +n04573937 +n04574067 +n04574999 +n04575723 +n04575824 +n04576002 +n04576211 +n04577426 +n04577769 +n04578934 +n04579056 +n04579145 +n04579230 +n04579432 +n04579667 +n04579986 +n04580493 +n04581102 +n04581829 +n04582205 +n04582349 +n04582771 +n04582869 +n04583212 +n04583620 +n04584207 +n04584373 +n04585128 +n04585745 +n04585980 +n04586072 +n04586581 +n04586932 +n04587327 +n04587404 +n04587559 +n04587648 +n04588739 +n04589190 +n04589325 +n04589593 +n04589890 +n04590021 +n04590129 +n04590263 +n04590553 +n04590746 +n04590933 +n04591056 +n04591157 +n04591517 +n04591713 +n04591887 +n04592005 +n04592099 +n04592465 +n04592741 +n04593077 +n04593185 +n04593376 +n04593524 +n04593866 +n04594114 +n04594218 +n04594489 +n04594828 +n04595028 +n04595285 +n04595855 +n04596742 +n04596852 +n04597309 +n04597400 +n04597804 +n04597913 +n04598318 +n04598582 +n04598965 +n04599124 +n04599235 +n04600312 +n04600912 +n04602762 +n04602956 +n04603399 +n04603729 +n04603872 +n04604644 +n04605163 +n04605321 +n04605572 +n04605726 +n04606251 +n04606574 +n04607035 +n04607242 +n04607869 +n04608329 +n04608435 +n04608567 +n04608923 +n04609531 +n04609651 +n04610013 +n04610274 +n04610503 +n04610676 +n04611916 +n04612026 +n04612373 +n04612504 +n04613015 +n04613696 +n04613939 +n04614655 +n04615226 +n04615644 +n04950952 +n04951071 +n04951186 +n04953296 +n04955160 +n04959672 +n04960277 +n04960582 +n04961062 +n04961331 +n04961691 +n04962062 +n04962240 +n04963307 +n04963588 +n04963740 +n04964001 +n04964799 +n04964878 +n04965179 +n04965451 +n04965661 +n04966543 +n04966941 +n04967191 +n04967674 +n04967801 +n04967882 +n04968056 +n04968139 +n04968749 +n04968895 +n04969242 +n04969540 +n04969798 +n04969952 +n04970059 +n04970398 +n04970470 +n04970916 +n04971211 +n04971313 +n04972350 +n04972451 +n04972801 +n04973291 +n04973386 +n04973585 +n04973816 +n04974859 +n04976319 +n04976952 +n04977412 +n04979002 +n04981658 +n05218119 +n05238282 +n05239437 +n05242928 +n05244934 +n05245192 +n05258051 +n05259914 +n05260127 +n05260240 +n05261310 +n05262422 +n05262534 +n05262698 +n05263183 +n05263448 +n05282652 +n05302499 +n05399034 +n05399243 +n05418717 +n05450617 +n05451384 +n05453657 +n05458576 +n05486510 +n05526957 +n05538625 +n05578095 +n05581932 +n05586759 +n05716342 +n06255081 +n06263609 +n06266633 +n06266973 +n06267145 +n06267564 +n06267655 +n06267758 +n06267893 +n06267991 +n06271778 +n06272290 +n06272612 +n06272803 +n06273414 +n06273555 +n06273743 +n06273986 +n06274760 +n06275095 +n06275353 +n06275471 +n06276501 +n06276697 +n06277135 +n06277280 +n06278338 +n06278475 +n06281040 +n06359193 +n06359467 +n06415688 +n06417096 +n06470073 +n06592281 +n06595351 +n06596364 +n06596474 +n06596607 +n06596727 +n06785654 +n06793231 +n06794110 +n06874185 +n06883725 +n06892775 +n06998748 +n07005523 +n07248320 +n07273802 +n07461050 +n07556406 +n07556637 +n07556970 +n07557434 +n07560193 +n07560331 +n07560542 +n07560652 +n07560903 +n07561112 +n07561590 +n07561848 +n07562495 +n07563207 +n07564008 +n07564796 +n07564971 +n07565083 +n07565161 +n07565259 +n07566340 +n07567707 +n07567980 +n07568502 +n07568818 +n07569106 +n07569644 +n07570720 +n07572616 +n07572957 +n07573347 +n07573696 +n07574176 +n07574426 +n07574504 +n07574602 +n07574780 +n07574923 +n07575076 +n07575392 +n07575510 +n07575726 +n07575984 +n07576182 +n07576438 +n07576781 +n07577144 +n07577374 +n07577538 +n07578093 +n07579575 +n07579688 +n07579787 +n07579917 +n07580053 +n07580253 +n07580359 +n07580470 +n07580592 +n07581249 +n07581346 +n07581775 +n07581931 +n07582152 +n07582277 +n07582609 +n07582892 +n07583066 +n07584110 +n07584332 +n07584423 +n07584593 +n07585107 +n07585208 +n07585557 +n07585758 +n07585906 +n07586099 +n07586318 +n07586604 +n07586718 +n07586894 +n07587023 +n07587111 +n07587331 +n07587441 +n07587618 +n07587700 +n07587962 +n07588111 +n07588193 +n07588299 +n07588419 +n07588574 +n07588817 +n07588947 +n07589458 +n07589543 +n07590320 +n07590502 +n07590611 +n07590752 +n07591049 +n07591473 +n07591586 +n07591961 +n07592094 +n07592400 +n07592481 +n07592656 +n07592768 +n07593004 +n07593199 +n07593471 +n07594066 +n07595180 +n07595649 +n07595914 +n07596684 +n07596967 +n07597145 +n07597365 +n07598256 +n07598734 +n07599161 +n07599911 +n07599998 +n07600177 +n07600285 +n07600696 +n07601290 +n07601572 +n07601686 +n07601809 +n07604956 +n07605040 +n07605380 +n07605474 +n07605597 +n07605804 +n07605944 +n07606538 +n07606669 +n07606764 +n07607138 +n07607605 +n07607967 +n07608098 +n07608339 +n07608429 +n07608866 +n07609215 +n07609407 +n07609632 +n07609840 +n07610620 +n07611046 +n07611148 +n07611267 +n07611358 +n07611839 +n07611991 +n07612137 +n07612367 +n07612632 +n07612996 +n07613266 +n07613480 +n07613815 +n07614198 +n07614500 +n07614730 +n07614825 +n07615190 +n07615289 +n07615460 +n07615569 +n07615671 +n07615774 +n07616046 +n07616386 +n07616487 +n07616590 +n07616748 +n07617051 +n07617611 +n07617708 +n07617932 +n07618029 +n07618119 +n07618432 +n07619004 +n07619208 +n07619409 +n07620689 +n07621618 +n07623136 +n07624466 +n07624666 +n07624924 +n07625061 +n07627931 +n07628068 +n07631926 +n07639069 +n07641928 +n07642361 +n07642471 +n07642742 +n07642933 +n07643026 +n07643200 +n07643306 +n07643474 +n07643891 +n07643981 +n07648913 +n07648997 +n07650903 +n07651025 +n07654148 +n07654298 +n07655263 +n07665438 +n07666176 +n07678729 +n07679034 +n07679356 +n07680313 +n07680517 +n07680761 +n07680932 +n07681450 +n07681691 +n07681926 +n07682197 +n07682316 +n07682477 +n07682624 +n07682808 +n07682952 +n07683039 +n07683360 +n07683490 +n07683617 +n07683786 +n07684084 +n07684164 +n07684289 +n07684517 +n07684600 +n07684938 +n07685031 +n07685218 +n07685399 +n07685546 +n07685730 +n07685918 +n07686021 +n07686202 +n07686720 +n07686873 +n07687053 +n07687211 +n07687381 +n07687469 +n07687626 +n07687789 +n07688130 +n07688624 +n07688898 +n07689003 +n07689842 +n07690019 +n07690152 +n07690273 +n07690431 +n07690511 +n07690585 +n07690739 +n07690892 +n07691091 +n07691237 +n07691539 +n07691650 +n07691758 +n07691863 +n07691954 +n07692614 +n07693048 +n07693223 +n07693590 +n07693725 +n07693972 +n07694403 +n07694516 +n07694659 +n07694839 +n07695652 +n07695742 +n07695878 +n07695965 +n07696403 +n07696527 +n07696625 +n07696728 +n07696839 +n07696977 +n07697100 +n07697313 +n07697537 +n07697699 +n07697825 +n07698250 +n07698401 +n07698543 +n07698672 +n07698782 +n07700003 +n07704054 +n07704205 +n07705931 +n07707451 +n07708124 +n07708398 +n07708685 +n07709046 +n07709172 +n07709333 +n07709881 +n07710283 +n07710616 +n07710952 +n07711080 +n07711232 +n07711371 +n07711569 +n07712063 +n07712267 +n07712382 +n07712559 +n07712748 +n07712856 +n07712959 +n07713074 +n07713267 +n07713395 +n07713763 +n07713895 +n07714078 +n07714188 +n07714287 +n07714448 +n07714571 +n07714802 +n07714895 +n07714990 +n07715103 +n07715221 +n07715407 +n07715561 +n07715721 +n07716034 +n07716203 +n07716358 +n07716504 +n07716906 +n07717070 +n07717410 +n07717556 +n07718472 +n07718747 +n07719213 +n07719616 +n07719839 +n07720277 +n07720442 +n07720615 +n07720875 +n07721018 +n07721195 +n07721325 +n07721456 +n07721678 +n07721942 +n07722052 +n07722217 +n07722485 +n07722763 +n07722888 +n07723039 +n07723177 +n07723330 +n07723559 +n07723968 +n07724269 +n07724492 +n07724654 +n07724943 +n07725255 +n07725376 +n07725531 +n07725789 +n07725888 +n07726095 +n07726525 +n07726672 +n07726796 +n07727048 +n07727458 +n07727578 +n07727868 +n07728053 +n07728181 +n07728391 +n07728585 +n07728708 +n07729384 +n07729485 +n07729828 +n07729926 +n07730033 +n07730207 +n07730320 +n07730406 +n07730708 +n07730855 +n07731006 +n07731284 +n07731587 +n07731767 +n07731952 +n07732168 +n07732636 +n07732747 +n07732904 +n07733394 +n07733567 +n07733712 +n07734017 +n07734183 +n07734292 +n07734417 +n07734555 +n07734744 +n07734879 +n07735404 +n07735510 +n07735687 +n07735803 +n07736087 +n07736256 +n07736371 +n07736692 +n07736813 +n07737745 +n07739125 +n07739344 +n07739506 +n07740033 +n07740220 +n07740342 +n07740461 +n07740597 +n07740954 +n07741138 +n07741461 +n07742012 +n07742313 +n07742704 +n07743224 +n07743544 +n07743902 +n07744057 +n07744246 +n07744430 +n07744682 +n07744811 +n07745046 +n07745466 +n07745940 +n07746186 +n07746334 +n07746551 +n07747055 +n07747607 +n07747951 +n07748157 +n07748276 +n07748416 +n07748574 +n07748753 +n07748912 +n07749192 +n07749312 +n07749446 +n07749582 +n07749731 +n07749870 +n07749969 +n07750146 +n07750449 +n07750736 +n07750872 +n07751004 +n07751148 +n07751280 +n07751451 +n07751858 +n07752109 +n07752377 +n07752514 +n07752664 +n07752966 +n07753113 +n07753275 +n07753592 +n07753743 +n07753980 +n07754451 +n07754684 +n07754894 +n07755089 +n07755411 +n07755707 +n07755929 +n07756325 +n07756641 +n07756951 +n07757132 +n07757312 +n07757511 +n07757990 +n07758680 +n07759194 +n07759816 +n07760153 +n07760859 +n07761141 +n07761309 +n07761611 +n07762114 +n07762244 +n07762740 +n07762913 +n07763107 +n07763629 +n07763792 +n07763987 +n07764155 +n07764315 +n07764630 +n07764847 +n07765073 +n07765208 +n07765361 +n07765862 +n07765999 +n07766173 +n07766891 +n07767002 +n07767171 +n07767344 +n07767549 +n07767709 +n07767847 +n07768068 +n07768230 +n07768423 +n07768694 +n07768858 +n07769584 +n07769731 +n07770034 +n07770763 +n07771212 +n07771731 +n07772147 +n07772274 +n07772788 +n07772935 +n07774596 +n07774719 +n07774842 +n07775050 +n07775197 +n07800740 +n07801091 +n07801342 +n07801508 +n07801779 +n07801892 +n07802026 +n07802417 +n07802863 +n07802963 +n07803093 +n07803545 +n07804323 +n07804543 +n07804657 +n07804771 +n07804900 +n07805594 +n07805731 +n07805966 +n07806120 +n07806221 +n07806633 +n07806774 +n07807002 +n07807171 +n07807317 +n07807472 +n07807594 +n07807710 +n07807834 +n07807922 +n07808022 +n07808587 +n07808904 +n07809096 +n07810907 +n07812046 +n07812184 +n07814203 +n07814390 +n07814487 +n07814634 +n07814790 +n07815424 +n07815588 +n07815839 +n07816052 +n07816164 +n07816296 +n07816398 +n07816575 +n07816839 +n07817024 +n07817160 +n07817315 +n07817871 +n07818133 +n07818277 +n07818572 +n07818689 +n07818825 +n07818995 +n07819166 +n07819480 +n07819769 +n07819896 +n07820145 +n07820297 +n07820497 +n07820683 +n07820960 +n07821260 +n07821610 +n07821758 +n07821919 +n07822197 +n07822323 +n07822518 +n07822845 +n07823105 +n07823280 +n07823460 +n07823698 +n07823951 +n07824191 +n07824702 +n07825194 +n07825717 +n07825972 +n07826091 +n07826340 +n07826453 +n07826930 +n07827130 +n07827284 +n07827410 +n07827750 +n07827896 +n07828642 +n07829248 +n07829331 +n07829412 +n07830593 +n07831146 +n07831267 +n07832416 +n07832902 +n07834065 +n07834507 +n07834618 +n07834872 +n07835331 +n07835457 +n07835921 +n07836838 +n07837002 +n07837362 +n07837912 +n07838073 +n07838233 +n07838441 +n07838551 +n07840027 +n07840520 +n07840804 +n07841345 +n07841495 +n07841639 +n07841800 +n07841907 +n07842044 +n07842130 +n07842202 +n07842308 +n07842433 +n07842605 +n07842753 +n07843117 +n07843464 +n07843636 +n07843775 +n07844042 +n07844604 +n07844867 +n07845087 +n07845702 +n07845863 +n07846143 +n07847198 +n07847453 +n07847827 +n07847917 +n07848093 +n07848196 +n07848338 +n07849336 +n07849619 +n07849733 +n07849912 +n07850083 +n07850329 +n07851298 +n07851443 +n07851554 +n07851641 +n07851767 +n07852045 +n07852229 +n07852302 +n07852614 +n07852833 +n07852919 +n07853560 +n07854184 +n07854982 +n07855510 +n07855907 +n07857170 +n07857731 +n07858114 +n07858978 +n07859284 +n07859583 +n07859796 +n07860103 +n07860331 +n07860447 +n07860805 +n07860988 +n07861158 +n07861557 +n07861813 +n07861983 +n07862095 +n07862244 +n07862348 +n07862461 +n07862611 +n07863374 +n07863547 +n07863802 +n07864065 +n07864756 +n07864934 +n07865105 +n07865196 +n07865484 +n07866015 +n07866151 +n07866277 +n07866409 +n07866723 +n07866868 +n07867021 +n07867164 +n07867324 +n07867421 +n07867616 +n07867751 +n07868200 +n07868340 +n07868508 +n07868830 +n07868955 +n07869291 +n07869391 +n07869522 +n07869611 +n07869775 +n07870069 +n07870167 +n07870313 +n07870894 +n07871234 +n07871436 +n07871720 +n07871810 +n07872593 +n07873057 +n07873348 +n07873464 +n07873807 +n07874063 +n07874159 +n07874259 +n07874343 +n07874441 +n07874780 +n07875152 +n07875436 +n07875560 +n07875693 +n07876189 +n07876651 +n07877187 +n07877299 +n07877675 +n07877849 +n07877961 +n07878647 +n07878785 +n07878926 +n07879072 +n07879174 +n07879350 +n07879450 +n07879659 +n07879953 +n07880080 +n07880213 +n07880325 +n07880458 +n07880751 +n07880880 +n07880968 +n07881205 +n07881404 +n07881800 +n07882420 +n07882497 +n07883031 +n07883251 +n07884567 +n07885705 +n07886057 +n07886176 +n07886463 +n07886572 +n07886849 +n07887099 +n07887192 +n07887304 +n07887461 +n07887634 +n07887967 +n07888229 +n07888465 +n07888816 +n07889274 +n07889510 +n07889814 +n07890068 +n07890226 +n07890352 +n07890540 +n07890750 +n07891189 +n07891309 +n07891433 +n07891726 +n07892418 +n07892512 +n07892813 +n07893253 +n07893528 +n07893642 +n07893891 +n07894102 +n07894298 +n07894451 +n07894551 +n07894703 +n07894799 +n07894965 +n07895100 +n07895237 +n07895435 +n07895595 +n07895710 +n07895839 +n07895962 +n07896060 +n07896165 +n07896287 +n07896661 +n07896765 +n07896893 +n07896994 +n07897116 +n07897438 +n07897600 +n07897750 +n07897865 +n07897975 +n07898117 +n07898247 +n07898333 +n07898443 +n07898617 +n07898745 +n07899003 +n07899108 +n07899292 +n07899434 +n07899533 +n07899660 +n07899769 +n07899899 +n07900225 +n07900406 +n07900616 +n07900734 +n07900825 +n07900958 +n07901355 +n07901457 +n07901587 +n07902121 +n07902336 +n07902443 +n07902799 +n07902937 +n07903101 +n07903208 +n07903543 +n07903643 +n07903731 +n07903841 +n07903962 +n07904293 +n07904395 +n07904637 +n07904760 +n07904865 +n07904934 +n07905038 +n07905296 +n07905386 +n07905474 +n07905979 +n07906111 +n07906284 +n07906572 +n07906718 +n07906877 +n07907037 +n07907161 +n07907342 +n07907429 +n07907548 +n07907831 +n07907943 +n07908411 +n07908567 +n07908647 +n07908812 +n07909129 +n07909593 +n07910048 +n07910152 +n07910379 +n07910538 +n07910656 +n07911249 +n07911371 +n07911677 +n07912211 +n07913393 +n07913882 +n07914006 +n07914128 +n07914271 +n07914413 +n07914586 +n07914777 +n07914995 +n07915094 +n07915491 +n07915618 +n07915918 +n07916041 +n07916183 +n07916319 +n07916437 +n07917133 +n07917272 +n07917392 +n07917507 +n07917618 +n07918028 +n07918193 +n07918309 +n07918879 +n07919310 +n07919441 +n07919572 +n07920052 +n07920222 +n07920349 +n07920540 +n07920663 +n07920872 +n07920989 +n07921239 +n07921455 +n07921615 +n07921834 +n07922041 +n07922147 +n07922512 +n07922764 +n07923748 +n07924033 +n07924276 +n07924443 +n07924560 +n07924747 +n07924834 +n07924955 +n07925116 +n07925229 +n07925500 +n07925608 +n07925966 +n07926250 +n07926346 +n07926785 +n07926920 +n07927197 +n07927512 +n07927836 +n07927931 +n07928163 +n07928367 +n07928488 +n07928696 +n07928790 +n07928887 +n07929172 +n07929351 +n07929519 +n07930062 +n07930315 +n07930433 +n07930554 +n07930864 +n07931001 +n07931452 +n07931612 +n07931870 +n07932039 +n07932841 +n07933154 +n07933274 +n07933799 +n07934282 +n07935043 +n07935379 +n07935504 +n07935737 +n07935878 +n07936263 +n07936548 +n07936745 +n07937461 +n07938007 +n07938149 +n07938313 +n07938594 +n07942152 +n07951464 +n07954211 +n07977870 +n08182379 +n08242223 +n08249459 +n08256735 +n08376250 +n08492461 +n08494231 +n08495908 +n08505018 +n08517676 +n08518171 +n08521623 +n08524735 +n08539072 +n08547468 +n08547544 +n08551296 +n08555710 +n08560295 +n08571898 +n08573842 +n08578517 +n08579352 +n08580944 +n08583292 +n08583455 +n08584914 +n08596076 +n08598301 +n08598568 +n08611339 +n08614632 +n08616050 +n08628141 +n08633683 +n08640531 +n08640739 +n08640962 +n08645104 +n08645212 +n08649711 +n08658309 +n08659446 +n08659861 +n08663703 +n08673039 +n08677424 +n08896327 +n09189157 +n09191635 +n09193705 +n09194227 +n09199101 +n09205509 +n09206896 +n09206985 +n09208496 +n09210862 +n09217230 +n09218315 +n09218494 +n09218641 +n09219233 +n09224725 +n09228055 +n09229709 +n09230041 +n09230202 +n09231117 +n09233446 +n09238926 +n09239302 +n09242389 +n09245515 +n09246464 +n09247410 +n09248399 +n09249034 +n09251407 +n09256479 +n09257843 +n09259025 +n09259219 +n09260907 +n09263912 +n09265620 +n09267854 +n09269341 +n09269472 +n09270735 +n09274152 +n09279986 +n09282208 +n09283193 +n09283405 +n09283767 +n09283866 +n09287968 +n09288635 +n09289331 +n09290444 +n09294877 +n09295946 +n09300905 +n09302616 +n09303008 +n09303528 +n09304750 +n09305898 +n09308572 +n09308743 +n09309168 +n09309292 +n09326662 +n09331251 +n09332890 +n09335809 +n09336555 +n09337253 +n09344324 +n09348460 +n09349648 +n09359803 +n09361517 +n09362945 +n09366317 +n09376198 +n09376526 +n09376786 +n09381242 +n09382099 +n09384106 +n09392402 +n09393605 +n09396465 +n09396608 +n09398076 +n09398677 +n09399592 +n09400987 +n09403211 +n09403427 +n09403734 +n09405078 +n09406793 +n09409512 +n09409752 +n09410224 +n09411189 +n09415584 +n09415671 +n09416076 +n09416890 +n09421799 +n09421951 +n09428293 +n09428628 +n09432283 +n09433442 +n09433839 +n09435739 +n09436444 +n09436708 +n09437454 +n09438844 +n09438940 +n09439213 +n09442595 +n09443281 +n09443641 +n09444783 +n09445008 +n09445289 +n09447666 +n09448690 +n09450163 +n09451237 +n09452395 +n09452760 +n09453008 +n09454153 +n09454412 +n09457979 +n09460046 +n09461069 +n09466678 +n09468604 +n09472413 +n09472597 +n09475044 +n09475179 +n09475925 +n09481120 +n09505153 +n09606527 +n09607630 +n09607903 +n09608709 +n09610405 +n09616922 +n09618760 +n09618880 +n09618957 +n09619168 +n09620078 +n09620794 +n09621232 +n09622049 +n09622302 +n09624168 +n09624559 +n09626238 +n09627906 +n09629752 +n09632518 +n09635534 +n09636339 +n09637339 +n09638454 +n09638875 +n09639919 +n09640715 +n09641002 +n09643799 +n09644152 +n09648743 +n09650729 +n09651123 +n09652149 +n09654518 +n09656077 +n09659039 +n09659188 +n09661873 +n09666883 +n09670521 +n09675922 +n09676021 +n09676247 +n09676884 +n09679170 +n09681234 +n09683757 +n09683924 +n09684901 +n09686401 +n09688804 +n09689435 +n09689958 +n09690621 +n09691729 +n09691858 +n09692915 +n09693982 +n09694664 +n09694771 +n09695514 +n09695620 +n09695979 +n09696456 +n09696585 +n09696763 +n09697401 +n09698644 +n09700964 +n09701148 +n09701833 +n09703485 +n09703708 +n09705124 +n09705784 +n09706255 +n09707289 +n09708750 +n09708889 +n09710041 +n09711435 +n09712324 +n09712448 +n09712696 +n09713108 +n09714694 +n09715427 +n09717233 +n09718217 +n09718811 +n09718936 +n09719309 +n09719794 +n09720033 +n09720256 +n09720595 +n09720842 +n09722658 +n09723067 +n09724533 +n09724656 +n09724785 +n09725000 +n09725653 +n09725772 +n09726621 +n09727440 +n09727826 +n09728137 +n09728285 +n09730077 +n09730204 +n09730824 +n09731343 +n09731436 +n09732170 +n09733793 +n09734185 +n09734450 +n09734535 +n09734639 +n09736798 +n09736945 +n09738121 +n09740724 +n09741816 +n09742101 +n09742315 +n09743487 +n09743792 +n09744161 +n09744834 +n09747191 +n09747495 +n09749386 +n09750282 +n09750641 +n09750770 +n09750891 +n09751496 +n09751895 +n09752023 +n09752519 +n09753792 +n09754217 +n09755241 +n09756049 +n09757449 +n09758885 +n09759501 +n09760609 +n09761068 +n09763784 +n09764598 +n09764900 +n09765118 +n09767197 +n09770179 +n09770359 +n09772930 +n09774783 +n09776346 +n09779790 +n09782167 +n09782397 +n09785659 +n09785891 +n09787534 +n09787765 +n09789566 +n09791014 +n09791419 +n09791816 +n09792555 +n09792969 +n09793141 +n09796809 +n09797873 +n09800964 +n09801533 +n09805151 +n09805324 +n09809538 +n09809749 +n09809925 +n09811852 +n09813219 +n09814660 +n09816771 +n09818022 +n09820263 +n09822830 +n09823502 +n09823832 +n09824135 +n09824609 +n09827246 +n09827363 +n09828216 +n09830194 +n09830400 +n09830629 +n09832456 +n09833441 +n09833536 +n09834378 +n09834699 +n09835230 +n09835348 +n09835506 +n09836160 +n09836343 +n09836519 +n09836786 +n09838621 +n09839702 +n09840217 +n09840520 +n09841188 +n09841696 +n09842047 +n09842395 +n09842528 +n09843443 +n09843824 +n09844457 +n09845401 +n09846469 +n09846755 +n09846894 +n09847543 +n09850760 +n09851165 +n09851575 +n09854218 +n09854421 +n09855433 +n09856671 +n09858165 +n09859152 +n09861599 +n09861863 +n09861946 +n09862621 +n09863031 +n09866817 +n09871229 +n09871681 +n09871867 +n09872066 +n09873348 +n09873473 +n09873899 +n09874428 +n09874725 +n09874862 +n09877288 +n09877750 +n09877951 +n09881265 +n09881895 +n09886403 +n09889065 +n09889170 +n09889941 +n09890749 +n09893191 +n09893344 +n09893502 +n09894143 +n09894445 +n09894654 +n09895222 +n09895561 +n09896170 +n09896401 +n09896685 +n09899671 +n09899782 +n09899929 +n09901337 +n09901921 +n09902731 +n09902954 +n09903153 +n09903501 +n09904208 +n09904837 +n09905185 +n09906449 +n09911226 +n09913455 +n09913593 +n09915434 +n09915651 +n09916348 +n09917214 +n09917345 +n09917593 +n09918248 +n09918554 +n09919451 +n09920283 +n09923186 +n09923418 +n09923561 +n09923673 +n09924106 +n09924195 +n09924996 +n09927451 +n09928136 +n09929298 +n09929577 +n09930257 +n09930876 +n09931165 +n09931640 +n09932098 +n09932336 +n09932508 +n09933098 +n09934337 +n09934774 +n09936825 +n09938449 +n09941089 +n09941787 +n09941964 +n09942970 +n09943239 +n09943811 +n09944022 +n09944430 +n09945745 +n09946814 +n09951274 +n09951616 +n09953350 +n09954639 +n09959142 +n09964202 +n09967967 +n09970822 +n09971273 +n09972010 +n09972458 +n09974648 +n09975425 +n09976283 +n09976429 +n09980985 +n09981278 +n09981540 +n09981939 +n09988063 +n09988493 +n09988703 +n09989502 +n09990415 +n09990690 +n09990777 +n09991867 +n09993252 +n09994673 +n10001217 +n10001481 +n10002760 +n10004718 +n10005934 +n10007684 +n10009276 +n10013811 +n10015485 +n10017272 +n10019072 +n10019406 +n10020670 +n10020890 +n10024362 +n10025635 +n10026976 +n10027246 +n10033412 +n10033663 +n10034201 +n10034614 +n10036692 +n10036929 +n10037385 +n10037922 +n10038409 +n10039271 +n10039946 +n10040945 +n10042845 +n10043491 +n10043643 +n10048612 +n10049363 +n10053439 +n10053808 +n10054657 +n10055410 +n10058962 +n10060352 +n10063635 +n10069296 +n10069981 +n10070108 +n10070711 +n10075693 +n10076224 +n10076604 +n10076957 +n10077593 +n10078131 +n10078719 +n10078806 +n10079399 +n10080869 +n10081204 +n10082043 +n10082687 +n10082997 +n10084295 +n10085869 +n10086383 +n10087434 +n10091450 +n10091564 +n10091651 +n10092488 +n10092643 +n10092794 +n10092978 +n10093475 +n10093818 +n10095769 +n10095869 +n10098245 +n10098517 +n10098624 +n10098710 +n10098862 +n10102800 +n10104064 +n10105733 +n10107303 +n10108018 +n10112129 +n10115430 +n10116702 +n10117739 +n10117851 +n10120330 +n10120671 +n10123122 +n10123844 +n10127689 +n10129825 +n10131151 +n10131815 +n10132035 +n10134178 +n10134982 +n10135129 +n10137825 +n10140597 +n10140929 +n10141364 +n10141732 +n10142391 +n10142747 +n10143172 +n10144338 +n10145239 +n10145340 +n10145480 +n10145590 +n10145774 +n10145902 +n10146002 +n10146104 +n10146416 +n10146816 +n10147121 +n10147262 +n10147935 +n10148035 +n10150071 +n10150940 +n10151760 +n10152763 +n10153414 +n10153594 +n10155849 +n10157128 +n10159045 +n10159533 +n10160280 +n10164233 +n10164492 +n10165448 +n10167152 +n10167838 +n10168837 +n10169147 +n10173410 +n10173771 +n10174330 +n10174445 +n10175248 +n10178216 +n10182190 +n10183931 +n10185483 +n10185793 +n10186068 +n10186216 +n10187491 +n10187990 +n10188957 +n10189278 +n10191001 +n10192839 +n10194231 +n10195593 +n10198437 +n10200781 +n10202624 +n10203949 +n10205231 +n10205457 +n10207169 +n10208189 +n10208950 +n10209082 +n10209731 +n10210911 +n10212231 +n10212501 +n10215623 +n10216106 +n10221312 +n10222170 +n10223177 +n10225219 +n10225931 +n10226413 +n10227985 +n10229883 +n10233248 +n10235024 +n10235385 +n10236304 +n10237069 +n10237196 +n10237464 +n10237676 +n10241300 +n10242328 +n10243137 +n10243273 +n10243664 +n10247358 +n10247880 +n10249270 +n10249459 +n10252222 +n10253122 +n10253296 +n10253479 +n10253703 +n10258786 +n10259348 +n10259780 +n10259997 +n10260706 +n10260800 +n10261624 +n10262445 +n10262561 +n10262655 +n10263411 +n10263790 +n10267311 +n10267865 +n10274815 +n10275395 +n10276477 +n10277027 +n10279018 +n10280674 +n10282482 +n10282672 +n10283170 +n10288964 +n10289039 +n10289462 +n10290919 +n10291822 +n10293332 +n10296176 +n10296444 +n10297234 +n10297531 +n10297841 +n10298647 +n10298912 +n10299250 +n10300154 +n10300303 +n10300500 +n10303814 +n10304086 +n10304914 +n10305802 +n10308168 +n10308732 +n10313000 +n10313239 +n10313724 +n10314054 +n10314517 +n10314836 +n10315456 +n10315561 +n10316360 +n10317007 +n10317500 +n10318293 +n10318607 +n10320863 +n10321340 +n10323634 +n10324560 +n10325774 +n10327987 +n10328123 +n10328328 +n10331167 +n10332385 +n10332861 +n10333439 +n10333601 +n10333838 +n10334009 +n10339717 +n10340312 +n10341343 +n10341573 +n10342992 +n10343355 +n10345015 +n10346015 +n10347446 +n10348526 +n10353016 +n10355142 +n10355449 +n10355688 +n10356877 +n10357613 +n10359546 +n10360747 +n10362319 +n10362557 +n10364198 +n10366276 +n10366966 +n10368291 +n10368528 +n10368624 +n10369317 +n10370955 +n10373390 +n10375052 +n10375314 +n10375402 +n10376523 +n10377021 +n10377185 +n10377291 +n10378026 +n10380672 +n10382710 +n10382825 +n10384392 +n10384496 +n10385566 +n10386984 +n10387196 +n10387324 +n10393909 +n10395073 +n10395828 +n10396106 +n10400108 +n10400437 +n10400618 +n10401331 +n10401639 +n10403876 +n10405694 +n10406266 +n10406391 +n10406765 +n10407310 +n10407954 +n10410246 +n10411551 +n10415037 +n10418735 +n10419472 +n10419785 +n10420507 +n10421016 +n10421470 +n10421956 +n10422405 +n10427764 +n10431625 +n10432189 +n10432441 +n10435169 +n10435988 +n10438842 +n10439373 +n10439851 +n10441037 +n10441962 +n10449664 +n10450161 +n10450303 +n10451450 +n10453184 +n10461060 +n10464052 +n10465451 +n10465831 +n10467179 +n10467395 +n10469874 +n10470779 +n10472129 +n10473917 +n10474645 +n10476467 +n10477713 +n10481268 +n10482220 +n10483138 +n10483799 +n10485883 +n10486166 +n10487182 +n10488656 +n10493685 +n10495756 +n10498816 +n10498986 +n10499232 +n10499355 +n10500217 +n10500419 +n10500603 +n10502329 +n10504206 +n10505613 +n10506915 +n10508141 +n10508710 +n10509063 +n10510245 +n10512372 +n10513823 +n10514429 +n10521100 +n10521662 +n10522035 +n10522759 +n10523341 +n10524076 +n10525436 +n10525617 +n10528023 +n10529231 +n10530150 +n10530383 +n10530959 +n10536416 +n10540114 +n10542608 +n10542761 +n10542888 +n10548537 +n10548681 +n10550369 +n10553235 +n10559288 +n10559508 +n10559996 +n10560106 +n10562135 +n10562283 +n10563314 +n10563403 +n10565667 +n10566072 +n10568358 +n10568608 +n10569179 +n10572706 +n10572889 +n10574538 +n10574840 +n10575463 +n10577284 +n10578021 +n10578471 +n10580030 +n10581890 +n10582746 +n10583387 +n10583790 +n10585077 +n10588074 +n10588357 +n10588965 +n10590146 +n10592811 +n10593521 +n10595164 +n10595647 +n10598181 +n10599806 +n10602470 +n10602985 +n10603851 +n10604380 +n10604979 +n10607291 +n10607478 +n10610465 +n10610850 +n10611267 +n10611613 +n10613996 +n10618342 +n10620586 +n10620758 +n10622053 +n10624074 +n10624310 +n10624437 +n10624540 +n10627252 +n10628644 +n10629939 +n10630188 +n10631309 +n10633450 +n10634849 +n10635788 +n10638922 +n10639359 +n10639637 +n10642596 +n10644598 +n10645017 +n10646140 +n10649197 +n10652605 +n10655594 +n10657835 +n10661563 +n10665587 +n10665698 +n10667477 +n10667863 +n10669991 +n10671613 +n10671736 +n10672371 +n10672662 +n10674713 +n10675010 +n10678937 +n10679174 +n10680609 +n10680796 +n10682953 +n10685398 +n10686073 +n10686885 +n10688356 +n10689306 +n10690648 +n10692482 +n10693824 +n10694258 +n10696508 +n10698368 +n10699981 +n10701180 +n10701644 +n10701962 +n10702167 +n10707134 +n10707233 +n10709529 +n10711766 +n10718131 +n10719132 +n10721321 +n10726031 +n10727171 +n10727458 +n10728624 +n10730728 +n10732010 +n10734394 +n10734891 +n10737103 +n10738111 +n10739391 +n10740868 +n10741367 +n10744164 +n10745006 +n10746931 +n10747119 +n10748620 +n10750031 +n10750640 +n10751152 +n10753442 +n10754189 +n10755080 +n10755648 +n10756148 +n10757050 +n10757492 +n10761190 +n10763075 +n10763383 +n10763620 +n10765679 +n10772092 +n10773665 +n10780284 +n10780632 +n10782471 +n10782791 +n10782940 +n10787470 +n10791115 +n10791221 +n10792335 +n10792856 +n10793570 +n10802507 +n10804287 +n10806113 +n11448153 +n11487732 +n11508382 +n11524451 +n11532682 +n11533212 +n11536673 +n11537327 +n11542137 +n11542640 +n11544015 +n11545524 +n11545714 +n11547855 +n11552133 +n11552806 +n11552976 +n11599324 +n11600372 +n11601177 +n11601333 +n11601918 +n11602873 +n11603246 +n11603835 +n11608250 +n11609475 +n11609862 +n11610047 +n11610215 +n11610437 +n11611087 +n11611233 +n11611356 +n11611561 +n11611758 +n11612018 +n11612349 +n11612575 +n11613219 +n11613459 +n11614039 +n11614250 +n11614420 +n11614713 +n11615026 +n11615387 +n11615607 +n11615967 +n11616486 +n11616662 +n11617090 +n11617272 +n11617631 +n11618290 +n11618525 +n11618861 +n11619227 +n11619455 +n11620389 +n11620673 +n11621029 +n11621281 +n11621547 +n11621727 +n11621950 +n11622184 +n11622368 +n11622591 +n11622771 +n11623105 +n11623815 +n11623967 +n11624192 +n11624531 +n11625003 +n11625223 +n11625632 +n11625804 +n11626152 +n11626409 +n11626585 +n11626826 +n11627168 +n11627512 +n11627908 +n11628087 +n11628456 +n11628793 +n11630017 +n11631854 +n11632167 +n11632619 +n11634736 +n11635152 +n11635433 +n11635830 +n11636204 +n11636835 +n11639445 +n11640132 +n11643835 +n11644046 +n11644226 +n11644462 +n11645590 +n11645914 +n11646167 +n11646344 +n11646694 +n11647306 +n11647703 +n11650558 +n11652376 +n11653904 +n11654293 +n11655974 +n11656123 +n11658331 +n11658544 +n11660300 +n11661372 +n11661909 +n11662371 +n11664418 +n11665372 +n11666854 +n11669786 +n11669921 +n11672269 +n11672400 +n11675025 +n11676500 +n11678010 +n11680596 +n11682659 +n11686912 +n11689483 +n11690254 +n11690455 +n11691046 +n11691857 +n11692265 +n11692792 +n11693981 +n11694664 +n11695599 +n11695974 +n11698042 +n11699442 +n11700058 +n11701066 +n11703669 +n11704093 +n11704620 +n11705171 +n11705387 +n11705776 +n11706761 +n11707229 +n11707827 +n11709205 +n11709674 +n11710136 +n11710393 +n11710827 +n11711537 +n11711764 +n11712282 +n11714382 +n11715430 +n11715678 +n11717577 +n11719286 +n11720353 +n11720643 +n11720891 +n11721337 +n11722466 +n11722982 +n11723227 +n11723770 +n11724109 +n11725015 +n11725311 +n11725480 +n11725821 +n11725973 +n11726269 +n11726707 +n11727091 +n11727358 +n11727540 +n11727738 +n11728099 +n11728945 +n11730602 +n11731659 +n11732567 +n11733054 +n11733312 +n11733548 +n11735053 +n11736694 +n11736851 +n11737534 +n11748811 +n11752937 +n11753143 +n11753355 +n11753700 +n11754893 +n11756092 +n11756669 +n11756870 +n11757653 +n11757851 +n11758122 +n11758276 +n11758483 +n11758799 +n11759224 +n11759404 +n11759853 +n11760785 +n11761202 +n11761650 +n11762433 +n11769176 +n11769621 +n11769803 +n11770256 +n11772408 +n11772879 +n11773987 +n11774513 +n11777080 +n11778257 +n11779300 +n11780148 +n11781176 +n11782036 +n11782761 +n11783920 +n11784126 +n11784497 +n11785668 +n11786131 +n11786539 +n11788727 +n11789066 +n11789589 +n11791341 +n11791569 +n11792029 +n11792341 +n11792742 +n11793779 +n11794024 +n11794519 +n11795049 +n11797321 +n11800236 +n11801891 +n11802586 +n11802800 +n11805544 +n11805956 +n11806219 +n11806679 +n11807108 +n11807525 +n11807979 +n11808299 +n11808468 +n11808721 +n11808932 +n11809094 +n11809271 +n11809437 +n11809594 +n11809754 +n11810358 +n11811473 +n11811706 +n11811921 +n11812094 +n11812910 +n11813077 +n11814584 +n11815491 +n11815721 +n11815918 +n11816121 +n11816336 +n11816649 +n11816829 +n11817914 +n11818069 +n11819509 +n11819912 +n11820965 +n11821184 +n11823436 +n11824146 +n11825351 +n11826198 +n11828577 +n11830906 +n11832214 +n11832480 +n11834654 +n11836722 +n11837970 +n11838916 +n11839568 +n11839823 +n11840067 +n11844371 +n11844892 +n11845557 +n11845793 +n11845913 +n11846765 +n11847169 +n11848479 +n11849467 +n11849871 +n11849983 +n11850521 +n11851258 +n11851578 +n11851839 +n11852028 +n11853356 +n11853813 +n11854479 +n11855274 +n11855553 +n11855842 +n11857875 +n11858077 +n11859275 +n11859472 +n11859737 +n11860555 +n11861641 +n11861853 +n11862835 +n11865874 +n11866248 +n11869689 +n11870418 +n11870747 +n11872146 +n11874081 +n11875523 +n11875691 +n11875938 +n11876204 +n11876432 +n11876634 +n11876803 +n11877193 +n11877283 +n11877646 +n11878101 +n11879054 +n11879722 +n11879895 +n11881189 +n11882074 +n11882426 +n11883328 +n11885856 +n11887119 +n11888800 +n11889619 +n11890150 +n11891175 +n11892029 +n11892637 +n11892817 +n11893640 +n11894327 +n11894558 +n11894770 +n11895092 +n11896722 +n11897116 +n11898775 +n11900569 +n11901294 +n11901597 +n11901759 +n11901977 +n11902200 +n11902389 +n11902709 +n11902982 +n11903671 +n11904109 +n11905392 +n11905749 +n11906917 +n11907100 +n11907689 +n11908549 +n11908846 +n11910271 +n11910460 +n11915214 +n11915658 +n11915899 +n11916467 +n11916696 +n11918286 +n11918473 +n11919447 +n11919975 +n11920133 +n11921395 +n11923174 +n11923397 +n11923637 +n11924445 +n11924849 +n11925303 +n11925898 +n11926365 +n11926833 +n11927215 +n11928352 +n11928858 +n11929743 +n11931540 +n11931918 +n11933546 +n11933728 +n11934616 +n11934807 +n11935330 +n11935469 +n11939180 +n11939491 +n11939699 +n11940006 +n11940599 +n11941924 +n11943407 +n11943660 +n11943992 +n11944196 +n11944954 +n11945367 +n11945514 +n11945783 +n11946727 +n11946918 +n11947629 +n11947802 +n11948264 +n11948864 +n11949015 +n11949402 +n11950345 +n11950686 +n11950877 +n11951511 +n11952541 +n11953038 +n11953610 +n11953884 +n11954161 +n11954345 +n11954642 +n11955153 +n11955896 +n11956348 +n11956850 +n11957678 +n11958080 +n11959632 +n11959862 +n11960245 +n11961100 +n11961446 +n11961871 +n11962272 +n11962667 +n11963932 +n11965218 +n11965627 +n11966083 +n11966215 +n11966617 +n11966896 +n11968704 +n11968931 +n11969166 +n11969607 +n11970586 +n11971248 +n11971406 +n11971783 +n11971927 +n11972291 +n11972759 +n11973341 +n11977303 +n11978233 +n11978551 +n11978713 +n11978961 +n11979527 +n11979715 +n11979964 +n11980318 +n11980682 +n11981192 +n11982115 +n11984144 +n11984542 +n11986511 +n11987126 +n11988596 +n11989087 +n11989393 +n11989869 +n11990167 +n11990313 +n11991263 +n11992806 +n11995092 +n11998888 +n12001707 +n12002428 +n12003167 +n12003696 +n12004547 +n12005656 +n12006766 +n12006930 +n12007196 +n12007406 +n12008252 +n12008487 +n12008749 +n12009420 +n12011620 +n12012111 +n12014085 +n12015221 +n12015525 +n12015959 +n12016567 +n12018760 +n12019035 +n12019827 +n12020184 +n12020507 +n12020736 +n12020941 +n12022054 +n12023108 +n12023407 +n12023726 +n12024445 +n12024690 +n12026018 +n12026476 +n12026981 +n12027222 +n12027658 +n12029635 +n12030908 +n12031139 +n12031927 +n12033709 +n12034141 +n12034384 +n12035631 +n12036939 +n12037499 +n12037691 +n12038038 +n12038406 +n12038585 +n12038898 +n12039317 +n12041446 +n12043444 +n12043673 +n12043836 +n12044467 +n12046028 +n12046428 +n12046815 +n12047345 +n12047884 +n12048056 +n12048399 +n12049282 +n12049562 +n12050533 +n12050959 +n12051103 +n12052447 +n12052787 +n12053405 +n12053690 +n12055516 +n12056217 +n12056601 +n12056758 +n12057211 +n12057447 +n12057660 +n12058192 +n12058630 +n12058822 +n12059314 +n12059625 +n12061380 +n12061614 +n12062468 +n12062626 +n12062781 +n12063639 +n12064389 +n12064591 +n12065316 +n12065777 +n12066018 +n12066261 +n12066630 +n12067193 +n12068432 +n12069217 +n12069679 +n12070016 +n12070381 +n12070583 +n12070712 +n12071744 +n12072722 +n12073554 +n12073991 +n12074408 +n12074867 +n12075010 +n12075151 +n12075299 +n12075830 +n12076223 +n12076577 +n12076852 +n12077944 +n12078172 +n12079120 +n12079963 +n12080395 +n12080820 +n12081215 +n12083113 +n12083591 +n12083847 +n12084158 +n12084555 +n12084890 +n12085267 +n12085664 +n12086012 +n12086192 +n12086539 +n12086778 +n12087961 +n12088223 +n12090890 +n12091213 +n12091377 +n12091550 +n12091953 +n12092262 +n12092417 +n12093329 +n12093600 +n12094612 +n12095020 +n12095647 +n12097396 +n12098403 +n12098524 +n12099342 +n12101870 +n12102133 +n12104238 +n12104501 +n12104734 +n12105125 +n12107710 +n12107970 +n12108871 +n12109365 +n12110085 +n12110778 +n12112008 +n12112609 +n12112918 +n12113195 +n12115180 +n12116429 +n12119238 +n12121610 +n12122725 +n12123741 +n12124627 +n12124818 +n12126084 +n12127460 +n12127768 +n12128071 +n12129134 +n12133462 +n12133682 +n12134025 +n12135049 +n12136392 +n12137120 +n12137569 +n12139575 +n12140903 +n12141167 +n12141385 +n12142085 +n12144313 +n12144580 +n12145477 +n12146311 +n12146654 +n12148757 +n12150722 +n12151615 +n12152532 +n12152722 +n12154773 +n12155009 +n12157056 +n12158031 +n12158443 +n12159055 +n12159388 +n12160303 +n12160490 +n12160857 +n12161056 +n12161969 +n12162181 +n12162425 +n12164363 +n12164656 +n12164881 +n12165170 +n12165758 +n12166128 +n12166424 +n12166793 +n12167075 +n12167436 +n12167602 +n12168565 +n12171098 +n12171316 +n12171966 +n12172364 +n12172481 +n12172906 +n12173069 +n12173664 +n12173912 +n12174311 +n12174521 +n12174926 +n12178896 +n12179122 +n12180168 +n12180885 +n12184912 +n12185859 +n12187247 +n12187891 +n12189429 +n12189987 +n12190410 +n12190869 +n12194147 +n12195533 +n12196336 +n12196527 +n12196694 +n12198286 +n12199790 +n12200143 +n12201331 +n12201580 +n12202936 +n12203529 +n12203896 +n12204032 +n12204175 +n12205694 +n12214789 +n12215022 +n12215579 +n12217453 +n12223569 +n12223764 +n12224978 +n12225563 +n12227658 +n12228229 +n12228387 +n12230794 +n12237486 +n12237641 +n12238913 +n12240477 +n12242409 +n12243109 +n12244153 +n12244650 +n12244819 +n12245319 +n12246232 +n12249542 +n12252168 +n12256920 +n12257570 +n12258885 +n12260799 +n12261571 +n12261808 +n12262018 +n12262185 +n12263038 +n12263204 +n12263738 +n12263987 +n12264512 +n12265600 +n12266217 +n12266796 +n12267411 +n12267677 +n12268246 +n12269241 +n12269406 +n12270027 +n12270741 +n12270946 +n12271933 +n12272239 +n12272883 +n12273114 +n12273344 +n12273768 +n12273939 +n12274358 +n12274863 +n12275131 +n12275675 +n12275888 +n12276110 +n12276477 +n12276628 +n12276872 +n12277150 +n12277578 +n12277800 +n12278107 +n12278371 +n12278650 +n12278865 +n12279458 +n12279772 +n12280060 +n12281241 +n12281788 +n12281974 +n12282235 +n12282527 +n12282737 +n12282933 +n12283147 +n12283542 +n12284262 +n12284821 +n12285369 +n12285900 +n12286826 +n12286988 +n12287836 +n12288005 +n12288823 +n12289433 +n12290748 +n12291143 +n12291959 +n12293723 +n12294124 +n12294331 +n12294723 +n12294871 +n12295033 +n12295429 +n12295796 +n12296432 +n12300840 +n12301180 +n12301445 +n12302071 +n12302248 +n12302565 +n12303083 +n12303462 +n12304115 +n12304703 +n12304899 +n12305089 +n12305293 +n12305475 +n12305819 +n12305986 +n12306089 +n12306717 +n12307076 +n12307240 +n12307756 +n12309277 +n12311579 +n12312728 +n12315598 +n12315999 +n12316444 +n12316572 +n12317296 +n12318378 +n12318965 +n12319204 +n12319414 +n12320010 +n12320806 +n12321077 +n12322099 +n12322501 +n12322699 +n12325234 +n12328398 +n12328567 +n12329260 +n12329473 +n12330469 +n12330587 +n12330891 +n12331655 +n12332030 +n12332555 +n12333053 +n12333530 +n12333771 +n12334293 +n12334891 +n12336092 +n12336224 +n12336333 +n12336727 +n12336973 +n12337391 +n12337617 +n12338258 +n12338454 +n12338655 +n12338796 +n12339831 +n12340383 +n12340755 +n12342299 +n12342498 +n12342852 +n12343480 +n12344283 +n12344483 +n12344700 +n12344837 +n12345280 +n12345899 +n12346813 +n12347158 +n12350758 +n12352287 +n12352639 +n12352844 +n12352990 +n12353203 +n12353754 +n12356023 +n12356960 +n12357485 +n12360108 +n12360684 +n12360958 +n12361135 +n12361946 +n12362274 +n12362668 +n12367611 +n12368028 +n12368257 +n12368451 +n12369309 +n12371439 +n12373100 +n12374418 +n12374862 +n12377198 +n12383894 +n12384037 +n12384227 +n12384375 +n12384839 +n12385429 +n12385566 +n12387633 +n12387839 +n12388143 +n12388858 +n12388989 +n12389130 +n12389501 +n12390099 +n12390314 +n12392549 +n12393269 +n12397431 +n12399132 +n12399384 +n12400489 +n12400720 +n12401684 +n12402051 +n12402348 +n12402596 +n12402840 +n12403994 +n12405714 +n12406488 +n12406715 +n12406902 +n12407079 +n12407222 +n12407715 +n12407890 +n12408077 +n12408717 +n12409231 +n12409470 +n12409840 +n12412355 +n12412606 +n12413165 +n12413301 +n12413419 +n12413642 +n12413880 +n12414035 +n12414159 +n12414449 +n12414818 +n12414932 +n12415595 +n12416073 +n12416703 +n12418221 +n12420722 +n12421137 +n12421683 +n12421917 +n12422129 +n12426623 +n12426749 +n12427184 +n12427391 +n12427566 +n12427757 +n12428076 +n12428412 +n12428747 +n12429352 +n12432356 +n12433081 +n12433178 +n12433769 +n12435152 +n12435649 +n12435777 +n12437513 +n12437769 +n12437930 +n12441183 +n12441390 +n12441958 +n12443323 +n12446519 +n12448700 +n12449296 +n12449526 +n12450344 +n12450840 +n12451070 +n12451240 +n12451399 +n12451915 +n12452836 +n12453186 +n12454159 +n12454436 +n12454556 +n12454705 +n12454949 +n12455950 +n12457091 +n12458550 +n12459629 +n12460697 +n12460957 +n12461109 +n12461466 +n12461673 +n12462805 +n12463134 +n12465557 +n12466727 +n12469517 +n12472024 +n12473608 +n12473840 +n12474167 +n12475035 +n12475242 +n12476510 +n12477163 +n12477401 +n12477583 +n12477747 +n12478768 +n12479537 +n12480456 +n12480895 +n12481458 +n12482437 +n12482668 +n12482893 +n12483427 +n12483625 +n12483841 +n12484784 +n12485653 +n12485981 +n12486574 +n12489815 +n12491017 +n12491826 +n12492106 +n12493208 +n12494794 +n12495146 +n12495895 +n12496427 +n12496949 +n12498055 +n12499979 +n12501202 +n12504570 +n12504783 +n12506341 +n12506991 +n12508309 +n12509476 +n12509665 +n12513172 +n12513613 +n12513933 +n12514138 +n12515711 +n12515925 +n12516828 +n12517445 +n12517642 +n12519089 +n12519563 +n12521394 +n12523475 +n12527738 +n12528549 +n12528974 +n12529220 +n12530629 +n12530818 +n12532564 +n12537253 +n12539306 +n12540250 +n12544539 +n12545635 +n12546183 +n12546617 +n12546962 +n12547215 +n12547503 +n12548280 +n12549192 +n12552309 +n12554911 +n12556656 +n12557064 +n12557438 +n12557556 +n12557681 +n12558230 +n12558425 +n12560282 +n12560621 +n12560775 +n12561169 +n12562785 +n12564083 +n12566954 +n12568186 +n12570394 +n12570703 +n12570972 +n12571781 +n12573474 +n12574320 +n12574866 +n12575322 +n12575812 +n12576323 +n12577895 +n12578626 +n12578916 +n12579038 +n12580654 +n12580896 +n12582231 +n12582665 +n12582846 +n12583126 +n12583401 +n12584191 +n12584715 +n12585629 +n12587132 +n12587803 +n12588320 +n12588780 +n12590232 +n12590499 +n12591017 +n12591351 +n12593994 +n12595699 +n12595964 +n12596148 +n12596345 +n12596709 +n12596849 +n12597134 +n12597466 +n12597798 +n12598027 +n12599435 +n12602262 +n12602980 +n12603449 +n12604228 +n12606438 +n12606545 +n12607456 +n12610328 +n12614477 +n12615232 +n12620196 +n12620546 +n12620969 +n12621410 +n12622297 +n12622875 +n12623077 +n12624381 +n12624568 +n12625383 +n12627119 +n12628986 +n12629305 +n12629666 +n12630763 +n12631331 +n12631932 +n12632335 +n12633638 +n12633994 +n12634211 +n12634429 +n12634734 +n12634986 +n12635532 +n12635744 +n12635955 +n12636224 +n12637123 +n12638218 +n12638753 +n12638964 +n12639584 +n12640839 +n12641007 +n12641413 +n12642090 +n12642200 +n12643313 +n12643473 +n12644902 +n12645174 +n12646072 +n12646397 +n12646605 +n12646740 +n12647560 +n12647893 +n12648045 +n12648888 +n12649065 +n12649317 +n12649539 +n12650379 +n12650556 +n12651229 +n12651611 +n12651821 +n12653218 +n12655869 +n12656369 +n12656685 +n12657082 +n12658118 +n12658308 +n12658481 +n12659064 +n12659356 +n12659539 +n12662772 +n12663023 +n12663359 +n12665048 +n12665271 +n12665857 +n12666965 +n12670758 +n12671651 +n12674895 +n12675299 +n12675876 +n12676534 +n12676703 +n12679593 +n12680402 +n12680864 +n12681893 +n12682411 +n12682668 +n12683096 +n12683407 +n12683571 +n12683791 +n12684379 +n12685431 +n12685831 +n12686077 +n12686274 +n12686676 +n12687044 +n12687462 +n12687698 +n12687957 +n12688716 +n12691428 +n12691661 +n12694486 +n12695975 +n12696492 +n12698598 +n12700088 +n12703190 +n12703383 +n12703557 +n12703856 +n12704343 +n12706410 +n12707781 +n12708293 +n12708654 +n12708941 +n12709103 +n12709688 +n12709901 +n12710295 +n12710415 +n12710577 +n12710693 +n12711596 +n12711817 +n12711984 +n12713063 +n12713866 +n12714755 +n12717072 +n12717224 +n12719684 +n12719944 +n12720200 +n12723610 +n12724942 +n12725521 +n12725738 +n12726159 +n12726670 +n12727101 +n12727518 +n12729315 +n12729521 +n12729729 +n12731029 +n12731401 +n12731835 +n12732009 +n12732491 +n12732756 +n12732966 +n12733218 +n12733428 +n12733647 +n12733870 +n12734070 +n12737383 +n12737898 +n12739332 +n12741222 +n12741792 +n12743009 +n12743352 +n12744387 +n12745386 +n12746884 +n12749049 +n12749456 +n12749679 +n12749852 +n12752205 +n12753007 +n12753245 +n12753573 +n12753762 +n12754003 +n12754468 +n12754648 +n12754781 +n12754981 +n12755225 +n12755387 +n12755727 +n12756457 +n12757303 +n12757458 +n12757816 +n12759273 +n12761284 +n12762049 +n12762896 +n12764202 +n12765115 +n12766595 +n12766869 +n12767648 +n12768682 +n12771192 +n12771390 +n12771597 +n12772753 +n12772908 +n12773651 +n12774299 +n12774641 +n12775919 +n12777680 +n12778398 +n12778605 +n12779603 +n12779851 +n12781940 +n12782530 +n12782915 +n12784889 +n12785724 +n12785889 +n12788854 +n12789054 +n12790430 +n12791064 +n12791329 +n12793015 +n12793284 +n12793494 +n12794135 +n12794367 +n12794985 +n12795352 +n12795555 +n12796022 +n12797860 +n12799776 +n12801520 +n12801781 +n12803754 +n12805146 +n12805561 +n12806015 +n12806732 +n12807251 +n12807409 +n12807773 +n12808007 +n12810595 +n12811027 +n12812478 +n12813189 +n12814643 +n12815198 +n12816508 +n12817464 +n12817694 +n12818346 +n12818966 +n12819728 +n12820853 +n12821505 +n12821895 +n12822115 +n12822769 +n12822955 +n12823717 +n12823859 +n12824053 +n12825497 +n12827270 +n12827537 +n12828220 +n12828379 +n12828791 +n12830222 +n12830568 +n12831932 +n12832315 +n12832538 +n12833149 +n12833985 +n12834798 +n12835331 +n12836212 +n12836337 +n12836508 +n12836862 +n12837803 +n12840362 +n12840749 +n12841007 +n12841193 +n12841354 +n12843557 +n12843970 +n12844939 +n12845413 +n12847008 +n12847374 +n12847927 +n12848499 +n12849061 +n12849279 +n12849416 +n12849952 +n12850168 +n12850336 +n12850906 +n12851469 +n12853482 +n12854048 +n12854600 +n12855494 +n12856091 +n12856287 +n12856479 +n12856680 +n12857779 +n12858150 +n12858397 +n12858618 +n12858871 +n12859986 +n12860365 +n12861345 +n12861541 +n12861892 +n12862512 +n12863624 +n12864160 +n12865037 +n12865562 +n12865708 +n12865824 +n12866002 +n12866162 +n12866459 +n12866635 +n12867826 +n12868019 +n12869061 +n12869478 +n12870535 +n12870682 +n12870891 +n12872458 +n12875269 +n12877838 +n12879527 +n12879963 +n12880244 +n12880462 +n12882779 +n12882945 +n12884100 +n12884260 +n12887293 +n12889219 +n12889713 +n12890265 +n12890490 +n12890685 +n12890928 +n12891093 +n12891305 +n12891469 +n12891643 +n12893463 +n12893993 +n12895811 +n12898774 +n12899537 +n12899752 +n12901724 +n12902662 +n12904314 +n12905412 +n12906214 +n12906498 +n12908093 +n12908645 +n12909421 +n12909917 +n12911079 +n12911440 +n12911673 +n12912670 +n12913791 +n12914923 +n12915568 +n12915811 +n12916179 +n12916511 +n12917901 +n12918609 +n12919403 +n12919646 +n12919847 +n12920204 +n12920955 +n12921868 +n12922763 +n12924623 +n12925179 +n12926480 +n12926689 +n12927013 +n12927494 +n12928071 +n12929403 +n12931542 +n12932173 +n12932365 +n12932966 +n12934036 +n12934174 +n12934479 +n12934985 +n12935609 +n12937130 +n12938193 +n12939282 +n12939874 +n12940226 +n12940609 +n12942395 +n12942572 +n12946849 +n12947313 +n12947544 +n12947895 +n12948053 +n12948251 +n12948495 +n12950126 +n12950314 +n12951146 +n12951835 +n12953206 +n12953484 +n12954799 +n12956367 +n12957924 +n12961879 +n12963628 +n12964920 +n12965626 +n12966945 +n12969131 +n12969425 +n12973443 +n12974987 +n12975804 +n12979829 +n12980840 +n12981443 +n12982468 +n12983048 +n12985420 +n12985773 +n12985857 +n12986227 +n12987056 +n12988158 +n12989938 +n12991184 +n12991837 +n12992177 +n12992868 +n12995601 +n12997654 +n12997919 +n12998815 +n13000891 +n13001041 +n13001206 +n13001366 +n13001529 +n13001930 +n13002750 +n13002925 +n13003061 +n13003254 +n13003522 +n13003712 +n13004423 +n13005329 +n13005984 +n13006171 +n13006631 +n13006894 +n13007034 +n13007417 +n13008315 +n13009085 +n13009429 +n13011595 +n13012253 +n13012973 +n13013534 +n13013764 +n13014409 +n13014741 +n13017102 +n13017240 +n13019835 +n13020191 +n13020964 +n13021689 +n13022210 +n13022709 +n13023134 +n13024012 +n13025647 +n13028611 +n13029326 +n13029760 +n13032115 +n13032381 +n13032618 +n13032923 +n13033134 +n13033577 +n13034062 +n13035241 +n13035707 +n13035925 +n13037406 +n13038068 +n13038744 +n13039349 +n13040303 +n13040629 +n13041312 +n13043926 +n13044375 +n13044778 +n13046669 +n13049953 +n13050397 +n13052670 +n13052931 +n13053608 +n13054073 +n13054560 +n13055423 +n13055577 +n13055949 +n13060190 +n13061348 +n13062421 +n13065089 +n13066448 +n13068255 +n13072528 +n13074619 +n13077033 +n13077295 +n13079073 +n13083023 +n13084184 +n13084834 +n13085747 +n13090871 +n13091620 +n13094273 +n13099999 +n13100677 +n13102775 +n13103877 +n13104059 +n13107694 +n13107891 +n13108131 +n13108323 +n13108481 +n13108545 +n13108841 +n13111881 +n13121349 +n13122364 +n13123431 +n13125117 +n13126856 +n13127843 +n13128976 +n13130726 +n13131028 +n13131618 +n13132338 +n13132656 +n13133613 +n13133932 +n13134947 +n13135832 +n13136316 +n13136556 +n13137409 +n13138308 +n13138842 +n13139055 +n13141415 +n13141564 +n13142504 +n13145040 +n13145250 +n13146583 +n13147270 +n13147386 +n13148208 +n13150894 +n13154388 +n13154494 +n13155095 +n13155305 +n13155611 +n13157684 +n13158512 +n13160604 +n13163991 +n13172923 +n13173488 +n13173882 +n13177048 +n13177884 +n13180534 +n13180875 +n13181055 +n13181811 +n13183056 +n13183489 +n13185269 +n13187367 +n13188096 +n13190747 +n13192625 +n13193642 +n13193856 +n13194036 +n13194572 +n13195341 +n13196003 +n13197274 +n13197507 +n13198914 +n13199717 +n13199970 +n13200651 +n13201969 +n13205058 +n13206178 +n13206817 +n13207094 +n13207335 +n13208302 +n13209808 +n13211020 +n13213066 +n13214340 +n13215586 +n13219422 +n13219833 +n13219976 +n13220122 +n13221529 +n13223588 +n13223710 +n13223843 +n13226871 +n13229543 +n13230662 +n13231078 +n13232779 +n13234678 +n13235159 +n13235503 +n13237188 +n13238375 +n13238988 +n13579829 +n13653902 +n13862407 +n13863020 +n13863771 +n13864035 +n13865298 +n13865483 +n13865904 +n13868944 +n13869547 +n13869788 +n13869896 +n13872592 +n13872822 +n13873502 +n13873917 +n13875392 +n13875571 +n13876561 +n13878306 +n13879049 +n13879320 +n13880994 +n13881644 +n13882201 +n13882276 +n13882563 +n13886260 +n13895262 +n13896100 +n13896217 +n13897996 +n13898207 +n13900287 +n13900422 +n13901211 +n13901321 +n13901858 +n13902048 +n13902336 +n13905792 +n13907272 +n13908201 +n13908580 +n13912260 +n13912540 +n13914608 +n13915023 +n13915113 +n13916721 +n13918274 +n13918387 +n13919547 +n13919919 +n13926786 +n14131950 +n14564779 +n14685296 +n14696793 +n14698884 +n14765422 +n14785065 +n14810561 +n14820180 +n14844693 +n14858292 +n14900342 +n14908027 +n14915184 +n14919819 +n14973585 +n14974264 +n14976759 +n14976871 +n14977504 +n15019030 +n15062057 +n15067877 +n15075141 +n15086247 +n15089258 +n15090065 +n15091129 +n15091304 +n15091473 +n15091669 +n15091846 +n15092059 +n15092227 +n15092650 +n15092942 +n15093137 +n15093298 +n15102455 +n15102894 diff --git a/results/imagenet22k_synsets.txt b/results/imagenet22k_synsets.txt new file mode 100644 index 0000000000..0561374dd4 --- /dev/null +++ b/results/imagenet22k_synsets.txt @@ -0,0 +1,21841 @@ +n00004475 +n00005787 +n00006024 +n00006484 +n00007846 +n00015388 +n00017222 +n00021265 +n00021939 +n00120010 +n00141669 +n00288000 +n00288190 +n00288384 +n00324978 +n00326094 +n00433458 +n00433661 +n00433802 +n00434075 +n00439826 +n00440039 +n00440218 +n00440382 +n00440509 +n00440643 +n00440747 +n00440941 +n00441073 +n00441824 +n00442115 +n00442437 +n00442847 +n00442981 +n00443231 +n00443375 +n00443517 +n00443692 +n00443803 +n00443917 +n00444142 +n00444340 +n00444490 +n00444651 +n00444846 +n00444937 +n00445055 +n00445226 +n00445351 +n00445685 +n00445802 +n00446311 +n00446411 +n00446493 +n00446632 +n00446804 +n00446980 +n00447073 +n00447221 +n00447361 +n00447463 +n00447540 +n00447957 +n00448126 +n00448232 +n00448466 +n00448640 +n00448748 +n00448872 +n00448958 +n00449054 +n00449168 +n00449295 +n00449517 +n00449695 +n00449796 +n00449892 +n00449977 +n00450070 +n00450335 +n00450700 +n00450866 +n00450998 +n00451186 +n00451370 +n00451563 +n00451635 +n00451768 +n00451866 +n00452034 +n00452152 +n00452293 +n00452734 +n00452864 +n00453126 +n00453313 +n00453396 +n00453478 +n00453631 +n00453935 +n00454237 +n00454395 +n00454493 +n00454624 +n00454855 +n00454983 +n00455076 +n00455173 +n00456465 +n00463246 +n00463543 +n00464277 +n00464478 +n00464651 +n00464894 +n00466273 +n00466377 +n00466524 +n00466630 +n00466712 +n00466880 +n00467320 +n00467536 +n00467719 +n00467995 +n00468299 +n00468480 +n00469651 +n00470554 +n00470682 +n00470830 +n00470966 +n00471437 +n00471613 +n00474568 +n00474657 +n00474769 +n00474881 +n00475014 +n00475142 +n00475273 +n00475403 +n00475535 +n00475661 +n00475787 +n00476140 +n00476235 +n00476389 +n00477392 +n00477639 +n00477827 +n00478262 +n00479076 +n00479440 +n00479616 +n00479734 +n00479887 +n00480211 +n00480366 +n00480508 +n00480885 +n00480993 +n00481803 +n00481938 +n00482122 +n00482298 +n00483205 +n00483313 +n00483409 +n00483508 +n00483605 +n00483705 +n00483848 +n00523513 +n00812526 +n00825773 +n00887544 +n01035504 +n01035667 +n01055165 +n01314388 +n01314663 +n01314781 +n01314910 +n01315213 +n01315330 +n01315581 +n01315805 +n01316422 +n01316579 +n01316734 +n01316949 +n01317089 +n01317294 +n01317391 +n01317541 +n01317813 +n01317916 +n01318053 +n01318279 +n01318381 +n01318478 +n01318660 +n01318894 +n01319001 +n01319187 +n01319467 +n01319685 +n01320872 +n01321123 +n01321230 +n01321456 +n01321579 +n01321770 +n01321854 +n01322221 +n01322343 +n01322508 +n01322604 +n01322685 +n01322898 +n01322983 +n01323068 +n01323155 +n01323261 +n01323355 +n01323493 +n01323599 +n01323781 +n01324305 +n01324431 +n01324610 +n01324799 +n01324916 +n01325060 +n01326291 +n01327909 +n01329186 +n01330126 +n01330497 +n01332181 +n01333082 +n01333483 +n01333610 +n01334217 +n01334690 +n01335218 +n01337191 +n01337734 +n01338685 +n01339083 +n01339336 +n01339471 +n01339801 +n01340014 +n01340522 +n01340785 +n01340935 +n01341090 +n01342269 +n01347583 +n01349735 +n01350226 +n01350701 +n01351170 +n01351315 +n01357328 +n01357507 +n01358572 +n01359762 +n01362336 +n01363719 +n01365474 +n01365885 +n01366700 +n01367772 +n01368672 +n01369358 +n01369484 +n01374703 +n01374846 +n01375204 +n01376237 +n01376437 +n01376543 +n01377278 +n01377510 +n01377694 +n01378545 +n01379389 +n01380610 +n01380754 +n01381044 +n01382033 +n01384084 +n01384164 +n01384687 +n01385017 +n01385330 +n01386007 +n01386182 +n01386354 +n01387065 +n01389507 +n01390123 +n01390763 +n01392275 +n01392380 +n01393486 +n01394040 +n01394492 +n01394771 +n01395254 +n01396048 +n01396617 +n01397114 +n01397690 +n01397871 +n01400247 +n01400391 +n01402600 +n01403457 +n01404365 +n01404495 +n01405007 +n01405616 +n01407798 +n01410457 +n01411450 +n01412694 +n01413457 +n01414216 +n01415626 +n01415920 +n01416213 +n01418498 +n01418620 +n01419332 +n01419573 +n01419888 +n01421333 +n01421807 +n01422185 +n01422335 +n01422450 +n01423302 +n01423617 +n01424420 +n01425223 +n01427399 +n01429172 +n01438208 +n01438581 +n01439121 +n01439514 +n01439808 +n01440160 +n01440242 +n01440467 +n01440764 +n01441117 +n01441272 +n01441425 +n01441910 +n01442450 +n01442710 +n01442972 +n01443243 +n01443537 +n01443831 +n01444339 +n01444783 +n01445429 +n01445593 +n01445857 +n01446152 +n01446589 +n01446760 +n01447139 +n01447331 +n01447658 +n01447946 +n01448291 +n01448594 +n01448951 +n01449374 +n01449712 +n01449980 +n01450661 +n01450950 +n01451115 +n01451295 +n01451426 +n01451863 +n01452345 +n01453087 +n01453475 +n01453742 +n01454545 +n01454856 +n01455317 +n01455461 +n01455778 +n01456137 +n01456454 +n01456756 +n01457082 +n01457407 +n01457852 +n01458746 +n01458842 +n01459791 +n01460303 +n01461315 +n01461646 +n01462042 +n01462544 +n01462803 +n01464844 +n01466257 +n01467336 +n01467804 +n01468238 +n01468712 +n01469103 +n01469723 +n01470145 +n01470479 +n01470733 +n01470895 +n01471682 +n01472303 +n01472502 +n01473806 +n01474283 +n01474864 +n01475232 +n01475940 +n01476418 +n01477080 +n01477525 +n01477875 +n01478511 +n01478969 +n01479213 +n01479820 +n01480106 +n01480516 +n01480880 +n01481331 +n01481498 +n01482071 +n01482330 +n01483021 +n01483522 +n01483830 +n01484097 +n01484285 +n01484447 +n01484562 +n01484850 +n01485479 +n01486010 +n01486540 +n01486838 +n01487506 +n01488038 +n01488918 +n01489501 +n01489709 +n01489920 +n01490112 +n01490360 +n01490670 +n01491006 +n01491361 +n01491661 +n01491874 +n01492357 +n01492569 +n01492708 +n01492860 +n01493146 +n01493541 +n01493829 +n01494041 +n01494475 +n01494757 +n01494882 +n01495006 +n01495493 +n01495701 +n01496331 +n01497118 +n01497413 +n01497738 +n01498041 +n01498406 +n01498699 +n01498989 +n01499396 +n01499732 +n01500091 +n01500476 +n01500854 +n01501160 +n01501641 +n01501777 +n01501948 +n01502101 +n01503061 +n01503976 +n01504179 +n01504344 +n01514668 +n01514752 +n01514859 +n01514926 +n01515078 +n01515217 +n01515303 +n01516212 +n01517389 +n01517565 +n01517966 +n01518878 +n01519563 +n01519873 +n01520576 +n01521399 +n01521756 +n01522450 +n01523105 +n01524359 +n01524761 +n01525720 +n01526521 +n01526766 +n01527194 +n01527347 +n01527617 +n01527917 +n01528396 +n01528654 +n01528845 +n01529672 +n01530439 +n01530575 +n01531178 +n01531344 +n01531512 +n01531639 +n01531811 +n01531971 +n01532325 +n01532511 +n01532829 +n01533000 +n01533339 +n01533481 +n01533651 +n01533893 +n01534155 +n01534433 +n01534582 +n01534762 +n01535140 +n01535469 +n01535690 +n01536035 +n01536186 +n01536334 +n01536644 +n01536780 +n01537134 +n01537544 +n01537895 +n01538059 +n01538200 +n01538362 +n01538630 +n01538955 +n01539272 +n01539573 +n01539925 +n01540090 +n01540233 +n01540566 +n01540832 +n01541102 +n01541386 +n01541760 +n01541922 +n01542168 +n01542433 +n01542786 +n01543175 +n01543383 +n01543632 +n01543936 +n01544208 +n01544389 +n01544704 +n01545574 +n01546039 +n01546506 +n01546921 +n01547832 +n01548301 +n01548492 +n01548694 +n01548865 +n01549053 +n01549430 +n01549641 +n01549886 +n01550172 +n01550761 +n01551080 +n01551300 +n01551711 +n01552034 +n01552333 +n01552813 +n01553142 +n01553527 +n01553762 +n01554017 +n01554448 +n01555004 +n01555305 +n01555809 +n01556182 +n01556514 +n01557185 +n01557962 +n01558149 +n01558307 +n01558461 +n01558594 +n01558765 +n01558993 +n01559160 +n01559477 +n01559639 +n01559804 +n01560105 +n01560280 +n01560419 +n01560636 +n01560793 +n01560935 +n01561181 +n01561452 +n01561732 +n01562014 +n01562265 +n01562451 +n01563128 +n01563449 +n01563746 +n01563945 +n01564101 +n01564217 +n01564394 +n01564773 +n01564914 +n01565078 +n01565345 +n01565599 +n01565930 +n01566207 +n01566645 +n01567133 +n01567678 +n01567879 +n01568132 +n01568294 +n01568720 +n01568892 +n01569060 +n01569262 +n01569423 +n01569566 +n01569836 +n01569971 +n01570267 +n01570421 +n01570676 +n01570839 +n01571410 +n01571904 +n01572328 +n01572489 +n01572654 +n01572782 +n01573074 +n01573240 +n01573360 +n01573627 +n01573898 +n01574045 +n01574390 +n01574560 +n01574801 +n01575117 +n01575401 +n01575745 +n01576076 +n01576358 +n01576695 +n01577035 +n01577458 +n01577659 +n01577941 +n01578180 +n01578575 +n01579028 +n01579149 +n01579260 +n01579410 +n01579578 +n01579729 +n01580077 +n01580379 +n01580490 +n01580772 +n01580870 +n01581166 +n01581434 +n01581730 +n01581874 +n01581984 +n01582220 +n01582398 +n01582498 +n01582856 +n01583209 +n01583495 +n01583828 +n01584225 +n01584695 +n01584853 +n01585121 +n01585287 +n01585422 +n01585715 +n01586020 +n01586374 +n01586941 +n01587278 +n01587526 +n01587834 +n01588002 +n01588431 +n01588725 +n01588996 +n01589286 +n01589718 +n01589893 +n01590220 +n01591005 +n01591123 +n01591301 +n01591697 +n01592084 +n01592257 +n01592387 +n01592540 +n01592694 +n01593028 +n01593282 +n01593553 +n01594004 +n01594372 +n01594787 +n01594968 +n01595168 +n01595450 +n01595624 +n01595974 +n01596273 +n01596608 +n01597022 +n01597336 +n01597737 +n01597906 +n01598074 +n01598271 +n01598588 +n01598988 +n01599159 +n01599269 +n01599388 +n01599556 +n01599741 +n01600085 +n01600341 +n01600657 +n01601068 +n01601410 +n01601694 +n01602080 +n01602209 +n01602630 +n01602832 +n01603000 +n01603152 +n01603600 +n01603812 +n01603953 +n01604330 +n01604968 +n01605630 +n01606097 +n01606177 +n01606522 +n01606672 +n01606809 +n01606978 +n01607309 +n01607429 +n01607600 +n01607812 +n01607962 +n01608265 +n01608432 +n01608814 +n01609062 +n01609391 +n01609751 +n01609956 +n01610100 +n01610226 +n01610552 +n01610955 +n01611472 +n01611674 +n01611800 +n01611969 +n01612122 +n01612275 +n01612476 +n01612628 +n01612955 +n01613177 +n01613294 +n01613615 +n01613807 +n01614038 +n01614343 +n01614556 +n01614925 +n01615121 +n01615303 +n01615458 +n01615703 +n01616086 +n01616318 +n01616551 +n01616764 +n01617095 +n01617443 +n01617766 +n01618082 +n01618503 +n01618922 +n01619310 +n01619536 +n01619835 +n01620135 +n01620414 +n01620735 +n01621127 +n01621635 +n01622120 +n01622352 +n01622483 +n01622779 +n01622959 +n01623110 +n01623425 +n01623615 +n01623706 +n01623880 +n01624115 +n01624212 +n01624305 +n01624537 +n01624833 +n01625121 +n01625562 +n01627424 +n01628331 +n01628770 +n01629276 +n01629819 +n01629962 +n01630148 +n01630284 +n01630670 +n01630901 +n01631175 +n01631354 +n01631512 +n01631663 +n01632047 +n01632308 +n01632458 +n01632601 +n01632777 +n01632952 +n01633406 +n01633781 +n01634227 +n01634522 +n01635027 +n01635176 +n01635480 +n01636127 +n01636352 +n01636510 +n01636829 +n01637112 +n01637338 +n01637615 +n01637932 +n01638194 +n01638329 +n01638722 +n01639187 +n01639765 +n01640846 +n01641206 +n01641391 +n01641577 +n01641739 +n01641930 +n01642097 +n01642257 +n01642391 +n01642539 +n01642943 +n01643255 +n01643507 +n01643896 +n01644373 +n01644900 +n01645466 +n01645776 +n01646292 +n01646388 +n01646555 +n01646648 +n01646802 +n01646902 +n01647033 +n01647180 +n01647303 +n01647466 +n01647640 +n01648139 +n01648356 +n01648620 +n01649170 +n01649412 +n01649556 +n01649726 +n01650167 +n01650690 +n01650901 +n01651059 +n01651285 +n01651487 +n01651641 +n01651778 +n01652026 +n01652297 +n01653026 +n01653223 +n01653509 +n01653773 +n01654083 +n01654637 +n01654863 +n01655344 +n01661091 +n01661592 +n01661818 +n01662060 +n01662622 +n01662784 +n01663401 +n01663782 +n01664065 +n01664369 +n01664492 +n01664674 +n01664990 +n01665541 +n01665932 +n01666228 +n01666585 +n01667114 +n01667432 +n01667778 +n01668091 +n01668436 +n01668665 +n01668892 +n01669191 +n01669372 +n01669654 +n01670092 +n01670535 +n01670802 +n01671125 +n01671479 +n01671705 +n01672032 +n01672432 +n01672611 +n01673282 +n01674216 +n01674464 +n01674990 +n01675352 +n01675722 +n01676755 +n01677366 +n01677747 +n01678043 +n01678343 +n01678657 +n01679005 +n01679307 +n01679626 +n01679962 +n01680264 +n01680478 +n01680655 +n01680813 +n01680983 +n01681328 +n01681653 +n01681940 +n01682172 +n01682435 +n01682714 +n01683201 +n01683558 +n01684133 +n01684578 +n01684741 +n01685439 +n01685808 +n01686044 +n01686220 +n01686403 +n01686609 +n01686808 +n01687128 +n01687290 +n01687665 +n01687978 +n01688243 +n01688961 +n01689081 +n01689411 +n01689811 +n01690149 +n01690466 +n01691217 +n01691652 +n01691951 +n01692333 +n01692523 +n01692864 +n01693175 +n01693334 +n01693783 +n01694178 +n01694311 +n01694709 +n01694955 +n01695060 +n01696633 +n01697178 +n01697457 +n01697611 +n01697749 +n01697978 +n01698434 +n01698640 +n01698782 +n01699040 +n01699254 +n01699675 +n01701551 +n01701859 +n01702256 +n01702479 +n01703011 +n01703161 +n01703569 +n01704103 +n01704323 +n01704626 +n01705010 +n01705591 +n01705934 +n01707294 +n01708106 +n01708998 +n01709484 +n01709876 +n01710177 +n01711160 +n01712008 +n01712752 +n01713170 +n01713764 +n01714231 +n01715888 +n01717016 +n01717229 +n01717467 +n01718096 +n01718414 +n01719403 +n01721174 +n01721898 +n01722670 +n01722998 +n01723579 +n01724231 +n01724840 +n01725086 +n01725713 +n01726203 +n01726692 +n01727646 +n01728266 +n01728572 +n01728920 +n01729322 +n01729672 +n01729977 +n01730185 +n01730307 +n01730563 +n01730812 +n01730960 +n01731137 +n01731277 +n01731545 +n01731764 +n01731941 +n01732093 +n01732244 +n01732614 +n01732789 +n01732989 +n01733214 +n01733466 +n01733757 +n01733957 +n01734104 +n01734418 +n01734637 +n01734808 +n01735189 +n01735439 +n01735577 +n01735728 +n01736032 +n01736375 +n01736796 +n01737021 +n01737472 +n01737728 +n01737875 +n01738065 +n01738306 +n01738601 +n01738731 +n01739094 +n01739381 +n01739647 +n01739871 +n01740131 +n01740551 +n01740885 +n01741232 +n01741442 +n01741562 +n01741943 +n01742172 +n01742447 +n01742821 +n01743086 +n01743605 +n01743936 +n01744100 +n01744270 +n01744401 +n01744555 +n01745125 +n01745484 +n01745902 +n01746191 +n01746359 +n01746952 +n01747285 +n01747589 +n01747885 +n01748264 +n01748389 +n01748686 +n01748906 +n01749244 +n01749582 +n01749742 +n01749939 +n01750167 +n01750437 +n01750743 +n01751036 +n01751215 +n01751472 +n01751748 +n01752165 +n01752585 +n01752736 +n01753032 +n01753180 +n01753488 +n01753959 +n01754370 +n01754533 +n01754876 +n01755581 +n01755740 +n01755952 +n01756089 +n01756291 +n01756508 +n01756733 +n01756916 +n01757115 +n01757343 +n01757677 +n01757901 +n01758141 +n01758757 +n01758895 +n01767661 +n01768244 +n01769347 +n01770081 +n01770393 +n01770795 +n01771100 +n01771417 +n01771766 +n01772222 +n01772664 +n01773157 +n01773549 +n01773797 +n01774097 +n01774384 +n01774750 +n01775062 +n01775370 +n01775730 +n01776192 +n01776313 +n01776705 +n01777304 +n01777467 +n01777649 +n01777909 +n01778217 +n01778487 +n01778621 +n01778801 +n01779148 +n01779463 +n01779629 +n01779939 +n01780142 +n01780426 +n01780696 +n01781071 +n01781570 +n01781698 +n01781875 +n01782209 +n01782516 +n01783017 +n01783706 +n01784293 +n01784675 +n01785667 +n01786646 +n01787006 +n01787191 +n01787835 +n01788291 +n01788579 +n01788864 +n01789386 +n01789740 +n01790171 +n01790304 +n01790398 +n01790557 +n01790711 +n01790812 +n01791107 +n01791314 +n01791388 +n01791463 +n01791625 +n01791954 +n01792042 +n01792158 +n01792429 +n01792530 +n01792640 +n01792808 +n01792955 +n01793085 +n01793159 +n01793249 +n01793340 +n01793435 +n01793565 +n01793715 +n01794158 +n01794344 +n01794651 +n01795088 +n01795545 +n01795735 +n01795900 +n01796019 +n01796105 +n01796340 +n01796519 +n01796729 +n01797020 +n01797307 +n01797601 +n01797886 +n01798168 +n01798484 +n01798706 +n01798839 +n01798979 +n01799302 +n01799679 +n01800195 +n01800424 +n01800633 +n01801088 +n01801479 +n01801672 +n01801876 +n01802159 +n01802721 +n01803078 +n01803362 +n01803641 +n01803893 +n01804163 +n01804478 +n01804653 +n01804921 +n01805070 +n01805321 +n01805801 +n01806061 +n01806143 +n01806297 +n01806364 +n01806467 +n01806567 +n01806847 +n01807105 +n01807496 +n01807828 +n01808140 +n01808291 +n01808596 +n01809106 +n01809371 +n01809752 +n01810268 +n01810700 +n01811243 +n01811909 +n01812187 +n01812337 +n01812662 +n01812866 +n01813088 +n01813385 +n01813532 +n01813658 +n01813948 +n01814217 +n01814370 +n01814549 +n01814620 +n01814755 +n01814921 +n01815036 +n01815270 +n01815601 +n01816017 +n01816140 +n01816474 +n01816887 +n01817263 +n01817346 +n01817953 +n01818299 +n01818515 +n01818832 +n01819115 +n01819313 +n01819465 +n01819734 +n01820052 +n01820348 +n01820546 +n01820801 +n01821076 +n01821203 +n01821554 +n01821869 +n01822300 +n01822602 +n01823013 +n01823414 +n01823740 +n01824035 +n01824344 +n01824575 +n01824749 +n01825278 +n01825930 +n01826364 +n01826680 +n01826844 +n01827403 +n01827793 +n01828096 +n01828556 +n01828970 +n01829413 +n01829869 +n01830042 +n01830479 +n01830915 +n01831360 +n01831712 +n01832167 +n01832493 +n01832813 +n01833112 +n01833415 +n01833805 +n01834177 +n01834540 +n01835276 +n01835769 +n01835918 +n01836087 +n01836673 +n01837072 +n01837526 +n01838038 +n01838598 +n01839086 +n01839330 +n01839598 +n01839750 +n01839949 +n01840120 +n01840412 +n01840775 +n01841102 +n01841288 +n01841441 +n01841679 +n01841943 +n01842235 +n01842504 +n01842788 +n01843065 +n01843383 +n01843719 +n01844231 +n01844551 +n01844746 +n01844917 +n01845132 +n01845477 +n01846331 +n01847000 +n01847089 +n01847170 +n01847253 +n01847407 +n01847806 +n01847978 +n01848123 +n01848323 +n01848453 +n01848555 +n01848648 +n01848840 +n01848976 +n01849157 +n01849466 +n01849676 +n01849863 +n01850192 +n01850373 +n01850553 +n01850873 +n01851038 +n01851207 +n01851375 +n01851573 +n01851731 +n01851895 +n01852142 +n01852329 +n01852400 +n01852671 +n01852861 +n01853195 +n01853498 +n01853666 +n01853870 +n01854415 +n01854700 +n01854838 +n01855032 +n01855188 +n01855476 +n01855672 +n01856072 +n01856155 +n01856380 +n01856553 +n01856890 +n01857079 +n01857325 +n01857512 +n01857632 +n01857851 +n01858281 +n01858441 +n01858780 +n01858845 +n01858906 +n01859190 +n01859325 +n01859496 +n01859689 +n01859852 +n01860002 +n01860187 +n01860497 +n01860864 +n01861148 +n01861330 +n01861778 +n01862399 +n01871265 +n01871543 +n01871875 +n01872401 +n01872772 +n01873310 +n01874434 +n01874928 +n01875313 +n01875610 +n01876034 +n01876326 +n01876667 +n01877134 +n01877606 +n01877812 +n01878061 +n01878335 +n01878639 +n01878929 +n01879217 +n01879509 +n01879837 +n01880152 +n01880473 +n01880716 +n01880813 +n01881171 +n01881564 +n01881857 +n01882125 +n01882714 +n01883070 +n01883513 +n01883920 +n01884104 +n01884203 +n01884476 +n01884834 +n01885158 +n01885498 +n01886045 +n01886756 +n01887474 +n01887623 +n01887787 +n01887896 +n01888045 +n01888181 +n01888264 +n01888411 +n01889074 +n01889520 +n01889849 +n01890144 +n01890564 +n01890860 +n01891013 +n01891274 +n01891633 +n01892030 +n01892145 +n01892385 +n01892551 +n01892744 +n01893021 +n01893164 +n01893399 +n01893825 +n01894207 +n01894522 +n01894956 +n01896844 +n01897257 +n01897426 +n01897536 +n01897667 +n01898593 +n01899894 +n01900150 +n01903234 +n01903346 +n01903498 +n01904029 +n01904806 +n01904886 +n01905321 +n01905661 +n01906749 +n01907287 +n01907738 +n01908042 +n01908958 +n01909422 +n01909788 +n01909906 +n01910252 +n01910747 +n01911063 +n01911403 +n01911839 +n01912152 +n01912454 +n01912809 +n01913166 +n01913346 +n01913440 +n01914163 +n01914609 +n01914830 +n01915700 +n01915811 +n01916187 +n01916388 +n01916481 +n01916588 +n01916925 +n01917289 +n01917611 +n01917882 +n01918744 +n01919385 +n01920051 +n01920438 +n01921059 +n01922303 +n01922717 +n01922948 +n01923025 +n01923404 +n01923890 +n01924800 +n01924916 +n01925270 +n01925695 +n01925916 +n01926379 +n01926689 +n01927159 +n01927456 +n01927928 +n01928215 +n01928517 +n01928865 +n01929186 +n01930112 +n01930852 +n01931140 +n01931520 +n01931714 +n01932151 +n01932936 +n01933151 +n01933478 +n01933988 +n01934440 +n01934844 +n01935176 +n01935395 +n01936391 +n01936671 +n01936858 +n01937579 +n01937909 +n01938454 +n01938735 +n01940736 +n01941223 +n01941340 +n01942177 +n01942869 +n01943087 +n01943541 +n01943899 +n01944118 +n01944390 +n01944812 +n01944955 +n01945143 +n01945340 +n01945685 +n01945845 +n01946277 +n01946630 +n01946827 +n01947139 +n01947396 +n01947997 +n01948446 +n01948573 +n01949085 +n01949499 +n01949973 +n01950731 +n01951274 +n01951613 +n01952029 +n01952712 +n01953361 +n01953594 +n01953762 +n01954516 +n01955084 +n01955933 +n01956344 +n01956481 +n01956764 +n01957335 +n01958038 +n01958346 +n01958435 +n01958531 +n01959029 +n01959492 +n01959985 +n01960177 +n01960459 +n01961234 +n01961600 +n01961985 +n01962506 +n01962788 +n01963317 +n01963479 +n01963571 +n01964049 +n01964271 +n01964441 +n01964957 +n01965252 +n01965529 +n01965889 +n01966377 +n01966586 +n01967094 +n01967308 +n01967963 +n01968315 +n01968897 +n01969726 +n01970164 +n01970667 +n01971094 +n01971280 +n01971620 +n01971850 +n01972131 +n01972541 +n01973148 +n01974773 +n01975687 +n01976146 +n01976868 +n01976957 +n01977485 +n01978010 +n01978136 +n01978287 +n01978455 +n01978587 +n01978930 +n01979269 +n01979526 +n01979874 +n01980166 +n01980655 +n01981276 +n01981702 +n01982068 +n01982347 +n01982650 +n01983048 +n01983481 +n01983674 +n01983829 +n01984245 +n01984695 +n01985128 +n01985493 +n01985797 +n01986214 +n01986806 +n01987076 +n01987545 +n01987727 +n01988203 +n01988701 +n01988869 +n01989516 +n01989869 +n01990007 +n01990516 +n01990800 +n01991028 +n01991520 +n01992262 +n01992423 +n01992773 +n01993525 +n01993830 +n01994910 +n01995514 +n01995686 +n01996280 +n01996585 +n01997119 +n01997825 +n01998183 +n01998741 +n01999186 +n01999767 +n02000954 +n02002075 +n02002556 +n02002724 +n02003037 +n02003204 +n02003577 +n02003839 +n02004131 +n02004492 +n02004855 +n02005399 +n02005790 +n02006063 +n02006364 +n02006656 +n02006985 +n02007284 +n02007558 +n02008041 +n02008497 +n02008643 +n02008796 +n02009229 +n02009380 +n02009508 +n02009750 +n02009912 +n02010272 +n02010453 +n02010728 +n02011016 +n02011281 +n02011460 +n02011805 +n02011943 +n02012185 +n02012849 +n02013177 +n02013567 +n02013706 +n02014237 +n02014524 +n02014941 +n02015357 +n02015554 +n02015797 +n02016066 +n02016358 +n02016659 +n02016816 +n02016956 +n02017213 +n02017475 +n02017725 +n02018027 +n02018207 +n02018368 +n02018795 +n02019190 +n02019438 +n02019929 +n02020219 +n02020578 +n02021050 +n02021281 +n02021795 +n02022684 +n02023341 +n02023855 +n02023992 +n02024185 +n02024479 +n02024763 +n02025043 +n02025239 +n02025389 +n02026059 +n02026629 +n02026948 +n02027075 +n02027357 +n02027492 +n02027897 +n02028035 +n02028175 +n02028342 +n02028451 +n02028727 +n02028900 +n02029087 +n02029378 +n02029706 +n02030035 +n02030224 +n02030287 +n02030568 +n02030837 +n02030996 +n02031298 +n02031585 +n02031934 +n02032222 +n02032355 +n02032480 +n02032769 +n02033041 +n02033208 +n02033324 +n02033561 +n02033779 +n02033882 +n02034129 +n02034295 +n02034661 +n02034971 +n02035210 +n02035402 +n02035656 +n02036053 +n02036228 +n02036711 +n02037110 +n02037464 +n02037869 +n02038141 +n02038466 +n02038993 +n02039171 +n02039497 +n02039780 +n02040266 +n02040505 +n02041085 +n02041246 +n02041678 +n02041875 +n02042046 +n02042180 +n02042472 +n02042759 +n02043063 +n02043333 +n02043808 +n02044178 +n02044517 +n02044778 +n02044908 +n02045369 +n02045596 +n02045864 +n02046171 +n02046759 +n02046939 +n02047045 +n02047260 +n02047411 +n02047517 +n02047614 +n02047975 +n02048115 +n02048353 +n02048698 +n02049088 +n02049532 +n02050004 +n02050313 +n02050442 +n02050586 +n02050809 +n02051059 +n02051474 +n02051845 +n02052204 +n02052365 +n02052775 +n02053083 +n02053425 +n02053584 +n02054036 +n02054502 +n02054711 +n02055107 +n02055658 +n02055803 +n02056228 +n02056570 +n02056728 +n02057035 +n02057330 +n02057731 +n02057898 +n02058221 +n02058594 +n02058747 +n02059162 +n02059541 +n02059852 +n02060133 +n02060411 +n02060569 +n02060889 +n02061217 +n02061560 +n02061853 +n02062017 +n02062430 +n02062744 +n02063224 +n02063662 +n02064000 +n02064338 +n02064816 +n02065026 +n02065263 +n02065407 +n02065726 +n02066245 +n02066707 +n02067240 +n02067603 +n02067768 +n02068206 +n02068541 +n02068974 +n02069412 +n02069701 +n02069974 +n02070174 +n02070430 +n02070624 +n02070776 +n02071028 +n02071294 +n02071636 +n02072040 +n02072493 +n02072798 +n02073250 +n02073831 +n02074367 +n02074726 +n02075296 +n02075612 +n02075927 +n02076196 +n02076402 +n02076779 +n02077152 +n02077384 +n02077658 +n02077787 +n02077923 +n02078292 +n02078574 +n02078738 +n02079005 +n02079389 +n02079851 +n02080146 +n02080415 +n02080713 +n02081060 +n02081571 +n02081798 +n02081927 +n02082056 +n02082190 +n02082791 +n02083346 +n02083672 +n02083780 +n02084071 +n02084732 +n02084861 +n02085019 +n02085118 +n02085272 +n02085374 +n02085620 +n02085782 +n02085936 +n02086079 +n02086240 +n02086346 +n02086478 +n02086646 +n02086753 +n02086910 +n02087046 +n02087122 +n02087314 +n02087394 +n02087551 +n02088094 +n02088238 +n02088364 +n02088466 +n02088632 +n02088745 +n02088839 +n02088992 +n02089078 +n02089232 +n02089468 +n02089555 +n02089725 +n02089867 +n02089973 +n02090129 +n02090253 +n02090379 +n02090475 +n02090622 +n02090721 +n02090827 +n02091032 +n02091134 +n02091244 +n02091467 +n02091635 +n02091831 +n02092002 +n02092173 +n02092339 +n02092468 +n02093056 +n02093256 +n02093428 +n02093647 +n02093754 +n02093859 +n02093991 +n02094114 +n02094258 +n02094433 +n02094562 +n02094721 +n02094931 +n02095050 +n02095212 +n02095314 +n02095412 +n02095570 +n02095727 +n02095889 +n02096051 +n02096177 +n02096294 +n02096437 +n02096585 +n02096756 +n02097047 +n02097130 +n02097209 +n02097298 +n02097474 +n02097658 +n02097786 +n02097967 +n02098105 +n02098286 +n02098413 +n02098550 +n02098806 +n02098906 +n02099029 +n02099267 +n02099429 +n02099601 +n02099712 +n02099849 +n02099997 +n02100236 +n02100399 +n02100583 +n02100735 +n02100877 +n02101006 +n02101108 +n02101388 +n02101556 +n02101670 +n02101861 +n02102040 +n02102177 +n02102318 +n02102480 +n02102605 +n02102806 +n02102973 +n02103181 +n02103406 +n02103841 +n02104029 +n02104184 +n02104280 +n02104365 +n02104523 +n02104882 +n02105056 +n02105162 +n02105251 +n02105412 +n02105505 +n02105641 +n02105855 +n02106030 +n02106166 +n02106382 +n02106550 +n02106662 +n02106854 +n02106966 +n02107142 +n02107312 +n02107420 +n02107574 +n02107683 +n02107908 +n02108000 +n02108089 +n02108254 +n02108422 +n02108551 +n02108672 +n02108915 +n02109047 +n02109150 +n02109256 +n02109391 +n02109525 +n02109687 +n02109811 +n02109961 +n02110063 +n02110185 +n02110341 +n02110532 +n02110627 +n02110806 +n02110958 +n02111129 +n02111277 +n02111500 +n02111626 +n02111889 +n02112018 +n02112137 +n02112350 +n02112497 +n02112706 +n02112826 +n02113023 +n02113186 +n02113335 +n02113624 +n02113712 +n02113799 +n02113892 +n02113978 +n02114100 +n02114367 +n02114548 +n02114712 +n02114855 +n02115012 +n02115096 +n02115335 +n02115641 +n02115913 +n02116185 +n02116450 +n02116738 +n02117135 +n02117512 +n02117646 +n02117900 +n02118176 +n02118333 +n02118643 +n02118707 +n02119022 +n02119247 +n02119359 +n02119477 +n02119634 +n02119789 +n02120079 +n02120278 +n02120505 +n02120997 +n02121620 +n02121808 +n02122298 +n02122430 +n02122510 +n02122580 +n02122725 +n02122810 +n02122878 +n02122948 +n02123045 +n02123159 +n02123242 +n02123394 +n02123478 +n02123597 +n02123785 +n02123917 +n02124075 +n02124157 +n02124313 +n02124484 +n02124623 +n02125010 +n02125081 +n02125311 +n02125494 +n02125689 +n02125872 +n02126028 +n02126139 +n02126317 +n02126640 +n02126787 +n02127052 +n02127292 +n02127381 +n02127482 +n02127586 +n02127678 +n02127808 +n02128385 +n02128598 +n02128669 +n02128757 +n02128925 +n02129165 +n02129463 +n02129530 +n02129604 +n02129837 +n02129923 +n02129991 +n02130086 +n02130308 +n02130545 +n02130925 +n02131653 +n02132136 +n02132320 +n02132466 +n02132580 +n02132788 +n02133161 +n02133400 +n02133704 +n02134084 +n02134418 +n02134971 +n02135220 +n02135610 +n02135844 +n02136103 +n02136285 +n02136452 +n02136794 +n02137015 +n02137302 +n02137549 +n02137722 +n02137888 +n02138169 +n02138441 +n02138647 +n02138777 +n02139199 +n02139671 +n02140049 +n02140179 +n02140268 +n02140491 +n02140858 +n02141306 +n02141611 +n02141713 +n02142407 +n02142734 +n02142898 +n02143142 +n02143439 +n02143891 +n02144251 +n02144593 +n02144936 +n02145424 +n02145910 +n02146201 +n02146371 +n02146700 +n02146879 +n02147173 +n02147328 +n02147591 +n02147947 +n02148088 +n02148512 +n02148835 +n02148991 +n02149420 +n02149653 +n02149861 +n02150134 +n02150482 +n02150885 +n02151230 +n02152740 +n02152881 +n02152991 +n02153109 +n02153203 +n02153809 +n02156732 +n02156871 +n02157206 +n02157285 +n02159955 +n02160947 +n02161225 +n02161338 +n02161457 +n02161588 +n02162561 +n02163008 +n02163297 +n02164464 +n02165105 +n02165456 +n02165877 +n02166229 +n02166567 +n02166826 +n02167151 +n02167505 +n02167820 +n02167944 +n02168245 +n02168427 +n02168699 +n02169023 +n02169218 +n02169497 +n02169705 +n02169974 +n02170400 +n02170599 +n02170738 +n02170993 +n02171164 +n02171453 +n02171869 +n02172182 +n02172518 +n02172678 +n02172761 +n02172870 +n02173113 +n02173373 +n02173784 +n02174001 +n02174355 +n02174659 +n02175014 +n02175569 +n02175916 +n02176261 +n02176439 +n02176747 +n02176916 +n02177196 +n02177506 +n02177775 +n02177972 +n02178411 +n02178717 +n02179012 +n02179192 +n02179340 +n02179891 +n02180233 +n02180427 +n02180875 +n02181235 +n02181477 +n02181724 +n02182045 +n02182355 +n02182642 +n02182930 +n02183096 +n02183507 +n02183857 +n02184473 +n02184589 +n02184720 +n02185167 +n02185481 +n02186153 +n02186717 +n02187150 +n02187279 +n02187554 +n02187900 +n02188699 +n02189363 +n02189670 +n02190166 +n02190790 +n02191273 +n02191773 +n02191979 +n02192252 +n02192513 +n02192814 +n02193009 +n02193163 +n02194249 +n02194750 +n02195091 +n02195526 +n02195819 +n02196119 +n02196344 +n02196896 +n02197185 +n02197689 +n02197877 +n02198129 +n02198532 +n02198859 +n02199170 +n02199502 +n02200198 +n02200509 +n02200630 +n02200850 +n02201000 +n02201497 +n02201626 +n02202006 +n02202124 +n02202287 +n02202678 +n02203152 +n02203592 +n02203978 +n02204249 +n02204722 +n02204907 +n02205219 +n02205673 +n02206270 +n02206856 +n02207179 +n02207345 +n02207449 +n02207647 +n02207805 +n02208280 +n02208498 +n02208848 +n02208979 +n02209111 +n02209354 +n02209624 +n02209964 +n02210427 +n02210921 +n02211444 +n02211627 +n02211896 +n02212062 +n02212602 +n02212958 +n02213107 +n02213239 +n02213543 +n02213663 +n02213788 +n02214096 +n02214341 +n02214499 +n02214660 +n02214773 +n02215161 +n02215621 +n02215770 +n02216211 +n02216365 +n02216740 +n02217563 +n02217839 +n02218134 +n02218371 +n02218713 +n02219015 +n02219486 +n02220055 +n02220225 +n02220518 +n02220804 +n02221083 +n02221414 +n02221571 +n02221715 +n02221820 +n02222035 +n02222321 +n02222582 +n02223266 +n02223520 +n02224023 +n02224713 +n02225081 +n02225798 +n02226183 +n02226429 +n02226821 +n02226970 +n02227247 +n02227604 +n02227966 +n02228341 +n02228697 +n02229156 +n02229544 +n02229765 +n02230023 +n02230187 +n02230480 +n02230634 +n02231052 +n02231487 +n02231803 +n02232223 +n02233338 +n02233943 +n02234355 +n02234570 +n02234848 +n02235205 +n02236044 +n02236241 +n02236355 +n02236896 +n02237424 +n02237581 +n02237868 +n02238235 +n02238358 +n02238594 +n02238887 +n02239192 +n02239528 +n02239774 +n02240068 +n02240517 +n02241008 +n02241426 +n02241569 +n02241799 +n02242137 +n02242455 +n02243209 +n02243562 +n02243878 +n02244173 +n02244515 +n02244797 +n02245111 +n02245443 +n02246011 +n02246628 +n02246941 +n02247216 +n02247511 +n02247655 +n02248062 +n02248368 +n02248510 +n02248887 +n02249134 +n02249515 +n02249809 +n02250280 +n02250822 +n02251067 +n02251233 +n02251593 +n02251775 +n02252226 +n02252799 +n02252972 +n02253127 +n02253264 +n02253494 +n02253715 +n02253913 +n02254246 +n02254697 +n02254901 +n02255023 +n02255391 +n02256172 +n02256656 +n02257003 +n02257284 +n02257715 +n02257985 +n02258198 +n02258508 +n02258629 +n02259212 +n02259377 +n02259708 +n02259987 +n02260421 +n02260863 +n02261063 +n02261419 +n02261757 +n02262178 +n02262449 +n02262803 +n02263378 +n02264021 +n02264232 +n02264363 +n02264591 +n02264885 +n02265330 +n02266050 +n02266269 +n02266421 +n02266864 +n02267208 +n02267483 +n02268148 +n02268443 +n02268853 +n02269196 +n02269340 +n02269522 +n02269657 +n02270011 +n02270200 +n02270623 +n02270945 +n02271222 +n02271570 +n02271897 +n02272286 +n02272552 +n02272871 +n02273392 +n02274024 +n02274259 +n02274822 +n02275560 +n02275773 +n02276078 +n02276258 +n02276355 +n02276749 +n02276902 +n02277094 +n02277268 +n02277422 +n02277742 +n02278024 +n02278210 +n02278463 +n02278839 +n02278980 +n02279257 +n02279637 +n02279972 +n02280458 +n02280649 +n02281015 +n02281136 +n02281267 +n02281406 +n02281787 +n02282257 +n02282385 +n02282553 +n02282903 +n02283077 +n02283201 +n02283617 +n02283951 +n02284224 +n02284611 +n02284884 +n02285179 +n02285548 +n02285801 +n02286089 +n02286425 +n02286654 +n02287004 +n02287352 +n02287622 +n02287799 +n02287987 +n02288122 +n02288268 +n02288789 +n02289307 +n02289610 +n02289988 +n02290340 +n02290664 +n02290870 +n02291220 +n02291572 +n02291748 +n02292085 +n02292401 +n02292692 +n02293352 +n02293868 +n02294097 +n02294407 +n02294577 +n02295064 +n02295390 +n02295870 +n02296021 +n02296276 +n02296612 +n02296912 +n02297294 +n02297442 +n02297819 +n02297938 +n02298095 +n02298218 +n02298541 +n02299039 +n02299157 +n02299378 +n02299505 +n02299846 +n02300173 +n02300554 +n02300797 +n02301452 +n02301935 +n02302244 +n02302459 +n02302620 +n02302969 +n02303284 +n02303585 +n02303777 +n02304036 +n02304432 +n02304657 +n02304797 +n02305085 +n02305407 +n02305636 +n02305929 +n02306433 +n02306825 +n02307176 +n02307325 +n02307515 +n02307681 +n02307910 +n02308033 +n02308139 +n02308471 +n02308618 +n02308735 +n02309120 +n02309242 +n02309337 +n02309841 +n02310000 +n02310149 +n02310334 +n02310585 +n02310717 +n02310941 +n02311060 +n02311617 +n02311748 +n02312006 +n02312175 +n02312325 +n02312427 +n02312640 +n02312912 +n02313008 +n02313360 +n02313709 +n02315487 +n02315821 +n02316707 +n02317335 +n02317781 +n02318167 +n02318687 +n02319095 +n02319308 +n02319555 +n02319829 +n02320127 +n02320465 +n02321170 +n02321529 +n02322047 +n02322992 +n02323449 +n02323902 +n02324045 +n02324431 +n02324514 +n02324587 +n02324850 +n02325366 +n02325722 +n02325884 +n02326074 +n02326432 +n02326763 +n02326862 +n02327028 +n02327175 +n02327435 +n02327656 +n02327842 +n02328009 +n02328150 +n02328429 +n02328820 +n02328942 +n02329401 +n02330245 +n02331046 +n02331309 +n02331842 +n02332156 +n02332447 +n02332755 +n02332954 +n02333190 +n02333546 +n02333733 +n02333819 +n02333909 +n02334201 +n02334460 +n02334728 +n02335127 +n02335231 +n02336011 +n02336275 +n02336641 +n02336826 +n02337001 +n02337171 +n02337332 +n02337598 +n02337902 +n02338145 +n02338449 +n02338722 +n02338901 +n02339282 +n02339376 +n02339922 +n02340186 +n02340358 +n02340640 +n02340930 +n02341288 +n02341475 +n02341616 +n02341974 +n02342250 +n02342534 +n02342885 +n02343058 +n02343320 +n02343772 +n02344175 +n02344270 +n02344408 +n02344528 +n02344918 +n02345078 +n02345340 +n02345600 +n02345774 +n02345997 +n02346170 +n02346627 +n02346998 +n02347274 +n02347573 +n02347744 +n02348173 +n02348788 +n02349205 +n02349390 +n02349557 +n02349847 +n02350105 +n02350357 +n02350670 +n02350989 +n02351343 +n02351870 +n02352002 +n02352290 +n02352591 +n02352932 +n02353172 +n02353411 +n02353861 +n02354162 +n02354320 +n02354621 +n02354781 +n02355227 +n02355477 +n02356381 +n02356612 +n02356798 +n02356977 +n02357111 +n02357401 +n02357585 +n02357911 +n02358091 +n02358390 +n02358584 +n02358712 +n02358890 +n02359047 +n02359324 +n02359556 +n02359667 +n02359915 +n02360282 +n02360480 +n02360781 +n02360933 +n02361090 +n02361337 +n02361587 +n02361706 +n02361850 +n02362194 +n02363005 +n02363245 +n02363351 +n02363996 +n02364520 +n02364673 +n02364840 +n02365108 +n02365480 +n02366002 +n02366301 +n02366579 +n02366959 +n02367492 +n02367812 +n02368116 +n02368399 +n02368821 +n02369293 +n02369555 +n02369680 +n02369935 +n02370137 +n02370525 +n02370806 +n02371344 +n02372140 +n02372584 +n02372952 +n02373336 +n02374149 +n02374451 +n02375302 +n02375438 +n02375757 +n02375862 +n02376542 +n02376679 +n02376791 +n02376918 +n02377063 +n02377181 +n02377291 +n02377388 +n02377480 +n02377603 +n02377703 +n02378149 +n02378299 +n02378415 +n02378541 +n02378625 +n02378755 +n02378870 +n02378969 +n02379081 +n02379183 +n02379329 +n02379430 +n02379630 +n02379743 +n02379908 +n02380052 +n02380335 +n02380464 +n02380583 +n02380745 +n02380875 +n02381004 +n02381119 +n02381261 +n02381364 +n02381460 +n02381609 +n02381831 +n02382039 +n02382132 +n02382204 +n02382338 +n02382437 +n02382635 +n02382750 +n02382850 +n02382948 +n02383231 +n02384741 +n02384858 +n02385002 +n02385098 +n02385214 +n02385580 +n02385676 +n02385776 +n02385898 +n02386014 +n02386141 +n02386224 +n02386310 +n02386496 +n02386746 +n02386853 +n02386968 +n02387093 +n02387254 +n02387346 +n02387452 +n02387722 +n02387887 +n02387983 +n02388143 +n02388276 +n02388453 +n02388588 +n02388735 +n02388832 +n02388917 +n02389026 +n02389128 +n02389261 +n02389346 +n02389559 +n02389779 +n02389865 +n02389943 +n02390015 +n02390101 +n02390258 +n02390454 +n02390640 +n02390738 +n02390834 +n02390938 +n02391049 +n02391234 +n02391373 +n02391508 +n02391617 +n02391994 +n02392434 +n02392555 +n02392824 +n02393161 +n02393580 +n02393807 +n02393940 +n02394477 +n02395003 +n02395406 +n02395694 +n02395855 +n02395931 +n02396014 +n02396088 +n02396157 +n02396427 +n02396796 +n02397096 +n02397529 +n02397744 +n02397987 +n02398521 +n02399000 +n02401031 +n02402010 +n02402175 +n02402425 +n02403003 +n02403153 +n02403231 +n02403325 +n02403454 +n02403740 +n02403820 +n02403920 +n02404028 +n02404186 +n02404432 +n02404573 +n02404906 +n02405101 +n02405302 +n02405440 +n02405577 +n02405692 +n02405799 +n02405929 +n02406046 +n02406174 +n02406432 +n02406533 +n02406647 +n02406749 +n02406859 +n02406952 +n02407071 +n02407172 +n02407276 +n02407390 +n02407521 +n02407625 +n02407763 +n02407959 +n02408429 +n02408660 +n02408817 +n02409038 +n02409202 +n02409508 +n02409870 +n02410011 +n02410141 +n02410509 +n02410702 +n02410900 +n02411206 +n02411705 +n02411999 +n02412080 +n02412210 +n02412440 +n02412629 +n02412700 +n02412787 +n02412909 +n02412977 +n02413050 +n02413131 +n02413484 +n02413593 +n02413717 +n02413824 +n02413917 +n02414043 +n02414209 +n02414290 +n02414442 +n02414578 +n02414763 +n02414904 +n02415130 +n02415253 +n02415435 +n02415577 +n02415829 +n02416104 +n02416519 +n02416820 +n02416880 +n02416964 +n02417070 +n02417242 +n02417387 +n02417534 +n02417663 +n02417785 +n02417914 +n02418064 +n02418465 +n02418770 +n02419056 +n02419336 +n02419634 +n02419796 +n02420509 +n02420828 +n02421136 +n02421449 +n02421792 +n02422106 +n02422391 +n02422699 +n02423022 +n02423218 +n02423362 +n02423589 +n02424085 +n02424305 +n02424486 +n02424589 +n02424695 +n02424909 +n02425086 +n02425228 +n02425532 +n02425887 +n02426176 +n02426481 +n02426813 +n02427032 +n02427183 +n02427470 +n02427576 +n02427724 +n02428089 +n02428349 +n02428508 +n02428842 +n02429456 +n02430045 +n02430559 +n02430643 +n02430748 +n02430830 +n02431122 +n02431337 +n02431441 +n02431542 +n02431628 +n02431785 +n02431976 +n02432291 +n02432511 +n02432704 +n02432983 +n02433318 +n02433546 +n02433729 +n02433925 +n02434190 +n02434415 +n02434712 +n02434954 +n02435216 +n02435517 +n02435853 +n02436224 +n02436353 +n02436645 +n02437136 +n02437312 +n02437482 +n02437616 +n02437971 +n02438173 +n02438272 +n02438580 +n02439033 +n02439398 +n02441326 +n02441942 +n02442172 +n02442336 +n02442446 +n02442572 +n02442668 +n02442845 +n02443015 +n02443114 +n02443346 +n02443484 +n02443808 +n02443959 +n02444251 +n02444819 +n02445004 +n02445171 +n02445394 +n02445715 +n02446206 +n02446352 +n02446645 +n02447021 +n02447366 +n02447762 +n02448060 +n02448318 +n02448633 +n02448885 +n02449183 +n02449350 +n02449699 +n02450034 +n02450295 +n02450426 +n02450561 +n02450677 +n02450829 +n02451125 +n02451415 +n02451575 +n02453108 +n02453611 +n02454379 +n02454794 +n02455135 +n02455428 +n02455720 +n02456008 +n02456275 +n02456962 +n02457408 +n02457945 +n02458135 +n02458517 +n02459190 +n02460009 +n02460451 +n02460817 +n02461128 +n02461830 +n02462213 +n02469248 +n02469472 +n02469914 +n02470238 +n02470325 +n02470709 +n02470899 +n02471300 +n02471762 +n02472293 +n02472987 +n02473307 +n02473554 +n02473720 +n02473857 +n02473983 +n02474110 +n02474282 +n02474605 +n02474777 +n02475078 +n02475358 +n02475669 +n02476219 +n02476567 +n02476870 +n02477028 +n02477187 +n02477329 +n02477516 +n02477782 +n02478239 +n02478875 +n02479332 +n02480153 +n02480495 +n02480855 +n02481103 +n02481235 +n02481366 +n02481500 +n02481823 +n02482060 +n02482286 +n02482474 +n02482650 +n02483092 +n02483362 +n02483708 +n02484322 +n02484473 +n02484975 +n02485225 +n02485371 +n02485536 +n02485688 +n02485988 +n02486261 +n02486410 +n02486657 +n02486908 +n02487079 +n02487347 +n02487547 +n02487675 +n02487847 +n02488003 +n02488291 +n02488415 +n02488702 +n02488894 +n02489166 +n02489589 +n02490219 +n02490597 +n02490811 +n02491107 +n02491329 +n02491474 +n02492035 +n02492356 +n02492660 +n02492948 +n02493224 +n02493509 +n02493793 +n02494079 +n02494383 +n02495242 +n02496052 +n02496913 +n02497673 +n02498153 +n02498743 +n02499022 +n02499316 +n02499568 +n02499808 +n02500267 +n02500596 +n02501583 +n02501923 +n02502006 +n02502514 +n02502807 +n02503127 +n02503517 +n02503756 +n02504013 +n02504458 +n02504770 +n02505063 +n02505238 +n02505485 +n02505998 +n02506947 +n02507148 +n02507649 +n02508021 +n02508213 +n02508346 +n02508742 +n02509197 +n02509515 +n02509815 +n02510455 +n02511730 +n02512053 +n02512752 +n02512830 +n02512938 +n02513248 +n02513355 +n02513560 +n02513727 +n02513805 +n02513939 +n02514041 +n02515214 +n02515713 +n02516188 +n02516776 +n02517442 +n02517938 +n02518324 +n02518622 +n02519148 +n02519340 +n02519472 +n02519686 +n02519862 +n02520147 +n02520525 +n02520810 +n02521646 +n02522399 +n02522637 +n02522722 +n02522866 +n02523110 +n02523427 +n02523877 +n02524202 +n02524524 +n02524659 +n02524928 +n02525382 +n02525703 +n02526121 +n02526425 +n02526818 +n02527057 +n02527271 +n02527622 +n02528163 +n02529293 +n02529772 +n02530052 +n02530188 +n02530421 +n02530637 +n02530831 +n02530999 +n02531114 +n02531625 +n02532028 +n02532272 +n02532451 +n02532602 +n02532786 +n02532918 +n02533209 +n02533545 +n02533834 +n02534165 +n02534559 +n02534734 +n02535080 +n02535163 +n02535258 +n02535537 +n02535759 +n02536165 +n02536456 +n02536864 +n02537085 +n02537319 +n02537525 +n02537716 +n02538010 +n02538216 +n02538406 +n02538562 +n02538985 +n02539424 +n02539573 +n02539894 +n02540412 +n02540983 +n02541257 +n02541687 +n02542017 +n02542432 +n02542958 +n02543255 +n02543565 +n02544274 +n02545841 +n02546028 +n02546331 +n02546627 +n02547014 +n02547733 +n02548247 +n02548689 +n02548884 +n02549248 +n02549376 +n02549989 +n02550203 +n02550460 +n02550655 +n02551134 +n02551668 +n02552171 +n02553028 +n02554730 +n02555863 +n02556373 +n02556846 +n02557182 +n02557318 +n02557591 +n02557749 +n02557909 +n02558206 +n02558860 +n02559144 +n02559383 +n02559862 +n02560110 +n02561108 +n02561381 +n02561514 +n02561661 +n02561803 +n02561937 +n02562315 +n02562796 +n02562971 +n02563079 +n02563182 +n02563648 +n02563792 +n02563949 +n02564270 +n02564403 +n02564720 +n02564935 +n02565072 +n02565324 +n02565573 +n02566109 +n02566489 +n02566665 +n02567334 +n02567633 +n02568087 +n02568447 +n02568959 +n02569484 +n02569631 +n02569905 +n02570164 +n02570484 +n02570838 +n02571167 +n02571652 +n02571810 +n02572196 +n02572484 +n02573249 +n02573704 +n02574271 +n02574910 +n02575325 +n02575590 +n02576223 +n02576575 +n02576906 +n02577041 +n02577164 +n02577403 +n02577662 +n02577952 +n02578233 +n02578454 +n02578771 +n02578928 +n02579303 +n02579557 +n02579762 +n02579928 +n02580336 +n02580679 +n02580830 +n02581108 +n02581482 +n02581642 +n02581957 +n02582220 +n02582349 +n02582721 +n02583567 +n02583890 +n02584145 +n02584449 +n02585872 +n02586238 +n02586543 +n02587051 +n02587300 +n02587479 +n02587618 +n02587877 +n02588286 +n02588794 +n02588945 +n02589062 +n02589196 +n02589316 +n02589623 +n02589796 +n02590094 +n02590495 +n02590702 +n02590987 +n02591330 +n02591613 +n02591911 +n02592055 +n02592371 +n02592734 +n02593019 +n02593191 +n02593453 +n02593679 +n02594250 +n02594942 +n02595056 +n02595339 +n02595702 +n02596067 +n02596252 +n02596381 +n02596720 +n02597004 +n02597367 +n02597608 +n02597818 +n02597972 +n02598134 +n02598573 +n02598878 +n02599052 +n02599347 +n02599557 +n02599958 +n02600298 +n02600503 +n02600798 +n02601344 +n02601767 +n02601921 +n02602059 +n02602405 +n02602760 +n02603317 +n02603540 +n02603862 +n02604157 +n02604480 +n02604954 +n02605316 +n02605703 +n02605936 +n02606052 +n02606384 +n02606751 +n02607072 +n02607201 +n02607470 +n02607862 +n02608284 +n02608547 +n02608860 +n02608996 +n02609302 +n02609823 +n02610066 +n02610373 +n02610664 +n02610980 +n02611561 +n02611898 +n02612167 +n02613181 +n02613572 +n02613820 +n02614140 +n02614482 +n02614653 +n02614978 +n02615298 +n02616128 +n02616397 +n02616851 +n02617537 +n02618094 +n02618513 +n02618827 +n02619165 +n02619550 +n02619861 +n02620167 +n02620578 +n02621258 +n02621908 +n02622249 +n02622547 +n02622712 +n02622955 +n02623445 +n02624167 +n02624551 +n02624807 +n02624987 +n02625258 +n02625612 +n02625851 +n02626089 +n02626265 +n02626471 +n02626762 +n02627037 +n02627292 +n02627532 +n02627835 +n02628062 +n02628259 +n02628600 +n02629230 +n02629716 +n02630281 +n02630615 +n02630739 +n02631041 +n02631330 +n02631475 +n02631628 +n02631775 +n02632039 +n02632494 +n02633422 +n02633677 +n02633977 +n02634545 +n02635154 +n02635580 +n02636170 +n02636405 +n02636550 +n02636854 +n02637179 +n02637475 +n02637977 +n02638596 +n02639087 +n02639605 +n02639922 +n02640242 +n02640626 +n02640857 +n02641379 +n02642107 +n02642644 +n02643112 +n02643316 +n02643566 +n02643836 +n02644113 +n02644360 +n02644501 +n02644665 +n02644817 +n02645538 +n02645691 +n02645953 +n02646667 +n02646892 +n02648035 +n02648625 +n02648916 +n02649218 +n02649546 +n02650050 +n02650413 +n02650541 +n02651060 +n02652132 +n02652668 +n02653145 +n02653497 +n02653786 +n02654112 +n02654425 +n02654745 +n02655020 +n02655523 +n02655848 +n02656032 +n02656301 +n02656670 +n02656969 +n02657368 +n02657694 +n02658079 +n02658531 +n02658811 +n02659176 +n02659478 +n02659808 +n02660091 +n02660208 +n02660519 +n02660640 +n02661017 +n02661473 +n02661618 +n02662239 +n02662397 +n02662559 +n02662825 +n02662993 +n02663211 +n02663485 +n02663849 +n02664285 +n02664642 +n02665250 +n02665985 +n02666196 +n02666501 +n02666624 +n02666943 +n02667093 +n02667244 +n02667379 +n02667478 +n02667576 +n02667693 +n02668393 +n02668613 +n02669295 +n02669442 +n02669534 +n02669723 +n02670186 +n02670382 +n02670683 +n02670935 +n02671780 +n02672152 +n02672371 +n02672831 +n02675077 +n02675219 +n02675522 +n02676097 +n02676261 +n02676566 +n02676670 +n02676938 +n02677028 +n02677136 +n02677436 +n02677718 +n02678010 +n02678384 +n02678897 +n02679142 +n02679257 +n02679961 +n02680110 +n02680512 +n02680638 +n02680754 +n02681392 +n02682311 +n02682407 +n02682569 +n02682811 +n02682922 +n02683183 +n02683323 +n02683454 +n02683558 +n02683791 +n02684248 +n02684356 +n02684515 +n02684649 +n02684962 +n02685082 +n02685253 +n02685365 +n02685701 +n02685995 +n02686121 +n02686227 +n02686379 +n02686568 +n02687172 +n02687423 +n02687682 +n02687821 +n02687992 +n02688273 +n02688443 +n02689144 +n02689274 +n02689434 +n02689748 +n02689819 +n02690373 +n02690715 +n02691156 +n02692086 +n02692232 +n02692513 +n02692680 +n02692877 +n02693246 +n02693413 +n02693540 +n02694045 +n02694279 +n02694426 +n02694662 +n02694966 +n02695627 +n02695762 +n02696165 +n02696246 +n02696569 +n02696843 +n02697022 +n02697221 +n02697576 +n02697675 +n02697876 +n02698244 +n02698473 +n02698634 +n02699494 +n02699629 +n02699770 +n02699915 +n02700064 +n02700258 +n02700895 +n02701002 +n02701260 +n02701730 +n02702989 +n02703124 +n02703275 +n02704645 +n02704792 +n02704949 +n02705201 +n02705429 +n02705944 +n02706221 +n02706806 +n02708093 +n02708224 +n02708433 +n02708555 +n02708711 +n02708885 +n02709101 +n02709367 +n02709637 +n02709763 +n02709908 +n02710044 +n02710201 +n02710324 +n02710429 +n02710600 +n02711237 +n02711780 +n02712545 +n02712643 +n02713003 +n02713218 +n02713364 +n02713496 +n02714315 +n02714535 +n02714751 +n02715229 +n02715513 +n02715712 +n02716626 +n02720048 +n02720576 +n02721813 +n02723165 +n02724722 +n02725872 +n02726017 +n02726210 +n02726305 +n02726681 +n02727016 +n02727141 +n02727426 +n02727825 +n02728440 +n02729222 +n02729837 +n02729965 +n02730265 +n02730568 +n02730930 +n02731251 +n02731398 +n02731629 +n02731900 +n02732072 +n02732572 +n02732827 +n02733213 +n02733524 +n02734725 +n02734835 +n02735268 +n02735361 +n02735538 +n02735688 +n02736396 +n02736798 +n02737351 +n02737660 +n02738031 +n02738271 +n02738449 +n02738535 +n02738741 +n02738859 +n02738978 +n02739123 +n02739427 +n02739550 +n02739668 +n02739889 +n02740061 +n02740300 +n02740533 +n02740764 +n02741367 +n02741475 +n02742070 +n02742194 +n02742322 +n02742468 +n02742753 +n02743426 +n02744323 +n02744844 +n02744961 +n02745492 +n02745611 +n02745816 +n02746008 +n02746225 +n02746365 +n02746595 +n02746683 +n02746978 +n02747063 +n02747177 +n02747672 +n02747802 +n02748183 +n02748359 +n02748491 +n02749169 +n02749292 +n02749479 +n02749670 +n02749790 +n02749953 +n02750070 +n02750169 +n02750320 +n02750652 +n02751067 +n02751215 +n02751295 +n02751490 +n02752199 +n02752496 +n02752615 +n02752810 +n02752917 +n02753044 +n02753394 +n02753710 +n02754103 +n02754656 +n02755140 +n02755352 +n02755529 +n02755675 +n02755823 +n02755984 +n02756098 +n02756854 +n02756977 +n02757061 +n02757337 +n02757462 +n02757714 +n02757810 +n02757927 +n02758134 +n02758490 +n02758863 +n02758960 +n02759257 +n02759387 +n02759700 +n02759963 +n02760099 +n02760199 +n02760298 +n02760429 +n02760658 +n02760855 +n02761034 +n02761206 +n02761392 +n02761557 +n02761696 +n02761834 +n02762169 +n02762371 +n02762508 +n02762725 +n02762909 +n02763083 +n02763198 +n02763306 +n02763604 +n02763714 +n02763901 +n02764044 +n02764398 +n02764505 +n02764614 +n02764779 +n02764935 +n02765028 +n02766168 +n02766320 +n02766534 +n02766792 +n02767038 +n02767147 +n02767433 +n02767665 +n02767956 +n02768114 +n02768226 +n02768433 +n02768655 +n02768973 +n02769075 +n02769290 +n02769669 +n02769748 +n02769963 +n02770078 +n02770211 +n02770585 +n02770721 +n02770830 +n02771004 +n02771166 +n02771286 +n02771547 +n02771750 +n02772101 +n02772435 +n02772554 +n02772700 +n02773037 +n02773838 +n02774152 +n02774630 +n02774921 +n02775039 +n02775178 +n02775483 +n02775689 +n02775813 +n02775897 +n02776007 +n02776205 +n02776505 +n02776631 +n02776825 +n02776978 +n02777100 +n02777292 +n02777402 +n02777638 +n02777734 +n02777927 +n02778131 +n02778294 +n02778456 +n02778588 +n02778669 +n02779435 +n02779609 +n02779719 +n02779971 +n02780315 +n02780445 +n02780588 +n02780704 +n02780815 +n02781121 +n02781213 +n02781338 +n02781517 +n02781764 +n02782093 +n02782432 +n02782602 +n02782681 +n02782778 +n02783035 +n02783161 +n02783324 +n02783459 +n02783900 +n02783994 +n02784124 +n02784998 +n02785648 +n02786058 +n02786198 +n02786331 +n02786463 +n02786611 +n02786736 +n02786837 +n02787120 +n02787269 +n02787435 +n02787622 +n02788021 +n02788148 +n02788386 +n02788462 +n02788572 +n02788689 +n02789487 +n02790669 +n02790823 +n02790996 +n02791124 +n02791270 +n02791532 +n02791665 +n02791795 +n02792409 +n02792552 +n02792948 +n02793089 +n02793199 +n02793296 +n02793414 +n02793495 +n02793684 +n02793842 +n02793930 +n02794008 +n02794156 +n02794368 +n02794474 +n02794664 +n02794779 +n02794972 +n02795169 +n02795528 +n02795670 +n02795783 +n02795978 +n02796207 +n02796318 +n02796412 +n02796623 +n02796995 +n02797295 +n02797535 +n02797692 +n02797881 +n02799071 +n02799175 +n02799323 +n02799897 +n02800213 +n02800497 +n02800675 +n02800940 +n02801047 +n02801184 +n02801450 +n02801525 +n02801823 +n02801938 +n02802215 +n02802426 +n02802544 +n02802721 +n02802990 +n02803349 +n02803539 +n02803666 +n02803809 +n02803934 +n02804123 +n02804252 +n02804414 +n02804515 +n02804610 +n02805283 +n02805845 +n02805983 +n02806088 +n02806379 +n02806530 +n02806762 +n02806875 +n02806992 +n02807133 +n02807523 +n02807616 +n02807731 +n02808185 +n02808304 +n02808440 +n02808829 +n02808968 +n02809105 +n02809241 +n02809364 +n02809491 +n02809605 +n02809736 +n02810139 +n02810270 +n02810471 +n02810782 +n02811059 +n02811204 +n02811350 +n02811468 +n02811618 +n02811719 +n02811936 +n02812201 +n02812342 +n02812631 +n02812785 +n02812949 +n02813252 +n02813399 +n02813544 +n02813645 +n02813752 +n02813981 +n02814116 +n02814338 +n02814428 +n02814533 +n02814774 +n02814860 +n02815478 +n02815749 +n02815834 +n02815950 +n02816494 +n02816656 +n02816768 +n02817031 +n02817251 +n02817386 +n02817516 +n02817650 +n02817799 +n02818135 +n02818254 +n02818687 +n02818832 +n02819697 +n02820085 +n02820210 +n02820556 +n02820675 +n02821202 +n02821415 +n02821543 +n02821627 +n02821943 +n02822064 +n02822220 +n02822399 +n02822579 +n02822762 +n02822865 +n02823124 +n02823335 +n02823428 +n02823510 +n02823586 +n02823750 +n02823848 +n02823964 +n02824058 +n02824152 +n02824319 +n02824448 +n02825153 +n02825240 +n02825442 +n02825657 +n02825872 +n02825961 +n02826068 +n02826259 +n02826459 +n02826589 +n02826683 +n02826812 +n02826886 +n02827148 +n02827606 +n02828115 +n02828299 +n02828427 +n02828884 +n02829246 +n02829353 +n02829510 +n02829596 +n02830157 +n02831237 +n02831335 +n02831595 +n02831724 +n02831894 +n02831998 +n02833040 +n02833140 +n02833275 +n02833403 +n02833793 +n02834027 +n02834397 +n02834506 +n02834642 +n02834778 +n02835271 +n02835412 +n02835551 +n02835724 +n02835829 +n02835915 +n02836035 +n02836174 +n02836268 +n02836392 +n02836513 +n02836607 +n02836900 +n02837134 +n02837567 +n02837789 +n02837887 +n02838014 +n02838178 +n02838345 +n02838577 +n02838728 +n02838958 +n02839110 +n02839351 +n02839592 +n02839910 +n02840134 +n02840245 +n02840515 +n02840619 +n02841063 +n02841187 +n02841315 +n02841506 +n02841641 +n02841847 +n02842133 +n02842573 +n02842809 +n02843029 +n02843158 +n02843276 +n02843465 +n02843553 +n02843684 +n02843777 +n02843909 +n02844056 +n02844214 +n02844307 +n02844714 +n02845130 +n02845293 +n02845985 +n02846141 +n02846260 +n02846511 +n02846619 +n02846733 +n02846874 +n02847461 +n02847631 +n02847852 +n02848118 +n02848216 +n02848523 +n02848806 +n02848921 +n02849154 +n02849885 +n02850060 +n02850358 +n02850732 +n02850950 +n02851099 +n02851795 +n02851939 +n02852043 +n02852173 +n02852360 +n02853016 +n02853218 +n02853336 +n02853745 +n02853870 +n02854378 +n02854532 +n02854630 +n02854739 +n02854926 +n02855089 +n02855390 +n02855701 +n02855793 +n02855925 +n02856013 +n02856237 +n02856362 +n02857365 +n02857477 +n02857644 +n02857907 +n02858304 +n02859184 +n02859343 +n02859443 +n02859557 +n02859729 +n02859955 +n02860415 +n02860640 +n02860847 +n02861022 +n02861147 +n02861286 +n02861387 +n02861509 +n02861658 +n02861777 +n02861886 +n02862048 +n02862916 +n02863014 +n02863176 +n02863340 +n02863426 +n02863536 +n02863638 +n02863750 +n02864122 +n02864504 +n02864593 +n02864987 +n02865351 +n02865665 +n02865931 +n02866106 +n02866386 +n02866578 +n02867401 +n02867592 +n02867715 +n02867966 +n02868240 +n02868429 +n02868546 +n02868638 +n02868975 +n02869155 +n02869249 +n02869563 +n02869737 +n02869837 +n02870526 +n02870676 +n02870772 +n02870880 +n02871005 +n02871147 +n02871314 +n02871439 +n02871525 +n02871631 +n02871824 +n02871963 +n02872333 +n02872529 +n02872752 +n02873520 +n02873623 +n02873733 +n02873839 +n02874086 +n02874214 +n02874336 +n02874442 +n02874537 +n02874642 +n02874750 +n02875436 +n02875626 +n02875948 +n02876084 +n02876326 +n02876457 +n02876657 +n02877266 +n02877513 +n02877642 +n02877765 +n02877962 +n02878107 +n02878222 +n02878425 +n02878534 +n02878628 +n02878796 +n02879087 +n02879309 +n02879422 +n02879517 +n02879718 +n02880189 +n02880393 +n02880546 +n02880842 +n02880940 +n02881193 +n02881546 +n02881757 +n02881906 +n02882190 +n02882301 +n02882483 +n02882647 +n02882894 +n02883004 +n02883101 +n02883205 +n02883344 +n02884225 +n02884450 +n02884859 +n02884994 +n02885108 +n02885233 +n02885338 +n02885462 +n02885882 +n02886321 +n02886434 +n02886599 +n02887079 +n02887209 +n02887489 +n02887832 +n02887970 +n02888270 +n02888429 +n02888569 +n02888898 +n02889425 +n02889646 +n02889856 +n02889996 +n02890188 +n02890351 +n02890513 +n02890662 +n02890804 +n02890940 +n02891188 +n02891788 +n02892201 +n02892304 +n02892392 +n02892499 +n02892626 +n02892767 +n02892948 +n02893269 +n02893418 +n02893608 +n02893692 +n02893941 +n02894024 +n02894158 +n02894337 +n02894605 +n02894847 +n02895008 +n02895154 +n02895328 +n02895438 +n02896074 +n02896294 +n02896442 +n02896694 +n02896856 +n02896949 +n02897097 +n02897389 +n02897820 +n02898093 +n02898173 +n02898269 +n02898369 +n02898585 +n02898711 +n02899439 +n02900160 +n02900459 +n02900594 +n02900705 +n02900857 +n02900987 +n02901114 +n02901259 +n02901377 +n02901481 +n02901620 +n02901793 +n02901901 +n02902079 +n02902687 +n02902816 +n02902916 +n02903006 +n02903126 +n02903204 +n02903727 +n02903852 +n02904109 +n02904233 +n02904505 +n02904640 +n02904803 +n02904927 +n02905036 +n02905152 +n02905886 +n02906734 +n02906963 +n02907082 +n02907296 +n02907391 +n02907656 +n02907873 +n02908123 +n02908217 +n02908773 +n02908951 +n02909053 +n02909165 +n02909285 +n02909706 +n02909870 +n02910145 +n02910241 +n02910353 +n02910542 +n02910701 +n02910864 +n02910964 +n02911332 +n02911485 +n02912065 +n02912319 +n02912557 +n02912894 +n02913152 +n02914991 +n02915904 +n02916065 +n02916179 +n02916350 +n02916936 +n02917067 +n02917377 +n02917521 +n02917607 +n02917742 +n02917964 +n02918112 +n02918330 +n02918455 +n02918595 +n02918831 +n02918964 +n02919148 +n02919308 +n02919414 +n02919648 +n02919792 +n02919890 +n02919976 +n02920083 +n02920164 +n02920259 +n02920369 +n02920503 +n02920658 +n02921029 +n02921195 +n02921292 +n02921406 +n02921592 +n02921756 +n02921884 +n02922159 +n02922292 +n02922461 +n02922578 +n02922798 +n02922877 +n02923129 +n02923535 +n02923682 +n02923915 +n02924116 +n02925009 +n02925107 +n02925385 +n02925519 +n02925666 +n02926426 +n02926591 +n02927053 +n02927161 +n02927764 +n02927887 +n02928049 +n02928299 +n02928413 +n02928608 +n02929184 +n02929289 +n02929462 +n02929582 +n02929923 +n02930080 +n02930214 +n02930339 +n02930645 +n02930766 +n02931013 +n02931148 +n02931294 +n02931417 +n02931836 +n02932019 +n02932400 +n02932523 +n02932693 +n02932891 +n02933112 +n02933340 +n02933462 +n02933649 +n02933750 +n02933990 +n02934168 +n02934451 +n02935017 +n02935387 +n02935490 +n02935658 +n02935891 +n02936176 +n02936281 +n02936402 +n02936570 +n02936714 +n02936921 +n02937010 +n02937336 +n02937958 +n02938218 +n02938321 +n02938886 +n02939185 +n02939763 +n02939866 +n02940289 +n02940385 +n02940570 +n02940706 +n02941095 +n02941228 +n02941845 +n02942015 +n02942147 +n02942349 +n02942460 +n02942699 +n02943241 +n02943465 +n02943686 +n02943871 +n02943964 +n02944075 +n02944146 +n02944256 +n02944459 +n02944579 +n02944826 +n02945161 +n02945813 +n02945964 +n02946127 +n02946270 +n02946348 +n02946509 +n02946753 +n02946824 +n02946921 +n02947212 +n02947660 +n02947818 +n02947977 +n02948072 +n02948293 +n02948403 +n02948557 +n02948834 +n02948942 +n02949084 +n02949202 +n02949356 +n02949542 +n02950018 +n02950120 +n02950186 +n02950256 +n02950482 +n02950632 +n02950826 +n02950943 +n02951358 +n02951585 +n02951703 +n02951843 +n02952109 +n02952237 +n02952374 +n02952485 +n02952585 +n02952674 +n02952798 +n02952935 +n02953056 +n02953197 +n02953455 +n02953552 +n02953673 +n02953850 +n02954163 +n02954340 +n02954938 +n02955065 +n02955247 +n02955540 +n02955767 +n02956393 +n02956699 +n02956795 +n02956883 +n02957008 +n02957135 +n02957252 +n02957427 +n02957755 +n02957862 +n02958343 +n02959942 +n02960352 +n02960690 +n02960903 +n02961035 +n02961225 +n02961451 +n02961544 +n02961947 +n02962061 +n02962200 +n02962414 +n02962843 +n02962938 +n02963159 +n02963302 +n02963503 +n02963692 +n02963821 +n02963987 +n02964075 +n02964196 +n02964295 +n02964634 +n02964843 +n02964934 +n02965024 +n02965122 +n02965216 +n02965300 +n02965529 +n02965783 +n02966068 +n02966193 +n02966545 +n02966687 +n02966786 +n02966942 +n02967081 +n02967170 +n02967294 +n02967407 +n02967540 +n02967626 +n02967782 +n02967991 +n02968074 +n02968210 +n02968333 +n02968473 +n02969010 +n02969163 +n02969323 +n02969527 +n02969634 +n02969886 +n02970408 +n02970534 +n02970685 +n02970849 +n02971167 +n02971356 +n02971473 +n02971579 +n02971691 +n02971940 +n02972397 +n02972714 +n02972934 +n02973017 +n02973236 +n02973805 +n02973904 +n02974003 +n02974348 +n02974454 +n02974565 +n02974697 +n02975212 +n02975589 +n02975994 +n02976123 +n02976249 +n02976350 +n02976455 +n02976552 +n02976641 +n02976815 +n02976939 +n02977058 +n02977330 +n02977438 +n02977619 +n02977936 +n02978055 +n02978205 +n02978367 +n02978478 +n02978753 +n02978881 +n02979074 +n02979186 +n02979290 +n02979399 +n02979516 +n02979836 +n02980036 +n02980203 +n02980441 +n02980625 +n02981024 +n02981198 +n02981321 +n02981565 +n02981792 +n02981911 +n02982232 +n02982416 +n02982515 +n02982599 +n02983072 +n02983189 +n02983357 +n02983507 +n02983904 +n02984061 +n02984203 +n02984469 +n02984699 +n02985137 +n02985606 +n02985828 +n02985963 +n02986066 +n02986160 +n02986348 +n02987047 +n02987379 +n02987492 +n02987706 +n02987823 +n02987950 +n02988066 +n02988156 +n02988304 +n02988486 +n02988679 +n02988963 +n02989099 +n02990373 +n02990758 +n02991048 +n02991302 +n02991847 +n02992032 +n02992211 +n02992368 +n02992529 +n02992795 +n02993194 +n02993368 +n02993546 +n02994573 +n02994743 +n02995345 +n02995871 +n02995998 +n02997391 +n02997607 +n02997910 +n02998003 +n02998107 +n02998563 +n02998696 +n02998841 +n02999138 +n02999410 +n02999936 +n03000134 +n03000247 +n03000530 +n03000684 +n03001115 +n03001282 +n03001540 +n03001627 +n03002096 +n03002210 +n03002341 +n03002555 +n03002711 +n03002816 +n03002948 +n03003091 +n03003633 +n03004275 +n03004409 +n03004531 +n03004620 +n03004713 +n03004824 +n03005033 +n03005147 +n03005285 +n03005515 +n03005619 +n03006626 +n03006788 +n03006903 +n03007130 +n03007297 +n03007444 +n03007591 +n03008177 +n03008817 +n03008976 +n03009111 +n03009269 +n03009794 +n03010473 +n03010656 +n03010795 +n03010915 +n03011018 +n03011355 +n03011741 +n03012013 +n03012159 +n03012373 +n03012499 +n03012644 +n03012734 +n03012897 +n03013006 +n03013438 +n03013580 +n03013850 +n03014440 +n03014705 +n03015149 +n03015254 +n03015478 +n03015631 +n03015851 +n03016209 +n03016389 +n03016609 +n03016737 +n03016868 +n03016953 +n03017070 +n03017168 +n03017698 +n03017835 +n03018209 +n03018349 +n03018614 +n03018712 +n03018848 +n03019198 +n03019304 +n03019434 +n03019685 +n03019806 +n03019938 +n03020034 +n03020416 +n03020692 +n03021228 +n03024064 +n03024233 +n03024333 +n03024518 +n03025070 +n03025165 +n03025250 +n03025886 +n03026506 +n03026907 +n03027001 +n03027108 +n03027250 +n03027505 +n03027625 +n03028079 +n03028596 +n03028785 +n03029066 +n03029197 +n03029296 +n03029445 +n03029925 +n03030262 +n03030353 +n03030557 +n03030880 +n03031012 +n03031152 +n03031422 +n03031756 +n03032252 +n03032453 +n03032811 +n03033267 +n03033362 +n03033986 +n03034244 +n03034405 +n03034516 +n03034663 +n03035252 +n03035510 +n03035715 +n03035832 +n03036022 +n03036149 +n03036244 +n03036341 +n03036469 +n03036701 +n03036866 +n03037108 +n03037228 +n03037404 +n03037590 +n03037709 +n03038041 +n03038281 +n03038480 +n03038685 +n03038870 +n03039015 +n03039259 +n03039353 +n03039493 +n03039827 +n03039947 +n03040229 +n03040376 +n03040836 +n03041114 +n03041265 +n03041449 +n03041632 +n03041810 +n03042139 +n03042384 +n03042490 +n03042697 +n03042829 +n03042984 +n03043173 +n03043274 +n03043423 +n03043693 +n03043798 +n03043958 +n03044671 +n03044801 +n03044934 +n03045074 +n03045228 +n03045337 +n03045698 +n03045800 +n03046029 +n03046133 +n03046257 +n03046802 +n03046921 +n03047052 +n03047171 +n03047690 +n03047799 +n03047941 +n03048883 +n03049066 +n03049326 +n03049457 +n03049782 +n03049924 +n03050026 +n03050453 +n03050546 +n03050655 +n03050864 +n03051041 +n03051249 +n03051396 +n03051540 +n03052464 +n03052917 +n03053047 +n03053976 +n03054491 +n03054605 +n03054901 +n03055159 +n03055418 +n03055670 +n03055857 +n03056097 +n03056215 +n03056288 +n03056493 +n03056583 +n03056873 +n03057021 +n03057541 +n03057636 +n03057724 +n03057841 +n03057920 +n03058107 +n03058603 +n03058949 +n03059103 +n03059236 +n03059366 +n03059685 +n03059934 +n03060728 +n03061050 +n03061211 +n03061345 +n03061505 +n03061674 +n03061819 +n03061893 +n03062015 +n03062122 +n03062245 +n03062336 +n03062651 +n03062798 +n03062985 +n03063073 +n03063199 +n03063338 +n03063485 +n03063599 +n03063689 +n03063834 +n03063968 +n03064250 +n03064350 +n03064562 +n03064758 +n03064935 +n03065243 +n03065424 +n03065708 +n03066232 +n03066359 +n03066464 +n03066849 +n03067093 +n03067212 +n03067339 +n03067518 +n03068181 +n03068998 +n03069752 +n03070059 +n03070193 +n03070396 +n03070587 +n03070854 +n03071021 +n03071160 +n03071288 +n03071552 +n03072056 +n03072201 +n03072440 +n03072682 +n03073296 +n03073384 +n03073545 +n03073694 +n03073977 +n03074380 +n03074855 +n03075097 +n03075248 +n03075370 +n03075500 +n03075634 +n03075768 +n03075946 +n03076411 +n03076623 +n03076708 +n03077442 +n03077616 +n03077741 +n03078287 +n03078506 +n03078670 +n03078802 +n03078995 +n03079136 +n03079230 +n03079494 +n03079616 +n03079741 +n03080309 +n03080497 +n03080633 +n03080731 +n03080904 +n03081859 +n03081986 +n03082127 +n03082280 +n03082450 +n03082656 +n03082807 +n03082979 +n03084420 +n03084834 +n03085013 +n03085219 +n03085333 +n03085602 +n03085781 +n03085915 +n03086183 +n03086457 +n03086580 +n03086670 +n03086868 +n03087069 +n03087245 +n03087366 +n03087521 +n03087643 +n03087816 +n03088389 +n03088580 +n03088707 +n03089477 +n03089624 +n03089753 +n03089879 +n03090000 +n03090172 +n03090437 +n03090710 +n03090856 +n03091044 +n03091223 +n03091374 +n03091907 +n03092053 +n03092166 +n03092314 +n03092476 +n03092656 +n03092883 +n03093427 +n03093792 +n03094159 +n03094503 +n03095699 +n03095965 +n03096439 +n03096960 +n03097362 +n03097535 +n03097673 +n03098140 +n03098515 +n03098688 +n03098806 +n03098959 +n03099147 +n03099274 +n03099454 +n03099622 +n03099771 +n03099945 +n03100240 +n03100346 +n03100490 +n03100897 +n03101156 +n03101302 +n03101375 +n03101517 +n03101664 +n03101796 +n03101986 +n03102371 +n03102516 +n03102654 +n03102859 +n03103128 +n03103396 +n03103563 +n03103904 +n03104019 +n03104512 +n03105088 +n03105214 +n03105306 +n03105467 +n03105645 +n03105810 +n03105974 +n03106722 +n03106898 +n03107046 +n03107488 +n03107716 +n03108455 +n03108624 +n03108759 +n03108853 +n03109033 +n03109150 +n03109253 +n03109693 +n03109881 +n03110202 +n03110669 +n03111041 +n03111177 +n03111296 +n03111690 +n03112240 +n03112719 +n03112869 +n03113152 +n03113505 +n03113657 +n03113835 +n03114041 +n03114236 +n03114379 +n03114504 +n03114743 +n03114839 +n03115014 +n03115180 +n03115400 +n03115663 +n03115762 +n03115897 +n03116008 +n03116163 +n03116530 +n03116767 +n03117199 +n03117642 +n03118346 +n03118969 +n03119203 +n03119396 +n03119510 +n03120198 +n03120491 +n03120778 +n03121040 +n03121190 +n03121298 +n03121431 +n03121897 +n03122073 +n03122202 +n03122295 +n03122748 +n03123553 +n03123666 +n03123809 +n03123917 +n03124043 +n03124170 +n03124313 +n03124474 +n03124590 +n03125057 +n03125588 +n03125729 +n03125870 +n03126090 +n03126385 +n03126580 +n03126707 +n03126927 +n03127024 +n03127203 +n03127408 +n03127531 +n03127747 +n03127925 +n03128085 +n03128248 +n03128427 +n03128519 +n03129001 +n03129471 +n03129636 +n03129753 +n03129848 +n03130066 +n03130233 +n03130563 +n03130761 +n03130866 +n03131193 +n03131574 +n03131669 +n03131967 +n03132076 +n03132261 +n03132438 +n03132666 +n03132776 +n03133050 +n03133415 +n03133878 +n03134118 +n03134232 +n03134394 +n03134739 +n03134853 +n03135030 +n03135532 +n03135656 +n03135788 +n03135917 +n03136051 +n03136254 +n03136369 +n03136504 +n03137473 +n03137579 +n03138128 +n03138217 +n03138344 +n03138669 +n03139089 +n03139464 +n03139640 +n03139998 +n03140126 +n03140292 +n03140431 +n03140546 +n03140652 +n03140771 +n03140900 +n03141065 +n03141327 +n03141455 +n03141612 +n03141702 +n03141823 +n03142099 +n03142205 +n03142325 +n03142431 +n03142679 +n03143400 +n03143572 +n03143754 +n03144156 +n03144873 +n03144982 +n03145147 +n03145277 +n03145384 +n03145522 +n03145719 +n03145843 +n03146219 +n03146342 +n03146449 +n03146560 +n03146687 +n03146777 +n03146846 +n03147084 +n03147156 +n03147280 +n03147509 +n03148324 +n03148518 +n03148727 +n03148808 +n03149135 +n03149401 +n03149686 +n03149810 +n03150232 +n03150511 +n03150661 +n03150795 +n03151077 +n03152303 +n03152951 +n03153246 +n03153585 +n03153948 +n03154073 +n03154316 +n03154446 +n03154616 +n03154745 +n03154895 +n03155178 +n03155502 +n03155915 +n03156071 +n03156279 +n03156405 +n03156767 +n03157348 +n03158186 +n03158414 +n03158668 +n03158796 +n03158885 +n03159535 +n03159640 +n03160001 +n03160186 +n03160309 +n03160740 +n03161016 +n03161450 +n03161893 +n03162297 +n03162460 +n03162556 +n03162714 +n03162818 +n03163222 +n03163381 +n03163488 +n03163798 +n03163973 +n03164192 +n03164344 +n03164605 +n03164722 +n03164929 +n03165096 +n03165211 +n03165466 +n03165616 +n03165823 +n03165955 +n03166120 +n03166514 +n03166600 +n03166685 +n03166809 +n03166951 +n03167153 +n03167978 +n03168107 +n03168217 +n03168543 +n03168663 +n03168774 +n03168933 +n03169063 +n03169176 +n03170292 +n03170459 +n03170635 +n03170872 +n03171228 +n03171356 +n03171635 +n03171910 +n03172038 +n03172738 +n03172965 +n03173270 +n03173387 +n03173929 +n03174079 +n03174450 +n03174731 +n03175081 +n03175189 +n03175301 +n03175457 +n03175604 +n03175843 +n03175983 +n03176238 +n03176386 +n03176594 +n03176763 +n03177059 +n03177165 +n03177708 +n03178000 +n03178173 +n03178430 +n03178538 +n03178674 +n03179701 +n03179910 +n03180011 +n03180384 +n03180504 +n03180732 +n03180865 +n03180969 +n03181293 +n03181667 +n03182140 +n03182232 +n03182912 +n03183080 +n03185868 +n03186199 +n03186285 +n03186818 +n03187037 +n03187153 +n03187268 +n03187595 +n03187751 +n03188290 +n03188531 +n03188725 +n03188871 +n03189083 +n03189311 +n03189818 +n03190458 +n03191286 +n03191451 +n03191561 +n03191776 +n03192543 +n03192907 +n03193107 +n03193260 +n03193423 +n03193597 +n03193754 +n03194170 +n03194297 +n03194812 +n03194992 +n03195332 +n03195485 +n03195799 +n03195959 +n03196062 +n03196217 +n03196324 +n03196598 +n03196990 +n03197201 +n03197337 +n03197446 +n03198223 +n03198500 +n03199358 +n03199488 +n03199647 +n03199775 +n03199901 +n03200231 +n03200357 +n03200539 +n03200701 +n03200906 +n03201035 +n03201208 +n03201529 +n03201638 +n03201776 +n03201895 +n03201996 +n03202354 +n03202481 +n03202760 +n03202940 +n03203089 +n03203806 +n03204134 +n03204306 +n03204436 +n03204558 +n03204955 +n03205143 +n03205304 +n03205458 +n03205574 +n03205669 +n03205903 +n03206023 +n03206158 +n03206282 +n03206405 +n03206602 +n03206718 +n03206908 +n03207305 +n03207548 +n03207630 +n03207743 +n03207835 +n03207941 +n03208556 +n03208938 +n03209359 +n03209477 +n03209666 +n03209910 +n03210245 +n03210372 +n03210552 +n03210683 +n03211117 +n03211413 +n03211616 +n03211789 +n03212114 +n03212247 +n03212406 +n03212811 +n03213014 +n03213361 +n03213538 +n03213715 +n03213826 +n03214253 +n03214450 +n03214582 +n03214966 +n03215076 +n03215191 +n03215337 +n03215508 +n03215749 +n03215930 +n03216199 +n03216402 +n03216562 +n03216710 +n03216828 +n03217653 +n03217739 +n03217889 +n03218198 +n03218446 +n03219010 +n03219135 +n03219483 +n03219612 +n03219859 +n03219966 +n03220095 +n03220237 +n03220513 +n03220692 +n03221059 +n03221351 +n03221540 +n03221720 +n03222176 +n03222318 +n03222516 +n03222722 +n03222857 +n03223162 +n03223299 +n03223441 +n03223553 +n03223686 +n03223923 +n03224490 +n03224603 +n03224753 +n03224893 +n03225108 +n03225458 +n03225616 +n03225777 +n03225988 +n03226090 +n03226254 +n03226375 +n03226538 +n03226880 +n03227010 +n03227184 +n03227317 +n03227721 +n03227856 +n03228016 +n03228254 +n03228365 +n03228533 +n03228692 +n03228796 +n03228967 +n03229115 +n03229244 +n03229526 +n03231160 +n03231368 +n03231819 +n03232309 +n03232417 +n03232543 +n03232815 +n03232923 +n03233123 +n03233624 +n03233744 +n03233905 +n03234164 +n03234952 +n03235042 +n03235180 +n03235327 +n03235796 +n03235979 +n03236093 +n03236217 +n03236423 +n03236580 +n03236735 +n03237212 +n03237340 +n03237416 +n03237639 +n03237839 +n03237992 +n03238131 +n03238286 +n03238586 +n03238762 +n03238879 +n03239054 +n03239259 +n03239607 +n03239726 +n03240140 +n03240683 +n03240892 +n03241093 +n03241335 +n03241496 +n03241903 +n03242120 +n03242264 +n03242390 +n03242506 +n03242995 +n03243218 +n03243625 +n03244047 +n03244231 +n03244388 +n03244775 +n03244919 +n03245271 +n03245421 +n03245724 +n03245889 +n03246197 +n03246312 +n03246454 +n03246653 +n03246933 +n03247083 +n03247351 +n03247495 +n03248835 +n03249342 +n03249569 +n03249956 +n03250089 +n03250279 +n03250405 +n03250588 +n03250847 +n03250952 +n03251100 +n03251280 +n03251533 +n03251766 +n03251932 +n03252231 +n03252324 +n03252422 +n03252637 +n03252787 +n03253071 +n03253187 +n03253279 +n03253714 +n03253796 +n03253886 +n03254046 +n03254189 +n03254374 +n03254625 +n03254737 +n03254862 +n03255030 +n03255167 +n03255322 +n03255488 +n03255899 +n03256032 +n03256166 +n03256472 +n03256631 +n03256788 +n03256928 +n03257065 +n03257210 +n03257586 +n03258192 +n03258330 +n03258456 +n03258577 +n03258905 +n03259009 +n03259280 +n03259401 +n03259505 +n03260206 +n03260504 +n03260733 +n03260849 +n03261019 +n03261263 +n03261395 +n03261603 +n03261776 +n03262072 +n03262248 +n03262519 +n03262717 +n03262809 +n03262932 +n03263076 +n03263338 +n03263640 +n03263758 +n03264906 +n03265032 +n03265754 +n03266195 +n03266371 +n03266620 +n03266749 +n03267113 +n03267468 +n03267696 +n03267821 +n03268142 +n03268311 +n03268645 +n03268790 +n03268918 +n03269073 +n03269203 +n03269401 +n03270165 +n03270695 +n03270854 +n03271030 +n03271260 +n03271376 +n03271574 +n03271765 +n03271865 +n03272010 +n03272125 +n03272239 +n03272383 +n03272562 +n03272810 +n03272940 +n03273061 +n03273551 +n03273740 +n03273913 +n03274265 +n03274435 +n03274561 +n03274796 +n03275125 +n03275311 +n03275566 +n03275681 +n03275864 +n03276179 +n03276696 +n03276839 +n03277004 +n03277149 +n03277459 +n03277602 +n03277771 +n03278248 +n03278914 +n03279153 +n03279364 +n03279508 +n03279804 +n03279918 +n03280216 +n03280394 +n03280644 +n03281145 +n03281524 +n03281673 +n03282060 +n03282295 +n03282401 +n03283221 +n03283413 +n03283827 +n03284308 +n03284482 +n03284743 +n03284886 +n03284981 +n03285578 +n03285730 +n03285912 +n03286572 +n03287351 +n03287733 +n03288003 +n03288500 +n03288643 +n03288742 +n03288886 +n03289660 +n03289985 +n03290096 +n03290195 +n03290653 +n03291413 +n03291551 +n03291741 +n03291819 +n03291963 +n03292085 +n03292362 +n03292475 +n03292603 +n03292736 +n03292960 +n03293095 +n03293741 +n03293863 +n03294048 +n03294604 +n03294833 +n03295012 +n03295140 +n03295246 +n03295928 +n03296081 +n03296217 +n03296328 +n03296478 +n03296963 +n03297103 +n03297226 +n03297495 +n03297644 +n03297735 +n03298089 +n03298352 +n03298716 +n03298858 +n03299406 +n03300216 +n03300443 +n03301175 +n03301291 +n03301389 +n03301568 +n03301833 +n03301940 +n03302671 +n03302790 +n03302938 +n03303217 +n03303669 +n03303831 +n03304197 +n03304323 +n03304465 +n03305300 +n03305522 +n03305953 +n03306385 +n03306869 +n03307037 +n03307573 +n03307792 +n03308152 +n03308481 +n03308614 +n03309110 +n03309356 +n03309465 +n03309687 +n03309808 +n03313333 +n03314227 +n03314378 +n03314608 +n03314780 +n03314884 +n03315644 +n03315805 +n03315990 +n03316105 +n03316406 +n03316873 +n03317233 +n03317510 +n03317673 +n03317788 +n03317889 +n03318136 +n03318294 +n03318865 +n03318983 +n03319167 +n03319457 +n03319576 +n03319745 +n03320046 +n03320262 +n03320421 +n03320519 +n03320845 +n03320959 +n03321103 +n03321419 +n03321563 +n03321843 +n03321954 +n03322570 +n03322704 +n03322836 +n03322940 +n03323096 +n03323211 +n03323319 +n03323703 +n03324629 +n03324814 +n03324928 +n03325088 +n03325288 +n03325403 +n03325584 +n03325691 +n03325941 +n03326073 +n03326371 +n03326475 +n03326660 +n03326795 +n03326948 +n03327133 +n03327234 +n03327553 +n03327691 +n03327841 +n03328201 +n03329302 +n03329536 +n03329663 +n03330002 +n03330665 +n03330792 +n03330947 +n03331077 +n03331244 +n03331599 +n03332005 +n03332173 +n03332271 +n03332393 +n03332591 +n03332784 +n03332989 +n03333129 +n03333252 +n03333349 +n03333610 +n03333711 +n03333851 +n03334017 +n03334291 +n03334382 +n03334492 +n03334912 +n03335030 +n03335333 +n03335461 +n03335846 +n03336168 +n03336282 +n03336575 +n03336742 +n03336839 +n03337140 +n03337383 +n03337494 +n03337822 +n03338287 +n03338821 +n03339296 +n03339529 +n03339643 +n03340009 +n03340723 +n03340923 +n03341035 +n03341153 +n03341297 +n03341606 +n03342015 +n03342127 +n03342262 +n03342432 +n03342657 +n03342863 +n03342961 +n03343047 +n03343234 +n03343354 +n03343560 +n03343737 +n03343853 +n03344305 +n03344393 +n03344509 +n03344642 +n03344784 +n03344935 +n03345487 +n03345837 +n03346135 +n03346289 +n03346455 +n03347037 +n03347472 +n03347617 +n03348142 +n03348868 +n03349020 +n03349296 +n03349367 +n03349469 +n03349599 +n03349771 +n03349892 +n03350204 +n03350352 +n03350456 +n03350602 +n03351151 +n03351262 +n03351434 +n03351979 +n03352232 +n03352366 +n03352628 +n03352961 +n03353281 +n03353951 +n03354207 +n03354903 +n03355468 +n03355768 +n03355925 +n03356038 +n03356279 +n03356446 +n03356559 +n03356858 +n03356982 +n03357081 +n03357267 +n03357716 +n03358172 +n03358380 +n03358726 +n03358841 +n03359137 +n03359285 +n03359436 +n03359566 +n03360133 +n03360300 +n03360431 +n03360622 +n03360731 +n03361109 +n03361297 +n03361380 +n03361550 +n03361683 +n03362639 +n03362771 +n03362890 +n03363363 +n03363549 +n03363749 +n03364008 +n03364156 +n03364599 +n03364937 +n03365231 +n03365374 +n03365592 +n03365991 +n03366464 +n03366721 +n03366823 +n03366974 +n03367059 +n03367321 +n03367410 +n03367545 +n03367875 +n03367969 +n03368048 +n03368352 +n03369276 +n03369407 +n03369512 +n03369866 +n03370387 +n03370646 +n03371875 +n03372029 +n03372549 +n03372822 +n03372933 +n03373237 +n03373611 +n03373943 +n03374102 +n03374282 +n03374372 +n03374473 +n03374570 +n03374649 +n03374838 +n03375171 +n03375329 +n03375575 +n03376159 +n03376279 +n03376595 +n03376771 +n03376938 +n03378005 +n03378174 +n03378342 +n03378442 +n03378593 +n03378765 +n03379051 +n03379204 +n03379343 +n03379719 +n03379828 +n03379989 +n03380301 +n03380647 +n03380724 +n03380867 +n03381126 +n03381231 +n03381450 +n03381565 +n03381776 +n03382104 +n03382292 +n03382413 +n03382533 +n03382708 +n03382856 +n03382969 +n03383099 +n03383211 +n03383378 +n03383468 +n03383562 +n03383821 +n03384167 +n03384352 +n03384891 +n03385295 +n03385557 +n03386011 +n03386343 +n03386544 +n03386726 +n03386870 +n03387323 +n03387653 +n03388043 +n03388183 +n03388323 +n03388549 +n03388711 +n03388990 +n03389611 +n03389761 +n03389889 +n03389983 +n03390075 +n03390327 +n03390673 +n03390786 +n03390983 +n03391301 +n03391613 +n03391770 +n03392648 +n03392741 +n03393017 +n03393199 +n03393324 +n03393761 +n03393912 +n03394149 +n03394272 +n03394480 +n03394649 +n03394916 +n03395256 +n03395401 +n03395514 +n03395859 +n03396074 +n03396580 +n03396654 +n03396997 +n03397087 +n03397266 +n03397412 +n03397532 +n03397947 +n03398153 +n03398228 +n03399579 +n03399677 +n03399761 +n03399971 +n03400231 +n03400972 +n03401129 +n03401279 +n03401721 +n03402188 +n03402369 +n03402511 +n03402785 +n03402941 +n03403643 +n03404012 +n03404149 +n03404251 +n03404360 +n03404449 +n03404900 +n03405111 +n03405265 +n03405595 +n03405725 +n03406759 +n03406966 +n03407369 +n03407865 +n03408054 +n03408264 +n03408340 +n03408444 +n03409297 +n03409393 +n03409591 +n03409920 +n03410022 +n03410147 +n03410303 +n03410423 +n03410571 +n03410740 +n03410938 +n03411079 +n03411208 +n03411339 +n03411927 +n03412058 +n03412220 +n03412387 +n03412511 +n03412906 +n03413124 +n03413264 +n03413428 +n03413684 +n03413828 +n03414029 +n03414162 +n03414676 +n03415252 +n03415486 +n03415626 +n03415749 +n03415868 +n03416094 +n03416489 +n03416640 +n03416775 +n03416900 +n03417042 +n03417202 +n03417345 +n03417749 +n03417970 +n03418158 +n03418242 +n03418402 +n03418618 +n03418749 +n03418915 +n03419014 +n03420345 +n03420801 +n03420935 +n03421117 +n03421324 +n03421485 +n03421669 +n03421768 +n03421960 +n03422072 +n03422484 +n03422589 +n03422771 +n03423099 +n03423224 +n03423306 +n03423479 +n03423568 +n03423719 +n03423877 +n03424204 +n03424325 +n03424489 +n03424630 +n03424862 +n03425241 +n03425325 +n03425413 +n03425595 +n03425769 +n03426134 +n03426285 +n03426462 +n03426574 +n03426871 +n03427202 +n03427296 +n03428090 +n03428226 +n03428349 +n03429003 +n03429137 +n03429288 +n03429682 +n03429771 +n03429914 +n03430091 +n03430313 +n03430418 +n03430551 +n03430959 +n03431243 +n03431570 +n03431745 +n03432061 +n03432129 +n03432360 +n03432509 +n03433247 +n03433637 +n03433877 +n03434188 +n03434285 +n03434830 +n03435593 +n03435743 +n03435991 +n03436075 +n03436182 +n03436417 +n03436549 +n03436656 +n03436772 +n03436891 +n03436990 +n03437184 +n03437295 +n03437430 +n03437581 +n03437741 +n03437829 +n03437941 +n03438071 +n03438257 +n03438661 +n03438780 +n03438863 +n03439348 +n03439631 +n03439814 +n03440216 +n03440682 +n03440876 +n03441112 +n03441345 +n03441465 +n03441582 +n03442288 +n03442487 +n03442597 +n03442756 +n03443005 +n03443149 +n03443371 +n03443543 +n03443912 +n03444034 +n03445326 +n03445617 +n03445777 +n03445924 +n03446070 +n03446268 +n03446832 +n03447075 +n03447358 +n03447447 +n03447721 +n03447894 +n03448031 +n03448590 +n03448696 +n03448956 +n03449217 +n03449309 +n03449451 +n03449564 +n03449858 +n03450230 +n03450516 +n03450734 +n03450881 +n03450974 +n03451120 +n03451253 +n03451365 +n03451711 +n03451798 +n03452267 +n03452449 +n03452594 +n03452741 +n03453231 +n03453320 +n03453443 +n03454110 +n03454211 +n03454442 +n03454536 +n03454707 +n03454885 +n03455355 +n03455488 +n03455642 +n03455802 +n03456024 +n03456186 +n03456299 +n03456447 +n03456548 +n03456665 +n03457008 +n03457451 +n03457686 +n03457902 +n03458271 +n03458422 +n03459328 +n03459591 +n03459775 +n03459914 +n03460040 +n03460147 +n03460297 +n03460455 +n03460899 +n03461288 +n03461385 +n03461651 +n03461882 +n03461988 +n03462110 +n03462315 +n03462747 +n03462972 +n03463185 +n03463381 +n03463666 +n03464053 +n03464467 +n03464628 +n03464952 +n03465040 +n03465151 +n03465320 +n03465426 +n03465500 +n03465605 +n03465718 +n03465818 +n03466162 +n03466493 +n03466600 +n03466839 +n03466947 +n03467068 +n03467254 +n03467380 +n03467517 +n03467796 +n03467887 +n03467984 +n03468570 +n03468696 +n03468821 +n03469031 +n03469175 +n03469493 +n03469832 +n03469903 +n03470005 +n03470222 +n03470387 +n03470629 +n03470948 +n03471030 +n03471190 +n03471347 +n03471779 +n03472232 +n03472535 +n03472672 +n03472796 +n03472937 +n03473078 +n03473227 +n03473465 +n03473817 +n03473966 +n03474167 +n03474352 +n03474779 +n03474896 +n03475581 +n03475674 +n03475823 +n03475961 +n03476083 +n03476313 +n03476542 +n03476684 +n03476991 +n03477143 +n03477303 +n03477410 +n03477512 +n03477773 +n03477902 +n03478589 +n03478756 +n03478907 +n03479121 +n03479266 +n03479397 +n03479502 +n03480579 +n03480719 +n03480973 +n03481172 +n03481521 +n03482001 +n03482128 +n03482252 +n03482405 +n03482523 +n03482877 +n03483086 +n03483230 +n03483316 +n03483531 +n03483637 +n03483823 +n03483971 +n03484083 +n03484487 +n03484576 +n03484809 +n03484931 +n03485198 +n03485309 +n03485407 +n03485575 +n03485794 +n03487090 +n03487331 +n03487444 +n03487533 +n03487642 +n03487774 +n03487886 +n03488111 +n03488188 +n03488438 +n03488603 +n03488784 +n03488887 +n03489048 +n03489162 +n03490006 +n03490119 +n03490324 +n03490449 +n03490649 +n03490784 +n03490884 +n03491032 +n03491724 +n03491988 +n03492087 +n03492250 +n03492542 +n03492922 +n03493219 +n03493792 +n03493911 +n03494278 +n03494537 +n03494706 +n03495039 +n03495258 +n03495570 +n03495671 +n03495941 +n03496183 +n03496296 +n03496486 +n03496612 +n03496892 +n03497100 +n03497352 +n03497657 +n03498441 +n03498536 +n03498662 +n03498781 +n03498866 +n03498962 +n03499354 +n03499468 +n03499907 +n03500090 +n03500209 +n03500295 +n03500389 +n03500457 +n03500557 +n03500699 +n03500838 +n03500971 +n03501152 +n03501288 +n03501520 +n03501614 +n03502200 +n03502331 +n03502509 +n03502777 +n03502897 +n03503097 +n03503233 +n03503358 +n03503477 +n03503567 +n03503718 +n03503997 +n03504205 +n03504293 +n03504723 +n03505015 +n03505133 +n03505383 +n03505504 +n03505667 +n03505764 +n03506028 +n03506184 +n03506370 +n03506560 +n03506727 +n03506880 +n03507241 +n03507458 +n03507658 +n03507963 +n03508101 +n03508485 +n03508881 +n03509394 +n03509608 +n03509843 +n03510072 +n03510244 +n03510384 +n03510487 +n03510583 +n03510866 +n03510987 +n03511175 +n03511333 +n03512030 +n03512147 +n03512452 +n03512624 +n03512911 +n03513137 +n03513376 +n03514129 +n03514340 +n03514451 +n03514693 +n03514894 +n03515338 +n03515934 +n03516266 +n03516367 +n03516647 +n03516844 +n03516996 +n03517509 +n03517647 +n03517760 +n03517899 +n03517982 +n03518135 +n03518230 +n03518305 +n03518445 +n03518631 +n03518829 +n03518943 +n03519081 +n03519226 +n03519387 +n03519674 +n03519848 +n03520493 +n03521076 +n03521431 +n03521544 +n03521675 +n03521771 +n03521899 +n03522003 +n03522100 +n03522634 +n03522863 +n03522990 +n03523134 +n03523398 +n03523506 +n03523987 +n03524150 +n03524287 +n03524425 +n03524574 +n03524745 +n03524976 +n03525074 +n03525252 +n03525454 +n03525693 +n03525827 +n03526062 +n03527149 +n03527444 +n03527565 +n03527675 +n03528100 +n03528263 +n03528523 +n03528901 +n03529175 +n03529444 +n03529629 +n03529860 +n03530189 +n03530511 +n03530642 +n03530910 +n03531281 +n03531447 +n03531546 +n03531691 +n03531982 +n03532342 +n03532672 +n03532919 +n03533014 +n03533392 +n03533486 +n03533654 +n03533845 +n03534580 +n03534695 +n03534776 +n03535024 +n03535284 +n03535647 +n03535780 +n03536122 +n03536568 +n03536761 +n03537085 +n03537241 +n03537412 +n03537550 +n03538037 +n03538179 +n03538300 +n03538406 +n03538542 +n03538634 +n03538817 +n03538957 +n03539103 +n03539293 +n03539433 +n03539546 +n03539678 +n03539754 +n03540090 +n03540267 +n03540476 +n03540595 +n03540914 +n03541091 +n03541269 +n03541393 +n03541537 +n03541696 +n03541923 +n03542333 +n03542605 +n03542727 +n03542860 +n03543012 +n03543112 +n03543254 +n03543394 +n03543511 +n03543603 +n03543735 +n03543945 +n03544143 +n03544238 +n03544360 +n03545150 +n03545470 +n03545585 +n03545756 +n03545961 +n03546112 +n03546235 +n03546340 +n03547054 +n03547229 +n03547397 +n03547530 +n03547861 +n03548086 +n03548195 +n03548320 +n03548402 +n03548533 +n03548626 +n03548930 +n03549199 +n03549350 +n03549473 +n03549589 +n03549732 +n03549897 +n03550153 +n03550289 +n03550420 +n03551084 +n03551395 +n03551582 +n03551790 +n03552001 +n03552449 +n03552749 +n03553019 +n03553248 +n03553486 +n03554375 +n03554460 +n03554645 +n03555006 +n03555217 +n03555426 +n03555564 +n03555662 +n03555862 +n03555996 +n03556173 +n03556679 +n03556811 +n03556992 +n03557270 +n03557360 +n03557590 +n03557692 +n03557840 +n03558007 +n03558176 +n03558404 +n03558633 +n03558739 +n03559373 +n03559531 +n03559999 +n03560430 +n03560860 +n03561047 +n03561169 +n03561573 +n03562565 +n03563200 +n03563460 +n03563710 +n03563967 +n03564849 +n03565288 +n03565565 +n03565710 +n03565830 +n03565991 +n03566193 +n03566329 +n03566555 +n03566730 +n03566860 +n03567066 +n03567635 +n03567788 +n03567912 +n03568117 +n03568818 +n03569014 +n03569174 +n03569293 +n03569494 +n03571280 +n03571439 +n03571625 +n03571853 +n03571942 +n03572107 +n03572205 +n03572321 +n03572631 +n03573574 +n03573848 +n03574243 +n03574416 +n03574555 +n03574816 +n03575958 +n03576215 +n03576443 +n03576955 +n03577090 +n03577312 +n03577474 +n03577672 +n03577818 +n03578055 +n03578251 +n03578656 +n03578981 +n03579538 +n03579982 +n03580518 +n03580615 +n03580845 +n03580990 +n03581125 +n03581531 +n03581897 +n03582508 +n03582959 +n03583419 +n03583621 +n03584254 +n03584400 +n03584829 +n03585073 +n03585337 +n03585438 +n03585551 +n03585682 +n03585778 +n03585875 +n03586219 +n03586631 +n03586911 +n03587205 +n03588216 +n03588841 +n03588951 +n03589313 +n03589513 +n03589672 +n03589791 +n03590306 +n03590475 +n03590588 +n03590841 +n03590932 +n03591116 +n03591313 +n03591592 +n03591798 +n03591901 +n03592245 +n03592669 +n03592773 +n03592931 +n03593122 +n03593222 +n03593526 +n03593862 +n03594010 +n03594148 +n03594277 +n03594523 +n03594734 +n03594945 +n03595055 +n03595264 +n03595409 +n03595523 +n03595614 +n03595860 +n03596099 +n03596285 +n03596543 +n03597147 +n03597317 +n03597916 +n03598151 +n03598299 +n03598385 +n03598515 +n03598646 +n03598783 +n03598930 +n03599486 +n03599964 +n03600285 +n03600475 +n03600722 +n03600977 +n03601442 +n03601638 +n03601840 +n03602081 +n03602194 +n03602365 +n03602686 +n03602790 +n03602883 +n03603442 +n03603594 +n03603722 +n03604156 +n03604311 +n03604400 +n03604536 +n03604629 +n03604763 +n03604843 +n03605417 +n03605504 +n03605598 +n03605722 +n03605915 +n03606106 +n03606251 +n03606347 +n03606465 +n03607029 +n03607186 +n03607527 +n03607659 +n03607923 +n03608504 +n03609147 +n03609235 +n03609397 +n03609542 +n03609786 +n03609959 +n03610098 +n03610418 +n03610524 +n03610682 +n03610836 +n03610992 +n03612010 +n03612814 +n03612965 +n03613294 +n03613592 +n03614007 +n03614383 +n03614532 +n03614782 +n03614887 +n03615300 +n03615406 +n03615563 +n03615655 +n03615790 +n03616091 +n03616225 +n03616428 +n03616763 +n03616979 +n03617095 +n03617312 +n03617480 +n03617594 +n03617834 +n03618101 +n03618339 +n03618546 +n03618678 +n03618797 +n03618982 +n03619050 +n03619196 +n03619275 +n03619396 +n03619650 +n03619793 +n03619890 +n03620052 +n03620353 +n03620967 +n03621049 +n03621377 +n03621694 +n03622058 +n03622401 +n03622526 +n03622839 +n03622931 +n03623198 +n03623338 +n03623556 +n03624134 +n03624400 +n03624767 +n03625355 +n03625539 +n03625646 +n03625943 +n03626115 +n03626272 +n03626418 +n03626502 +n03626760 +n03627232 +n03627954 +n03628071 +n03628215 +n03628421 +n03628511 +n03628728 +n03628831 +n03628984 +n03629100 +n03629231 +n03629520 +n03629643 +n03630262 +n03630383 +n03631177 +n03631811 +n03631922 +n03632100 +n03632577 +n03632729 +n03632852 +n03632963 +n03633091 +n03633341 +n03633632 +n03633886 +n03634034 +n03634899 +n03635032 +n03635108 +n03635330 +n03635516 +n03635668 +n03635932 +n03636248 +n03636649 +n03637027 +n03637181 +n03637318 +n03637480 +n03637787 +n03637898 +n03638014 +n03638180 +n03638623 +n03638743 +n03638883 +n03639077 +n03639230 +n03639497 +n03639675 +n03639880 +n03640850 +n03640988 +n03641569 +n03641947 +n03642144 +n03642341 +n03642444 +n03642573 +n03642806 +n03643149 +n03643253 +n03643491 +n03643737 +n03643907 +n03644073 +n03644378 +n03644858 +n03645011 +n03645168 +n03645290 +n03645577 +n03646020 +n03646148 +n03646296 +n03646809 +n03646916 +n03647423 +n03647520 +n03648219 +n03648431 +n03648667 +n03649003 +n03649161 +n03649288 +n03649674 +n03649797 +n03649909 +n03650551 +n03651388 +n03651605 +n03651843 +n03652100 +n03652389 +n03652729 +n03652826 +n03652932 +n03653110 +n03653220 +n03653454 +n03653583 +n03653740 +n03653833 +n03653975 +n03654576 +n03654826 +n03655072 +n03655470 +n03655720 +n03656484 +n03656957 +n03657121 +n03657239 +n03657511 +n03658102 +n03658185 +n03658635 +n03658858 +n03659292 +n03659686 +n03659809 +n03659950 +n03660124 +n03660562 +n03660909 +n03661043 +n03661340 +n03662301 +n03662452 +n03662601 +n03662719 +n03662887 +n03663433 +n03663531 +n03663910 +n03664159 +n03664675 +n03664840 +n03664943 +n03665232 +n03665366 +n03665851 +n03665924 +n03666238 +n03666362 +n03666591 +n03666917 +n03667060 +n03667235 +n03667552 +n03667664 +n03667829 +n03668067 +n03668279 +n03668488 +n03668803 +n03669245 +n03669534 +n03669886 +n03670208 +n03671914 +n03672521 +n03672827 +n03673027 +n03673270 +n03673450 +n03673767 +n03674270 +n03674440 +n03674731 +n03674842 +n03675076 +n03675235 +n03675445 +n03675558 +n03675907 +n03676087 +n03676483 +n03676623 +n03676759 +n03677115 +n03677682 +n03677766 +n03678558 +n03678729 +n03678879 +n03679384 +n03679712 +n03680248 +n03680355 +n03680512 +n03680734 +n03680858 +n03680942 +n03681477 +n03681813 +n03682380 +n03682487 +n03682877 +n03683079 +n03683341 +n03683457 +n03683606 +n03683708 +n03683995 +n03684143 +n03684224 +n03684489 +n03684611 +n03684740 +n03684823 +n03685307 +n03685486 +n03685640 +n03685820 +n03686130 +n03686363 +n03686470 +n03686924 +n03687137 +n03687928 +n03688066 +n03688192 +n03688405 +n03688504 +n03688605 +n03688707 +n03688832 +n03688943 +n03689157 +n03689570 +n03690168 +n03690279 +n03690473 +n03690851 +n03690938 +n03691459 +n03691817 +n03692004 +n03692136 +n03692272 +n03692379 +n03692522 +n03692842 +n03693293 +n03693474 +n03693707 +n03693860 +n03694196 +n03694356 +n03694639 +n03694761 +n03694949 +n03695122 +n03695452 +n03695616 +n03695753 +n03695857 +n03695957 +n03696065 +n03696301 +n03696445 +n03696568 +n03696746 +n03696909 +n03697007 +n03697366 +n03697552 +n03697812 +n03697913 +n03698123 +n03698226 +n03698360 +n03698604 +n03698723 +n03698815 +n03699280 +n03699591 +n03699754 +n03699975 +n03700963 +n03701191 +n03701391 +n03701640 +n03701790 +n03702248 +n03702440 +n03702582 +n03703075 +n03703203 +n03703463 +n03703590 +n03703730 +n03703862 +n03703945 +n03704549 +n03704834 +n03705379 +n03705808 +n03706229 +n03706415 +n03706653 +n03706939 +n03707171 +n03707372 +n03707597 +n03707766 +n03708036 +n03708425 +n03708843 +n03708962 +n03709206 +n03709363 +n03709545 +n03709644 +n03709823 +n03709960 +n03710079 +n03710193 +n03710294 +n03710421 +n03710528 +n03710637 +n03710721 +n03710937 +n03711044 +n03711711 +n03711999 +n03712111 +n03712337 +n03712444 +n03712887 +n03712981 +n03713069 +n03713151 +n03713436 +n03714235 +n03715114 +n03715275 +n03715386 +n03715669 +n03715892 +n03716228 +n03716887 +n03716966 +n03717131 +n03717285 +n03717447 +n03717622 +n03718212 +n03718335 +n03718458 +n03718581 +n03718699 +n03718789 +n03718935 +n03719053 +n03719343 +n03719560 +n03719743 +n03720005 +n03720163 +n03720665 +n03720891 +n03721047 +n03721252 +n03721384 +n03721590 +n03722007 +n03722288 +n03722646 +n03722944 +n03723153 +n03723267 +n03723439 +n03723781 +n03723885 +n03724066 +n03724176 +n03724417 +n03724538 +n03724623 +n03724756 +n03724870 +n03725035 +n03725506 +n03725600 +n03725717 +n03725869 +n03726116 +n03726233 +n03726371 +n03726516 +n03726760 +n03726993 +n03727067 +n03727465 +n03727605 +n03727837 +n03727946 +n03728437 +n03728982 +n03729131 +n03729308 +n03729402 +n03729482 +n03729647 +n03729826 +n03729951 +n03730153 +n03730334 +n03730494 +n03730655 +n03730788 +n03730893 +n03731019 +n03731483 +n03731695 +n03731882 +n03732020 +n03732114 +n03732458 +n03732543 +n03732658 +n03733131 +n03733281 +n03733465 +n03733547 +n03733644 +n03733805 +n03733925 +n03735637 +n03735963 +n03736064 +n03736147 +n03736269 +n03736372 +n03736470 +n03736970 +n03738066 +n03738241 +n03738472 +n03739518 +n03739693 +n03742019 +n03742115 +n03742238 +n03743016 +n03743279 +n03743902 +n03744276 +n03744684 +n03744840 +n03745146 +n03745487 +n03745571 +n03746005 +n03746155 +n03746330 +n03746486 +n03748162 +n03749504 +n03749634 +n03749807 +n03750206 +n03750437 +n03750614 +n03751065 +n03751269 +n03751458 +n03751590 +n03751757 +n03752071 +n03752185 +n03752398 +n03752922 +n03753077 +n03753514 +n03757604 +n03758089 +n03758220 +n03758894 +n03758992 +n03759243 +n03759432 +n03759661 +n03759954 +n03760310 +n03760671 +n03760944 +n03761084 +n03761588 +n03761731 +n03762238 +n03762332 +n03762434 +n03762602 +n03762982 +n03763727 +n03763968 +n03764276 +n03764606 +n03764736 +n03764822 +n03764995 +n03765128 +n03765467 +n03765561 +n03765934 +n03766044 +n03766218 +n03766322 +n03766508 +n03766600 +n03766697 +n03766935 +n03767112 +n03767203 +n03767459 +n03767745 +n03767966 +n03768132 +n03768683 +n03768823 +n03768916 +n03769610 +n03769722 +n03769881 +n03770085 +n03770224 +n03770316 +n03770439 +n03770520 +n03770679 +n03770834 +n03770954 +n03772077 +n03772269 +n03772584 +n03772674 +n03773035 +n03773504 +n03773835 +n03774327 +n03774461 +n03775071 +n03775199 +n03775388 +n03775546 +n03775636 +n03775747 +n03775847 +n03776167 +n03776460 +n03776877 +n03776997 +n03777126 +n03777568 +n03777754 +n03778459 +n03778817 +n03779000 +n03779128 +n03779246 +n03779370 +n03779884 +n03780047 +n03780799 +n03781055 +n03781244 +n03781467 +n03781594 +n03781683 +n03781787 +n03782006 +n03782190 +n03782794 +n03782929 +n03783304 +n03783430 +n03783575 +n03783873 +n03784139 +n03784270 +n03784793 +n03784896 +n03785016 +n03785142 +n03785237 +n03785499 +n03785721 +n03786096 +n03786194 +n03786313 +n03786621 +n03786715 +n03786901 +n03787032 +n03787523 +n03788047 +n03788195 +n03788365 +n03788498 +n03788601 +n03788914 +n03789171 +n03789400 +n03789603 +n03789794 +n03789946 +n03790230 +n03790512 +n03790755 +n03790953 +n03791053 +n03791235 +n03792048 +n03792334 +n03792526 +n03792782 +n03792972 +n03793489 +n03793850 +n03794056 +n03794136 +n03794798 +n03795123 +n03795269 +n03795758 +n03795976 +n03796181 +n03796401 +n03796522 +n03796605 +n03796848 +n03796974 +n03797062 +n03797182 +n03797264 +n03797390 +n03797896 +n03798061 +n03798442 +n03798610 +n03798982 +n03799113 +n03799240 +n03799375 +n03799610 +n03799876 +n03800371 +n03800485 +n03800563 +n03800772 +n03800933 +n03801353 +n03801533 +n03801671 +n03801760 +n03801880 +n03802007 +n03802228 +n03802393 +n03802643 +n03802800 +n03802973 +n03803116 +n03803284 +n03803780 +n03804211 +n03804744 +n03805180 +n03805280 +n03805374 +n03805503 +n03805725 +n03805933 +n03807334 +n03809211 +n03809312 +n03809603 +n03809686 +n03809802 +n03810412 +n03810952 +n03811295 +n03811444 +n03811847 +n03811965 +n03812263 +n03812382 +n03812789 +n03812924 +n03813078 +n03813176 +n03813946 +n03814528 +n03814639 +n03814727 +n03814817 +n03814906 +n03815149 +n03815278 +n03815482 +n03815615 +n03816005 +n03816136 +n03816394 +n03816530 +n03816849 +n03817191 +n03817331 +n03817522 +n03817647 +n03818001 +n03818343 +n03819047 +n03819336 +n03819448 +n03819595 +n03819994 +n03820154 +n03820318 +n03820728 +n03820950 +n03821145 +n03821424 +n03821518 +n03822171 +n03822361 +n03822504 +n03822656 +n03822767 +n03823111 +n03823216 +n03823312 +n03823673 +n03823906 +n03824197 +n03824284 +n03824381 +n03824589 +n03824713 +n03824999 +n03825080 +n03825271 +n03825442 +n03825673 +n03825788 +n03825913 +n03826039 +n03826186 +n03827420 +n03827536 +n03828020 +n03829340 +n03829857 +n03829954 +n03831203 +n03831382 +n03831757 +n03832144 +n03832673 +n03833907 +n03834040 +n03834472 +n03834604 +n03835197 +n03835729 +n03835941 +n03836062 +n03836451 +n03836602 +n03836906 +n03836976 +n03837422 +n03837606 +n03837698 +n03837869 +n03838024 +n03838298 +n03838748 +n03838899 +n03839172 +n03839276 +n03839424 +n03839671 +n03839795 +n03840327 +n03840681 +n03840823 +n03841011 +n03841143 +n03841290 +n03841666 +n03842012 +n03842156 +n03842276 +n03842377 +n03842585 +n03842754 +n03842986 +n03843092 +n03843316 +n03843438 +n03843555 +n03843883 +n03844045 +n03844233 +n03844550 +n03844673 +n03844815 +n03844965 +n03845107 +n03845190 +n03845990 +n03846100 +n03846234 +n03846431 +n03846677 +n03846772 +n03846970 +n03847471 +n03847823 +n03848033 +n03848168 +n03848348 +n03848537 +n03849275 +n03849412 +n03849679 +n03849814 +n03849943 +n03850053 +n03850245 +n03850492 +n03850613 +n03851341 +n03851787 +n03852280 +n03852544 +n03852688 +n03853291 +n03853924 +n03854065 +n03854421 +n03854506 +n03854722 +n03854815 +n03855214 +n03855333 +n03855464 +n03855604 +n03855756 +n03855908 +n03856012 +n03856335 +n03856465 +n03856728 +n03857026 +n03857156 +n03857291 +n03857687 +n03857828 +n03858085 +n03858183 +n03858418 +n03858533 +n03858837 +n03859000 +n03859170 +n03859280 +n03859495 +n03859608 +n03859958 +n03860234 +n03860404 +n03861048 +n03861271 +n03861430 +n03861596 +n03861842 +n03862379 +n03862676 +n03862862 +n03863108 +n03863262 +n03863657 +n03863783 +n03863923 +n03864139 +n03864356 +n03864692 +n03865288 +n03865371 +n03865557 +n03865820 +n03865949 +n03866082 +n03867854 +n03868044 +n03868242 +n03868324 +n03868406 +n03868643 +n03868763 +n03868863 +n03869838 +n03869976 +n03870105 +n03870290 +n03870546 +n03870672 +n03870980 +n03871083 +n03871371 +n03871524 +n03871628 +n03871724 +n03871860 +n03872016 +n03872167 +n03872273 +n03873416 +n03873699 +n03873848 +n03873996 +n03874138 +n03874293 +n03874487 +n03874599 +n03874823 +n03875218 +n03875806 +n03875955 +n03876111 +n03876231 +n03877351 +n03877472 +n03877674 +n03877845 +n03878066 +n03878211 +n03878294 +n03878418 +n03878511 +n03878674 +n03878828 +n03878963 +n03879456 +n03879705 +n03880032 +n03880129 +n03880323 +n03880531 +n03881305 +n03881404 +n03881534 +n03882611 +n03882960 +n03883054 +n03883385 +n03883524 +n03883664 +n03883773 +n03883944 +n03884397 +n03884554 +n03884639 +n03884778 +n03884926 +n03885028 +n03885194 +n03885293 +n03885410 +n03885535 +n03885669 +n03885788 +n03885904 +n03886053 +n03886641 +n03886762 +n03886940 +n03887185 +n03887330 +n03887512 +n03887697 +n03887899 +n03888022 +n03888257 +n03888605 +n03888808 +n03888998 +n03889397 +n03889503 +n03889626 +n03889726 +n03889871 +n03890093 +n03890233 +n03890358 +n03890514 +n03891051 +n03891251 +n03891332 +n03891538 +n03892178 +n03892425 +n03892557 +n03892728 +n03893935 +n03894051 +n03894379 +n03894677 +n03894933 +n03895038 +n03895170 +n03895866 +n03896103 +n03896233 +n03896419 +n03896526 +n03896628 +n03896984 +n03897130 +n03897634 +n03897943 +n03898129 +n03898271 +n03898395 +n03898633 +n03898787 +n03899100 +n03899612 +n03899768 +n03899933 +n03900028 +n03900194 +n03900301 +n03900393 +n03900979 +n03901229 +n03901338 +n03901750 +n03901974 +n03902125 +n03902220 +n03902482 +n03902756 +n03903133 +n03903290 +n03903424 +n03903733 +n03903868 +n03904060 +n03904183 +n03904433 +n03904657 +n03904782 +n03904909 +n03905361 +n03905540 +n03905730 +n03905947 +n03906106 +n03906224 +n03906463 +n03906590 +n03906789 +n03906894 +n03906997 +n03907475 +n03907654 +n03907908 +n03908111 +n03908204 +n03908456 +n03908618 +n03908714 +n03909020 +n03909160 +n03909406 +n03909516 +n03909658 +n03911406 +n03911513 +n03911658 +n03911767 +n03911866 +n03912218 +n03912821 +n03913343 +n03913930 +n03914106 +n03914337 +n03914438 +n03914583 +n03914831 +n03915118 +n03915320 +n03915437 +n03915900 +n03916031 +n03916289 +n03916385 +n03916470 +n03916720 +n03917048 +n03917198 +n03917327 +n03917814 +n03918074 +n03918480 +n03918737 +n03919096 +n03919289 +n03919430 +n03919808 +n03920288 +n03920384 +n03920641 +n03920737 +n03920867 +n03923379 +n03923564 +n03923692 +n03923918 +n03924069 +n03924407 +n03924532 +n03924679 +n03926148 +n03926412 +n03926876 +n03927091 +n03927299 +n03927539 +n03927792 +n03928116 +n03928589 +n03928814 +n03928994 +n03929091 +n03929202 +n03929443 +n03929660 +n03929855 +n03930229 +n03930313 +n03930431 +n03930515 +n03930630 +n03931765 +n03931885 +n03931980 +n03932080 +n03932670 +n03933391 +n03933933 +n03934042 +n03934229 +n03934311 +n03934565 +n03934656 +n03934890 +n03935116 +n03935234 +n03935335 +n03935883 +n03936269 +n03936466 +n03937543 +n03937835 +n03937931 +n03938037 +n03938244 +n03938401 +n03938522 +n03938725 +n03939062 +n03939178 +n03939281 +n03939440 +n03939565 +n03939677 +n03939844 +n03940256 +n03940894 +n03941013 +n03941231 +n03941417 +n03941586 +n03941684 +n03941887 +n03942028 +n03942600 +n03942813 +n03942920 +n03943115 +n03943266 +n03943623 +n03943714 +n03943833 +n03943920 +n03944024 +n03944138 +n03944341 +n03945459 +n03945615 +n03945817 +n03945928 +n03946076 +n03946162 +n03947111 +n03947343 +n03947466 +n03947798 +n03947888 +n03948242 +n03948459 +n03948830 +n03948950 +n03949145 +n03949317 +n03949761 +n03950228 +n03950359 +n03950537 +n03950647 +n03950899 +n03951068 +n03951213 +n03951453 +n03951800 +n03951971 +n03952150 +n03952576 +n03953020 +n03953416 +n03953901 +n03954393 +n03954731 +n03955296 +n03955489 +n03955809 +n03955941 +n03956157 +n03956331 +n03956531 +n03956623 +n03956785 +n03956922 +n03957315 +n03957420 +n03957762 +n03957991 +n03958227 +n03958338 +n03958630 +n03958752 +n03959014 +n03959123 +n03959227 +n03959701 +n03960374 +n03960490 +n03961394 +n03961630 +n03961711 +n03961828 +n03961939 +n03962525 +n03962685 +n03962852 +n03962932 +n03963028 +n03963198 +n03963294 +n03963483 +n03963645 +n03964495 +n03964611 +n03965456 +n03965907 +n03966206 +n03966325 +n03966582 +n03966751 +n03966976 +n03967270 +n03967396 +n03967562 +n03967942 +n03968293 +n03968479 +n03968581 +n03968728 +n03969510 +n03970156 +n03970363 +n03970546 +n03971218 +n03971321 +n03971960 +n03972146 +n03972372 +n03972524 +n03973003 +n03973285 +n03973402 +n03973520 +n03973628 +n03973839 +n03973945 +n03974070 +n03974915 +n03975035 +n03975657 +n03975788 +n03975926 +n03976105 +n03976268 +n03976467 +n03976657 +n03977158 +n03977266 +n03977430 +n03977592 +n03977966 +n03978421 +n03978575 +n03978686 +n03978815 +n03978966 +n03979377 +n03979492 +n03980026 +n03980478 +n03980874 +n03980986 +n03981094 +n03981340 +n03981566 +n03981760 +n03981924 +n03982232 +n03982331 +n03982430 +n03982642 +n03982767 +n03982895 +n03983396 +n03983499 +n03983612 +n03983712 +n03983928 +n03984125 +n03984234 +n03984381 +n03984643 +n03984759 +n03985069 +n03985232 +n03985441 +n03985881 +n03986071 +n03986224 +n03986355 +n03986562 +n03986704 +n03986857 +n03986949 +n03987266 +n03987376 +n03987674 +n03987865 +n03987990 +n03988170 +n03988758 +n03988926 +n03989199 +n03989349 +n03989447 +n03989665 +n03989777 +n03989898 +n03990474 +n03991062 +n03991202 +n03991321 +n03991443 +n03991646 +n03991837 +n03992325 +n03992436 +n03992509 +n03992703 +n03992975 +n03993053 +n03993180 +n03993403 +n03993703 +n03993878 +n03994008 +n03994297 +n03994417 +n03994614 +n03994757 +n03995018 +n03995265 +n03995372 +n03995535 +n03995661 +n03995856 +n03996004 +n03996145 +n03996416 +n03996849 +n03997274 +n03997484 +n03997875 +n03998194 +n03998333 +n03998673 +n03999064 +n03999160 +n03999621 +n03999992 +n04000311 +n04000480 +n04000592 +n04000716 +n04000998 +n04001132 +n04001265 +n04001397 +n04001499 +n04001661 +n04001845 +n04002262 +n04002371 +n04002629 +n04003241 +n04003359 +n04003856 +n04004099 +n04004210 +n04004475 +n04004767 +n04004990 +n04005197 +n04005630 +n04005912 +n04006067 +n04006227 +n04006330 +n04006411 +n04007415 +n04007664 +n04008385 +n04008634 +n04009552 +n04009801 +n04009923 +n04010057 +n04010779 +n04010927 +n04011827 +n04012084 +n04012482 +n04012665 +n04013060 +n04013176 +n04013600 +n04013729 +n04014297 +n04015204 +n04015786 +n04015908 +n04016240 +n04016479 +n04016576 +n04016684 +n04016846 +n04017571 +n04017807 +n04018155 +n04018399 +n04018667 +n04019101 +n04019335 +n04019541 +n04019696 +n04019881 +n04020087 +n04020298 +n04020744 +n04020912 +n04021028 +n04021164 +n04021362 +n04021503 +n04021704 +n04021798 +n04022332 +n04022434 +n04022708 +n04022866 +n04023021 +n04023119 +n04023249 +n04023422 +n04023695 +n04023962 +n04024137 +n04024274 +n04024862 +n04024983 +n04025508 +n04025633 +n04026053 +n04026180 +n04026417 +n04026813 +n04026918 +n04027023 +n04027367 +n04027706 +n04027820 +n04027935 +n04028074 +n04028221 +n04028315 +n04028581 +n04028764 +n04029416 +n04029647 +n04029734 +n04029913 +n04030054 +n04030161 +n04030274 +n04030414 +n04030518 +n04030846 +n04030965 +n04031884 +n04032509 +n04032603 +n04032936 +n04033287 +n04033425 +n04033557 +n04033801 +n04033901 +n04033995 +n04034262 +n04034367 +n04035231 +n04035634 +n04035748 +n04035836 +n04035912 +n04036155 +n04036303 +n04036776 +n04036963 +n04037076 +n04037220 +n04037298 +n04037443 +n04037873 +n04037964 +n04038231 +n04038338 +n04038440 +n04038727 +n04039041 +n04039209 +n04039381 +n04039742 +n04039848 +n04040247 +n04040373 +n04040540 +n04040759 +n04041069 +n04041243 +n04041408 +n04041544 +n04041747 +n04042076 +n04042204 +n04042358 +n04042632 +n04042795 +n04042985 +n04043168 +n04043411 +n04043733 +n04044307 +n04044498 +n04044716 +n04044955 +n04045085 +n04045255 +n04045397 +n04045644 +n04045787 +n04045941 +n04046091 +n04046277 +n04046400 +n04046590 +n04046974 +n04047139 +n04047401 +n04047733 +n04047834 +n04048441 +n04049303 +n04049405 +n04049585 +n04049753 +n04050066 +n04050313 +n04050600 +n04050933 +n04051269 +n04051439 +n04051549 +n04051705 +n04051825 +n04052235 +n04052346 +n04052442 +n04052658 +n04052757 +n04053508 +n04053677 +n04053767 +n04054361 +n04054566 +n04054670 +n04055180 +n04055447 +n04055700 +n04055861 +n04056073 +n04056180 +n04056413 +n04056932 +n04057047 +n04057215 +n04057435 +n04057673 +n04057846 +n04057981 +n04058096 +n04058239 +n04058486 +n04058594 +n04058721 +n04059157 +n04059298 +n04059399 +n04059516 +n04059947 +n04060198 +n04060448 +n04060647 +n04060904 +n04061681 +n04061793 +n04061969 +n04062179 +n04062428 +n04062644 +n04062807 +n04063154 +n04063373 +n04063868 +n04064213 +n04064401 +n04064747 +n04064862 +n04065272 +n04065464 +n04065789 +n04065909 +n04066023 +n04066270 +n04066388 +n04066476 +n04066767 +n04067143 +n04067231 +n04067353 +n04067472 +n04067658 +n04067818 +n04067921 +n04068441 +n04068601 +n04069166 +n04069276 +n04069434 +n04069582 +n04069777 +n04070003 +n04070207 +n04070415 +n04070545 +n04070727 +n04070964 +n04071102 +n04071263 +n04071393 +n04072193 +n04072551 +n04072960 +n04073425 +n04073948 +n04074185 +n04074963 +n04075291 +n04075468 +n04075715 +n04075813 +n04075916 +n04076052 +n04076284 +n04076713 +n04077430 +n04077594 +n04077734 +n04077889 +n04078002 +n04078574 +n04078955 +n04079106 +n04079244 +n04079603 +n04079933 +n04080138 +n04080454 +n04080705 +n04080833 +n04081281 +n04081699 +n04081844 +n04082344 +n04082562 +n04082710 +n04082886 +n04083113 +n04083309 +n04083649 +n04083800 +n04084517 +n04084682 +n04084889 +n04085017 +n04085574 +n04085873 +n04086066 +n04086273 +n04086446 +n04086663 +n04086794 +n04086937 +n04087126 +n04087432 +n04087709 +n04087826 +n04088229 +n04088343 +n04088441 +n04088696 +n04088797 +n04089152 +n04089376 +n04089666 +n04089836 +n04089976 +n04090263 +n04090548 +n04090781 +n04091097 +n04091466 +n04091584 +n04091693 +n04092168 +n04093157 +n04093223 +n04093625 +n04093775 +n04093915 +n04094060 +n04094250 +n04094438 +n04094608 +n04094720 +n04094859 +n04095109 +n04095210 +n04095342 +n04095577 +n04095938 +n04096066 +n04096733 +n04096848 +n04097085 +n04097373 +n04097622 +n04097760 +n04097866 +n04098169 +n04098260 +n04098399 +n04098513 +n04098795 +n04099003 +n04099175 +n04099429 +n04099969 +n04100174 +n04100519 +n04101375 +n04101497 +n04101701 +n04101860 +n04102037 +n04102162 +n04102285 +n04102406 +n04102618 +n04102760 +n04102872 +n04102962 +n04103094 +n04103206 +n04103364 +n04103665 +n04103769 +n04103918 +n04104147 +n04104384 +n04104500 +n04104770 +n04104925 +n04105068 +n04105438 +n04105704 +n04105893 +n04107598 +n04107743 +n04107984 +n04108268 +n04108822 +n04108999 +n04110068 +n04110178 +n04110281 +n04110439 +n04110654 +n04110841 +n04110955 +n04111190 +n04111414 +n04111531 +n04111668 +n04111962 +n04112147 +n04112252 +n04112430 +n04112579 +n04112654 +n04112752 +n04112921 +n04113038 +n04113194 +n04113316 +n04113406 +n04113641 +n04113765 +n04113968 +n04114069 +n04114301 +n04114428 +n04114719 +n04114844 +n04114996 +n04115144 +n04115256 +n04115456 +n04115542 +n04115802 +n04115996 +n04116098 +n04116294 +n04116389 +n04116512 +n04117216 +n04117464 +n04117639 +n04118021 +n04118538 +n04118635 +n04118776 +n04119091 +n04119230 +n04119360 +n04119478 +n04119630 +n04119751 +n04120489 +n04120695 +n04120842 +n04121228 +n04121342 +n04121426 +n04121511 +n04121728 +n04122262 +n04122349 +n04122492 +n04122578 +n04122685 +n04122825 +n04123026 +n04123123 +n04123228 +n04123317 +n04123448 +n04123567 +n04123740 +n04124098 +n04124202 +n04124370 +n04124488 +n04124573 +n04124887 +n04125021 +n04125116 +n04125257 +n04125541 +n04125692 +n04125853 +n04126066 +n04126244 +n04126541 +n04126659 +n04126852 +n04126980 +n04127117 +n04127249 +n04127395 +n04127521 +n04127633 +n04127904 +n04128413 +n04128499 +n04128710 +n04128837 +n04129490 +n04129688 +n04129766 +n04130143 +n04130257 +n04130566 +n04130907 +n04131015 +n04131113 +n04131208 +n04131368 +n04131499 +n04131690 +n04131811 +n04131929 +n04132158 +n04132465 +n04132603 +n04132829 +n04132985 +n04133114 +n04133789 +n04134008 +n04134170 +n04134523 +n04134632 +n04135024 +n04135118 +n04135315 +n04135710 +n04135933 +n04136045 +n04136161 +n04136333 +n04136510 +n04136800 +n04137089 +n04137217 +n04137355 +n04137444 +n04137773 +n04137897 +n04138131 +n04138261 +n04138869 +n04138977 +n04139140 +n04139395 +n04139859 +n04140064 +n04140539 +n04140631 +n04140777 +n04140853 +n04141076 +n04141198 +n04141327 +n04141712 +n04141838 +n04141975 +n04142175 +n04142327 +n04142434 +n04142731 +n04142999 +n04143140 +n04143365 +n04143897 +n04144241 +n04144539 +n04144651 +n04145863 +n04146050 +n04146343 +n04146504 +n04146614 +n04146862 +n04146976 +n04147183 +n04147291 +n04147495 +n04147793 +n04147916 +n04148054 +n04148285 +n04148464 +n04148579 +n04148703 +n04149083 +n04149374 +n04149813 +n04150153 +n04150273 +n04150371 +n04150980 +n04151108 +n04151581 +n04151940 +n04152387 +n04152593 +n04153025 +n04153330 +n04153751 +n04154152 +n04154340 +n04154565 +n04154753 +n04154854 +n04154938 +n04155068 +n04155177 +n04155457 +n04155625 +n04155735 +n04155889 +n04156040 +n04156140 +n04156297 +n04156411 +n04156591 +n04156814 +n04156946 +n04157099 +n04157320 +n04158002 +n04158138 +n04158250 +n04158672 +n04158807 +n04158956 +n04160036 +n04160261 +n04160372 +n04160586 +n04160847 +n04161010 +n04161358 +n04161981 +n04162433 +n04162706 +n04163530 +n04164002 +n04164199 +n04164406 +n04164757 +n04164868 +n04165409 +n04165675 +n04165945 +n04166111 +n04166281 +n04166436 +n04167346 +n04167489 +n04167661 +n04168084 +n04168199 +n04168472 +n04168541 +n04168840 +n04169437 +n04169597 +n04170037 +n04170384 +n04170515 +n04170694 +n04170933 +n04171208 +n04171459 +n04171629 +n04171831 +n04172107 +n04172230 +n04172342 +n04172512 +n04172607 +n04172776 +n04172904 +n04173046 +n04173172 +n04173511 +n04173907 +n04174026 +n04174101 +n04174234 +n04174500 +n04174705 +n04175039 +n04175147 +n04175574 +n04176068 +n04176190 +n04176295 +n04176528 +n04177041 +n04177329 +n04177545 +n04177654 +n04177755 +n04177820 +n04177931 +n04178190 +n04178329 +n04178668 +n04179126 +n04179712 +n04179824 +n04179913 +n04180063 +n04180229 +n04180888 +n04181083 +n04181228 +n04181561 +n04181718 +n04182152 +n04182322 +n04183217 +n04183329 +n04183957 +n04184095 +n04184316 +n04184435 +n04184600 +n04184880 +n04185071 +n04185529 +n04185804 +n04185946 +n04186051 +n04186268 +n04186455 +n04186624 +n04186848 +n04187061 +n04187233 +n04187547 +n04187751 +n04187885 +n04187970 +n04188064 +n04188179 +n04189092 +n04189282 +n04189651 +n04189816 +n04190052 +n04190376 +n04190464 +n04190747 +n04190997 +n04191150 +n04191595 +n04191943 +n04192238 +n04192361 +n04192521 +n04192698 +n04192858 +n04193179 +n04193377 +n04193742 +n04193883 +n04194009 +n04194127 +n04194289 +n04196080 +n04196502 +n04196803 +n04196925 +n04197110 +n04197391 +n04197781 +n04197878 +n04198015 +n04198233 +n04198355 +n04198453 +n04198562 +n04198722 +n04198797 +n04199027 +n04200000 +n04200258 +n04200537 +n04200800 +n04200908 +n04201064 +n04201297 +n04201733 +n04202142 +n04202282 +n04202417 +n04203356 +n04204081 +n04204238 +n04204347 +n04204755 +n04205062 +n04205318 +n04205505 +n04205613 +n04206070 +n04206225 +n04206356 +n04206570 +n04206790 +n04207151 +n04207343 +n04207596 +n04207763 +n04207903 +n04208065 +n04208210 +n04208427 +n04208582 +n04208760 +n04208936 +n04209133 +n04209239 +n04209509 +n04209613 +n04209811 +n04210012 +n04210120 +n04210288 +n04210390 +n04210591 +n04210858 +n04211001 +n04211219 +n04211356 +n04211528 +n04211857 +n04211970 +n04212165 +n04212282 +n04212467 +n04212810 +n04213105 +n04213264 +n04213353 +n04213530 +n04214046 +n04214282 +n04214413 +n04214649 +n04215153 +n04215402 +n04215588 +n04215800 +n04215910 +n04216634 +n04216860 +n04216963 +n04217387 +n04217546 +n04217718 +n04217882 +n04218564 +n04218921 +n04219185 +n04219424 +n04219580 +n04220250 +n04220805 +n04221076 +n04221673 +n04221823 +n04222210 +n04222307 +n04222470 +n04222723 +n04222847 +n04223066 +n04223170 +n04223299 +n04224395 +n04224543 +n04224842 +n04225031 +n04225222 +n04225729 +n04225987 +n04226322 +n04226464 +n04226537 +n04226826 +n04226962 +n04227050 +n04227144 +n04227519 +n04227787 +n04227900 +n04228054 +n04228215 +n04228422 +n04228581 +n04228693 +n04229007 +n04229107 +n04229480 +n04229620 +n04229737 +n04229816 +n04229959 +n04230387 +n04230487 +n04230603 +n04230707 +n04230808 +n04231272 +n04231693 +n04231905 +n04232153 +n04232312 +n04232437 +n04232800 +n04233027 +n04233124 +n04233295 +n04233715 +n04233832 +n04234160 +n04234260 +n04234455 +n04234670 +n04234763 +n04234887 +n04235291 +n04235646 +n04235771 +n04235860 +n04236001 +n04236377 +n04236702 +n04236809 +n04236935 +n04237174 +n04237287 +n04237423 +n04238128 +n04238321 +n04238617 +n04238763 +n04238953 +n04239074 +n04239218 +n04239333 +n04239436 +n04239639 +n04239786 +n04239900 +n04240434 +n04240752 +n04240867 +n04241042 +n04241249 +n04241394 +n04241573 +n04242084 +n04242315 +n04242408 +n04242587 +n04242704 +n04243003 +n04243142 +n04243251 +n04243546 +n04243941 +n04244379 +n04244847 +n04244997 +n04245218 +n04245412 +n04245508 +n04245847 +n04246060 +n04246271 +n04246459 +n04246731 +n04246855 +n04247011 +n04247440 +n04247544 +n04247630 +n04247736 +n04247876 +n04248209 +n04248396 +n04248507 +n04248851 +n04249415 +n04249582 +n04249882 +n04250224 +n04250473 +n04250599 +n04250692 +n04250850 +n04251144 +n04251701 +n04251791 +n04252077 +n04252225 +n04252331 +n04252560 +n04252653 +n04253057 +n04253168 +n04253304 +n04253931 +n04254009 +n04254120 +n04254450 +n04254680 +n04254777 +n04255163 +n04255346 +n04255499 +n04255586 +n04255670 +n04255768 +n04255899 +n04256318 +n04256520 +n04256758 +n04256891 +n04257223 +n04257684 +n04257790 +n04257986 +n04258138 +n04258333 +n04258438 +n04258618 +n04258732 +n04258859 +n04259202 +n04259468 +n04259630 +n04260192 +n04260364 +n04260589 +n04261116 +n04261281 +n04261369 +n04261506 +n04261638 +n04261767 +n04261868 +n04262161 +n04262530 +n04262678 +n04262869 +n04263257 +n04263336 +n04263502 +n04263760 +n04263950 +n04264134 +n04264233 +n04264361 +n04264485 +n04264628 +n04264765 +n04264914 +n04265275 +n04265428 +n04265904 +n04266014 +n04266162 +n04266375 +n04266486 +n04266849 +n04266968 +n04267091 +n04267165 +n04267246 +n04267435 +n04267577 +n04267985 +n04268142 +n04268275 +n04268418 +n04268565 +n04268799 +n04269086 +n04269270 +n04269502 +n04269668 +n04269822 +n04269944 +n04270147 +n04270371 +n04270576 +n04270891 +n04271148 +n04271531 +n04271793 +n04271891 +n04272054 +n04272389 +n04272782 +n04272928 +n04273064 +n04273285 +n04273569 +n04273659 +n04273796 +n04273972 +n04274686 +n04274985 +n04275093 +n04275175 +n04275283 +n04275548 +n04275661 +n04275904 +n04277352 +n04277493 +n04277669 +n04277826 +n04278247 +n04278353 +n04278447 +n04278605 +n04278932 +n04279063 +n04279172 +n04279353 +n04279462 +n04279858 +n04279987 +n04280259 +n04280373 +n04280487 +n04280845 +n04280970 +n04281260 +n04281375 +n04281571 +n04281998 +n04282231 +n04282494 +n04282872 +n04282992 +n04283096 +n04283255 +n04283378 +n04283585 +n04283784 +n04283905 +n04284002 +n04284341 +n04284438 +n04284572 +n04284869 +n04285008 +n04285146 +n04285622 +n04285803 +n04285965 +n04286128 +n04286575 +n04286960 +n04287351 +n04287451 +n04287747 +n04287898 +n04287986 +n04288165 +n04288272 +n04288533 +n04288673 +n04289027 +n04289195 +n04289449 +n04289576 +n04289690 +n04289827 +n04290079 +n04290259 +n04290507 +n04290615 +n04290762 +n04291069 +n04291242 +n04291759 +n04291992 +n04292080 +n04292221 +n04292414 +n04292572 +n04292921 +n04293119 +n04293258 +n04293744 +n04294212 +n04294426 +n04294614 +n04294879 +n04295081 +n04295353 +n04295571 +n04295777 +n04295881 +n04296562 +n04297098 +n04297750 +n04297847 +n04298053 +n04298661 +n04298765 +n04299215 +n04299370 +n04299963 +n04300358 +n04300509 +n04300643 +n04301000 +n04301242 +n04301474 +n04301760 +n04302200 +n04302863 +n04302988 +n04303095 +n04303258 +n04303357 +n04303497 +n04304215 +n04304375 +n04304680 +n04305016 +n04305210 +n04305323 +n04305471 +n04305572 +n04305947 +n04306080 +n04306592 +n04306847 +n04307419 +n04307767 +n04307878 +n04307986 +n04308084 +n04308273 +n04308397 +n04308583 +n04308807 +n04308915 +n04309049 +n04309348 +n04309548 +n04309833 +n04310018 +n04310157 +n04310507 +n04310604 +n04310721 +n04310904 +n04311004 +n04311174 +n04311595 +n04312020 +n04312154 +n04312432 +n04312654 +n04312756 +n04312916 +n04313220 +n04313503 +n04313628 +n04314107 +n04314216 +n04314522 +n04314632 +n04314914 +n04315342 +n04315713 +n04315828 +n04315948 +n04316498 +n04316815 +n04316924 +n04317063 +n04317175 +n04317325 +n04317420 +n04317833 +n04317976 +n04318131 +n04318787 +n04318892 +n04318982 +n04319545 +n04319774 +n04319937 +n04320405 +n04320598 +n04320871 +n04320973 +n04321121 +n04321453 +n04322026 +n04322531 +n04322692 +n04322801 +n04323519 +n04323819 +n04324120 +n04324297 +n04324387 +n04324515 +n04325041 +n04325208 +n04325704 +n04325804 +n04325968 +n04326547 +n04326676 +n04326799 +n04326896 +n04327204 +n04327544 +n04327682 +n04328054 +n04328186 +n04328329 +n04328580 +n04328703 +n04328946 +n04329477 +n04329681 +n04329834 +n04329958 +n04330109 +n04330189 +n04330267 +n04330340 +n04330669 +n04330746 +n04330896 +n04330998 +n04331277 +n04331443 +n04331639 +n04331765 +n04331892 +n04332074 +n04332243 +n04332580 +n04332987 +n04333129 +n04333869 +n04334105 +n04334365 +n04334504 +n04334599 +n04335209 +n04335435 +n04335693 +n04335886 +n04336792 +n04337157 +n04337287 +n04337503 +n04337650 +n04338517 +n04338963 +n04339062 +n04339191 +n04339638 +n04339879 +n04340019 +n04340521 +n04340750 +n04340935 +n04341133 +n04341288 +n04341414 +n04341686 +n04343511 +n04343630 +n04343740 +n04344003 +n04344734 +n04344873 +n04345028 +n04345201 +n04345787 +n04346003 +n04346157 +n04346328 +n04346428 +n04346511 +n04346679 +n04346855 +n04347119 +n04347519 +n04347754 +n04348070 +n04348184 +n04348359 +n04348988 +n04349189 +n04349306 +n04349401 +n04349913 +n04350104 +n04350235 +n04350458 +n04350581 +n04350688 +n04350769 +n04350905 +n04351550 +n04351699 +n04353573 +n04354026 +n04354182 +n04354387 +n04354487 +n04354589 +n04355115 +n04355267 +n04355338 +n04355511 +n04355684 +n04355821 +n04355933 +n04356056 +n04356595 +n04356772 +n04356925 +n04357121 +n04357314 +n04357531 +n04357930 +n04358117 +n04358256 +n04358491 +n04358707 +n04358874 +n04359034 +n04359124 +n04359217 +n04359335 +n04359500 +n04359589 +n04360501 +n04360798 +n04360914 +n04361095 +n04361260 +n04361937 +n04362624 +n04362821 +n04362972 +n04363082 +n04363210 +n04363412 +n04363671 +n04363777 +n04363874 +n04363991 +n04364160 +n04364397 +n04364545 +n04364827 +n04364994 +n04365112 +n04365229 +n04365328 +n04365484 +n04365751 +n04366033 +n04366116 +n04366367 +n04366832 +n04367011 +n04367371 +n04367480 +n04367746 +n04367950 +n04368109 +n04368235 +n04368365 +n04368496 +n04368695 +n04368840 +n04369025 +n04369282 +n04369485 +n04369618 +n04370048 +n04370288 +n04370456 +n04370600 +n04370774 +n04370955 +n04371050 +n04371430 +n04371563 +n04371774 +n04371979 +n04372370 +n04373089 +n04373428 +n04373563 +n04373704 +n04373795 +n04373894 +n04374315 +n04374521 +n04374735 +n04374907 +n04375080 +n04375241 +n04375405 +n04375615 +n04375775 +n04375926 +n04376400 +n04376876 +n04377057 +n04378489 +n04378651 +n04378956 +n04379096 +n04379243 +n04379964 +n04380255 +n04380346 +n04380533 +n04380916 +n04381073 +n04381450 +n04381587 +n04381724 +n04381860 +n04381994 +n04382334 +n04382438 +n04382537 +n04382695 +n04382880 +n04383015 +n04383130 +n04383301 +n04383839 +n04383923 +n04384593 +n04384910 +n04385079 +n04385157 +n04385536 +n04385799 +n04386051 +n04386456 +n04386664 +n04386792 +n04387095 +n04387201 +n04387261 +n04387400 +n04387531 +n04387706 +n04387932 +n04388040 +n04388162 +n04388473 +n04388574 +n04388743 +n04389033 +n04389430 +n04389521 +n04389718 +n04389854 +n04389999 +n04390483 +n04390577 +n04390873 +n04390977 +n04391445 +n04391838 +n04392113 +n04392526 +n04392764 +n04392985 +n04393095 +n04393301 +n04393549 +n04393808 +n04393913 +n04394031 +n04394261 +n04394421 +n04394630 +n04395024 +n04395106 +n04395332 +n04395651 +n04395875 +n04396226 +n04396335 +n04396650 +n04396808 +n04396902 +n04397027 +n04397168 +n04397261 +n04397452 +n04397645 +n04397768 +n04397860 +n04398044 +n04398497 +n04398688 +n04398834 +n04398951 +n04399046 +n04399158 +n04399537 +n04399846 +n04400109 +n04400289 +n04400499 +n04400737 +n04400899 +n04401088 +n04401578 +n04401680 +n04401828 +n04401949 +n04402057 +n04402342 +n04402449 +n04402580 +n04402746 +n04402984 +n04403413 +n04403524 +n04403638 +n04403925 +n04404072 +n04404200 +n04404412 +n04404817 +n04404997 +n04405540 +n04405762 +n04405907 +n04406239 +n04406552 +n04406687 +n04406817 +n04407257 +n04407435 +n04407686 +n04408871 +n04409011 +n04409128 +n04409279 +n04409384 +n04409515 +n04409625 +n04409806 +n04409911 +n04410086 +n04410365 +n04410485 +n04410565 +n04410663 +n04410760 +n04410886 +n04411019 +n04411264 +n04411835 +n04411966 +n04412097 +n04412300 +n04412416 +n04413151 +n04413419 +n04413969 +n04414101 +n04414199 +n04414319 +n04414476 +n04414675 +n04414909 +n04415257 +n04415663 +n04415815 +n04416005 +n04416901 +n04417086 +n04417180 +n04417361 +n04417672 +n04417809 +n04418357 +n04418644 +n04419073 +n04419642 +n04419868 +n04420024 +n04420720 +n04421083 +n04421258 +n04421417 +n04421582 +n04421740 +n04421872 +n04422409 +n04422566 +n04422727 +n04422875 +n04423552 +n04423687 +n04423845 +n04424692 +n04425804 +n04425977 +n04426184 +n04426316 +n04426427 +n04427216 +n04427473 +n04427559 +n04427715 +n04427857 +n04428008 +n04428191 +n04428382 +n04428634 +n04429038 +n04429376 +n04430475 +n04430605 +n04430896 +n04431025 +n04431436 +n04431648 +n04431745 +n04431925 +n04432043 +n04432203 +n04432662 +n04432785 +n04433377 +n04433585 +n04434207 +n04434531 +n04434932 +n04435180 +n04435552 +n04435653 +n04435759 +n04435870 +n04436012 +n04436185 +n04436329 +n04436401 +n04436542 +n04436832 +n04436992 +n04437276 +n04437380 +n04437670 +n04437953 +n04438304 +n04438507 +n04438643 +n04438897 +n04439505 +n04439585 +n04439712 +n04440597 +n04440963 +n04441093 +n04441528 +n04441662 +n04441790 +n04442312 +n04442441 +n04442582 +n04442741 +n04443164 +n04443257 +n04443433 +n04443766 +n04444121 +n04444218 +n04444749 +n04444953 +n04445040 +n04445154 +n04445327 +n04445610 +n04445782 +n04445952 +n04446162 +n04446276 +n04446844 +n04447028 +n04447156 +n04447276 +n04447443 +n04447861 +n04448070 +n04448185 +n04448361 +n04449290 +n04449449 +n04449550 +n04449700 +n04449966 +n04450133 +n04450243 +n04450465 +n04450640 +n04450749 +n04450994 +n04451139 +n04451318 +n04451636 +n04451818 +n04452528 +n04452615 +n04452757 +n04452848 +n04453037 +n04453156 +n04453390 +n04453666 +n04453910 +n04454654 +n04454792 +n04454908 +n04455048 +n04455250 +n04455579 +n04455652 +n04456011 +n04456115 +n04456472 +n04456734 +n04457157 +n04457326 +n04457474 +n04457638 +n04457767 +n04457910 +n04458201 +n04458633 +n04458843 +n04459018 +n04459122 +n04459243 +n04459362 +n04459610 +n04459773 +n04459909 +n04460130 +n04461437 +n04461570 +n04461696 +n04461879 +n04462011 +n04462240 +n04462576 +n04463679 +n04464125 +n04464615 +n04464852 +n04465050 +n04465203 +n04465358 +n04465501 +n04465666 +n04466871 +n04467099 +n04467307 +n04467506 +n04467665 +n04467899 +n04468005 +n04469003 +n04469251 +n04469514 +n04469684 +n04469813 +n04470741 +n04471148 +n04471315 +n04471632 +n04471912 +n04472243 +n04472563 +n04472726 +n04472961 +n04473108 +n04473275 +n04473884 +n04474035 +n04474187 +n04474466 +n04475309 +n04475411 +n04475496 +n04475631 +n04475749 +n04475900 +n04476116 +n04476259 +n04476526 +n04476831 +n04476972 +n04477219 +n04477387 +n04477548 +n04477725 +n04478066 +n04478383 +n04478512 +n04478657 +n04479046 +n04479287 +n04479405 +n04479526 +n04479694 +n04479823 +n04479939 +n04480033 +n04480141 +n04480303 +n04480527 +n04480853 +n04480995 +n04481524 +n04481642 +n04482177 +n04482297 +n04482393 +n04482975 +n04483073 +n04483307 +n04483925 +n04484024 +n04484432 +n04485082 +n04485423 +n04485586 +n04485750 +n04485884 +n04486054 +n04486213 +n04486322 +n04486616 +n04486934 +n04487081 +n04487394 +n04487724 +n04487894 +n04488202 +n04488427 +n04488530 +n04488742 +n04488857 +n04489008 +n04489695 +n04489817 +n04490091 +n04491312 +n04491388 +n04491638 +n04491769 +n04491934 +n04492060 +n04492157 +n04492375 +n04492749 +n04493109 +n04493259 +n04493381 +n04494204 +n04495051 +n04495183 +n04495310 +n04495450 +n04495555 +n04495698 +n04495843 +n04496614 +n04496726 +n04496872 +n04497249 +n04497442 +n04497570 +n04497801 +n04498275 +n04498389 +n04498523 +n04498873 +n04499062 +n04499300 +n04499446 +n04499554 +n04499810 +n04500060 +n04500390 +n04501127 +n04501281 +n04501370 +n04501550 +n04501837 +n04501947 +n04502059 +n04502197 +n04502502 +n04502670 +n04502851 +n04502989 +n04503073 +n04503155 +n04503269 +n04503413 +n04503499 +n04503593 +n04503705 +n04504038 +n04504141 +n04504770 +n04505036 +n04505345 +n04505470 +n04505888 +n04506289 +n04506402 +n04506506 +n04506688 +n04506895 +n04506994 +n04507155 +n04507326 +n04507453 +n04507689 +n04508163 +n04508489 +n04508949 +n04509171 +n04509260 +n04509417 +n04509592 +n04510706 +n04511002 +n04513827 +n04513998 +n04514095 +n04514241 +n04514648 +n04515003 +n04515444 +n04515729 +n04515890 +n04516116 +n04516214 +n04516354 +n04516672 +n04517211 +n04517408 +n04517823 +n04517999 +n04518132 +n04518343 +n04518643 +n04518764 +n04519153 +n04519536 +n04519728 +n04519887 +n04520170 +n04520382 +n04520784 +n04520962 +n04521571 +n04521863 +n04521987 +n04522168 +n04523525 +n04523831 +n04524142 +n04524313 +n04524594 +n04524716 +n04524941 +n04525038 +n04525191 +n04525305 +n04525417 +n04525584 +n04525821 +n04526520 +n04526800 +n04526964 +n04527648 +n04528079 +n04528968 +n04529108 +n04529681 +n04529962 +n04530283 +n04530456 +n04530566 +n04531098 +n04531873 +n04532022 +n04532106 +n04532398 +n04532504 +n04532670 +n04532831 +n04533042 +n04533199 +n04533499 +n04533594 +n04533700 +n04533802 +n04533946 +n04534127 +n04534359 +n04534520 +n04534895 +n04535252 +n04535370 +n04535524 +n04536153 +n04536335 +n04536465 +n04536595 +n04536765 +n04536866 +n04537436 +n04538249 +n04538403 +n04538552 +n04538878 +n04539053 +n04539203 +n04539407 +n04539794 +n04540053 +n04540255 +n04540397 +n04540761 +n04541136 +n04541320 +n04541662 +n04541777 +n04541987 +n04542095 +n04542329 +n04542474 +n04542595 +n04542715 +n04542858 +n04542943 +n04543158 +n04543509 +n04543636 +n04543772 +n04543924 +n04543996 +n04544325 +n04544450 +n04545305 +n04545471 +n04545748 +n04545858 +n04545984 +n04546081 +n04546194 +n04546340 +n04546595 +n04546855 +n04547592 +n04548280 +n04548362 +n04549028 +n04549122 +n04549629 +n04549721 +n04549919 +n04550184 +n04550676 +n04551055 +n04551833 +n04552097 +n04552348 +n04552551 +n04552696 +n04553389 +n04553561 +n04553703 +n04554211 +n04554406 +n04554684 +n04554871 +n04554998 +n04555291 +n04555400 +n04555600 +n04555700 +n04555897 +n04556408 +n04556533 +n04556664 +n04556948 +n04557308 +n04557522 +n04557648 +n04557751 +n04558059 +n04558199 +n04558478 +n04558804 +n04559023 +n04559166 +n04559451 +n04559620 +n04559730 +n04559910 +n04559994 +n04560113 +n04560292 +n04560502 +n04560619 +n04560804 +n04560882 +n04561010 +n04561287 +n04561422 +n04561734 +n04561857 +n04561965 +n04562122 +n04562262 +n04562496 +n04562935 +n04563020 +n04563204 +n04563413 +n04563560 +n04563790 +n04564278 +n04564581 +n04565039 +n04565375 +n04566257 +n04566561 +n04566756 +n04567098 +n04567593 +n04567746 +n04568069 +n04568557 +n04568713 +n04568841 +n04569063 +n04569520 +n04569822 +n04570118 +n04570214 +n04570416 +n04570532 +n04570815 +n04570958 +n04571292 +n04571566 +n04571686 +n04571800 +n04571958 +n04572121 +n04572235 +n04572935 +n04573045 +n04573281 +n04573379 +n04573513 +n04573625 +n04573832 +n04573937 +n04574067 +n04574348 +n04574471 +n04574606 +n04574999 +n04575723 +n04575824 +n04576002 +n04576211 +n04576971 +n04577139 +n04577293 +n04577426 +n04577567 +n04577769 +n04578112 +n04578329 +n04578559 +n04578708 +n04578801 +n04578934 +n04579056 +n04579145 +n04579230 +n04579432 +n04579667 +n04579986 +n04580493 +n04581102 +n04581595 +n04581829 +n04582205 +n04582349 +n04582771 +n04582869 +n04583022 +n04583212 +n04583620 +n04583888 +n04583967 +n04584056 +n04584207 +n04584373 +n04585128 +n04585318 +n04585456 +n04585626 +n04585745 +n04585980 +n04586072 +n04586581 +n04586932 +n04587327 +n04587404 +n04587559 +n04587648 +n04588739 +n04589190 +n04589325 +n04589434 +n04589593 +n04589890 +n04590021 +n04590129 +n04590263 +n04590553 +n04590746 +n04590933 +n04591056 +n04591157 +n04591249 +n04591359 +n04591517 +n04591631 +n04591713 +n04591887 +n04592005 +n04592099 +n04592356 +n04592465 +n04592596 +n04592741 +n04593077 +n04593185 +n04593376 +n04593524 +n04593629 +n04593866 +n04594114 +n04594218 +n04594489 +n04594742 +n04594828 +n04594919 +n04595028 +n04595285 +n04595501 +n04595611 +n04595762 +n04595855 +n04596116 +n04596492 +n04596742 +n04596852 +n04597066 +n04597309 +n04597400 +n04597804 +n04597913 +n04598136 +n04598318 +n04598416 +n04598582 +n04598965 +n04599124 +n04599235 +n04600312 +n04600486 +n04600912 +n04601041 +n04601159 +n04601938 +n04602762 +n04602840 +n04602956 +n04603399 +n04603729 +n04603872 +n04604276 +n04604644 +n04604806 +n04605057 +n04605163 +n04605321 +n04605446 +n04605572 +n04605726 +n04606251 +n04606574 +n04607035 +n04607242 +n04607640 +n04607759 +n04607869 +n04607982 +n04608329 +n04608435 +n04608567 +n04608809 +n04608923 +n04609531 +n04609651 +n04609811 +n04610013 +n04610176 +n04610274 +n04610503 +n04610676 +n04611351 +n04611795 +n04611916 +n04612026 +n04612159 +n04612257 +n04612373 +n04612504 +n04612840 +n04613015 +n04613158 +n04613696 +n04613939 +n04614505 +n04614655 +n04614844 +n04615149 +n04615226 +n04615644 +n04682018 +n04950713 +n04950952 +n04951071 +n04951186 +n04951373 +n04951716 +n04951875 +n04953296 +n04953678 +n04955160 +n04957356 +n04957589 +n04958634 +n04958865 +n04959061 +n04959230 +n04959672 +n04960277 +n04960582 +n04961062 +n04961331 +n04961691 +n04962062 +n04962240 +n04963111 +n04963307 +n04963588 +n04963740 +n04964001 +n04964799 +n04964878 +n04965179 +n04965451 +n04965661 +n04966543 +n04966941 +n04967191 +n04967561 +n04967674 +n04967801 +n04967882 +n04968056 +n04968139 +n04968749 +n04968895 +n04969242 +n04969540 +n04969798 +n04969952 +n04970059 +n04970312 +n04970398 +n04970470 +n04970631 +n04970916 +n04971211 +n04971313 +n04972350 +n04972451 +n04972801 +n04973020 +n04973291 +n04973386 +n04973585 +n04973669 +n04973816 +n04974145 +n04974340 +n04974859 +n04975739 +n04976319 +n04976952 +n04977412 +n04978561 +n04979002 +n04979307 +n04981658 +n05102764 +n05218119 +n05233741 +n05235879 +n05238282 +n05239437 +n05241218 +n05241485 +n05241662 +n05242070 +n05242239 +n05242928 +n05244421 +n05244755 +n05244934 +n05245192 +n05257476 +n05257967 +n05258051 +n05258627 +n05259914 +n05260127 +n05260240 +n05261310 +n05262422 +n05262534 +n05262698 +n05263183 +n05263316 +n05263448 +n05265736 +n05266096 +n05266879 +n05278922 +n05279953 +n05282652 +n05285623 +n05302499 +n05314075 +n05399034 +n05399243 +n05399356 +n05418717 +n05427346 +n05442594 +n05447757 +n05448704 +n05448827 +n05449196 +n05449661 +n05449959 +n05450617 +n05451099 +n05451384 +n05453412 +n05453657 +n05453815 +n05454833 +n05454978 +n05455113 +n05458173 +n05458576 +n05459101 +n05459457 +n05459769 +n05460759 +n05464534 +n05467054 +n05467758 +n05468098 +n05468739 +n05469664 +n05469861 +n05475397 +n05482922 +n05486510 +n05491154 +n05526957 +n05538625 +n05539947 +n05541509 +n05542893 +n05545879 +n05571341 +n05578095 +n05581932 +n05584746 +n05586759 +n05604434 +n05716342 +n06008896 +n06209940 +n06254669 +n06255081 +n06255613 +n06259898 +n06262567 +n06262943 +n06263202 +n06263369 +n06263609 +n06263762 +n06263895 +n06266417 +n06266633 +n06266710 +n06266878 +n06266973 +n06267145 +n06267564 +n06267655 +n06267758 +n06267893 +n06267991 +n06271778 +n06272290 +n06272612 +n06272803 +n06273207 +n06273294 +n06273414 +n06273555 +n06273743 +n06273890 +n06273986 +n06274092 +n06274292 +n06274546 +n06274760 +n06274921 +n06275095 +n06275353 +n06275471 +n06276501 +n06276697 +n06276902 +n06277025 +n06277135 +n06277280 +n06278338 +n06278475 +n06281040 +n06281175 +n06340977 +n06359193 +n06359467 +n06359657 +n06415688 +n06417096 +n06418693 +n06419354 +n06423496 +n06470073 +n06591815 +n06592078 +n06592281 +n06592421 +n06595351 +n06596179 +n06596364 +n06596474 +n06596607 +n06596727 +n06596845 +n06613686 +n06614901 +n06616216 +n06618653 +n06625062 +n06785654 +n06793231 +n06794110 +n06874185 +n06883725 +n06892775 +n06998748 +n07005523 +n07248320 +n07273802 +n07461050 +n07556406 +n07556637 +n07556872 +n07556970 +n07557165 +n07557434 +n07560193 +n07560331 +n07560422 +n07560542 +n07560652 +n07560903 +n07561112 +n07561590 +n07561848 +n07562017 +n07562172 +n07562379 +n07562495 +n07562651 +n07562881 +n07562984 +n07563207 +n07563366 +n07563642 +n07563800 +n07564008 +n07564101 +n07564292 +n07564515 +n07564629 +n07564796 +n07564971 +n07565083 +n07565161 +n07565259 +n07565608 +n07565725 +n07565945 +n07566092 +n07566231 +n07566340 +n07566863 +n07567039 +n07567139 +n07567390 +n07567611 +n07567707 +n07567980 +n07568095 +n07568241 +n07568389 +n07568502 +n07568625 +n07568818 +n07568991 +n07569106 +n07569423 +n07569543 +n07569644 +n07569873 +n07570021 +n07570530 +n07570720 +n07572353 +n07572616 +n07572858 +n07572957 +n07573103 +n07573347 +n07573453 +n07573563 +n07573696 +n07574176 +n07574426 +n07574504 +n07574602 +n07574780 +n07574923 +n07575076 +n07575226 +n07575392 +n07575510 +n07575726 +n07575984 +n07576182 +n07576438 +n07576577 +n07576781 +n07576969 +n07577144 +n07577374 +n07577538 +n07577657 +n07577772 +n07577918 +n07578093 +n07579575 +n07579688 +n07579787 +n07579917 +n07580053 +n07580253 +n07580359 +n07580470 +n07580592 +n07581249 +n07581346 +n07581607 +n07581775 +n07581931 +n07582027 +n07582152 +n07582277 +n07582441 +n07582609 +n07582811 +n07582892 +n07582970 +n07583066 +n07583197 +n07583865 +n07583978 +n07584110 +n07584228 +n07584332 +n07584423 +n07584593 +n07584859 +n07584938 +n07585015 +n07585107 +n07585208 +n07585474 +n07585557 +n07585644 +n07585758 +n07585906 +n07585997 +n07586099 +n07586179 +n07586318 +n07586485 +n07586604 +n07586718 +n07586894 +n07587023 +n07587111 +n07587206 +n07587331 +n07587441 +n07587618 +n07587700 +n07587819 +n07587962 +n07588111 +n07588193 +n07588299 +n07588419 +n07588574 +n07588688 +n07588817 +n07588947 +n07589458 +n07589543 +n07589724 +n07589872 +n07589967 +n07590068 +n07590177 +n07590320 +n07590502 +n07590611 +n07590752 +n07590841 +n07590974 +n07591049 +n07591162 +n07591236 +n07591330 +n07591473 +n07591586 +n07591813 +n07591961 +n07592094 +n07592317 +n07592400 +n07592481 +n07592656 +n07592768 +n07592922 +n07593004 +n07593107 +n07593199 +n07593471 +n07593774 +n07593972 +n07594066 +n07594155 +n07594250 +n07594737 +n07594840 +n07595051 +n07595180 +n07595368 +n07595649 +n07595751 +n07595914 +n07596046 +n07596160 +n07596362 +n07596452 +n07596566 +n07596684 +n07596967 +n07597145 +n07597263 +n07597365 +n07598256 +n07598529 +n07598622 +n07598734 +n07598928 +n07599068 +n07599161 +n07599242 +n07599383 +n07599468 +n07599554 +n07599649 +n07599783 +n07599911 +n07599998 +n07600177 +n07600285 +n07600394 +n07600506 +n07600696 +n07600895 +n07601025 +n07601175 +n07601290 +n07601407 +n07601572 +n07601686 +n07601809 +n07602650 +n07604956 +n07605040 +n07605198 +n07605282 +n07605380 +n07605474 +n07605597 +n07605693 +n07605804 +n07605944 +n07606058 +n07606191 +n07606278 +n07606419 +n07606538 +n07606669 +n07606764 +n07606933 +n07607027 +n07607138 +n07607361 +n07607492 +n07607605 +n07607707 +n07607832 +n07607967 +n07608098 +n07608245 +n07608339 +n07608429 +n07608533 +n07608641 +n07608721 +n07608866 +n07608980 +n07609083 +n07609215 +n07609316 +n07609407 +n07609549 +n07609632 +n07609728 +n07609840 +n07610295 +n07610502 +n07610620 +n07610746 +n07610890 +n07611046 +n07611148 +n07611267 +n07611358 +n07611733 +n07611839 +n07611991 +n07612137 +n07612273 +n07612367 +n07612530 +n07612632 +n07612996 +n07613158 +n07613266 +n07613480 +n07613671 +n07613815 +n07614103 +n07614198 +n07614348 +n07614500 +n07614730 +n07614825 +n07615052 +n07615190 +n07615289 +n07615460 +n07615569 +n07615671 +n07615774 +n07615954 +n07616046 +n07616174 +n07616265 +n07616386 +n07616487 +n07616590 +n07616748 +n07616906 +n07617051 +n07617188 +n07617344 +n07617447 +n07617526 +n07617611 +n07617708 +n07617839 +n07617932 +n07618029 +n07618119 +n07618281 +n07618432 +n07618587 +n07618684 +n07618871 +n07619004 +n07619208 +n07619301 +n07619409 +n07619508 +n07619881 +n07620047 +n07620145 +n07620327 +n07620597 +n07620689 +n07621264 +n07621497 +n07621618 +n07623136 +n07624466 +n07624666 +n07624757 +n07624924 +n07625061 +n07625324 +n07627931 +n07628068 +n07628181 +n07631926 +n07639069 +n07641928 +n07642361 +n07642471 +n07642742 +n07642833 +n07642933 +n07643026 +n07643200 +n07643306 +n07643474 +n07643577 +n07643679 +n07643764 +n07643891 +n07643981 +n07644244 +n07648913 +n07648997 +n07650792 +n07650903 +n07651025 +n07654148 +n07654298 +n07655067 +n07655263 +n07663899 +n07665438 +n07666176 +n07672914 +n07678586 +n07678729 +n07678953 +n07679034 +n07679140 +n07679356 +n07680168 +n07680313 +n07680416 +n07680517 +n07680655 +n07680761 +n07680932 +n07681264 +n07681355 +n07681450 +n07681691 +n07681805 +n07681926 +n07682197 +n07682316 +n07682477 +n07682624 +n07682808 +n07682952 +n07683039 +n07683138 +n07683265 +n07683360 +n07683490 +n07683617 +n07683786 +n07684084 +n07684164 +n07684289 +n07684422 +n07684517 +n07684600 +n07684938 +n07685031 +n07685118 +n07685218 +n07685303 +n07685399 +n07685546 +n07685730 +n07685918 +n07686021 +n07686202 +n07686299 +n07686461 +n07686634 +n07686720 +n07686873 +n07687053 +n07687211 +n07687381 +n07687469 +n07687626 +n07687789 +n07688021 +n07688130 +n07688265 +n07688412 +n07688624 +n07688757 +n07688898 +n07689003 +n07689217 +n07689313 +n07689490 +n07689624 +n07689757 +n07689842 +n07690019 +n07690152 +n07690273 +n07690431 +n07690511 +n07690585 +n07690739 +n07690892 +n07691091 +n07691237 +n07691539 +n07691650 +n07691758 +n07691863 +n07691954 +n07692114 +n07692248 +n07692405 +n07692517 +n07692614 +n07692887 +n07693048 +n07693223 +n07693439 +n07693590 +n07693725 +n07693889 +n07693972 +n07694169 +n07694403 +n07694516 +n07694659 +n07694839 +n07695187 +n07695284 +n07695410 +n07695504 +n07695652 +n07695742 +n07695878 +n07695965 +n07696403 +n07696527 +n07696625 +n07696728 +n07696839 +n07696977 +n07697100 +n07697313 +n07697408 +n07697537 +n07697699 +n07697825 +n07698250 +n07698401 +n07698543 +n07698672 +n07698782 +n07700003 +n07703889 +n07704054 +n07704205 +n07704305 +n07705931 +n07707451 +n07708124 +n07708398 +n07708512 +n07708685 +n07708798 +n07709046 +n07709172 +n07709333 +n07709701 +n07709881 +n07710007 +n07710283 +n07710616 +n07710952 +n07711080 +n07711232 +n07711371 +n07711569 +n07711683 +n07711799 +n07711907 +n07712063 +n07712267 +n07712382 +n07712559 +n07712748 +n07712856 +n07712959 +n07713074 +n07713267 +n07713395 +n07713763 +n07713895 +n07714078 +n07714188 +n07714287 +n07714448 +n07714571 +n07714802 +n07714895 +n07714990 +n07715103 +n07715221 +n07715407 +n07715561 +n07715721 +n07716034 +n07716203 +n07716358 +n07716504 +n07716649 +n07716750 +n07716906 +n07717070 +n07717410 +n07717556 +n07717714 +n07717858 +n07718068 +n07718195 +n07718329 +n07718472 +n07718671 +n07718747 +n07718920 +n07719058 +n07719213 +n07719330 +n07719437 +n07719616 +n07719756 +n07719839 +n07719980 +n07720084 +n07720185 +n07720277 +n07720442 +n07720615 +n07720875 +n07721018 +n07721118 +n07721195 +n07721325 +n07721456 +n07721678 +n07721833 +n07721942 +n07722052 +n07722217 +n07722390 +n07722485 +n07722666 +n07722763 +n07722888 +n07723039 +n07723177 +n07723330 +n07723559 +n07723753 +n07723968 +n07724078 +n07724173 +n07724269 +n07724492 +n07724654 +n07724819 +n07724943 +n07725158 +n07725255 +n07725376 +n07725531 +n07725663 +n07725789 +n07725888 +n07726009 +n07726095 +n07726230 +n07726386 +n07726525 +n07726672 +n07726796 +n07727048 +n07727140 +n07727252 +n07727377 +n07727458 +n07727578 +n07727741 +n07727868 +n07728053 +n07728181 +n07728284 +n07728391 +n07728585 +n07728708 +n07728804 +n07729000 +n07729142 +n07729225 +n07729384 +n07729485 +n07729828 +n07729926 +n07730033 +n07730207 +n07730320 +n07730406 +n07730562 +n07730708 +n07730855 +n07731006 +n07731122 +n07731284 +n07731436 +n07731587 +n07731767 +n07731952 +n07732168 +n07732302 +n07732433 +n07732525 +n07732636 +n07732747 +n07732904 +n07733005 +n07733124 +n07733217 +n07733394 +n07733567 +n07733712 +n07733847 +n07734017 +n07734183 +n07734292 +n07734417 +n07734555 +n07734744 +n07734879 +n07735052 +n07735179 +n07735294 +n07735404 +n07735510 +n07735687 +n07735803 +n07735981 +n07736087 +n07736256 +n07736371 +n07736527 +n07736692 +n07736813 +n07736971 +n07737081 +n07737594 +n07737745 +n07738105 +n07738224 +n07739035 +n07739125 +n07739344 +n07739506 +n07739923 +n07740033 +n07740115 +n07740220 +n07740342 +n07740461 +n07740597 +n07740744 +n07740855 +n07740954 +n07741138 +n07741235 +n07741357 +n07741461 +n07741623 +n07741706 +n07741804 +n07741888 +n07742012 +n07742224 +n07742313 +n07742415 +n07742513 +n07742605 +n07742704 +n07743224 +n07743384 +n07743544 +n07743723 +n07743902 +n07744057 +n07744246 +n07744430 +n07744559 +n07744682 +n07744811 +n07745046 +n07745197 +n07745357 +n07745466 +n07745661 +n07745940 +n07746038 +n07746186 +n07746334 +n07746551 +n07746749 +n07746910 +n07747055 +n07747607 +n07747811 +n07747951 +n07748157 +n07748276 +n07748416 +n07748574 +n07748753 +n07748912 +n07749095 +n07749192 +n07749312 +n07749446 +n07749582 +n07749731 +n07749870 +n07749969 +n07750146 +n07750299 +n07750449 +n07750586 +n07750736 +n07750872 +n07751004 +n07751148 +n07751280 +n07751451 +n07751737 +n07751858 +n07751977 +n07752109 +n07752264 +n07752377 +n07752514 +n07752602 +n07752664 +n07752782 +n07752874 +n07752966 +n07753113 +n07753275 +n07753448 +n07753592 +n07753743 +n07753980 +n07754155 +n07754279 +n07754451 +n07754684 +n07754894 +n07755089 +n07755262 +n07755411 +n07755619 +n07755707 +n07755929 +n07756096 +n07756325 +n07756499 +n07756641 +n07756838 +n07756951 +n07757132 +n07757312 +n07757511 +n07757602 +n07757753 +n07757874 +n07757990 +n07758125 +n07758260 +n07758407 +n07758582 +n07758680 +n07758950 +n07759194 +n07759324 +n07759424 +n07759576 +n07759691 +n07759816 +n07760070 +n07760153 +n07760297 +n07760395 +n07760501 +n07760673 +n07760755 +n07760859 +n07761141 +n07761309 +n07761611 +n07761777 +n07761954 +n07762114 +n07762244 +n07762373 +n07762534 +n07762740 +n07762913 +n07763107 +n07763290 +n07763483 +n07763629 +n07763792 +n07763987 +n07764155 +n07764315 +n07764486 +n07764630 +n07764847 +n07765073 +n07765208 +n07765361 +n07765517 +n07765612 +n07765728 +n07765862 +n07765999 +n07766173 +n07766409 +n07766530 +n07766723 +n07766891 +n07767002 +n07767171 +n07767344 +n07767549 +n07767709 +n07767847 +n07768068 +n07768139 +n07768230 +n07768318 +n07768423 +n07768590 +n07768694 +n07768858 +n07769102 +n07769306 +n07769465 +n07769584 +n07769731 +n07769886 +n07770034 +n07770180 +n07770439 +n07770571 +n07770763 +n07770869 +n07771082 +n07771212 +n07771405 +n07771539 +n07771731 +n07771891 +n07772026 +n07772147 +n07772274 +n07772413 +n07772788 +n07772935 +n07773428 +n07774182 +n07774295 +n07774479 +n07774596 +n07774719 +n07774842 +n07775050 +n07775197 +n07783827 +n07785487 +n07800091 +n07800487 +n07800636 +n07800740 +n07801007 +n07801091 +n07801342 +n07801508 +n07801709 +n07801779 +n07801892 +n07802026 +n07802152 +n07802246 +n07802417 +n07802767 +n07802863 +n07802963 +n07803093 +n07803213 +n07803310 +n07803408 +n07803545 +n07803779 +n07803895 +n07803992 +n07804152 +n07804323 +n07804543 +n07804657 +n07804771 +n07804900 +n07805006 +n07805254 +n07805389 +n07805478 +n07805594 +n07805731 +n07805966 +n07806043 +n07806120 +n07806221 +n07806633 +n07806774 +n07806879 +n07807002 +n07807171 +n07807317 +n07807472 +n07807594 +n07807710 +n07807834 +n07807922 +n07808022 +n07808166 +n07808268 +n07808352 +n07808479 +n07808587 +n07808675 +n07808806 +n07808904 +n07809096 +n07809368 +n07810531 +n07810907 +n07811416 +n07812046 +n07812184 +n07812662 +n07812790 +n07812913 +n07813107 +n07813324 +n07813495 +n07813579 +n07813717 +n07813833 +n07814007 +n07814203 +n07814390 +n07814487 +n07814634 +n07814790 +n07814925 +n07815163 +n07815294 +n07815424 +n07815588 +n07815839 +n07815956 +n07816052 +n07816164 +n07816296 +n07816398 +n07816575 +n07816726 +n07816839 +n07817024 +n07817160 +n07817315 +n07817465 +n07817599 +n07817758 +n07817871 +n07818029 +n07818133 +n07818277 +n07818422 +n07818572 +n07818689 +n07818825 +n07818995 +n07819166 +n07819303 +n07819480 +n07819682 +n07819769 +n07819896 +n07820036 +n07820145 +n07820297 +n07820497 +n07820683 +n07820814 +n07820960 +n07821107 +n07821260 +n07821404 +n07821610 +n07821758 +n07821919 +n07822053 +n07822197 +n07822323 +n07822518 +n07822687 +n07822845 +n07823105 +n07823280 +n07823369 +n07823460 +n07823591 +n07823698 +n07823814 +n07823951 +n07824191 +n07824268 +n07824383 +n07824502 +n07824702 +n07824863 +n07824988 +n07825194 +n07825399 +n07825496 +n07825597 +n07825717 +n07825850 +n07825972 +n07826091 +n07826250 +n07826340 +n07826453 +n07826544 +n07826653 +n07826930 +n07827130 +n07827284 +n07827410 +n07827554 +n07827750 +n07827896 +n07828041 +n07828156 +n07828275 +n07828378 +n07828642 +n07828987 +n07829248 +n07829331 +n07829412 +n07830493 +n07830593 +n07830690 +n07830841 +n07830986 +n07831146 +n07831267 +n07831450 +n07831663 +n07831821 +n07831955 +n07832099 +n07832202 +n07832307 +n07832416 +n07832592 +n07832741 +n07832902 +n07833333 +n07833535 +n07833672 +n07833816 +n07833951 +n07834065 +n07834160 +n07834286 +n07834507 +n07834618 +n07834774 +n07834872 +n07835051 +n07835173 +n07835331 +n07835457 +n07835547 +n07835701 +n07835823 +n07835921 +n07836077 +n07836269 +n07836456 +n07836600 +n07836731 +n07836838 +n07837002 +n07837110 +n07837234 +n07837362 +n07837545 +n07837630 +n07837755 +n07837912 +n07838073 +n07838233 +n07838441 +n07838551 +n07838659 +n07838811 +n07838905 +n07839055 +n07839172 +n07839312 +n07839478 +n07839593 +n07839730 +n07839864 +n07840027 +n07840124 +n07840219 +n07840304 +n07840395 +n07840520 +n07840672 +n07840804 +n07841037 +n07841345 +n07841495 +n07841639 +n07841800 +n07841907 +n07842044 +n07842130 +n07842202 +n07842308 +n07842433 +n07842605 +n07842753 +n07842972 +n07843117 +n07843220 +n07843348 +n07843464 +n07843636 +n07843775 +n07844042 +n07844604 +n07844786 +n07844867 +n07845087 +n07845166 +n07845335 +n07845421 +n07845495 +n07845571 +n07845702 +n07845775 +n07845863 +n07846014 +n07846143 +n07846274 +n07846359 +n07846471 +n07846557 +n07846688 +n07846802 +n07846938 +n07847047 +n07847198 +n07847453 +n07847585 +n07847706 +n07847827 +n07847917 +n07848093 +n07848196 +n07848338 +n07848771 +n07848936 +n07849026 +n07849186 +n07849336 +n07849506 +n07849619 +n07849733 +n07849912 +n07850083 +n07850219 +n07850329 +n07851054 +n07851298 +n07851443 +n07851554 +n07851641 +n07851767 +n07851926 +n07852045 +n07852229 +n07852302 +n07852376 +n07852452 +n07852532 +n07852614 +n07852712 +n07852833 +n07852919 +n07853125 +n07853232 +n07853345 +n07853445 +n07853560 +n07853648 +n07853762 +n07853852 +n07853946 +n07854066 +n07854184 +n07854266 +n07854348 +n07854455 +n07854614 +n07854707 +n07854813 +n07854982 +n07855105 +n07855188 +n07855317 +n07855413 +n07855510 +n07855603 +n07855721 +n07855812 +n07855907 +n07856045 +n07856186 +n07856270 +n07856756 +n07856895 +n07856992 +n07857076 +n07857170 +n07857356 +n07857598 +n07857731 +n07857959 +n07858114 +n07858197 +n07858336 +n07858484 +n07858595 +n07858841 +n07858978 +n07859142 +n07859284 +n07859583 +n07859796 +n07859951 +n07860103 +n07860208 +n07860331 +n07860447 +n07860548 +n07860629 +n07860805 +n07860988 +n07861158 +n07861247 +n07861334 +n07861557 +n07861681 +n07861813 +n07861983 +n07862095 +n07862244 +n07862348 +n07862461 +n07862611 +n07862770 +n07862946 +n07863107 +n07863229 +n07863374 +n07863547 +n07863644 +n07863802 +n07863935 +n07864065 +n07864198 +n07864317 +n07864475 +n07864638 +n07864756 +n07864934 +n07865105 +n07865196 +n07865484 +n07865575 +n07865700 +n07865788 +n07866015 +n07866151 +n07866277 +n07866409 +n07866571 +n07866723 +n07866868 +n07867021 +n07867164 +n07867324 +n07867421 +n07867616 +n07867751 +n07867883 +n07868045 +n07868200 +n07868340 +n07868508 +n07868684 +n07868830 +n07868955 +n07869111 +n07869291 +n07869391 +n07869522 +n07869611 +n07869775 +n07869937 +n07870069 +n07870167 +n07870313 +n07870478 +n07870620 +n07870734 +n07870894 +n07871065 +n07871234 +n07871335 +n07871436 +n07871588 +n07871720 +n07871810 +n07872593 +n07872748 +n07873057 +n07873198 +n07873348 +n07873464 +n07873679 +n07873807 +n07874063 +n07874159 +n07874259 +n07874343 +n07874441 +n07874531 +n07874674 +n07874780 +n07874995 +n07875086 +n07875152 +n07875267 +n07875436 +n07875560 +n07875693 +n07875835 +n07875926 +n07876026 +n07876189 +n07876281 +n07876460 +n07876550 +n07876651 +n07876775 +n07876893 +n07877187 +n07877299 +n07877675 +n07877849 +n07877961 +n07878145 +n07878283 +n07878479 +n07878647 +n07878785 +n07878926 +n07879072 +n07879174 +n07879350 +n07879450 +n07879560 +n07879659 +n07879821 +n07879953 +n07880080 +n07880213 +n07880325 +n07880458 +n07880751 +n07880880 +n07880968 +n07881117 +n07881205 +n07881404 +n07881525 +n07881625 +n07881800 +n07882420 +n07882497 +n07882886 +n07883031 +n07883156 +n07883251 +n07883384 +n07883510 +n07883661 +n07884567 +n07885705 +n07886057 +n07886176 +n07886317 +n07886463 +n07886572 +n07886849 +n07887099 +n07887192 +n07887304 +n07887461 +n07887634 +n07887967 +n07888058 +n07888229 +n07888378 +n07888465 +n07888816 +n07888909 +n07889193 +n07889274 +n07889510 +n07889814 +n07889990 +n07890068 +n07890226 +n07890352 +n07890540 +n07890617 +n07890750 +n07890890 +n07890970 +n07891095 +n07891189 +n07891309 +n07891433 +n07891726 +n07892418 +n07892512 +n07892813 +n07893253 +n07893425 +n07893528 +n07893642 +n07893792 +n07893891 +n07894102 +n07894298 +n07894451 +n07894551 +n07894703 +n07894799 +n07894965 +n07895100 +n07895237 +n07895435 +n07895595 +n07895710 +n07895839 +n07895962 +n07896060 +n07896165 +n07896287 +n07896422 +n07896560 +n07896661 +n07896765 +n07896893 +n07896994 +n07897116 +n07897200 +n07897438 +n07897600 +n07897750 +n07897865 +n07897975 +n07898117 +n07898247 +n07898333 +n07898443 +n07898617 +n07898745 +n07898895 +n07899003 +n07899108 +n07899292 +n07899434 +n07899533 +n07899660 +n07899769 +n07899899 +n07899976 +n07900225 +n07900406 +n07900616 +n07900734 +n07900825 +n07900958 +n07901355 +n07901457 +n07901587 +n07902121 +n07902336 +n07902443 +n07902520 +n07902698 +n07902799 +n07902937 +n07903101 +n07903208 +n07903543 +n07903643 +n07903731 +n07903841 +n07903962 +n07904072 +n07904293 +n07904395 +n07904637 +n07904760 +n07904865 +n07904934 +n07905038 +n07905296 +n07905386 +n07905474 +n07905618 +n07905770 +n07905979 +n07906111 +n07906284 +n07906572 +n07906718 +n07906877 +n07907037 +n07907161 +n07907342 +n07907429 +n07907548 +n07907831 +n07907943 +n07908411 +n07908567 +n07908647 +n07908812 +n07908923 +n07909129 +n07909231 +n07909362 +n07909504 +n07909593 +n07909714 +n07909811 +n07909954 +n07910048 +n07910152 +n07910245 +n07910379 +n07910538 +n07910656 +n07910799 +n07910970 +n07911061 +n07911249 +n07911371 +n07911677 +n07912093 +n07912211 +n07913180 +n07913300 +n07913393 +n07913537 +n07913644 +n07913774 +n07913882 +n07914006 +n07914128 +n07914271 +n07914413 +n07914586 +n07914686 +n07914777 +n07914887 +n07914995 +n07915094 +n07915213 +n07915366 +n07915491 +n07915618 +n07915800 +n07915918 +n07916041 +n07916183 +n07916319 +n07916437 +n07916582 +n07917133 +n07917272 +n07917392 +n07917507 +n07917618 +n07917791 +n07917874 +n07917951 +n07918028 +n07918193 +n07918309 +n07918706 +n07918879 +n07919165 +n07919310 +n07919441 +n07919572 +n07919665 +n07919787 +n07919894 +n07920052 +n07920222 +n07920349 +n07920540 +n07920663 +n07920872 +n07920989 +n07921090 +n07921239 +n07921360 +n07921455 +n07921615 +n07921834 +n07921948 +n07922041 +n07922147 +n07922512 +n07922607 +n07922764 +n07922955 +n07923748 +n07924033 +n07924276 +n07924366 +n07924443 +n07924560 +n07924655 +n07924747 +n07924834 +n07924955 +n07925116 +n07925229 +n07925327 +n07925423 +n07925500 +n07925608 +n07925708 +n07925808 +n07925966 +n07926250 +n07926346 +n07926442 +n07926540 +n07926785 +n07926920 +n07927070 +n07927197 +n07927512 +n07927716 +n07927836 +n07927931 +n07928163 +n07928264 +n07928367 +n07928488 +n07928578 +n07928696 +n07928790 +n07928887 +n07928998 +n07929172 +n07929351 +n07929519 +n07929940 +n07930062 +n07930205 +n07930315 +n07930433 +n07930554 +n07930864 +n07931001 +n07931096 +n07931280 +n07931452 +n07931612 +n07931733 +n07931870 +n07932039 +n07932323 +n07932454 +n07932614 +n07932762 +n07932841 +n07933154 +n07933274 +n07933530 +n07933652 +n07933799 +n07933891 +n07934032 +n07934152 +n07934282 +n07934373 +n07934530 +n07934678 +n07934800 +n07934908 +n07935043 +n07935152 +n07935288 +n07935379 +n07935504 +n07935737 +n07935878 +n07936015 +n07936093 +n07936263 +n07936459 +n07936548 +n07936745 +n07936979 +n07937069 +n07937344 +n07937461 +n07937621 +n07938007 +n07938149 +n07938313 +n07938594 +n07942152 +n07951464 +n07954211 +n07977870 +n08079613 +n08182379 +n08238463 +n08242223 +n08249459 +n08253141 +n08256735 +n08376250 +n08385989 +n08492354 +n08492461 +n08494231 +n08495908 +n08496334 +n08500819 +n08500989 +n08501887 +n08505018 +n08506347 +n08511017 +n08517010 +n08517676 +n08518171 +n08519299 +n08521623 +n08523340 +n08524735 +n08539072 +n08539276 +n08540532 +n08547468 +n08547544 +n08551296 +n08554440 +n08555333 +n08555710 +n08558770 +n08558963 +n08559155 +n08560295 +n08569482 +n08571275 +n08571642 +n08571898 +n08573674 +n08573842 +n08578517 +n08579266 +n08579352 +n08580944 +n08583292 +n08583455 +n08583554 +n08583682 +n08584914 +n08586978 +n08589670 +n08596076 +n08597579 +n08598301 +n08598568 +n08599174 +n08599292 +n08611339 +n08611421 +n08613733 +n08614632 +n08616050 +n08618831 +n08619112 +n08623676 +n08628141 +n08633683 +n08640531 +n08640739 +n08640962 +n08643267 +n08644045 +n08645104 +n08645212 +n08645318 +n08647264 +n08648917 +n08649711 +n08651104 +n08652376 +n08658309 +n08658918 +n08659242 +n08659331 +n08659446 +n08659861 +n08661878 +n08662427 +n08663051 +n08663703 +n08663860 +n08673039 +n08674344 +n08676253 +n08677424 +n08677801 +n08678783 +n08679167 +n08679269 +n08679562 +n08685188 +n08782627 +n08896327 +n09032191 +n09186592 +n09189157 +n09191635 +n09193551 +n09193705 +n09194227 +n09199101 +n09201998 +n09203827 +n09205509 +n09206896 +n09206985 +n09208496 +n09209025 +n09210862 +n09213434 +n09213565 +n09214060 +n09214269 +n09214916 +n09215023 +n09215437 +n09217230 +n09218315 +n09218494 +n09218641 +n09219233 +n09223487 +n09224725 +n09226869 +n09228055 +n09229709 +n09230041 +n09230202 +n09231117 +n09233446 +n09233603 +n09238926 +n09239302 +n09242389 +n09245515 +n09246464 +n09247410 +n09248153 +n09248399 +n09249034 +n09249155 +n09251407 +n09255070 +n09256479 +n09257843 +n09259025 +n09259219 +n09260907 +n09262690 +n09263912 +n09264803 +n09265620 +n09266604 +n09267854 +n09268007 +n09269341 +n09269472 +n09269882 +n09270160 +n09270657 +n09270735 +n09274152 +n09274305 +n09279986 +n09281252 +n09282208 +n09283193 +n09283405 +n09283514 +n09283767 +n09283866 +n09287415 +n09287968 +n09288635 +n09289331 +n09289596 +n09290350 +n09290444 +n09294877 +n09295210 +n09295946 +n09300306 +n09300905 +n09302616 +n09303008 +n09303528 +n09304750 +n09305031 +n09305898 +n09308572 +n09308743 +n09309046 +n09309168 +n09309292 +n09310616 +n09315159 +n09319604 +n09325824 +n09326662 +n09327077 +n09327538 +n09330378 +n09331251 +n09332890 +n09335693 +n09335809 +n09336555 +n09337048 +n09337253 +n09338013 +n09339810 +n09344198 +n09344324 +n09344724 +n09348460 +n09349648 +n09351905 +n09352849 +n09353815 +n09354511 +n09357346 +n09357447 +n09359803 +n09361517 +n09362316 +n09362945 +n09366017 +n09366317 +n09375606 +n09376198 +n09376526 +n09376786 +n09381242 +n09382099 +n09384106 +n09389867 +n09391386 +n09391644 +n09391774 +n09392402 +n09393524 +n09393605 +n09396465 +n09396608 +n09398076 +n09398677 +n09399592 +n09400584 +n09400987 +n09402944 +n09403086 +n09403211 +n09403427 +n09403734 +n09405078 +n09405787 +n09406793 +n09409512 +n09409752 +n09410224 +n09411189 +n09411295 +n09415584 +n09415671 +n09416076 +n09416890 +n09421031 +n09421799 +n09421951 +n09422190 +n09422631 +n09425019 +n09425344 +n09428293 +n09428628 +n09429630 +n09432283 +n09432990 +n09433312 +n09433442 +n09433839 +n09435739 +n09436444 +n09436708 +n09437454 +n09438844 +n09438940 +n09439032 +n09439213 +n09442595 +n09443281 +n09443641 +n09444783 +n09445008 +n09445289 +n09447666 +n09448690 +n09450163 +n09451237 +n09452291 +n09452395 +n09452760 +n09453008 +n09454153 +n09454412 +n09454744 +n09456207 +n09457979 +n09458269 +n09459979 +n09460046 +n09461069 +n09462600 +n09463226 +n09464486 +n09466678 +n09467696 +n09468604 +n09470027 +n09470222 +n09472413 +n09472597 +n09474010 +n09474412 +n09474765 +n09475044 +n09475179 +n09475925 +n09476123 +n09478210 +n09480959 +n09481120 +n09493983 +n09495962 +n09505153 +n09537660 +n09556121 +n09605110 +n09606009 +n09606527 +n09607630 +n09607782 +n09607903 +n09608709 +n09610255 +n09610405 +n09611722 +n09612700 +n09613118 +n09613191 +n09613690 +n09615336 +n09616573 +n09616922 +n09617161 +n09617435 +n09617577 +n09617696 +n09618760 +n09618880 +n09618957 +n09619168 +n09619452 +n09620078 +n09620794 +n09621232 +n09622049 +n09622302 +n09624168 +n09624559 +n09624899 +n09625401 +n09626238 +n09627807 +n09627906 +n09629065 +n09629246 +n09629752 +n09631129 +n09632274 +n09632518 +n09633969 +n09635534 +n09635635 +n09635973 +n09636339 +n09637339 +n09638454 +n09638875 +n09639382 +n09639919 +n09640327 +n09640715 +n09641002 +n09641578 +n09643799 +n09644152 +n09644657 +n09648743 +n09648911 +n09649067 +n09650729 +n09650839 +n09650989 +n09651123 +n09651968 +n09652149 +n09653144 +n09653438 +n09654079 +n09654518 +n09654898 +n09655213 +n09655466 +n09656077 +n09657206 +n09657748 +n09658254 +n09658398 +n09658815 +n09658921 +n09659039 +n09659188 +n09660010 +n09660240 +n09661873 +n09662038 +n09662661 +n09662951 +n09663248 +n09663786 +n09663999 +n09664556 +n09664908 +n09665367 +n09665545 +n09666349 +n09666476 +n09666883 +n09667358 +n09668199 +n09668437 +n09668562 +n09668988 +n09669631 +n09670280 +n09670521 +n09670909 +n09671089 +n09672590 +n09672725 +n09672840 +n09673091 +n09674412 +n09674786 +n09675045 +n09675673 +n09675799 +n09675922 +n09676021 +n09676247 +n09676884 +n09677427 +n09678747 +n09679028 +n09679170 +n09679925 +n09680908 +n09681107 +n09681234 +n09681973 +n09683180 +n09683757 +n09683924 +n09684082 +n09684901 +n09685233 +n09685806 +n09686262 +n09686401 +n09688233 +n09688804 +n09689435 +n09689958 +n09690083 +n09690208 +n09690496 +n09690621 +n09690864 +n09691604 +n09691729 +n09691858 +n09692125 +n09692915 +n09693244 +n09693982 +n09694664 +n09694771 +n09695019 +n09695132 +n09695514 +n09695620 +n09695979 +n09696456 +n09696585 +n09696763 +n09697401 +n09697986 +n09698644 +n09699020 +n09699642 +n09700125 +n09700964 +n09701148 +n09701833 +n09702134 +n09702673 +n09703101 +n09703344 +n09703485 +n09703708 +n09703809 +n09703932 +n09704057 +n09704157 +n09704283 +n09705003 +n09705124 +n09705671 +n09705784 +n09706029 +n09706255 +n09707061 +n09707289 +n09707735 +n09708750 +n09708889 +n09709531 +n09709673 +n09710041 +n09710164 +n09710886 +n09711132 +n09711435 +n09712324 +n09712448 +n09712696 +n09712967 +n09713108 +n09714120 +n09714694 +n09715165 +n09715303 +n09715427 +n09716047 +n09716933 +n09717233 +n09718217 +n09718811 +n09718936 +n09719309 +n09719794 +n09720033 +n09720256 +n09720595 +n09720702 +n09720842 +n09721244 +n09721444 +n09722064 +n09722658 +n09722817 +n09723067 +n09723819 +n09723944 +n09724234 +n09724533 +n09724656 +n09724785 +n09725000 +n09725229 +n09725546 +n09725653 +n09725772 +n09725935 +n09726621 +n09726811 +n09727440 +n09727826 +n09728137 +n09728285 +n09729062 +n09729156 +n09730077 +n09730204 +n09730824 +n09731343 +n09731436 +n09731571 +n09732170 +n09733459 +n09733793 +n09734185 +n09734450 +n09734535 +n09734639 +n09735258 +n09735654 +n09736485 +n09736798 +n09736945 +n09737050 +n09737161 +n09737453 +n09738121 +n09738400 +n09740724 +n09741074 +n09741331 +n09741722 +n09741816 +n09741904 +n09741999 +n09742101 +n09742315 +n09742927 +n09743487 +n09743601 +n09743792 +n09744161 +n09744346 +n09744462 +n09744679 +n09744834 +n09745229 +n09745324 +n09745834 +n09745933 +n09746936 +n09747191 +n09747495 +n09748101 +n09748408 +n09748648 +n09748889 +n09749386 +n09750282 +n09750641 +n09750770 +n09750891 +n09751076 +n09751496 +n09751622 +n09751895 +n09752023 +n09752519 +n09753348 +n09753792 +n09754152 +n09754217 +n09754633 +n09754907 +n09755086 +n09755241 +n09755555 +n09755788 +n09755893 +n09756049 +n09756195 +n09756961 +n09757449 +n09758173 +n09758885 +n09759501 +n09760290 +n09760609 +n09760913 +n09761068 +n09761753 +n09762011 +n09762385 +n09763272 +n09763784 +n09764201 +n09764598 +n09764732 +n09764900 +n09765118 +n09765278 +n09767197 +n09769076 +n09769525 +n09769929 +n09770179 +n09770359 +n09771435 +n09772330 +n09772746 +n09772930 +n09773962 +n09774167 +n09774783 +n09775907 +n09776346 +n09776642 +n09776807 +n09777870 +n09778266 +n09778537 +n09778783 +n09778927 +n09779124 +n09779280 +n09779461 +n09779790 +n09780395 +n09780828 +n09780984 +n09781398 +n09781504 +n09781650 +n09782167 +n09782397 +n09782855 +n09783537 +n09783776 +n09783884 +n09784043 +n09784160 +n09784564 +n09785236 +n09785659 +n09785891 +n09786115 +n09787534 +n09787765 +n09788073 +n09788237 +n09789150 +n09789566 +n09789898 +n09790047 +n09790482 +n09791014 +n09791419 +n09791816 +n09792125 +n09792555 +n09792969 +n09793141 +n09793352 +n09793946 +n09794550 +n09794668 +n09795010 +n09795124 +n09795334 +n09796809 +n09796974 +n09797742 +n09797873 +n09797998 +n09798096 +n09800469 +n09800964 +n09801102 +n09801275 +n09801533 +n09802445 +n09802641 +n09802951 +n09804230 +n09805151 +n09805324 +n09805475 +n09806944 +n09807075 +n09808080 +n09808591 +n09809279 +n09809538 +n09809749 +n09809925 +n09810166 +n09811568 +n09811712 +n09811852 +n09813219 +n09814252 +n09814381 +n09814488 +n09814567 +n09814660 +n09815455 +n09815790 +n09816654 +n09816771 +n09817174 +n09817386 +n09818022 +n09819477 +n09820044 +n09820263 +n09821831 +n09822830 +n09823153 +n09823287 +n09823502 +n09823832 +n09824135 +n09824609 +n09825096 +n09825750 +n09826204 +n09826605 +n09826821 +n09827246 +n09827363 +n09828216 +n09828403 +n09828988 +n09830194 +n09830400 +n09830629 +n09830759 +n09830926 +n09831962 +n09832456 +n09832633 +n09832978 +n09833111 +n09833275 +n09833441 +n09833536 +n09833751 +n09833997 +n09834258 +n09834378 +n09834699 +n09834885 +n09835017 +n09835153 +n09835230 +n09835348 +n09835506 +n09836160 +n09836343 +n09836519 +n09836786 +n09837459 +n09837720 +n09838295 +n09838370 +n09838621 +n09839702 +n09840217 +n09840435 +n09840520 +n09841188 +n09841515 +n09841696 +n09842047 +n09842288 +n09842395 +n09842528 +n09842823 +n09843443 +n09843602 +n09843716 +n09843824 +n09844457 +n09844898 +n09845401 +n09845849 +n09846142 +n09846469 +n09846586 +n09846755 +n09846894 +n09847267 +n09847344 +n09847543 +n09848110 +n09848489 +n09849167 +n09849990 +n09850760 +n09850974 +n09851165 +n09851575 +n09853541 +n09853645 +n09853881 +n09854218 +n09854421 +n09854915 +n09855433 +n09856401 +n09856671 +n09856827 +n09857007 +n09858165 +n09858299 +n09858733 +n09859152 +n09859285 +n09859975 +n09861287 +n09861599 +n09861863 +n09861946 +n09862183 +n09862621 +n09863031 +n09863339 +n09863749 +n09863936 +n09864632 +n09864968 +n09865068 +n09865162 +n09865398 +n09865672 +n09865744 +n09866115 +n09866354 +n09866559 +n09866661 +n09866817 +n09866922 +n09867069 +n09867154 +n09867311 +n09868270 +n09868782 +n09868899 +n09869317 +n09869447 +n09869578 +n09870096 +n09871095 +n09871229 +n09871681 +n09871867 +n09871952 +n09872066 +n09872557 +n09873348 +n09873473 +n09873769 +n09873899 +n09874428 +n09874725 +n09874862 +n09875025 +n09875979 +n09876701 +n09877288 +n09877587 +n09877750 +n09877951 +n09878921 +n09879552 +n09880189 +n09880741 +n09881265 +n09881358 +n09881895 +n09883047 +n09883452 +n09883807 +n09885059 +n09885866 +n09886403 +n09886540 +n09888635 +n09889065 +n09889170 +n09889691 +n09889941 +n09890192 +n09890749 +n09891730 +n09892262 +n09892513 +n09892693 +n09893191 +n09893344 +n09893502 +n09893600 +n09894143 +n09894445 +n09894654 +n09894909 +n09895222 +n09895480 +n09895561 +n09895701 +n09895902 +n09896170 +n09896311 +n09896401 +n09896685 +n09896826 +n09898020 +n09899289 +n09899671 +n09899782 +n09899929 +n09901337 +n09901502 +n09901642 +n09901786 +n09901921 +n09902128 +n09902353 +n09902731 +n09902851 +n09902954 +n09903153 +n09903501 +n09903639 +n09903936 +n09904208 +n09904837 +n09905050 +n09905185 +n09905530 +n09906293 +n09906449 +n09906704 +n09907804 +n09908769 +n09909660 +n09909929 +n09910222 +n09910374 +n09910556 +n09910840 +n09911226 +n09912431 +n09912681 +n09912907 +n09912995 +n09913329 +n09913455 +n09913593 +n09915434 +n09915651 +n09916348 +n09917214 +n09917345 +n09917481 +n09917593 +n09918248 +n09918554 +n09918867 +n09919061 +n09919200 +n09919451 +n09919899 +n09920106 +n09920283 +n09920901 +n09921034 +n09923003 +n09923186 +n09923418 +n09923561 +n09923673 +n09923996 +n09924106 +n09924195 +n09924313 +n09924437 +n09924996 +n09927089 +n09927451 +n09928136 +n09928451 +n09928845 +n09929202 +n09929298 +n09929577 +n09930257 +n09930628 +n09930876 +n09931165 +n09931418 +n09931640 +n09932098 +n09932336 +n09932508 +n09932788 +n09933020 +n09933098 +n09933842 +n09933972 +n09934337 +n09934488 +n09934774 +n09935107 +n09935434 +n09936825 +n09936892 +n09937056 +n09937688 +n09937802 +n09937903 +n09938080 +n09938449 +n09938991 +n09940725 +n09940818 +n09941089 +n09941571 +n09941787 +n09941964 +n09942697 +n09942970 +n09943239 +n09943811 +n09944022 +n09944160 +n09944430 +n09945021 +n09945223 +n09945319 +n09945603 +n09945745 +n09946814 +n09947127 +n09950457 +n09950728 +n09951070 +n09951274 +n09951524 +n09951616 +n09952163 +n09953052 +n09953350 +n09953615 +n09954355 +n09954639 +n09955406 +n09955944 +n09956578 +n09957523 +n09958133 +n09958292 +n09958447 +n09958569 +n09959142 +n09959658 +n09960688 +n09961198 +n09961331 +n09961469 +n09961605 +n09961739 +n09962966 +n09964202 +n09964411 +n09965515 +n09965787 +n09966470 +n09966554 +n09967063 +n09967406 +n09967555 +n09967816 +n09967967 +n09968259 +n09968652 +n09968741 +n09968845 +n09970088 +n09970192 +n09970402 +n09970822 +n09971273 +n09971385 +n09971839 +n09972010 +n09972458 +n09972587 +n09974648 +n09975425 +n09976024 +n09976283 +n09976429 +n09976728 +n09976917 +n09978442 +n09979321 +n09979913 +n09980458 +n09980805 +n09980985 +n09981092 +n09981278 +n09981540 +n09981939 +n09982152 +n09982525 +n09983314 +n09983572 +n09983889 +n09984960 +n09985470 +n09985809 +n09985978 +n09986450 +n09986700 +n09986904 +n09987045 +n09987161 +n09987239 +n09988063 +n09988311 +n09988493 +n09988703 +n09989502 +n09990415 +n09990690 +n09990777 +n09991740 +n09991867 +n09992538 +n09992837 +n09993252 +n09993651 +n09994400 +n09994673 +n09994808 +n09994878 +n09995829 +n09996039 +n09996304 +n09996481 +n09997622 +n09998788 +n09999135 +n10000294 +n10000459 +n10000787 +n10001217 +n10001481 +n10001764 +n10002257 +n10002760 +n10003476 +n10004718 +n10005006 +n10005934 +n10006177 +n10006748 +n10007684 +n10007809 +n10007995 +n10008123 +n10008254 +n10009162 +n10009276 +n10009484 +n10009671 +n10010062 +n10010243 +n10010632 +n10010767 +n10010864 +n10011360 +n10011486 +n10012484 +n10013811 +n10015215 +n10015485 +n10015792 +n10015897 +n10017272 +n10017422 +n10018747 +n10018861 +n10019072 +n10019187 +n10019406 +n10020366 +n10020533 +n10020670 +n10020807 +n10020890 +n10022908 +n10023264 +n10023506 +n10023656 +n10024025 +n10024362 +n10024937 +n10025060 +n10025295 +n10025391 +n10025635 +n10026976 +n10027246 +n10027590 +n10028402 +n10028541 +n10029068 +n10030277 +n10032987 +n10033412 +n10033572 +n10033663 +n10033888 +n10034201 +n10034614 +n10035952 +n10036266 +n10036444 +n10036692 +n10036929 +n10037080 +n10037385 +n10037588 +n10037922 +n10038119 +n10038409 +n10038620 +n10039271 +n10039946 +n10040240 +n10040698 +n10040945 +n10041373 +n10041887 +n10042690 +n10042845 +n10043024 +n10043491 +n10043643 +n10044682 +n10044879 +n10047199 +n10047459 +n10048117 +n10048367 +n10048612 +n10048836 +n10049363 +n10050043 +n10050880 +n10051026 +n10051761 +n10051861 +n10051975 +n10052694 +n10053439 +n10053808 +n10054657 +n10055297 +n10055410 +n10055566 +n10055730 +n10055847 +n10056103 +n10056611 +n10056719 +n10057271 +n10058411 +n10058962 +n10059067 +n10060075 +n10060175 +n10060352 +n10061043 +n10061195 +n10061431 +n10061882 +n10062042 +n10062176 +n10062275 +n10062492 +n10062594 +n10062716 +n10062905 +n10062996 +n10063635 +n10063919 +n10064831 +n10064977 +n10065758 +n10066206 +n10066314 +n10067011 +n10067305 +n10067600 +n10067968 +n10068234 +n10068425 +n10069296 +n10069981 +n10070108 +n10070377 +n10070449 +n10070563 +n10070711 +n10071332 +n10071557 +n10072054 +n10074249 +n10074578 +n10074735 +n10074841 +n10075299 +n10075693 +n10076224 +n10076483 +n10076604 +n10076957 +n10077106 +n10077593 +n10077879 +n10078131 +n10078719 +n10078806 +n10079399 +n10079893 +n10080117 +n10080508 +n10080869 +n10081204 +n10081842 +n10082043 +n10082299 +n10082423 +n10082562 +n10082687 +n10082997 +n10083677 +n10083823 +n10084043 +n10084295 +n10085101 +n10085869 +n10086383 +n10086744 +n10087434 +n10087736 +n10088200 +n10090745 +n10091349 +n10091450 +n10091564 +n10091651 +n10091861 +n10091997 +n10092488 +n10092643 +n10092794 +n10092978 +n10093167 +n10093475 +n10093818 +n10094320 +n10094584 +n10094782 +n10095265 +n10095420 +n10095769 +n10095869 +n10096126 +n10096508 +n10097262 +n10097477 +n10097590 +n10097842 +n10097995 +n10098245 +n10098388 +n10098517 +n10098624 +n10098710 +n10098862 +n10099002 +n10099375 +n10101308 +n10101634 +n10101981 +n10102800 +n10103155 +n10103228 +n10103921 +n10104064 +n10104487 +n10104756 +n10104888 +n10105085 +n10105733 +n10105906 +n10106387 +n10106509 +n10106995 +n10107173 +n10107303 +n10108018 +n10108089 +n10108464 +n10108832 +n10109443 +n10109662 +n10109826 +n10110093 +n10110731 +n10110893 +n10111358 +n10111779 +n10111903 +n10112129 +n10113249 +n10113583 +n10113869 +n10114476 +n10114550 +n10114662 +n10115430 +n10115946 +n10116370 +n10116478 +n10116702 +n10117017 +n10117267 +n10117415 +n10117739 +n10117851 +n10118301 +n10118743 +n10118844 +n10119609 +n10120330 +n10120671 +n10121026 +n10121246 +n10121714 +n10121800 +n10122300 +n10122531 +n10123122 +n10123844 +n10126177 +n10126424 +n10126708 +n10127186 +n10127689 +n10128519 +n10128748 +n10129338 +n10129825 +n10130686 +n10130877 +n10131151 +n10131268 +n10131590 +n10131815 +n10132035 +n10132502 +n10134178 +n10134396 +n10134760 +n10134982 +n10135129 +n10135197 +n10135297 +n10136615 +n10136959 +n10137825 +n10138369 +n10138472 +n10139077 +n10139651 +n10140051 +n10140597 +n10140683 +n10140783 +n10140929 +n10141364 +n10141732 +n10142166 +n10142391 +n10142537 +n10142747 +n10142946 +n10143172 +n10143595 +n10143725 +n10144338 +n10145239 +n10145340 +n10145480 +n10145590 +n10145774 +n10145902 +n10146002 +n10146104 +n10146416 +n10146816 +n10146927 +n10147121 +n10147262 +n10147710 +n10147935 +n10148035 +n10148305 +n10148825 +n10149436 +n10149867 +n10150071 +n10150794 +n10150940 +n10151133 +n10151261 +n10151367 +n10151570 +n10151760 +n10152306 +n10152616 +n10152763 +n10153155 +n10153414 +n10153594 +n10153865 +n10154013 +n10154186 +n10154601 +n10155222 +n10155600 +n10155849 +n10156629 +n10156831 +n10157016 +n10157128 +n10157271 +n10158506 +n10159045 +n10159289 +n10159533 +n10160188 +n10160280 +n10160412 +n10161622 +n10162016 +n10162194 +n10162354 +n10164025 +n10164233 +n10164492 +n10165448 +n10166189 +n10166394 +n10167152 +n10167361 +n10167565 +n10167838 +n10168012 +n10168183 +n10168584 +n10168837 +n10169147 +n10169241 +n10169419 +n10169796 +n10170060 +n10170681 +n10170866 +n10171219 +n10171456 +n10171567 +n10172080 +n10173410 +n10173579 +n10173665 +n10173771 +n10174253 +n10174330 +n10174445 +n10174589 +n10174695 +n10174971 +n10175248 +n10175725 +n10176913 +n10177150 +n10178077 +n10178216 +n10179069 +n10180580 +n10180791 +n10180923 +n10181445 +n10181547 +n10181799 +n10181878 +n10182190 +n10182402 +n10183347 +n10183931 +n10184505 +n10185148 +n10185483 +n10185793 +n10186068 +n10186143 +n10186216 +n10186350 +n10186686 +n10186774 +n10187130 +n10187491 +n10187990 +n10188715 +n10188856 +n10188957 +n10189278 +n10189597 +n10190122 +n10190516 +n10191001 +n10191388 +n10191613 +n10192839 +n10193650 +n10194231 +n10194775 +n10195056 +n10195155 +n10195261 +n10195593 +n10196404 +n10196725 +n10197392 +n10198437 +n10198832 +n10199251 +n10200246 +n10200781 +n10202225 +n10202624 +n10202763 +n10203949 +n10204177 +n10204833 +n10205231 +n10205344 +n10205457 +n10205714 +n10206173 +n10206506 +n10206629 +n10207077 +n10207169 +n10208189 +n10208847 +n10208950 +n10209082 +n10209731 +n10210137 +n10210512 +n10210648 +n10210911 +n10211036 +n10211666 +n10211830 +n10212231 +n10212501 +n10212780 +n10213034 +n10213429 +n10214062 +n10214390 +n10215623 +n10216106 +n10216403 +n10217208 +n10218043 +n10218164 +n10218292 +n10219240 +n10219453 +n10219879 +n10220080 +n10220924 +n10221312 +n10221520 +n10222170 +n10222259 +n10222497 +n10222716 +n10223069 +n10223177 +n10223606 +n10224578 +n10225219 +n10225931 +n10226413 +n10227166 +n10227266 +n10227393 +n10227490 +n10227698 +n10227793 +n10227985 +n10228278 +n10228468 +n10228592 +n10228712 +n10229883 +n10230216 +n10233248 +n10235024 +n10235269 +n10235385 +n10236304 +n10236521 +n10236842 +n10237069 +n10237196 +n10237464 +n10237556 +n10237676 +n10237799 +n10238272 +n10238375 +n10239928 +n10240082 +n10240235 +n10240417 +n10240821 +n10241024 +n10241300 +n10242328 +n10243137 +n10243273 +n10243483 +n10243664 +n10243872 +n10244108 +n10244359 +n10244913 +n10245029 +n10245341 +n10245507 +n10245639 +n10245863 +n10246317 +n10246395 +n10246703 +n10247358 +n10247880 +n10248008 +n10248198 +n10248377 +n10249191 +n10249270 +n10249459 +n10249869 +n10249950 +n10250712 +n10251329 +n10251612 +n10252075 +n10252222 +n10252354 +n10252547 +n10253122 +n10253296 +n10253479 +n10253611 +n10253703 +n10255459 +n10257221 +n10258602 +n10258786 +n10259348 +n10259780 +n10259997 +n10260473 +n10260706 +n10260800 +n10261211 +n10261511 +n10261624 +n10261862 +n10262343 +n10262445 +n10262561 +n10262655 +n10262880 +n10263146 +n10263411 +n10263790 +n10265281 +n10265801 +n10265891 +n10266016 +n10266328 +n10266848 +n10267166 +n10267311 +n10267865 +n10268629 +n10269199 +n10269289 +n10271677 +n10272782 +n10272913 +n10273064 +n10274173 +n10274318 +n10274815 +n10275249 +n10275395 +n10275848 +n10276045 +n10276477 +n10276942 +n10277027 +n10277638 +n10277815 +n10277912 +n10278456 +n10279018 +n10279778 +n10280034 +n10280130 +n10280598 +n10280674 +n10281546 +n10281770 +n10281896 +n10282482 +n10282672 +n10283170 +n10283366 +n10283546 +n10284064 +n10284871 +n10284965 +n10286282 +n10286539 +n10286749 +n10288964 +n10289039 +n10289176 +n10289462 +n10289766 +n10290422 +n10290541 +n10290813 +n10290919 +n10291110 +n10291469 +n10291822 +n10291942 +n10292316 +n10293332 +n10293590 +n10293861 +n10294020 +n10294139 +n10295371 +n10295479 +n10296176 +n10296444 +n10297234 +n10297367 +n10297531 +n10297841 +n10298202 +n10298271 +n10298647 +n10298912 +n10299125 +n10299250 +n10299700 +n10299875 +n10300041 +n10300154 +n10300303 +n10300500 +n10300654 +n10300829 +n10302576 +n10302700 +n10302905 +n10303037 +n10303814 +n10304086 +n10304650 +n10304914 +n10305635 +n10305802 +n10306004 +n10306279 +n10306496 +n10306595 +n10306890 +n10307114 +n10308066 +n10308168 +n10308275 +n10308504 +n10308653 +n10308732 +n10310783 +n10311506 +n10311661 +n10312287 +n10312491 +n10312600 +n10313000 +n10313239 +n10313441 +n10313724 +n10314054 +n10314182 +n10314517 +n10314836 +n10315217 +n10315456 +n10315561 +n10315730 +n10316360 +n10316527 +n10316862 +n10317007 +n10317500 +n10317963 +n10318293 +n10318607 +n10318686 +n10319313 +n10320484 +n10320863 +n10321126 +n10321340 +n10321632 +n10321882 +n10322238 +n10323634 +n10323752 +n10323999 +n10324560 +n10325549 +n10325774 +n10326776 +n10327143 +n10327987 +n10328123 +n10328328 +n10328437 +n10328696 +n10328941 +n10329035 +n10330593 +n10330931 +n10331098 +n10331167 +n10331258 +n10331347 +n10331841 +n10332110 +n10332385 +n10332861 +n10332953 +n10333044 +n10333165 +n10333317 +n10333439 +n10333601 +n10333838 +n10334009 +n10334461 +n10334782 +n10335246 +n10335801 +n10335931 +n10336411 +n10336904 +n10337488 +n10338231 +n10338391 +n10339179 +n10339251 +n10339717 +n10340312 +n10341243 +n10341343 +n10341446 +n10341573 +n10341955 +n10342180 +n10342367 +n10342543 +n10342893 +n10342992 +n10343088 +n10343355 +n10343449 +n10343554 +n10343869 +n10344121 +n10344203 +n10344319 +n10344656 +n10344774 +n10345015 +n10345100 +n10345302 +n10345422 +n10345659 +n10346015 +n10347204 +n10347446 +n10348526 +n10349243 +n10349750 +n10349836 +n10350220 +n10350774 +n10351064 +n10353016 +n10353355 +n10353928 +n10354265 +n10354754 +n10355142 +n10355306 +n10355449 +n10355688 +n10355806 +n10356450 +n10356877 +n10357012 +n10357613 +n10357737 +n10358032 +n10358124 +n10358575 +n10359117 +n10359422 +n10359546 +n10359659 +n10360366 +n10360747 +n10361060 +n10361194 +n10361296 +n10361525 +n10362003 +n10362319 +n10362557 +n10363445 +n10363573 +n10364198 +n10364502 +n10365514 +n10366145 +n10366276 +n10366966 +n10368291 +n10368528 +n10368624 +n10368711 +n10368798 +n10369095 +n10369317 +n10369417 +n10369528 +n10369699 +n10369955 +n10370381 +n10370955 +n10371052 +n10371221 +n10371330 +n10371450 +n10373390 +n10373525 +n10374541 +n10374849 +n10374943 +n10375052 +n10375314 +n10375402 +n10376523 +n10376890 +n10377021 +n10377185 +n10377291 +n10377542 +n10377633 +n10378026 +n10378113 +n10378780 +n10379376 +n10380126 +n10380499 +n10380672 +n10381804 +n10381981 +n10382157 +n10382302 +n10382480 +n10382710 +n10382825 +n10383094 +n10383237 +n10383505 +n10383816 +n10384214 +n10384392 +n10384496 +n10385566 +n10386196 +n10386754 +n10386874 +n10386984 +n10387196 +n10387324 +n10387836 +n10389865 +n10389976 +n10390600 +n10390698 +n10390807 +n10391416 +n10393909 +n10394434 +n10394786 +n10395073 +n10395209 +n10395390 +n10395828 +n10396106 +n10396337 +n10396727 +n10396908 +n10397001 +n10397142 +n10397392 +n10399130 +n10400003 +n10400108 +n10400205 +n10400437 +n10400618 +n10400998 +n10401204 +n10401331 +n10401639 +n10402709 +n10402824 +n10403633 +n10403876 +n10404426 +n10404998 +n10405540 +n10405694 +n10406266 +n10406391 +n10406765 +n10407310 +n10407954 +n10408809 +n10409459 +n10409752 +n10410246 +n10410996 +n10411356 +n10411551 +n10411867 +n10414239 +n10414768 +n10414865 +n10415037 +n10416567 +n10417288 +n10417424 +n10417551 +n10417682 +n10417843 +n10417969 +n10418101 +n10418735 +n10419047 +n10419472 +n10419630 +n10419785 +n10420031 +n10420277 +n10420507 +n10420649 +n10421016 +n10421470 +n10421956 +n10422405 +n10425946 +n10426454 +n10426630 +n10427223 +n10427359 +n10427764 +n10428004 +n10431122 +n10431625 +n10432189 +n10432441 +n10432875 +n10432957 +n10433077 +n10433452 +n10433610 +n10433737 +n10435169 +n10435251 +n10435716 +n10435988 +n10436334 +n10437014 +n10437137 +n10437262 +n10437698 +n10438172 +n10438619 +n10438842 +n10439373 +n10439523 +n10439727 +n10439851 +n10441037 +n10441124 +n10441694 +n10441962 +n10442093 +n10442232 +n10442417 +n10442573 +n10443032 +n10443659 +n10443830 +n10444194 +n10448322 +n10448455 +n10449664 +n10450038 +n10450161 +n10450303 +n10451450 +n10451590 +n10451858 +n10453184 +n10455619 +n10456070 +n10456138 +n10456696 +n10457214 +n10457444 +n10457903 +n10458111 +n10458356 +n10458596 +n10459882 +n10460033 +n10461060 +n10462588 +n10462751 +n10462860 +n10464052 +n10464542 +n10464711 +n10464870 +n10465002 +n10465451 +n10465831 +n10466198 +n10466564 +n10466918 +n10467179 +n10467395 +n10468750 +n10469611 +n10469874 +n10470779 +n10471640 +n10471732 +n10471859 +n10472129 +n10472447 +n10473453 +n10473562 +n10473789 +n10473917 +n10474064 +n10474343 +n10474446 +n10474645 +n10475835 +n10475940 +n10476467 +n10477713 +n10477955 +n10478118 +n10478293 +n10478462 +n10478827 +n10478960 +n10479135 +n10479328 +n10481167 +n10481268 +n10482054 +n10482220 +n10482587 +n10482921 +n10483138 +n10483395 +n10483799 +n10483890 +n10484858 +n10485298 +n10485883 +n10486166 +n10486236 +n10486561 +n10487182 +n10487363 +n10487592 +n10488016 +n10488309 +n10488656 +n10489426 +n10490421 +n10491998 +n10492086 +n10492727 +n10493199 +n10493419 +n10493685 +n10493835 +n10493922 +n10494195 +n10494373 +n10495167 +n10495421 +n10495555 +n10495756 +n10496393 +n10496489 +n10497135 +n10497534 +n10497645 +n10498046 +n10498699 +n10498816 +n10498986 +n10499110 +n10499232 +n10499355 +n10499631 +n10499857 +n10500217 +n10500419 +n10500603 +n10500824 +n10500942 +n10501453 +n10501635 +n10502046 +n10502329 +n10502950 +n10503818 +n10504090 +n10504206 +n10505347 +n10505613 +n10505732 +n10505942 +n10506336 +n10506544 +n10506915 +n10507070 +n10507380 +n10507482 +n10507565 +n10507692 +n10508141 +n10508379 +n10508710 +n10509063 +n10509161 +n10509810 +n10510245 +n10510974 +n10511771 +n10512201 +n10512372 +n10512708 +n10512859 +n10513509 +n10513823 +n10513938 +n10514051 +n10514121 +n10514255 +n10514429 +n10514784 +n10515863 +n10516527 +n10517137 +n10517283 +n10518349 +n10519126 +n10519494 +n10519984 +n10520286 +n10520544 +n10520964 +n10521100 +n10521662 +n10521853 +n10522035 +n10522324 +n10522759 +n10523341 +n10524076 +n10524223 +n10524869 +n10525134 +n10525436 +n10525617 +n10525878 +n10526534 +n10527147 +n10527334 +n10528023 +n10528148 +n10528493 +n10529231 +n10530150 +n10530383 +n10530571 +n10530959 +n10531109 +n10531445 +n10531838 +n10533874 +n10533983 +n10536134 +n10536274 +n10536416 +n10537708 +n10537906 +n10538629 +n10538733 +n10538853 +n10539015 +n10539160 +n10539278 +n10540114 +n10540252 +n10540656 +n10541833 +n10542608 +n10542761 +n10542888 +n10543161 +n10543937 +n10544232 +n10544748 +n10545792 +n10546428 +n10546633 +n10548419 +n10548537 +n10548681 +n10549510 +n10550252 +n10550369 +n10550468 +n10551576 +n10552393 +n10553140 +n10553235 +n10554024 +n10554141 +n10554846 +n10555059 +n10555430 +n10556033 +n10556518 +n10556704 +n10556825 +n10557246 +n10557854 +n10559009 +n10559288 +n10559508 +n10559683 +n10559996 +n10560106 +n10560637 +n10561222 +n10561320 +n10561736 +n10562135 +n10562283 +n10562509 +n10562968 +n10563314 +n10563403 +n10563711 +n10564098 +n10565502 +n10565667 +n10566072 +n10567613 +n10567722 +n10567848 +n10568200 +n10568358 +n10568443 +n10568608 +n10568915 +n10569011 +n10569179 +n10570019 +n10570704 +n10571907 +n10572706 +n10572889 +n10573957 +n10574311 +n10574538 +n10574840 +n10575463 +n10575594 +n10575787 +n10576223 +n10576316 +n10576676 +n10576818 +n10576962 +n10577182 +n10577284 +n10577710 +n10577820 +n10578021 +n10578162 +n10578471 +n10578656 +n10579062 +n10579549 +n10580030 +n10580437 +n10580535 +n10581648 +n10581890 +n10582604 +n10582746 +n10583387 +n10583790 +n10585077 +n10585217 +n10585628 +n10586166 +n10586265 +n10586444 +n10586903 +n10586998 +n10588074 +n10588357 +n10588724 +n10588965 +n10589666 +n10590146 +n10590239 +n10590452 +n10590903 +n10591072 +n10591811 +n10592049 +n10592811 +n10593521 +n10594147 +n10594523 +n10594857 +n10595164 +n10595647 +n10596517 +n10596899 +n10597505 +n10597745 +n10597889 +n10598013 +n10598181 +n10598459 +n10598904 +n10599215 +n10599806 +n10601234 +n10601362 +n10602119 +n10602470 +n10602985 +n10603528 +n10603851 +n10604275 +n10604380 +n10604634 +n10604880 +n10604979 +n10605253 +n10605737 +n10607291 +n10607478 +n10609092 +n10609198 +n10610465 +n10610850 +n10611267 +n10611613 +n10612210 +n10612373 +n10612518 +n10613996 +n10614507 +n10614629 +n10615179 +n10615334 +n10616578 +n10617024 +n10617193 +n10617397 +n10618234 +n10618342 +n10618465 +n10618685 +n10618848 +n10619492 +n10619642 +n10619888 +n10620212 +n10620586 +n10620758 +n10621294 +n10621400 +n10621514 +n10622053 +n10624074 +n10624310 +n10624437 +n10624540 +n10625860 +n10626630 +n10627252 +n10628097 +n10628644 +n10629329 +n10629647 +n10629939 +n10630093 +n10630188 +n10631131 +n10631309 +n10631654 +n10632576 +n10633298 +n10633450 +n10634464 +n10634849 +n10634990 +n10635788 +n10636488 +n10637483 +n10638922 +n10639238 +n10639359 +n10639637 +n10639817 +n10641223 +n10642596 +n10642705 +n10643095 +n10643837 +n10643937 +n10644598 +n10645017 +n10645223 +n10646032 +n10646140 +n10646433 +n10646641 +n10646780 +n10646942 +n10647745 +n10648237 +n10648696 +n10649197 +n10649308 +n10650162 +n10652605 +n10652703 +n10654015 +n10654211 +n10654321 +n10654827 +n10654932 +n10655169 +n10655442 +n10655594 +n10655730 +n10655986 +n10656120 +n10656223 +n10656969 +n10657306 +n10657556 +n10657835 +n10658304 +n10659042 +n10659762 +n10660128 +n10660621 +n10660883 +n10661002 +n10661216 +n10661563 +n10661732 +n10663315 +n10663549 +n10665302 +n10665587 +n10665698 +n10666752 +n10667477 +n10667709 +n10667863 +n10668450 +n10668666 +n10669991 +n10671042 +n10671613 +n10671736 +n10671898 +n10672371 +n10672540 +n10672662 +n10673296 +n10673776 +n10674130 +n10674713 +n10675010 +n10675142 +n10675609 +n10676018 +n10676434 +n10676569 +n10678937 +n10679174 +n10679503 +n10679610 +n10679723 +n10680609 +n10680796 +n10681194 +n10681557 +n10682713 +n10682953 +n10683675 +n10684146 +n10684630 +n10684827 +n10685398 +n10686073 +n10686517 +n10686694 +n10686885 +n10688356 +n10688811 +n10689306 +n10690268 +n10690421 +n10690648 +n10691318 +n10691937 +n10692090 +n10692482 +n10692883 +n10693235 +n10693334 +n10693824 +n10694258 +n10694939 +n10695450 +n10696101 +n10696508 +n10697135 +n10697282 +n10698368 +n10699558 +n10699752 +n10699981 +n10700105 +n10700201 +n10700640 +n10700963 +n10701180 +n10701644 +n10701962 +n10702167 +n10702615 +n10703221 +n10703336 +n10703480 +n10703692 +n10704238 +n10704712 +n10704886 +n10705448 +n10705615 +n10706812 +n10707134 +n10707233 +n10707707 +n10708292 +n10708454 +n10709529 +n10710171 +n10710259 +n10710778 +n10710913 +n10711483 +n10711766 +n10712229 +n10712374 +n10712474 +n10712690 +n10712835 +n10713254 +n10713686 +n10713843 +n10714195 +n10715030 +n10715347 +n10715789 +n10716576 +n10716864 +n10717055 +n10717196 +n10717337 +n10718131 +n10718349 +n10718509 +n10718665 +n10718952 +n10719036 +n10719132 +n10719267 +n10719807 +n10720197 +n10720453 +n10720964 +n10721124 +n10721321 +n10721612 +n10721708 +n10721819 +n10722029 +n10722575 +n10722965 +n10723230 +n10723597 +n10724132 +n10724372 +n10724570 +n10725280 +n10726031 +n10726786 +n10727016 +n10727171 +n10727458 +n10728117 +n10728233 +n10728624 +n10728998 +n10729330 +n10730542 +n10730728 +n10731013 +n10731732 +n10732010 +n10732521 +n10732854 +n10732967 +n10733820 +n10734394 +n10734741 +n10734891 +n10734963 +n10735173 +n10735298 +n10735984 +n10737103 +n10737264 +n10738111 +n10738215 +n10738670 +n10738871 +n10739135 +n10739297 +n10739391 +n10740594 +n10740732 +n10740868 +n10741152 +n10741367 +n10741493 +n10742005 +n10742111 +n10742546 +n10742997 +n10743124 +n10743356 +n10744078 +n10744164 +n10745006 +n10745770 +n10746931 +n10747119 +n10747424 +n10747548 +n10747965 +n10748142 +n10748506 +n10748620 +n10749928 +n10750031 +n10750188 +n10750640 +n10751026 +n10751152 +n10751265 +n10751710 +n10752480 +n10753061 +n10753182 +n10753339 +n10753442 +n10753989 +n10754189 +n10754281 +n10754449 +n10755080 +n10755164 +n10755394 +n10755648 +n10756061 +n10756148 +n10756261 +n10756641 +n10756837 +n10757050 +n10757492 +n10758337 +n10758445 +n10758949 +n10759151 +n10759331 +n10759982 +n10760199 +n10760622 +n10760951 +n10761190 +n10761326 +n10761519 +n10762212 +n10762480 +n10763075 +n10763245 +n10763383 +n10763620 +n10764465 +n10764622 +n10764719 +n10765305 +n10765587 +n10765679 +n10765885 +n10766260 +n10768148 +n10768272 +n10768903 +n10769084 +n10769188 +n10769321 +n10769459 +n10771066 +n10772092 +n10772580 +n10772937 +n10773665 +n10773800 +n10774329 +n10774756 +n10775003 +n10775128 +n10776052 +n10776339 +n10776887 +n10777299 +n10778044 +n10778148 +n10778711 +n10778999 +n10779610 +n10779897 +n10779995 +n10780284 +n10780632 +n10781236 +n10781817 +n10782362 +n10782471 +n10782791 +n10782940 +n10783240 +n10783539 +n10783646 +n10783734 +n10784113 +n10784544 +n10784922 +n10785480 +n10787470 +n10788852 +n10789415 +n10789709 +n10791115 +n10791221 +n10791820 +n10791890 +n10792335 +n10792506 +n10792856 +n10793570 +n10793799 +n10794014 +n10801561 +n10801802 +n10802507 +n10802621 +n10802953 +n10803031 +n10803282 +n10803978 +n10804287 +n10804636 +n10804732 +n10805501 +n10806113 +n10994097 +n11100798 +n11196627 +n11242849 +n11318824 +n11346873 +n11448153 +n11487732 +n11508382 +n11511327 +n11524451 +n11530008 +n11531193 +n11531334 +n11532682 +n11533212 +n11533999 +n11536567 +n11536673 +n11537327 +n11539289 +n11542137 +n11542640 +n11544015 +n11545350 +n11545524 +n11545714 +n11547562 +n11547855 +n11548728 +n11548870 +n11549009 +n11549245 +n11549779 +n11549895 +n11552133 +n11552386 +n11552594 +n11552806 +n11552976 +n11553240 +n11553522 +n11596108 +n11597657 +n11598287 +n11598686 +n11598886 +n11599324 +n11600372 +n11601177 +n11601333 +n11601918 +n11602091 +n11602478 +n11602873 +n11603246 +n11603462 +n11603835 +n11604046 +n11608250 +n11609475 +n11609684 +n11609862 +n11610047 +n11610215 +n11610437 +n11610602 +n11610823 +n11611087 +n11611233 +n11611356 +n11611561 +n11611758 +n11612018 +n11612235 +n11612349 +n11612575 +n11612923 +n11613219 +n11613459 +n11613692 +n11613867 +n11614039 +n11614250 +n11614420 +n11614713 +n11615026 +n11615259 +n11615387 +n11615607 +n11615812 +n11615967 +n11616260 +n11616486 +n11616662 +n11616852 +n11617090 +n11617272 +n11617631 +n11617878 +n11618079 +n11618290 +n11618525 +n11618861 +n11619227 +n11619455 +n11619687 +n11619845 +n11620016 +n11620389 +n11620673 +n11621029 +n11621281 +n11621547 +n11621727 +n11621950 +n11622184 +n11622368 +n11622591 +n11622771 +n11623105 +n11623815 +n11623967 +n11624192 +n11624531 +n11625003 +n11625223 +n11625391 +n11625632 +n11625804 +n11626010 +n11626152 +n11626409 +n11626585 +n11626826 +n11627168 +n11627512 +n11627714 +n11627908 +n11628087 +n11628456 +n11628793 +n11629047 +n11629354 +n11630017 +n11630489 +n11631159 +n11631405 +n11631619 +n11631854 +n11631985 +n11632167 +n11632376 +n11632619 +n11632929 +n11633284 +n11634736 +n11635152 +n11635433 +n11635830 +n11636204 +n11636835 +n11639084 +n11639306 +n11639445 +n11640132 +n11643835 +n11644046 +n11644226 +n11644462 +n11644872 +n11645163 +n11645590 +n11645914 +n11646167 +n11646344 +n11646517 +n11646694 +n11646955 +n11647306 +n11647703 +n11647868 +n11648039 +n11648268 +n11648776 +n11649150 +n11649359 +n11649878 +n11650160 +n11650307 +n11650430 +n11650558 +n11650759 +n11652039 +n11652217 +n11652376 +n11652578 +n11652753 +n11652966 +n11653126 +n11653570 +n11653904 +n11654293 +n11654438 +n11654984 +n11655152 +n11655592 +n11655974 +n11656123 +n11656549 +n11656771 +n11657585 +n11658331 +n11658544 +n11658709 +n11659248 +n11659627 +n11660300 +n11661372 +n11661909 +n11662128 +n11662371 +n11662585 +n11662937 +n11663263 +n11664418 +n11665372 +n11666854 +n11668117 +n11669786 +n11669921 +n11672269 +n11672400 +n11674019 +n11674332 +n11675025 +n11675404 +n11675738 +n11676500 +n11676743 +n11676850 +n11677485 +n11677902 +n11678010 +n11678299 +n11678377 +n11679378 +n11680457 +n11680596 +n11682659 +n11683216 +n11683838 +n11684264 +n11684499 +n11684654 +n11685091 +n11685621 +n11686195 +n11686652 +n11686780 +n11686912 +n11687071 +n11687432 +n11687789 +n11687964 +n11688069 +n11688378 +n11689197 +n11689367 +n11689483 +n11689678 +n11689815 +n11689957 +n11690088 +n11690254 +n11690455 +n11691046 +n11691857 +n11692265 +n11692792 +n11693981 +n11694300 +n11694469 +n11694664 +n11694866 +n11695085 +n11695285 +n11695599 +n11695974 +n11696450 +n11696935 +n11697560 +n11697802 +n11698042 +n11698245 +n11699442 +n11699751 +n11700058 +n11700279 +n11700864 +n11701066 +n11701302 +n11702713 +n11703669 +n11704093 +n11704620 +n11704791 +n11705171 +n11705387 +n11705573 +n11705776 +n11706325 +n11706761 +n11706942 +n11707229 +n11707827 +n11708658 +n11708857 +n11709045 +n11709205 +n11709674 +n11710136 +n11710393 +n11710658 +n11710827 +n11710987 +n11711289 +n11711537 +n11711764 +n11711971 +n11712282 +n11713164 +n11713370 +n11713763 +n11714382 +n11715430 +n11715678 +n11716698 +n11717399 +n11717577 +n11718296 +n11718681 +n11719286 +n11720353 +n11720643 +n11720891 +n11721337 +n11721642 +n11722036 +n11722342 +n11722466 +n11722621 +n11722982 +n11723227 +n11723452 +n11723770 +n11723986 +n11724109 +n11724660 +n11725015 +n11725311 +n11725480 +n11725623 +n11725821 +n11725973 +n11726145 +n11726269 +n11726433 +n11726707 +n11727091 +n11727358 +n11727540 +n11727738 +n11728099 +n11728769 +n11728945 +n11729142 +n11729478 +n11729860 +n11730015 +n11730458 +n11730602 +n11730750 +n11730933 +n11731157 +n11731659 +n11732052 +n11732567 +n11733054 +n11733312 +n11733548 +n11734493 +n11734698 +n11735053 +n11735570 +n11735977 +n11736362 +n11736694 +n11736851 +n11737009 +n11737125 +n11737534 +n11738547 +n11738997 +n11739365 +n11739978 +n11740414 +n11741175 +n11741350 +n11741575 +n11741797 +n11742310 +n11742878 +n11744011 +n11744108 +n11744471 +n11745817 +n11746600 +n11747468 +n11748002 +n11748811 +n11749112 +n11749603 +n11750173 +n11750508 +n11750989 +n11751765 +n11751974 +n11752578 +n11752798 +n11752937 +n11753143 +n11753355 +n11753562 +n11753700 +n11754893 +n11756092 +n11756329 +n11756669 +n11756870 +n11757017 +n11757190 +n11757653 +n11757851 +n11758122 +n11758276 +n11758483 +n11758799 +n11759224 +n11759404 +n11759609 +n11759853 +n11760785 +n11761202 +n11761650 +n11761836 +n11762018 +n11762433 +n11762927 +n11763142 +n11763625 +n11763874 +n11764478 +n11764814 +n11765568 +n11766046 +n11766189 +n11766432 +n11767354 +n11767877 +n11768816 +n11769176 +n11769621 +n11769803 +n11770256 +n11771147 +n11771539 +n11771746 +n11771924 +n11772408 +n11772879 +n11773408 +n11773628 +n11773987 +n11774513 +n11774972 +n11775340 +n11775626 +n11776234 +n11777080 +n11778092 +n11778257 +n11779300 +n11780148 +n11780424 +n11781176 +n11782036 +n11782266 +n11782761 +n11782878 +n11783162 +n11783920 +n11784126 +n11784497 +n11785276 +n11785668 +n11785875 +n11786131 +n11786539 +n11786843 +n11787190 +n11788039 +n11788727 +n11789066 +n11789438 +n11789589 +n11789962 +n11790089 +n11790788 +n11790936 +n11791341 +n11791569 +n11792029 +n11792341 +n11792742 +n11793403 +n11793779 +n11794024 +n11794139 +n11794519 +n11795049 +n11795216 +n11795580 +n11796005 +n11796188 +n11797321 +n11797508 +n11797981 +n11798270 +n11798496 +n11798688 +n11798978 +n11799331 +n11799732 +n11800236 +n11800565 +n11801392 +n11801665 +n11801891 +n11802410 +n11802586 +n11802800 +n11802995 +n11805255 +n11805544 +n11805956 +n11806219 +n11806369 +n11806521 +n11806679 +n11806814 +n11807108 +n11807525 +n11807696 +n11807979 +n11808299 +n11808468 +n11808721 +n11808932 +n11809094 +n11809271 +n11809437 +n11809594 +n11809754 +n11810030 +n11810358 +n11811059 +n11811473 +n11811706 +n11811921 +n11812094 +n11812910 +n11813077 +n11814584 +n11814996 +n11815491 +n11815721 +n11815918 +n11816121 +n11816336 +n11816649 +n11816829 +n11817160 +n11817501 +n11817914 +n11818069 +n11818636 +n11819509 +n11819912 +n11820965 +n11821184 +n11822300 +n11823043 +n11823305 +n11823436 +n11823756 +n11824146 +n11824344 +n11824747 +n11825351 +n11825749 +n11826198 +n11826569 +n11827541 +n11828577 +n11828973 +n11829205 +n11829672 +n11829922 +n11830045 +n11830252 +n11830400 +n11830714 +n11830906 +n11831100 +n11831297 +n11831521 +n11832214 +n11832480 +n11832671 +n11832899 +n11833373 +n11833749 +n11834272 +n11834654 +n11834890 +n11835251 +n11836327 +n11836722 +n11837204 +n11837351 +n11837562 +n11837743 +n11837970 +n11838413 +n11838916 +n11839460 +n11839568 +n11839823 +n11840067 +n11840246 +n11840476 +n11840764 +n11841247 +n11843441 +n11844371 +n11844892 +n11845557 +n11845793 +n11845913 +n11846312 +n11846425 +n11846765 +n11847169 +n11848479 +n11848867 +n11849271 +n11849467 +n11849871 +n11849983 +n11850521 +n11850918 +n11851258 +n11851578 +n11851839 +n11852028 +n11852148 +n11852531 +n11853079 +n11853356 +n11853813 +n11854479 +n11855274 +n11855435 +n11855553 +n11855842 +n11856573 +n11857696 +n11857875 +n11858077 +n11858703 +n11858814 +n11859275 +n11859472 +n11859737 +n11860208 +n11860555 +n11861238 +n11861487 +n11861641 +n11861853 +n11862835 +n11863467 +n11863877 +n11865071 +n11865276 +n11865429 +n11865574 +n11865874 +n11866248 +n11866706 +n11867311 +n11868814 +n11869351 +n11869689 +n11870044 +n11870418 +n11870747 +n11871059 +n11871496 +n11871748 +n11872146 +n11872324 +n11872658 +n11873182 +n11873612 +n11874081 +n11874423 +n11874878 +n11875523 +n11875691 +n11875938 +n11876204 +n11876432 +n11876634 +n11876803 +n11877193 +n11877283 +n11877473 +n11877646 +n11877860 +n11878101 +n11878283 +n11878633 +n11879054 +n11879722 +n11879895 +n11881189 +n11882074 +n11882237 +n11882426 +n11882636 +n11882821 +n11882972 +n11883328 +n11883628 +n11883945 +n11884384 +n11884967 +n11885856 +n11887119 +n11887310 +n11887476 +n11887750 +n11888061 +n11888424 +n11888800 +n11889205 +n11889619 +n11890022 +n11890150 +n11890884 +n11891175 +n11892029 +n11892181 +n11892637 +n11892817 +n11893640 +n11893916 +n11894327 +n11894558 +n11894770 +n11895092 +n11895472 +n11895714 +n11896141 +n11896722 +n11897116 +n11897466 +n11898639 +n11898775 +n11899223 +n11899762 +n11899921 +n11900569 +n11901294 +n11901452 +n11901597 +n11901759 +n11901977 +n11902200 +n11902389 +n11902709 +n11902982 +n11903333 +n11903671 +n11904109 +n11904274 +n11905392 +n11905749 +n11906127 +n11906514 +n11906917 +n11907100 +n11907405 +n11907689 +n11908549 +n11908846 +n11909864 +n11910271 +n11910460 +n11910666 +n11915214 +n11915658 +n11915899 +n11916467 +n11916696 +n11917407 +n11917835 +n11918286 +n11918473 +n11918808 +n11919447 +n11919761 +n11919975 +n11920133 +n11920498 +n11920663 +n11920998 +n11921395 +n11921792 +n11922661 +n11922755 +n11922839 +n11922926 +n11923174 +n11923397 +n11923637 +n11924014 +n11924445 +n11924849 +n11925303 +n11925450 +n11925898 +n11926365 +n11926833 +n11926976 +n11927215 +n11927740 +n11928352 +n11928858 +n11929743 +n11930038 +n11930203 +n11930353 +n11930571 +n11930788 +n11930994 +n11931135 +n11931540 +n11931918 +n11932745 +n11932927 +n11933099 +n11933257 +n11933387 +n11933546 +n11933728 +n11933903 +n11934041 +n11934239 +n11934463 +n11934616 +n11934807 +n11935027 +n11935187 +n11935330 +n11935469 +n11935627 +n11935715 +n11935794 +n11935877 +n11935953 +n11936027 +n11936113 +n11936199 +n11936287 +n11936369 +n11936448 +n11936539 +n11936624 +n11936707 +n11936782 +n11936864 +n11936946 +n11937023 +n11937102 +n11937195 +n11937278 +n11937360 +n11937446 +n11937692 +n11938556 +n11939180 +n11939491 +n11939699 +n11940006 +n11940349 +n11940599 +n11940750 +n11941094 +n11941478 +n11941924 +n11942659 +n11943133 +n11943407 +n11943660 +n11943992 +n11944196 +n11944751 +n11944954 +n11945367 +n11945514 +n11945783 +n11946051 +n11946313 +n11946727 +n11946918 +n11947251 +n11947629 +n11947802 +n11948044 +n11948264 +n11948469 +n11948864 +n11949015 +n11949402 +n11949857 +n11950345 +n11950686 +n11950877 +n11951052 +n11951511 +n11951820 +n11952346 +n11952541 +n11953038 +n11953339 +n11953610 +n11953884 +n11954161 +n11954345 +n11954484 +n11954642 +n11954798 +n11955040 +n11955153 +n11955532 +n11955896 +n11956348 +n11956850 +n11957317 +n11957514 +n11957678 +n11958080 +n11958499 +n11958888 +n11959259 +n11959632 +n11959862 +n11960245 +n11960673 +n11961100 +n11961446 +n11961871 +n11962272 +n11962667 +n11962994 +n11963572 +n11963932 +n11964446 +n11964848 +n11965218 +n11965627 +n11965962 +n11966083 +n11966215 +n11966385 +n11966617 +n11966896 +n11967142 +n11967315 +n11967744 +n11967878 +n11968519 +n11968704 +n11968931 +n11969166 +n11969607 +n11969806 +n11970101 +n11970298 +n11970586 +n11971248 +n11971406 +n11971783 +n11971927 +n11972291 +n11972759 +n11972959 +n11973341 +n11973634 +n11973749 +n11974373 +n11974557 +n11974888 +n11975254 +n11976170 +n11976314 +n11976511 +n11976933 +n11977303 +n11977660 +n11977887 +n11978233 +n11978551 +n11978713 +n11978961 +n11979187 +n11979354 +n11979527 +n11979715 +n11979964 +n11980318 +n11980682 +n11981192 +n11981475 +n11982115 +n11982545 +n11982939 +n11983375 +n11983606 +n11984144 +n11984542 +n11985053 +n11985321 +n11985739 +n11985903 +n11986511 +n11986729 +n11987126 +n11987349 +n11987511 +n11988132 +n11988596 +n11988893 +n11989087 +n11989393 +n11989869 +n11990167 +n11990313 +n11990627 +n11990920 +n11991263 +n11991549 +n11991777 +n11992479 +n11992806 +n11993203 +n11993444 +n11993675 +n11994150 +n11995092 +n11995396 +n11996251 +n11996677 +n11997032 +n11997160 +n11997969 +n11998492 +n11998888 +n11999278 +n11999656 +n12000191 +n12001294 +n12001707 +n12001924 +n12002428 +n12002651 +n12002826 +n12003167 +n12003696 +n12004120 +n12004547 +n12004987 +n12005656 +n12006306 +n12006766 +n12006930 +n12007196 +n12007406 +n12007766 +n12008252 +n12008487 +n12008749 +n12009047 +n12009420 +n12009792 +n12010628 +n12010815 +n12011370 +n12011620 +n12012111 +n12012253 +n12012510 +n12013035 +n12013511 +n12013701 +n12014085 +n12014355 +n12014923 +n12015221 +n12015525 +n12015959 +n12016434 +n12016567 +n12016777 +n12016914 +n12017127 +n12017326 +n12017511 +n12017664 +n12017853 +n12018014 +n12018100 +n12018188 +n12018271 +n12018363 +n12018447 +n12018530 +n12018760 +n12019035 +n12019827 +n12020184 +n12020507 +n12020736 +n12020941 +n12022054 +n12022382 +n12022821 +n12023108 +n12023407 +n12023726 +n12024176 +n12024445 +n12024690 +n12024805 +n12025220 +n12026018 +n12026476 +n12026981 +n12027222 +n12027658 +n12028424 +n12029039 +n12029635 +n12030092 +n12030654 +n12030908 +n12031139 +n12031388 +n12031547 +n12031927 +n12032429 +n12032686 +n12033139 +n12033504 +n12033709 +n12034141 +n12034384 +n12034594 +n12035631 +n12035907 +n12036067 +n12036226 +n12036939 +n12037499 +n12037691 +n12038038 +n12038208 +n12038406 +n12038585 +n12038760 +n12038898 +n12039317 +n12041446 +n12043444 +n12043673 +n12043836 +n12044041 +n12044467 +n12044784 +n12045157 +n12045514 +n12045860 +n12046028 +n12046428 +n12046815 +n12047345 +n12047884 +n12048056 +n12048399 +n12048928 +n12049282 +n12049562 +n12050533 +n12050959 +n12051103 +n12051514 +n12051792 +n12052267 +n12052447 +n12052787 +n12053405 +n12053690 +n12053962 +n12054195 +n12055073 +n12055516 +n12056099 +n12056217 +n12056601 +n12056758 +n12056990 +n12057211 +n12057447 +n12057660 +n12057895 +n12058192 +n12058630 +n12058822 +n12059314 +n12059625 +n12060546 +n12061104 +n12061380 +n12061614 +n12062105 +n12062468 +n12062626 +n12062781 +n12063211 +n12063639 +n12064389 +n12064591 +n12065316 +n12065649 +n12065777 +n12066018 +n12066261 +n12066451 +n12066630 +n12066821 +n12067029 +n12067193 +n12067433 +n12067672 +n12067817 +n12068138 +n12068432 +n12068615 +n12069009 +n12069217 +n12069679 +n12070016 +n12070381 +n12070583 +n12070712 +n12071259 +n12071477 +n12071744 +n12072210 +n12072722 +n12073217 +n12073554 +n12073991 +n12074408 +n12074867 +n12075010 +n12075151 +n12075299 +n12075830 +n12076223 +n12076577 +n12076852 +n12077244 +n12077944 +n12078172 +n12078451 +n12078747 +n12079120 +n12079523 +n12079963 +n12080395 +n12080588 +n12080820 +n12081215 +n12081649 +n12082131 +n12083113 +n12083591 +n12083847 +n12084158 +n12084400 +n12084555 +n12084890 +n12085267 +n12085664 +n12086012 +n12086192 +n12086539 +n12086778 +n12087961 +n12088223 +n12088327 +n12088495 +n12088909 +n12089320 +n12089496 +n12089846 +n12090890 +n12091213 +n12091377 +n12091550 +n12091697 +n12091953 +n12092262 +n12092417 +n12092629 +n12092930 +n12093329 +n12093600 +n12093885 +n12094244 +n12094401 +n12094612 +n12095020 +n12095281 +n12095412 +n12095543 +n12095647 +n12095934 +n12096089 +n12096395 +n12096563 +n12096674 +n12097396 +n12097556 +n12098403 +n12098524 +n12098827 +n12099342 +n12100187 +n12101870 +n12102133 +n12103680 +n12103894 +n12104104 +n12104238 +n12104501 +n12104734 +n12105125 +n12105353 +n12105828 +n12105981 +n12106134 +n12106323 +n12107002 +n12107191 +n12107710 +n12107970 +n12108432 +n12108613 +n12108871 +n12109365 +n12109827 +n12110085 +n12110236 +n12110352 +n12110475 +n12110778 +n12111238 +n12111627 +n12112008 +n12112337 +n12112609 +n12112918 +n12113195 +n12113323 +n12113657 +n12114010 +n12114590 +n12115180 +n12116058 +n12116429 +n12116734 +n12117017 +n12117235 +n12117326 +n12117695 +n12117912 +n12118414 +n12118661 +n12119099 +n12119238 +n12119390 +n12119539 +n12119717 +n12120347 +n12120578 +n12121033 +n12121187 +n12121610 +n12122442 +n12122725 +n12122918 +n12123648 +n12123741 +n12124172 +n12124627 +n12124818 +n12125001 +n12125183 +n12125584 +n12126084 +n12126360 +n12126736 +n12127460 +n12127575 +n12127768 +n12128071 +n12128306 +n12128490 +n12129134 +n12129738 +n12129986 +n12130549 +n12131405 +n12131550 +n12132092 +n12132956 +n12133151 +n12133462 +n12133682 +n12134025 +n12134486 +n12134695 +n12134836 +n12135049 +n12135576 +n12135729 +n12135898 +n12136392 +n12136581 +n12136720 +n12137120 +n12137569 +n12137791 +n12137954 +n12138110 +n12138248 +n12138444 +n12138578 +n12139196 +n12139575 +n12139793 +n12139921 +n12140511 +n12140759 +n12140903 +n12141167 +n12141385 +n12141495 +n12142085 +n12142357 +n12142450 +n12143065 +n12143215 +n12143405 +n12143676 +n12144313 +n12144580 +n12144987 +n12145148 +n12145477 +n12146311 +n12146488 +n12146654 +n12147226 +n12147835 +n12148757 +n12150722 +n12150969 +n12151170 +n12151615 +n12152031 +n12152251 +n12152532 +n12152722 +n12153033 +n12153224 +n12153580 +n12153741 +n12153914 +n12154114 +n12154773 +n12155009 +n12155583 +n12155773 +n12156679 +n12156819 +n12157056 +n12157179 +n12157769 +n12158031 +n12158443 +n12158798 +n12159055 +n12159388 +n12159555 +n12159804 +n12159942 +n12160125 +n12160303 +n12160490 +n12160857 +n12161056 +n12161285 +n12161577 +n12161744 +n12161969 +n12162181 +n12162425 +n12162758 +n12163035 +n12163279 +n12164363 +n12164656 +n12164881 +n12165170 +n12165384 +n12165758 +n12166128 +n12166424 +n12166793 +n12166929 +n12167075 +n12167436 +n12167602 +n12168565 +n12169099 +n12170585 +n12171098 +n12171316 +n12171966 +n12172364 +n12172481 +n12172906 +n12173069 +n12173664 +n12173912 +n12174311 +n12174521 +n12174926 +n12175181 +n12175370 +n12175598 +n12176453 +n12176709 +n12176953 +n12177129 +n12177455 +n12178129 +n12178780 +n12178896 +n12179122 +n12179632 +n12180168 +n12180456 +n12180885 +n12181352 +n12181612 +n12182049 +n12182276 +n12183026 +n12183452 +n12183816 +n12184095 +n12184468 +n12184912 +n12185254 +n12185859 +n12186352 +n12186554 +n12186839 +n12187247 +n12187663 +n12187891 +n12188289 +n12188635 +n12189429 +n12189779 +n12189987 +n12190410 +n12190869 +n12191240 +n12192132 +n12192877 +n12193334 +n12193665 +n12194147 +n12194613 +n12195391 +n12195533 +n12195734 +n12196129 +n12196336 +n12196527 +n12196694 +n12196954 +n12197359 +n12197601 +n12198286 +n12198793 +n12199266 +n12199399 +n12199790 +n12199982 +n12200143 +n12200504 +n12200905 +n12201331 +n12201580 +n12201938 +n12202936 +n12203529 +n12203699 +n12203896 +n12204032 +n12204175 +n12204730 +n12205460 +n12205694 +n12214789 +n12215022 +n12215210 +n12215579 +n12215824 +n12216215 +n12216628 +n12216968 +n12217453 +n12217851 +n12218274 +n12218490 +n12218868 +n12219668 +n12220019 +n12220496 +n12220829 +n12221191 +n12221368 +n12221522 +n12221801 +n12222090 +n12222493 +n12222900 +n12223160 +n12223569 +n12223764 +n12224978 +n12225222 +n12225349 +n12225563 +n12226932 +n12227658 +n12227909 +n12228229 +n12228387 +n12228689 +n12228886 +n12229111 +n12229651 +n12229887 +n12230540 +n12230794 +n12231192 +n12231709 +n12232114 +n12232280 +n12232851 +n12233249 +n12234318 +n12234669 +n12235051 +n12235479 +n12236160 +n12236546 +n12236768 +n12236977 +n12237152 +n12237486 +n12237641 +n12237855 +n12238756 +n12238913 +n12239240 +n12239647 +n12239880 +n12240150 +n12240477 +n12240965 +n12241192 +n12241426 +n12241880 +n12242123 +n12242409 +n12242850 +n12243109 +n12243693 +n12244153 +n12244458 +n12244650 +n12244819 +n12245319 +n12245695 +n12245885 +n12246037 +n12246232 +n12246773 +n12246941 +n12247202 +n12247407 +n12247963 +n12248141 +n12248359 +n12248574 +n12248780 +n12248941 +n12249122 +n12249294 +n12249542 +n12251001 +n12251278 +n12251740 +n12252168 +n12252383 +n12252866 +n12253229 +n12253487 +n12253664 +n12253835 +n12254168 +n12255225 +n12256112 +n12256325 +n12256522 +n12256708 +n12256920 +n12257570 +n12257725 +n12258101 +n12258885 +n12259316 +n12260799 +n12261359 +n12261571 +n12261808 +n12262018 +n12262185 +n12262553 +n12263038 +n12263204 +n12263410 +n12263588 +n12263738 +n12263987 +n12264512 +n12264786 +n12265083 +n12265394 +n12265600 +n12266217 +n12266528 +n12266644 +n12266796 +n12266984 +n12267133 +n12267265 +n12267411 +n12267534 +n12267677 +n12267931 +n12268246 +n12269241 +n12269406 +n12269652 +n12270027 +n12270278 +n12270460 +n12270741 +n12270946 +n12271187 +n12271451 +n12271643 +n12271933 +n12272239 +n12272432 +n12272735 +n12272883 +n12273114 +n12273344 +n12273515 +n12273768 +n12273939 +n12274151 +n12274358 +n12274630 +n12274863 +n12275131 +n12275317 +n12275489 +n12275675 +n12275888 +n12276110 +n12276314 +n12276477 +n12276628 +n12276872 +n12277150 +n12277334 +n12277578 +n12277800 +n12278107 +n12278371 +n12278650 +n12278865 +n12279060 +n12279293 +n12279458 +n12279772 +n12280060 +n12280364 +n12281241 +n12281788 +n12281974 +n12282235 +n12282527 +n12282737 +n12282933 +n12283147 +n12283395 +n12283542 +n12283790 +n12284262 +n12284821 +n12285049 +n12285195 +n12285369 +n12285512 +n12285705 +n12285900 +n12286068 +n12286197 +n12286826 +n12286988 +n12287195 +n12287642 +n12287836 +n12288005 +n12288823 +n12289310 +n12289433 +n12289585 +n12290748 +n12290975 +n12291143 +n12291459 +n12291671 +n12291959 +n12292463 +n12292877 +n12293723 +n12294124 +n12294331 +n12294542 +n12294723 +n12294871 +n12295033 +n12295237 +n12295429 +n12295796 +n12296045 +n12296432 +n12296735 +n12296929 +n12297110 +n12297280 +n12297507 +n12297846 +n12298165 +n12299640 +n12300840 +n12301180 +n12301445 +n12301613 +n12301766 +n12302071 +n12302248 +n12302565 +n12303083 +n12303462 +n12304115 +n12304286 +n12304420 +n12304703 +n12304899 +n12305089 +n12305293 +n12305475 +n12305654 +n12305819 +n12305986 +n12306089 +n12306270 +n12306717 +n12306938 +n12307076 +n12307240 +n12307756 +n12308112 +n12308447 +n12308907 +n12309277 +n12309630 +n12310021 +n12310349 +n12310638 +n12311045 +n12311224 +n12311413 +n12311579 +n12312110 +n12312728 +n12315060 +n12315245 +n12315598 +n12315999 +n12316444 +n12316572 +n12317296 +n12318378 +n12318782 +n12318965 +n12319204 +n12319414 +n12320010 +n12320414 +n12320627 +n12320806 +n12321077 +n12321395 +n12321669 +n12321873 +n12322099 +n12322501 +n12322699 +n12323665 +n12324056 +n12324222 +n12324388 +n12324558 +n12324906 +n12325234 +n12325787 +n12327022 +n12327528 +n12327846 +n12328398 +n12328567 +n12328801 +n12329260 +n12329473 +n12330239 +n12330469 +n12330587 +n12330891 +n12331066 +n12331263 +n12331655 +n12331788 +n12332030 +n12332218 +n12332555 +n12333053 +n12333530 +n12333771 +n12333961 +n12334153 +n12334293 +n12334891 +n12335483 +n12335664 +n12335800 +n12335937 +n12336092 +n12336224 +n12336333 +n12336586 +n12336727 +n12336973 +n12337131 +n12337246 +n12337391 +n12337617 +n12337800 +n12337922 +n12338034 +n12338146 +n12338258 +n12338454 +n12338655 +n12338796 +n12338979 +n12339526 +n12339831 +n12340383 +n12340581 +n12340755 +n12341542 +n12341931 +n12342299 +n12342498 +n12342852 +n12343480 +n12343753 +n12344283 +n12344483 +n12344700 +n12344837 +n12345280 +n12345899 +n12346578 +n12346813 +n12346986 +n12347158 +n12349315 +n12349711 +n12350032 +n12350758 +n12351091 +n12351790 +n12352287 +n12352639 +n12352844 +n12352990 +n12353203 +n12353431 +n12353754 +n12355760 +n12356023 +n12356395 +n12356960 +n12357485 +n12357968 +n12358293 +n12360108 +n12360534 +n12360684 +n12360817 +n12360958 +n12361135 +n12361560 +n12361754 +n12361946 +n12362274 +n12362514 +n12362668 +n12363301 +n12363768 +n12364604 +n12364940 +n12365158 +n12365285 +n12365462 +n12365900 +n12366053 +n12366186 +n12366313 +n12366675 +n12366870 +n12367611 +n12368028 +n12368257 +n12368451 +n12369066 +n12369309 +n12369476 +n12369665 +n12369845 +n12370174 +n12370549 +n12371202 +n12371439 +n12371704 +n12372233 +n12373100 +n12373739 +n12374418 +n12374705 +n12374862 +n12375769 +n12377198 +n12377494 +n12378249 +n12378753 +n12378963 +n12379531 +n12380761 +n12381511 +n12382233 +n12382875 +n12383737 +n12383894 +n12384037 +n12384227 +n12384375 +n12384569 +n12384680 +n12384839 +n12385429 +n12385566 +n12385830 +n12386945 +n12387103 +n12387633 +n12387839 +n12388143 +n12388293 +n12388858 +n12388989 +n12389130 +n12389501 +n12389727 +n12389932 +n12390099 +n12390314 +n12392070 +n12392549 +n12392765 +n12393269 +n12394118 +n12394328 +n12394638 +n12395068 +n12395289 +n12395463 +n12395906 +n12396091 +n12396924 +n12397431 +n12399132 +n12399384 +n12399534 +n12399656 +n12399899 +n12400489 +n12400720 +n12400924 +n12401335 +n12401684 +n12401893 +n12402051 +n12402348 +n12402596 +n12402840 +n12403075 +n12403276 +n12403513 +n12403994 +n12404729 +n12405714 +n12406304 +n12406488 +n12406715 +n12406902 +n12407079 +n12407222 +n12407396 +n12407545 +n12407715 +n12407890 +n12408077 +n12408280 +n12408466 +n12408717 +n12408873 +n12409231 +n12409470 +n12409651 +n12409840 +n12411461 +n12412355 +n12412606 +n12412987 +n12413165 +n12413301 +n12413419 +n12413642 +n12413880 +n12414035 +n12414159 +n12414329 +n12414449 +n12414818 +n12414932 +n12415595 +n12416073 +n12416423 +n12416703 +n12417836 +n12418221 +n12418507 +n12419037 +n12419878 +n12420124 +n12420535 +n12420722 +n12421137 +n12421467 +n12421683 +n12421917 +n12422129 +n12422559 +n12425281 +n12426623 +n12426749 +n12427184 +n12427391 +n12427566 +n12427757 +n12427946 +n12428076 +n12428242 +n12428412 +n12428747 +n12429352 +n12430198 +n12430471 +n12430675 +n12431434 +n12432069 +n12432356 +n12432574 +n12432707 +n12433081 +n12433178 +n12433769 +n12433952 +n12434106 +n12434483 +n12434634 +n12434775 +n12434985 +n12435152 +n12435486 +n12435649 +n12435777 +n12435965 +n12436090 +n12436907 +n12437513 +n12437769 +n12437930 +n12439154 +n12439830 +n12441183 +n12441390 +n12441552 +n12441958 +n12442548 +n12443323 +n12443736 +n12444095 +n12444898 +n12446200 +n12446519 +n12446737 +n12446908 +n12447121 +n12447346 +n12447581 +n12447891 +n12448136 +n12448361 +n12448700 +n12449296 +n12449526 +n12449784 +n12449934 +n12450344 +n12450607 +n12450840 +n12451070 +n12451240 +n12451399 +n12451566 +n12451915 +n12452256 +n12452480 +n12452673 +n12452836 +n12453018 +n12453186 +n12453714 +n12453857 +n12454159 +n12454436 +n12454556 +n12454705 +n12454793 +n12454949 +n12455950 +n12457091 +n12458550 +n12458713 +n12458874 +n12459629 +n12460146 +n12460697 +n12460957 +n12461109 +n12461466 +n12461673 +n12462032 +n12462221 +n12462582 +n12462805 +n12463134 +n12463743 +n12463975 +n12464128 +n12464476 +n12464649 +n12465557 +n12466727 +n12467018 +n12467197 +n12467433 +n12467592 +n12468545 +n12468719 +n12469517 +n12470092 +n12470512 +n12470907 +n12472024 +n12473608 +n12473840 +n12474167 +n12474418 +n12475035 +n12475242 +n12475774 +n12476510 +n12477163 +n12477401 +n12477583 +n12477747 +n12477983 +n12478768 +n12479537 +n12480456 +n12480895 +n12481150 +n12481289 +n12481458 +n12482437 +n12482668 +n12482893 +n12483282 +n12483427 +n12483625 +n12483841 +n12484244 +n12484784 +n12485653 +n12485981 +n12486574 +n12487058 +n12488454 +n12488709 +n12489046 +n12489676 +n12489815 +n12490490 +n12491017 +n12491435 +n12491826 +n12492106 +n12492460 +n12492682 +n12492900 +n12493208 +n12493426 +n12493868 +n12494794 +n12495146 +n12495670 +n12495895 +n12496427 +n12496949 +n12497669 +n12498055 +n12498457 +n12499163 +n12499757 +n12499979 +n12500309 +n12500518 +n12500751 +n12501202 +n12504570 +n12504783 +n12505253 +n12506181 +n12506341 +n12506991 +n12507379 +n12507823 +n12508309 +n12508618 +n12508762 +n12509109 +n12509476 +n12509665 +n12509821 +n12509993 +n12510343 +n12510774 +n12511488 +n12511856 +n12512095 +n12512294 +n12512674 +n12513172 +n12513613 +n12513933 +n12514138 +n12514592 +n12514992 +n12515393 +n12515711 +n12515925 +n12516165 +n12516584 +n12516828 +n12517077 +n12517445 +n12517642 +n12518013 +n12518481 +n12519089 +n12519563 +n12520406 +n12521186 +n12521394 +n12522188 +n12522678 +n12522894 +n12523141 +n12523475 +n12523850 +n12524188 +n12525168 +n12525513 +n12525753 +n12526178 +n12526516 +n12526754 +n12527081 +n12527738 +n12528109 +n12528382 +n12528549 +n12528768 +n12528974 +n12529220 +n12529500 +n12529905 +n12530629 +n12530818 +n12531328 +n12531727 +n12532564 +n12532886 +n12533190 +n12533437 +n12534208 +n12534625 +n12534862 +n12536291 +n12537253 +n12537569 +n12538209 +n12539074 +n12539306 +n12539832 +n12540250 +n12540647 +n12540966 +n12541157 +n12541403 +n12542043 +n12542240 +n12543186 +n12543455 +n12543639 +n12543826 +n12544240 +n12544539 +n12545232 +n12545635 +n12545865 +n12546183 +n12546420 +n12546617 +n12546962 +n12547215 +n12547503 +n12548280 +n12548564 +n12548804 +n12549005 +n12549192 +n12549420 +n12549799 +n12550210 +n12550408 +n12551173 +n12551457 +n12552309 +n12552893 +n12553742 +n12554029 +n12554526 +n12554729 +n12554911 +n12555255 +n12555859 +n12556656 +n12557064 +n12557438 +n12557556 +n12557681 +n12558230 +n12558425 +n12558680 +n12559044 +n12559518 +n12560282 +n12560621 +n12560775 +n12561169 +n12561309 +n12561594 +n12562141 +n12562577 +n12562785 +n12563045 +n12563702 +n12564083 +n12564613 +n12565102 +n12565912 +n12566331 +n12566954 +n12567950 +n12568186 +n12568649 +n12569037 +n12569616 +n12569851 +n12570394 +n12570703 +n12570972 +n12571781 +n12572546 +n12572759 +n12572858 +n12573256 +n12573474 +n12573647 +n12573911 +n12574320 +n12574470 +n12574866 +n12575322 +n12575812 +n12576323 +n12576451 +n12576695 +n12577362 +n12577895 +n12578255 +n12578626 +n12578916 +n12579038 +n12579404 +n12579822 +n12580012 +n12580654 +n12580786 +n12580896 +n12581110 +n12582231 +n12582665 +n12582846 +n12583126 +n12583401 +n12583681 +n12583855 +n12584191 +n12584365 +n12584715 +n12585137 +n12585373 +n12585629 +n12586298 +n12586499 +n12586725 +n12586989 +n12587132 +n12587487 +n12587803 +n12588320 +n12588780 +n12589142 +n12589458 +n12589687 +n12589841 +n12590232 +n12590499 +n12590600 +n12590715 +n12591017 +n12591351 +n12591702 +n12592058 +n12592544 +n12592839 +n12593122 +n12593341 +n12593994 +n12594324 +n12594989 +n12595699 +n12595964 +n12596148 +n12596345 +n12596709 +n12596849 +n12597134 +n12597466 +n12597798 +n12598027 +n12599185 +n12599435 +n12599661 +n12599874 +n12600095 +n12600267 +n12601494 +n12601805 +n12602262 +n12602434 +n12602612 +n12602980 +n12603273 +n12603449 +n12603672 +n12604228 +n12604460 +n12604639 +n12604845 +n12605683 +n12606438 +n12606545 +n12607456 +n12609379 +n12610328 +n12610740 +n12611640 +n12612170 +n12612811 +n12613706 +n12614096 +n12614477 +n12614625 +n12615232 +n12615710 +n12616248 +n12616630 +n12616996 +n12617559 +n12618146 +n12618727 +n12620196 +n12620546 +n12620969 +n12621410 +n12621619 +n12621945 +n12622297 +n12622875 +n12623077 +n12623211 +n12623818 +n12624381 +n12624568 +n12625003 +n12625383 +n12625670 +n12625823 +n12626674 +n12626878 +n12627119 +n12627347 +n12627526 +n12628356 +n12628705 +n12628986 +n12629305 +n12629666 +n12630763 +n12630999 +n12631331 +n12631637 +n12631932 +n12632335 +n12632733 +n12633061 +n12633638 +n12633994 +n12634211 +n12634429 +n12634734 +n12634986 +n12635151 +n12635359 +n12635532 +n12635744 +n12635955 +n12636224 +n12636885 +n12637123 +n12637485 +n12638218 +n12638556 +n12638753 +n12638964 +n12639168 +n12639376 +n12639584 +n12639736 +n12639910 +n12640081 +n12640284 +n12640435 +n12640607 +n12640839 +n12641007 +n12641180 +n12641413 +n12641931 +n12642090 +n12642200 +n12642435 +n12642600 +n12642964 +n12643113 +n12643313 +n12643473 +n12643688 +n12643877 +n12644283 +n12644902 +n12645174 +n12645530 +n12646072 +n12646197 +n12646397 +n12646605 +n12646740 +n12646950 +n12647231 +n12647376 +n12647560 +n12647787 +n12647893 +n12648045 +n12648196 +n12648424 +n12648693 +n12648888 +n12649065 +n12649317 +n12649539 +n12649866 +n12650038 +n12650229 +n12650379 +n12650556 +n12650805 +n12650915 +n12651229 +n12651611 +n12651821 +n12653218 +n12653436 +n12653633 +n12654227 +n12654857 +n12655062 +n12655245 +n12655351 +n12655498 +n12655605 +n12655726 +n12655869 +n12656369 +n12656528 +n12656685 +n12656909 +n12657082 +n12657755 +n12658118 +n12658308 +n12658481 +n12658603 +n12658715 +n12658846 +n12659064 +n12659356 +n12659539 +n12660601 +n12661045 +n12661227 +n12661538 +n12662074 +n12662379 +n12662772 +n12663023 +n12663254 +n12663359 +n12663804 +n12664005 +n12664187 +n12664469 +n12664710 +n12665048 +n12665271 +n12665659 +n12665857 +n12666050 +n12666159 +n12666369 +n12666965 +n12667406 +n12667582 +n12667964 +n12668131 +n12669803 +n12670334 +n12670758 +n12670962 +n12671651 +n12672289 +n12673588 +n12674120 +n12674685 +n12674895 +n12675299 +n12675515 +n12675876 +n12676134 +n12676370 +n12676534 +n12676703 +n12677120 +n12677331 +n12677612 +n12677841 +n12678794 +n12679023 +n12679432 +n12679593 +n12679876 +n12680402 +n12680652 +n12680864 +n12681376 +n12681579 +n12681893 +n12682411 +n12682668 +n12682882 +n12683096 +n12683407 +n12683571 +n12683791 +n12684379 +n12685431 +n12685831 +n12686077 +n12686274 +n12686496 +n12686676 +n12686877 +n12687044 +n12687462 +n12687698 +n12687957 +n12688187 +n12688372 +n12688716 +n12689305 +n12690653 +n12691428 +n12691661 +n12692024 +n12692160 +n12692521 +n12692714 +n12693244 +n12693352 +n12693865 +n12694486 +n12695144 +n12695975 +n12696492 +n12696830 +n12697152 +n12697514 +n12698027 +n12698435 +n12698598 +n12698774 +n12699031 +n12699301 +n12699922 +n12700088 +n12700357 +n12702124 +n12703190 +n12703383 +n12703557 +n12703716 +n12703856 +n12704041 +n12704343 +n12704513 +n12705013 +n12705220 +n12705458 +n12705698 +n12705978 +n12706410 +n12707199 +n12707781 +n12708293 +n12708654 +n12708941 +n12709103 +n12709349 +n12709688 +n12709901 +n12710295 +n12710415 +n12710577 +n12710693 +n12710917 +n12711182 +n12711398 +n12711596 +n12711817 +n12711984 +n12712320 +n12712626 +n12713063 +n12713358 +n12713521 +n12713866 +n12714254 +n12714755 +n12714949 +n12715195 +n12715914 +n12716400 +n12716594 +n12717072 +n12717224 +n12717644 +n12718074 +n12718483 +n12718995 +n12719684 +n12719944 +n12720200 +n12720354 +n12721122 +n12721477 +n12722071 +n12723062 +n12723610 +n12724942 +n12725521 +n12725738 +n12725940 +n12726159 +n12726357 +n12726528 +n12726670 +n12726902 +n12727101 +n12727301 +n12727518 +n12727729 +n12727960 +n12728164 +n12728322 +n12728508 +n12728656 +n12728864 +n12729023 +n12729164 +n12729315 +n12729521 +n12729729 +n12729950 +n12730143 +n12730370 +n12730544 +n12730776 +n12731029 +n12731401 +n12731835 +n12732009 +n12732252 +n12732491 +n12732605 +n12732756 +n12732966 +n12733218 +n12733428 +n12733647 +n12733870 +n12734070 +n12734215 +n12735160 +n12736603 +n12736999 +n12737383 +n12737898 +n12738259 +n12739332 +n12739966 +n12740967 +n12741222 +n12741586 +n12741792 +n12742290 +n12742741 +n12742878 +n12743009 +n12743352 +n12743823 +n12743976 +n12744142 +n12744387 +n12744850 +n12745386 +n12745564 +n12746884 +n12747120 +n12748248 +n12749049 +n12749456 +n12749679 +n12749852 +n12750076 +n12750767 +n12751172 +n12751675 +n12752205 +n12753007 +n12753245 +n12753573 +n12753762 +n12754003 +n12754174 +n12754311 +n12754468 +n12754648 +n12754781 +n12754981 +n12755225 +n12755387 +n12755559 +n12755727 +n12755876 +n12756457 +n12757115 +n12757303 +n12757458 +n12757668 +n12757816 +n12757930 +n12758014 +n12758099 +n12758176 +n12758250 +n12758325 +n12758399 +n12758471 +n12758555 +n12759273 +n12759668 +n12760539 +n12760875 +n12761284 +n12761702 +n12761905 +n12762049 +n12762405 +n12762896 +n12763529 +n12764008 +n12764202 +n12764507 +n12764978 +n12765115 +n12765402 +n12765846 +n12766043 +n12766595 +n12766869 +n12767208 +n12767423 +n12767648 +n12768369 +n12768682 +n12768809 +n12768933 +n12769065 +n12769219 +n12769318 +n12770529 +n12770892 +n12771085 +n12771192 +n12771390 +n12771597 +n12771890 +n12772753 +n12772908 +n12773142 +n12773651 +n12773917 +n12774299 +n12774641 +n12775070 +n12775393 +n12775717 +n12775919 +n12776558 +n12776774 +n12777436 +n12777680 +n12777778 +n12777892 +n12778398 +n12778605 +n12779603 +n12779851 +n12780325 +n12780563 +n12781940 +n12782530 +n12782915 +n12783316 +n12783730 +n12784371 +n12784889 +n12785724 +n12785889 +n12786273 +n12786464 +n12786836 +n12787364 +n12788854 +n12789054 +n12789554 +n12789977 +n12790430 +n12791064 +n12791329 +n12793015 +n12793284 +n12793494 +n12793695 +n12793886 +n12794135 +n12794367 +n12794568 +n12794985 +n12795209 +n12795352 +n12795555 +n12796022 +n12796385 +n12796849 +n12797368 +n12797860 +n12798284 +n12798910 +n12799269 +n12799776 +n12800049 +n12800586 +n12801072 +n12801520 +n12801781 +n12801966 +n12803226 +n12803754 +n12803958 +n12804352 +n12805146 +n12805561 +n12805762 +n12806015 +n12806732 +n12807251 +n12807409 +n12807624 +n12807773 +n12808007 +n12809868 +n12810007 +n12810151 +n12810595 +n12811027 +n12811713 +n12812235 +n12812478 +n12812801 +n12813189 +n12814643 +n12814857 +n12814960 +n12815198 +n12815668 +n12815838 +n12816508 +n12816942 +n12817464 +n12817694 +n12817855 +n12818004 +n12818346 +n12818601 +n12818966 +n12819141 +n12819354 +n12819728 +n12820113 +n12820669 +n12820853 +n12821505 +n12821895 +n12822115 +n12822466 +n12822769 +n12822955 +n12823717 +n12823859 +n12824053 +n12824289 +n12824735 +n12825497 +n12826143 +n12827270 +n12827537 +n12827907 +n12828220 +n12828379 +n12828520 +n12828791 +n12828977 +n12829582 +n12829975 +n12830222 +n12830568 +n12831141 +n12831535 +n12831932 +n12832315 +n12832538 +n12832822 +n12833149 +n12833985 +n12834190 +n12834798 +n12834938 +n12835331 +n12835766 +n12836212 +n12836337 +n12836508 +n12836862 +n12837052 +n12837259 +n12837466 +n12837803 +n12839574 +n12839979 +n12840168 +n12840362 +n12840502 +n12840749 +n12841007 +n12841193 +n12841354 +n12842302 +n12842519 +n12842642 +n12842887 +n12843144 +n12843316 +n12843557 +n12843970 +n12844409 +n12844939 +n12845187 +n12845413 +n12845908 +n12846335 +n12846690 +n12847008 +n12847374 +n12847927 +n12848499 +n12849061 +n12849279 +n12849416 +n12849952 +n12850168 +n12850336 +n12850906 +n12851094 +n12851469 +n12851860 +n12852234 +n12852428 +n12852570 +n12853080 +n12853287 +n12853482 +n12854048 +n12854193 +n12854600 +n12855365 +n12855494 +n12855710 +n12855886 +n12856091 +n12856287 +n12856479 +n12856680 +n12857204 +n12857779 +n12858150 +n12858397 +n12858618 +n12858871 +n12858987 +n12859153 +n12859272 +n12859679 +n12859986 +n12860365 +n12860978 +n12861345 +n12861541 +n12861892 +n12862512 +n12862828 +n12863234 +n12863624 +n12864160 +n12865037 +n12865562 +n12865708 +n12865824 +n12866002 +n12866162 +n12866333 +n12866459 +n12866635 +n12866968 +n12867184 +n12867449 +n12867826 +n12868019 +n12868880 +n12869061 +n12869478 +n12869668 +n12870048 +n12870225 +n12870535 +n12870682 +n12870891 +n12871272 +n12871696 +n12871859 +n12872458 +n12872914 +n12873341 +n12873984 +n12875269 +n12875697 +n12875861 +n12876899 +n12877244 +n12877493 +n12877637 +n12877838 +n12878169 +n12878325 +n12878784 +n12879068 +n12879527 +n12879963 +n12880244 +n12880462 +n12880638 +n12880799 +n12881105 +n12881913 +n12882158 +n12882779 +n12882945 +n12883265 +n12883628 +n12884100 +n12884260 +n12885045 +n12885265 +n12885510 +n12885754 +n12886185 +n12886402 +n12886600 +n12886831 +n12887293 +n12887532 +n12887713 +n12888016 +n12888234 +n12888457 +n12889219 +n12889412 +n12889579 +n12889713 +n12890265 +n12890490 +n12890685 +n12890928 +n12891093 +n12891305 +n12891469 +n12891643 +n12891824 +n12892013 +n12893463 +n12893993 +n12895298 +n12895811 +n12896615 +n12897118 +n12897788 +n12897999 +n12898342 +n12898774 +n12899166 +n12899537 +n12899752 +n12899971 +n12900783 +n12901724 +n12902466 +n12902662 +n12903014 +n12903367 +n12903503 +n12903964 +n12904314 +n12904562 +n12904938 +n12905135 +n12905412 +n12906214 +n12906498 +n12906771 +n12907057 +n12907671 +n12907857 +n12908093 +n12908645 +n12908854 +n12909421 +n12909614 +n12909759 +n12909917 +n12911079 +n12911264 +n12911440 +n12911673 +n12911914 +n12912274 +n12912670 +n12912801 +n12913144 +n12913524 +n12913791 +n12914923 +n12915140 +n12915568 +n12915811 +n12916179 +n12916511 +n12917901 +n12918609 +n12918810 +n12918991 +n12919195 +n12919403 +n12919646 +n12919847 +n12920043 +n12920204 +n12920521 +n12920719 +n12920955 +n12921315 +n12921499 +n12921660 +n12921868 +n12922119 +n12922458 +n12922763 +n12923108 +n12923257 +n12924623 +n12925179 +n12925583 +n12926039 +n12926480 +n12926689 +n12927013 +n12927194 +n12927494 +n12927758 +n12928071 +n12928307 +n12928491 +n12928819 +n12929403 +n12929600 +n12930778 +n12930951 +n12931231 +n12931542 +n12931906 +n12932173 +n12932365 +n12932706 +n12932966 +n12933274 +n12934036 +n12934174 +n12934479 +n12934685 +n12934985 +n12935166 +n12935609 +n12936155 +n12936826 +n12937130 +n12938081 +n12938193 +n12938445 +n12938667 +n12939104 +n12939282 +n12939479 +n12939874 +n12940226 +n12940609 +n12941220 +n12941536 +n12941717 +n12942025 +n12942395 +n12942572 +n12942729 +n12943049 +n12943443 +n12943912 +n12944095 +n12945177 +n12945366 +n12945549 +n12946849 +n12947313 +n12947544 +n12947756 +n12947895 +n12948053 +n12948251 +n12948495 +n12949160 +n12949361 +n12950126 +n12950314 +n12950796 +n12951146 +n12951835 +n12952165 +n12952469 +n12952590 +n12952717 +n12953206 +n12953484 +n12953712 +n12954353 +n12954799 +n12955414 +n12955840 +n12956170 +n12956367 +n12956588 +n12956922 +n12957608 +n12957803 +n12957924 +n12958261 +n12958615 +n12959074 +n12959538 +n12960378 +n12960552 +n12960863 +n12961242 +n12961393 +n12961536 +n12961879 +n12963628 +n12964920 +n12965626 +n12965951 +n12966804 +n12966945 +n12968136 +n12968309 +n12969131 +n12969425 +n12969670 +n12969927 +n12970193 +n12970293 +n12970733 +n12971400 +n12971804 +n12972136 +n12973443 +n12973791 +n12973937 +n12974987 +n12975804 +n12976198 +n12976554 +n12978076 +n12979316 +n12979829 +n12980080 +n12980840 +n12981086 +n12981301 +n12981443 +n12981954 +n12982468 +n12982590 +n12982915 +n12983048 +n12983654 +n12983873 +n12983961 +n12984267 +n12984489 +n12984595 +n12985420 +n12985773 +n12985857 +n12986227 +n12987056 +n12987423 +n12987535 +n12988158 +n12988341 +n12988572 +n12989007 +n12989938 +n12990597 +n12991184 +n12991837 +n12992177 +n12992868 +n12994892 +n12995601 +n12997654 +n12997919 +n12998815 +n13000891 +n13001041 +n13001206 +n13001366 +n13001529 +n13001930 +n13002209 +n13002750 +n13002925 +n13003061 +n13003254 +n13003522 +n13003712 +n13004423 +n13004640 +n13004826 +n13004992 +n13005329 +n13005984 +n13006171 +n13006631 +n13006894 +n13007034 +n13007417 +n13007629 +n13008157 +n13008315 +n13008485 +n13008689 +n13008839 +n13009085 +n13009244 +n13009429 +n13009656 +n13010694 +n13010951 +n13011221 +n13011595 +n13012253 +n13012469 +n13012973 +n13013534 +n13013764 +n13013965 +n13014097 +n13014265 +n13014409 +n13014581 +n13014741 +n13014879 +n13015509 +n13015688 +n13016076 +n13016289 +n13017102 +n13017240 +n13017439 +n13017610 +n13017789 +n13017979 +n13018088 +n13018232 +n13018407 +n13018906 +n13019496 +n13019643 +n13019835 +n13020191 +n13020481 +n13020964 +n13021166 +n13021332 +n13021543 +n13021689 +n13021867 +n13022210 +n13022709 +n13022903 +n13023134 +n13024012 +n13024500 +n13024653 +n13025647 +n13025854 +n13026015 +n13027557 +n13027879 +n13028611 +n13028937 +n13029122 +n13029326 +n13029610 +n13029760 +n13030337 +n13030616 +n13030852 +n13031193 +n13031323 +n13031474 +n13032115 +n13032381 +n13032618 +n13032923 +n13033134 +n13033396 +n13033577 +n13033879 +n13034062 +n13034555 +n13034788 +n13035241 +n13035389 +n13035707 +n13035925 +n13036116 +n13036312 +n13036804 +n13037406 +n13037585 +n13037805 +n13038068 +n13038376 +n13038577 +n13038744 +n13039349 +n13040303 +n13040629 +n13040796 +n13041312 +n13041943 +n13042134 +n13042316 +n13042982 +n13043926 +n13044375 +n13044778 +n13045210 +n13045594 +n13045975 +n13046130 +n13046669 +n13047862 +n13048447 +n13049953 +n13050397 +n13050705 +n13050940 +n13051346 +n13052014 +n13052248 +n13052670 +n13052931 +n13053608 +n13054073 +n13054560 +n13055423 +n13055577 +n13055792 +n13055949 +n13056135 +n13056349 +n13056607 +n13056799 +n13057054 +n13057242 +n13057422 +n13057639 +n13058037 +n13058272 +n13058608 +n13059298 +n13059657 +n13060017 +n13060190 +n13061172 +n13061348 +n13061471 +n13061704 +n13062421 +n13063269 +n13063514 +n13064111 +n13064457 +n13065089 +n13065514 +n13066129 +n13066448 +n13066979 +n13067191 +n13067330 +n13067532 +n13067672 +n13068255 +n13068434 +n13068735 +n13068917 +n13069224 +n13069773 +n13070308 +n13070875 +n13071371 +n13071553 +n13071815 +n13072031 +n13072209 +n13072350 +n13072528 +n13072706 +n13072863 +n13073055 +n13073703 +n13074619 +n13074814 +n13075020 +n13075272 +n13075441 +n13075684 +n13075847 +n13076041 +n13076405 +n13076643 +n13076831 +n13077033 +n13077295 +n13078021 +n13079073 +n13079419 +n13079567 +n13080306 +n13080866 +n13081229 +n13081999 +n13082568 +n13083023 +n13083461 +n13084184 +n13084834 +n13085113 +n13085747 +n13090018 +n13090871 +n13091620 +n13091774 +n13091982 +n13092078 +n13092240 +n13092385 +n13092987 +n13093275 +n13093629 +n13094145 +n13094273 +n13095013 +n13096779 +n13098515 +n13098962 +n13099833 +n13099999 +n13100156 +n13100677 +n13102648 +n13102775 +n13103023 +n13103660 +n13103750 +n13103877 +n13104059 +n13107694 +n13107807 +n13107891 +n13108131 +n13108323 +n13108481 +n13108545 +n13108662 +n13108841 +n13109733 +n13110915 +n13111174 +n13111340 +n13111504 +n13111881 +n13112035 +n13112201 +n13118330 +n13118707 +n13119870 +n13120211 +n13120958 +n13121104 +n13121349 +n13122364 +n13123309 +n13123431 +n13123841 +n13124358 +n13124654 +n13125117 +n13126050 +n13126856 +n13127001 +n13127303 +n13127666 +n13127843 +n13128278 +n13128582 +n13128976 +n13129078 +n13130014 +n13130161 +n13130726 +n13131028 +n13131618 +n13132034 +n13132156 +n13132338 +n13132486 +n13132656 +n13132756 +n13132940 +n13133140 +n13133233 +n13133316 +n13133613 +n13133932 +n13134302 +n13134531 +n13134844 +n13134947 +n13135692 +n13135832 +n13136316 +n13136556 +n13136781 +n13137010 +n13137225 +n13137409 +n13137672 +n13137951 +n13138155 +n13138308 +n13138658 +n13138842 +n13139055 +n13139321 +n13139482 +n13139647 +n13139837 +n13140049 +n13140367 +n13141141 +n13141415 +n13141564 +n13141797 +n13141972 +n13142182 +n13142504 +n13142907 +n13143285 +n13143758 +n13144084 +n13145040 +n13145250 +n13145444 +n13146403 +n13146583 +n13146928 +n13147153 +n13147270 +n13147386 +n13147532 +n13147689 +n13147918 +n13148208 +n13148384 +n13149296 +n13149970 +n13150378 +n13150592 +n13150894 +n13151082 +n13152339 +n13154388 +n13154494 +n13154841 +n13155095 +n13155305 +n13155611 +n13156986 +n13157137 +n13157346 +n13157481 +n13157684 +n13157971 +n13158167 +n13158512 +n13158605 +n13158714 +n13158815 +n13159357 +n13159691 +n13159890 +n13160116 +n13160254 +n13160365 +n13160604 +n13160831 +n13160938 +n13161151 +n13161254 +n13161904 +n13163553 +n13163649 +n13163991 +n13164501 +n13170840 +n13171210 +n13171797 +n13172923 +n13173132 +n13173259 +n13173488 +n13173697 +n13173882 +n13174354 +n13174670 +n13174823 +n13175682 +n13176363 +n13176714 +n13177048 +n13177529 +n13177768 +n13177884 +n13178284 +n13178707 +n13179056 +n13179804 +n13180534 +n13180875 +n13181055 +n13181244 +n13181406 +n13181811 +n13182164 +n13182338 +n13182799 +n13182937 +n13183056 +n13183489 +n13184394 +n13185269 +n13185658 +n13186388 +n13186546 +n13187367 +n13188096 +n13188268 +n13188462 +n13188767 +n13190060 +n13190747 +n13191148 +n13191620 +n13191884 +n13192625 +n13193143 +n13193269 +n13193466 +n13193642 +n13193856 +n13194036 +n13194212 +n13194572 +n13194758 +n13194918 +n13195341 +n13195761 +n13196003 +n13196234 +n13196369 +n13196738 +n13197274 +n13197507 +n13198054 +n13198482 +n13198914 +n13199717 +n13199970 +n13200193 +n13200542 +n13200651 +n13200986 +n13201423 +n13201566 +n13201969 +n13202125 +n13202355 +n13202602 +n13205058 +n13205249 +n13206178 +n13206817 +n13207094 +n13207335 +n13207572 +n13207736 +n13207923 +n13208302 +n13208705 +n13208965 +n13209129 +n13209270 +n13209460 +n13209808 +n13210350 +n13210597 +n13211020 +n13211790 +n13212025 +n13212175 +n13212379 +n13212559 +n13213066 +n13213397 +n13213577 +n13214217 +n13214340 +n13214485 +n13215258 +n13215586 +n13217005 +n13219422 +n13219833 +n13219976 +n13220122 +n13220355 +n13220525 +n13220663 +n13221529 +n13222877 +n13222985 +n13223090 +n13223588 +n13223710 +n13223843 +n13224673 +n13224922 +n13225244 +n13225365 +n13225617 +n13226320 +n13226871 +n13228017 +n13228536 +n13229543 +n13229951 +n13230190 +n13230662 +n13230843 +n13231078 +n13231678 +n13231919 +n13232106 +n13232363 +n13232779 +n13233727 +n13234114 +n13234519 +n13234678 +n13234857 +n13235011 +n13235159 +n13235319 +n13235503 +n13235766 +n13236100 +n13237188 +n13237508 +n13238375 +n13238654 +n13238988 +n13239177 +n13239736 +n13239921 +n13240362 +n13252672 +n13354021 +n13555775 +n13579829 +n13650447 +n13653902 +n13862407 +n13862552 +n13862780 +n13863020 +n13863186 +n13863473 +n13863771 +n13864035 +n13864153 +n13864965 +n13865298 +n13865483 +n13865904 +n13866144 +n13866626 +n13866827 +n13867005 +n13867492 +n13868248 +n13868371 +n13868515 +n13868944 +n13869045 +n13869547 +n13869788 +n13869896 +n13871717 +n13872592 +n13872822 +n13873361 +n13873502 +n13873917 +n13874073 +n13874558 +n13875392 +n13875571 +n13875884 +n13876561 +n13877547 +n13877667 +n13878306 +n13879049 +n13879320 +n13879816 +n13880199 +n13880415 +n13880551 +n13880704 +n13880994 +n13881512 +n13881644 +n13882201 +n13882276 +n13882487 +n13882563 +n13882639 +n13882713 +n13882961 +n13883603 +n13883763 +n13884261 +n13884384 +n13884930 +n13885011 +n13886260 +n13888491 +n13889066 +n13889331 +n13891547 +n13891937 +n13893786 +n13894154 +n13894434 +n13895262 +n13896100 +n13896217 +n13897198 +n13897528 +n13897996 +n13898207 +n13898315 +n13898645 +n13899735 +n13900287 +n13900422 +n13901211 +n13901321 +n13901423 +n13901490 +n13901858 +n13902048 +n13902336 +n13902793 +n13903079 +n13905121 +n13905275 +n13905792 +n13906484 +n13906669 +n13906767 +n13906936 +n13907272 +n13908201 +n13908580 +n13911045 +n13912260 +n13912540 +n13914141 +n13914265 +n13914608 +n13915023 +n13915113 +n13915209 +n13915305 +n13915999 +n13916363 +n13916721 +n13917690 +n13917785 +n13918274 +n13918387 +n13918717 +n13919547 +n13919919 +n13926786 +n14131950 +n14175579 +n14564779 +n14582716 +n14583400 +n14585392 +n14592309 +n14603798 +n14633206 +n14685296 +n14696793 +n14698884 +n14714645 +n14720833 +n14765422 +n14785065 +n14786943 +n14804958 +n14810561 +n14820180 +n14821852 +n14844693 +n14853210 +n14858292 +n14867545 +n14891255 +n14899328 +n14900184 +n14900342 +n14908027 +n14909584 +n14914945 +n14915184 +n14919819 +n14938389 +n14941787 +n14942411 +n14973585 +n14974264 +n14975598 +n14976759 +n14976871 +n14977188 +n14977504 +n14992287 +n14993378 +n15005577 +n15006012 +n15019030 +n15048888 +n15060326 +n15060688 +n15062057 +n15067877 +n15075141 +n15086247 +n15089258 +n15089472 +n15089645 +n15089803 +n15090065 +n15090238 +n15090742 +n15091129 +n15091304 +n15091473 +n15091669 +n15091846 +n15092059 +n15092227 +n15092409 +n15092650 +n15092751 +n15092942 +n15093049 +n15093137 +n15093298 +n15102359 +n15102455 +n15102894 diff --git a/timm/data/dataset_factory.py b/timm/data/dataset_factory.py index 194a597ea9..c4e6cf3c9d 100644 --- a/timm/data/dataset_factory.py +++ b/timm/data/dataset_factory.py @@ -69,6 +69,7 @@ def create_dataset( * folder - default, timm folder (or tar) based ImageDataset * torch - torchvision based datasets * TFDS - Tensorflow-datasets wrapper in IterabeDataset interface via IterableImageDataset + * WDS - Webdataset * all - any of the above Args: @@ -134,6 +135,10 @@ def create_dataset( ds = IterableImageDataset( root, parser=name, split=split, is_training=is_training, download=download, batch_size=batch_size, repeats=repeats, **kwargs) + elif name.startswith('wds/'): + ds = IterableImageDataset( + root, parser=name, split=split, is_training=is_training, + batch_size=batch_size, repeats=repeats, **kwargs) else: # FIXME support more advance split cfg for ImageFolder/Tar datasets in the future if search_split and os.path.isdir(root): diff --git a/timm/data/parsers/parser_factory.py b/timm/data/parsers/parser_factory.py index 892090adb7..6dedb46782 100644 --- a/timm/data/parsers/parser_factory.py +++ b/timm/data/parsers/parser_factory.py @@ -18,6 +18,10 @@ def create_parser(name, root, split='train', **kwargs): if prefix == 'tfds': from .parser_tfds import ParserTfds # defer tensorflow import parser = ParserTfds(root, name, split=split, **kwargs) + elif prefix == 'wds': + from .parser_wds import ParserWebdataset + kwargs.pop('download', False) + parser = ParserWebdataset(root, name, split=split, **kwargs) else: assert os.path.exists(root) # default fallback path (backwards compat), use image tar if root is a .tar file, otherwise image folder diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 08fa6dbdb5..c79b4ba828 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -34,12 +34,12 @@ from timm.bits import get_global_device, is_global_device MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities -SHUFFLE_SIZE = 8192 # examples to shuffle in DS queue -PREFETCH_SIZE = 2048 # examples to prefetch +SHUFFLE_SIZE = 8192 # number of samples to shuffle in DS queue +PREFETCH_SIZE = 2048 # number of samples to prefetch -def even_split_indices(split, n, num_examples): - partitions = [round(i * num_examples / n) for i in range(n + 1)] +def even_split_indices(split, n, num_samples): + partitions = [round(i * num_samples / n) for i in range(n + 1)] return [f"{split}[{partitions[i]}:{partitions[i + 1]}]" for i in range(n)] @@ -55,20 +55,20 @@ class ParserTfds(Parser): """ Wrap Tensorflow Datasets for use in PyTorch There several things to be aware of: - * To prevent excessive examples being dropped per epoch w/ distributed training or multiplicity of + * To prevent excessive samples being dropped per epoch w/ distributed training or multiplicity of dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last https://github.com/pytorch/pytorch/issues/33413 * With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch from each worker could be a different size. For training this is worked around by option above, for - validation extra examples are inserted iff distributed mode is enabled so that the batches being reduced + validation extra samples are inserted iff distributed mode is enabled so that the batches being reduced across replicas are of same size. This will slightly alter the results, distributed validation will not be 100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse - since there are up to N * J extra examples with IterableDatasets. + since there are up to N * J extra samples with IterableDatasets. * The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of replicas and dataloader workers you can use. For really small datasets that only contain a few shards you may have to train non-distributed w/ 1-2 dataloader workers. This is likely not a huge concern as the benefit of distributed training or fast dataloading should be much less for small datasets. - * This wrapper is currently configured to return individual, decompressed image examples from the TFDS + * This wrapper is currently configured to return individual, decompressed image samples from the TFDS dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible to specify TF augmentation fn and return augmented batches w/ some modifications to other downstream components. @@ -100,7 +100,7 @@ def __init__( name: tfds dataset name (eg `imagenet2012`) split: tfds dataset split (can use all TFDS split strings eg `train[:10%]`) is_training: training mode, shuffle enabled, dataset len rounded by batch_size - batch_size: batch_size to use to unsure total examples % batch_size == 0 in training across all dis nodes + batch_size: batch_size to use to unsure total samples % batch_size == 0 in training across all dis nodes download: download and build TFDS dataset if set, otherwise must use tfds CLI repeats: iterate through (repeat) the dataset this many times per iteration (once if 0 or 1) seed: common seed for shard shuffle across all distributed/worker instances @@ -139,7 +139,7 @@ def __init__( self.builder.download_and_prepare() self.class_to_idx = get_class_labels(self.builder.info) if self.target_name == 'label' else {} self.split_info = self.builder.info.splits[split] - self.num_examples = self.split_info.num_examples + self.num_samples = self.split_info.num_examples # Distributed world state self.dist_rank = 0 @@ -157,13 +157,18 @@ def __init__( self.dist_num_replicas = dist.get_world_size() # Attributes that are updated in _lazy_init, including the tf.data pipeline itself - self.global_num_workers = 1 - self.worker_info = None + self.worker_init = False # worker info initialized + self.worker_id = 0 self.worker_seed = 0 # seed unique to each work instance + self.num_workers = 1 + self.global_worker_id = 0 + self.global_num_workers = 1 self.subsplit = None # set when data is distributed across workers using sub-splits self.ds = None # initialized lazily on each dataloader worker process - self.init_count = 0 - self.reinit_each_iter = self.is_training # FIXME need to determine if this is necessary + self.init_count = 0 # number of ds TF data pipeline initializations + # FIXME need to determine if reinit_each_iter is necessary. I'm don't completely trust behaviour + # of `shuffle_reshuffle_each_iteration` when there are multiple workers / nodes across epochs + self.reinit_each_iter = self.is_training def _lazy_init(self): """ Lazily initialize the dataset. @@ -177,14 +182,15 @@ def _lazy_init(self): before it is passed to dataloader. """ # setup input context to split dataset across distributed processes - if self.worker_info is None: + if not self.worker_init: + # worker init done once, even if data-pipeline is re-initialized worker_info = torch.utils.data.get_worker_info() - assert worker_info is not None - self.worker_info = worker_info - self.worker_seed = worker_info.seed - num_workers = worker_info.num_workers - self.global_num_workers = self.dist_num_replicas * num_workers - global_worker_id = self.dist_rank * num_workers + worker_info.id + if worker_info is not None: + self.worker_id = worker_info.id + self.worker_seed = worker_info.seed + self.num_workers = worker_info.num_workers + self.global_worker_id = self.dist_rank * self.num_workers + self.worker_id + self.global_num_workers = self.dist_num_replicas * self.num_workers """ Data sharding InputContext will assign subset of underlying TFRecord files to each 'pipeline' if used. @@ -194,54 +200,59 @@ def _lazy_init(self): I am currently using a mix of InputContext shard assignment and fine-grained sub-splits for distributing the data across workers. For training InputContext is used to assign shards to nodes unless num_shards in dataset < total number of workers. Otherwise sub-split API is used for datasets without enough shards or - for validation where we can't drop examples and need to avoid minimize uneven splits to avoid padding. + for validation where we can't drop samples and need to avoid minimize uneven splits to avoid padding. """ should_subsplit = self.global_num_workers > 1 and ( self.split_info.num_shards < self.global_num_workers or not self.is_training) if should_subsplit: - # split the dataset w/o using sharding for more even examples / worker, can result in less optimal + # split the dataset w/o using sharding for more even samples / worker, can result in less optimal # read patterns for distributed training (overlap across shards) so better to use InputContext there if has_buggy_even_splits: # my even_split workaround doesn't work on subsplits, upgrade tfds! if not isinstance(self.split_info, tfds.core.splits.SubSplitInfo): - subsplits = even_split_indices(self.split, self.global_num_workers, self.num_examples) - self.subsplit = subsplits[global_worker_id] + subsplits = even_split_indices(self.split, self.global_num_workers, self.num_samples) + self.subsplit = subsplits[self.global_worker_id] else: subsplits = tfds.even_splits(self.split, self.global_num_workers) - self.subsplit = subsplits[global_worker_id] - else: - num_workers = self.worker_info.num_workers - global_worker_id = self.dist_rank * num_workers + self.worker_info.id + self.subsplit = subsplits[self.global_worker_id] + self.worker_init = True + + # initialize TF data pipeline input_context = None if self.global_num_workers > 1 and self.subsplit is None: # set input context to divide shards among distributed replicas input_context = tf.distribute.InputContext( num_input_pipelines=self.global_num_workers, - input_pipeline_id=global_worker_id, + input_pipeline_id=self.global_worker_id, num_replicas_in_sync=self.dist_num_replicas # FIXME does this arg have any impact? ) read_config = tfds.ReadConfig( - shuffle_seed=self.common_seed + self.init_count, - shuffle_reshuffle_each_iteration=not self.reinit_each_iter, + shuffle_seed=self.common_seed + self.init_count, # shard shuffling seed + shuffle_reshuffle_each_iteration=not self.reinit_each_iter, # re-shuffle shards per iteration input_context=input_context) ds = self.builder.as_dataset( - split=self.subsplit or self.split, shuffle_files=self.is_training, read_config=read_config) + split=self.subsplit or self.split, + shuffle_files=self.is_training, # enable shard shuffling + read_config=read_config) + # avoid overloading threading w/ combo of TF ds threads + PyTorch workers options = tf.data.Options() thread_member = 'threading' if hasattr(options, 'threading') else 'experimental_threading' - getattr(options, thread_member).private_threadpool_size = max(1, self.max_threadpool_size // num_workers) + getattr(options, thread_member).private_threadpool_size = max(1, self.max_threadpool_size // self.num_workers) getattr(options, thread_member).max_intra_op_parallelism = 1 ds = ds.with_options(options) + if self.is_training or self.repeats > 1: # to prevent excessive drop_last batch behaviour w/ IterableDatasets # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading ds = ds.repeat() # allow wrap around and break iteration manually if self.is_training: + # shuffle samples ds = ds.shuffle( - min(self.num_examples, self.shuffle_size) // self.global_num_workers, + min(self.num_samples, self.shuffle_size) // self.global_num_workers, seed=self.worker_seed + self.init_count) - ds = ds.prefetch(min(self.num_examples // self.global_num_workers, self.prefetch_size)) + ds = ds.prefetch(min(self.num_samples // self.global_num_workers, self.prefetch_size)) self.ds = tfds.as_numpy(ds) self.init_count += 1 @@ -251,10 +262,10 @@ def __iter__(self): # Compute a rounded up sample count that is used to: # 1. make batches even cross workers & replicas in distributed validation. - # This adds extra examples and will slightly alter validation results. + # This adds extra samples and will slightly alter validation results. # 2. determine loop ending condition in training w/ repeat enabled so that only full batch_size # batches are produced (underlying tfds iter wraps around) - target_example_count = math.ceil(max(1, self.repeats) * self.num_examples / self.global_num_workers) + target_example_count = math.ceil(max(1, self.repeats) * self.num_samples / self.global_num_workers) if self.is_training: # round up to nearest batch_size per worker-replica target_example_count = math.ceil(target_example_count / self.batch_size) * self.batch_size @@ -272,11 +283,11 @@ def __iter__(self): example_count += 1 if self.is_training and example_count >= target_example_count: # Need to break out of loop when repeat() is enabled for training w/ oversampling - # this results in extra examples per epoch but seems more desirable than dropping + # this results in extra samples per epoch but seems more desirable than dropping # up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes) break - # Pad across distributed nodes (make counts equal by adding examples) + # Pad across distributed nodes (make counts equal by adding samples) if not self.is_training and self.dist_num_replicas > 1 and self.subsplit is not None and \ 0 < example_count < target_example_count: # Validation batch padding only done for distributed training where results are reduced across nodes. @@ -288,12 +299,12 @@ def __iter__(self): example_count += 1 def __len__(self): - # this is just an estimate and does not factor in extra examples added to pad batches based on + # this is just an estimate and does not factor in extra samples added to pad batches based on # complete worker & replica info (not available until init in dataloader). - return math.ceil(max(1, self.repeats) * self.num_examples / self.dist_num_replicas) + return math.ceil(max(1, self.repeats) * self.num_samples / self.dist_num_replicas) def _filename(self, index, basename=False, absolute=False): - assert False, "Not supported" # no random access to examples + assert False, "Not supported" # no random access to samples def filenames(self, basename=False, absolute=False): """ Return all filenames in dataset, overrides base""" @@ -301,7 +312,7 @@ def filenames(self, basename=False, absolute=False): self._lazy_init() names = [] for sample in self.ds: - if len(names) > self.num_examples: + if len(names) >= self.num_samples: break # safety for ds.repeat() case if 'file_name' in sample: name = sample['file_name'] diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py new file mode 100644 index 0000000000..f6eaeb24d8 --- /dev/null +++ b/timm/data/parsers/parser_wds.py @@ -0,0 +1,261 @@ +""" Dataset parser interface for webdataset + +Hacked together by / Copyright 2022 Ross Wightman +""" +import math +import os +import io +import json +import yaml +import random +from dataclasses import dataclass +from itertools import islice +from functools import partial +from typing import Dict, Tuple + +import torch +from PIL import Image +try: + import webdataset as wds + from webdataset.shardlists import expand_urls +except ImportError: + wds = None + expand_urls = None + +from .parser import Parser +from timm.bits import get_global_device, is_global_device + +SHUFFLE_SIZE = 8192 + + +def _load_info(root, basename='info'): + info_json = os.path.join(root, basename + '.json') + info_yaml = os.path.join(root, basename + '.yaml') + info_dict = {} + if os.path.exists(info_json): + with open(info_json, 'r') as f: + info_dict = json.load(f) + elif os.path.exists(info_yaml): + with open(info_yaml, 'r') as f: + info_dict = yaml.safe_load(f) + return info_dict + +@dataclass +class SplitInfo: + num_samples: int + filenames: Tuple[str] + shard_lengths: Tuple[int] = () + name: str = '' + + +def _parse_split_info(split: str, info: Dict): + def _info_convert(dict_info): + return SplitInfo( + num_samples=dict_info['num_samples'], + filenames=tuple(dict_info['filenames']), + shard_lengths=tuple(dict_info['shard_lengths']), + name=dict_info['name'], + ) + + if 'tar' in split or '..' in split: + # split in WDS string braceexpand format, sample count can be included with a | separator + # ex: `dataset-split-{0000..9999}.tar|100000` for 9999 shards, covering 100,000 samples + split = split.split('|') + num_samples = 0 + split_name = '' + if len(split) > 1: + num_samples = int(split[1]) + split = split[0] + if '::' not in split: + split_parts = split.split('-', 3) + split_idx = len(split_parts) - 1 + if split_idx and 'splits' in info and split_parts[split_idx] in info['splits']: + split_name = split_parts[split_idx] + + split_filenames = expand_urls(split) + if split_name: + split_info = info['splits'][split_name] + if not num_samples: + _fc = {f: c for f, c in zip(split_info['filenames'], split_info['shard_lengths'])} + num_samples = sum(_fc[f] for f in split_filenames) + split_info['filenames'] = tuple(_fc.keys()) + split_info['shard_lengths'] = tuple(_fc.values()) + split_info['num_samples'] = num_samples + split_info = _info_convert(split_info) + else: + split_info = SplitInfo( + name=split_name, + num_samples=num_samples, + filenames=split_filenames, + ) + else: + if split not in info['splits']: + raise RuntimeError(f"split {split} not found in info ({info['splits'].keys()})") + split = split + split_info = info['splits'][split] + split_info = _info_convert(split_info) + + return split_info + + +def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls'): + """ Custom sample decode + * decode and convert PIL Image + * cls byte string label to int + * pass through JSON byte string (if it exists) without parse + """ + with io.BytesIO(sample[image_key]) as b: + img = Image.open(b) + img.load() + if image_format: + img = img.convert(image_format) + return dict(jpg=img, cls=int(sample[target_key]), json=sample.get('json', None)) + + +class ParserWebdataset(Parser): + def __init__( + self, + root, + name, + split, + is_training=False, + batch_size=None, + repeats=0, + seed=42, + input_name='image', + input_image='RGB', + target_name=None, + target_image='', + prefetch_size=None, + shuffle_size=None, + ): + super().__init__() + self.root = root + self.is_training = is_training + self.batch_size = batch_size + self.repeats = repeats + self.common_seed = seed # a seed that's fixed across all worker / distributed instances + self.shard_shuffle_size = 500 + self.sample_shuffle_size = shuffle_size or SHUFFLE_SIZE + + self.image_key = 'jpg' + self.image_format = input_image + self.target_key = 'cls' + self.filename_key = 'filename' + self.key_ext = '.JPEG' # extension to add to key for original filenames (DS specific, default ImageNet) + + self.info = _load_info(self.root) + self.split_info = _parse_split_info(split, self.info) + self.num_samples = self.split_info.num_samples + if not self.num_samples: + raise RuntimeError(f'Invalid split definition, no samples found.') + + # Distributed world state + self.dist_rank = 0 + self.dist_num_replicas = 1 + if is_global_device(): + dev_env = get_global_device() + if dev_env.distributed and dev_env.world_size > 1: + self.dist_rank = dev_env.global_rank + self.dist_num_replicas = dev_env.world_size + else: + # FIXME warn if we fallback to torch distributed? + import torch.distributed as dist + if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: + self.dist_rank = dist.get_rank() + self.dist_num_replicas = dist.get_world_size() + + # Attributes that are updated in _lazy_init + self.worker_id = 0 + self.worker_seed = seed # seed unique to each worker instance + self.num_workers = 1 + self.global_worker_id = 0 + self.global_num_workers = 1 + self.init_count = 0 + + # DataPipeline is lazy init, majority of WDS DataPipeline could be init here, BUT, shuffle seed + # is not handled in manner where it can be deterministic for each worker AND initialized up front + self.ds = None + + def _lazy_init(self): + """ Lazily initialize worker (in worker processes) + """ + worker_info = torch.utils.data.get_worker_info() + if worker_info is not None: + self.worker_id = worker_info.id + self.worker_seed = worker_info.seed + self.num_workers = worker_info.num_workers + self.global_num_workers = self.dist_num_replicas * self.num_workers + self.global_worker_id = self.dist_rank * self.num_workers + self.worker_id + + # init data pipeline + abs_shard_filenames = [os.path.join(self.root, f) for f in self.split_info.filenames] + pipeline = [wds.SimpleShardList(abs_shard_filenames)] + # at this point we have an iterator over all the shards + if self.is_training: + pipeline.extend([ + wds.detshuffle(self.shard_shuffle_size, seed=self.common_seed), + self._split_by_node_and_worker, + # at this point, we have an iterator over the shards assigned to each worker + wds.tarfile_to_samples(), + wds.shuffle( + self.sample_shuffle_size, + rng=random.Random(self.worker_seed)), # this is why we lazy-init whole DataPipeline + ]) + else: + pipeline.extend([ + self._split_by_node_and_worker, + # at this point, we have an iterator over the shards assigned to each worker + wds.tarfile_to_samples(), + ]) + pipeline.extend([ + wds.map(partial(_decode, image_key=self.image_key, image_format=self.image_format)) + ]) + self.ds = wds.DataPipeline(*pipeline) + self.init_count += 1 + + def _split_by_node_and_worker(self, src): + if self.global_num_workers > 1: + for s in islice(src, self.global_worker_id, self.global_num_workers): + yield s + else: + for s in src: + yield s + + def __iter__(self): + if not self.init_count: + self._lazy_init() + + i = 0 + num_worker_samples = math.ceil(self.num_samples / self.global_num_workers) + if self.is_training and self.batch_size is not None: + num_worker_samples = (num_worker_samples // self.batch_size) * self.batch_size + ds = self.ds.with_epoch(num_worker_samples) + for sample in ds: + yield sample[self.image_key], sample[self.target_key] + i += 1 + print('end', i) # FIXME debug + + def __len__(self): + return math.ceil(max(1, self.repeats) * self.num_samples / self.dist_num_replicas) + + def _filename(self, index, basename=False, absolute=False): + assert False, "Not supported" # no random access to examples + + def filenames(self, basename=False, absolute=False): + """ Return all filenames in dataset, overrides base""" + if not self.init_count: + self._lazy_init() + + names = [] + for sample in self.ds: + if self.filename_key in sample: + name = sample[self.filename_key] + elif '__key__' in sample: + name = sample['__key__'] + self.key_ext + else: + assert False, "No supported name field present" + names.append(name) + if len(names) >= self.num_samples: + break # safety for ds.repeat() case + return names diff --git a/train.py b/train.py index f968f0d785..4a3900934b 100755 --- a/train.py +++ b/train.py @@ -607,7 +607,7 @@ def setup_data(args, default_cfg, dev_env: DeviceEnv, mixup_active: bool): ) eval_workers = args.workers - if 'tfds' in args.dataset: + if 'tfds' in args.dataset or 'wds' in args.dataset: # FIXME reduces validation padding issues when using TFDS w/ workers and distributed training eval_workers = min(2, args.workers) From a444d4b89179c1e1aa4ef73ecbf3798ec1d723bc Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 9 Mar 2022 10:17:12 -0800 Subject: [PATCH 50/61] Add alternative label support to WDS for imagenet22k/12k split, add 21k/22k/12k indices filters to results/ --- .../imagenet21k_goog_to_12k_rw_indices.txt | 11821 +++++++++ results/imagenet21k_goog_to_22k_indices.txt | 21841 ++++++++++++++++ results/imagenet22k_to_12k_rw_indices.txt | 11821 +++++++++ timm/data/parsers/parser_wds.py | 18 +- 4 files changed, 45498 insertions(+), 3 deletions(-) create mode 100644 results/imagenet21k_goog_to_12k_rw_indices.txt create mode 100644 results/imagenet21k_goog_to_22k_indices.txt create mode 100644 results/imagenet22k_to_12k_rw_indices.txt diff --git a/results/imagenet21k_goog_to_12k_rw_indices.txt b/results/imagenet21k_goog_to_12k_rw_indices.txt new file mode 100644 index 0000000000..953537f8cb --- /dev/null +++ b/results/imagenet21k_goog_to_12k_rw_indices.txt @@ -0,0 +1,11821 @@ +1 +3 +4 +5 +6 +7 +8 +9 +10 +11 +13 +14 +15 +16 +17 +18 +19 +20 +21 +23 +24 +26 +27 +28 +29 +30 +31 +32 +33 +34 +37 +38 +41 +43 +44 +45 +46 +47 +48 +49 +50 +51 +53 +55 +56 +57 +58 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +89 +90 +91 +93 +94 +95 +96 +97 +99 +100 +101 +102 +103 +105 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +137 +138 +140 +141 +142 +143 +144 +146 +147 +148 +149 +151 +152 +153 +154 +156 +157 +158 +159 +161 +162 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +175 +176 +179 +180 +181 +182 +184 +188 +192 +193 +195 +196 +197 +199 +200 +203 +206 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +230 +231 +235 +249 +250 +251 +252 +253 +254 +289 +292 +295 +301 +306 +307 +312 +313 +315 +317 +320 +324 +325 +326 +327 +332 +341 +343 +347 +352 +353 +354 +356 +359 +360 +366 +367 +368 +369 +370 +377 +379 +380 +382 +383 +384 +385 +386 +392 +395 +398 +402 +405 +408 +410 +411 +413 +415 +416 +418 +422 +423 +424 +430 +431 +440 +441 +451 +452 +455 +456 +457 +460 +461 +464 +465 +466 +468 +469 +470 +471 +472 +473 +474 +475 +477 +479 +482 +486 +489 +490 +491 +492 +493 +496 +499 +500 +502 +503 +505 +510 +511 +512 +513 +514 +515 +516 +520 +523 +524 +525 +526 +527 +528 +529 +530 +533 +536 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +552 +553 +554 +555 +556 +557 +558 +559 +560 +561 +562 +563 +564 +566 +567 +568 +569 +570 +571 +572 +573 +574 +575 +576 +577 +578 +580 +581 +583 +584 +585 +586 +587 +588 +589 +590 +591 +592 +595 +596 +598 +601 +602 +603 +604 +605 +607 +608 +609 +610 +611 +612 +613 +614 +615 +616 +618 +619 +620 +621 +623 +624 +628 +629 +630 +631 +632 +634 +635 +636 +637 +638 +639 +640 +641 +643 +644 +645 +646 +647 +648 +649 +650 +651 +653 +654 +655 +656 +657 +658 +659 +660 +661 +663 +664 +665 +666 +667 +668 +669 +670 +671 +672 +673 +674 +675 +677 +678 +679 +680 +681 +682 +683 +684 +685 +686 +687 +688 +689 +691 +692 +693 +694 +695 +696 +697 +698 +700 +701 +702 +703 +704 +705 +706 +707 +708 +710 +711 +713 +714 +715 +716 +717 +718 +719 +720 +721 +722 +723 +727 +728 +730 +732 +733 +734 +736 +737 +738 +739 +740 +741 +742 +743 +744 +745 +746 +747 +748 +749 +751 +752 +753 +755 +757 +758 +759 +761 +762 +763 +764 +765 +766 +767 +768 +769 +770 +773 +774 +775 +776 +777 +778 +780 +781 +782 +783 +784 +785 +786 +787 +789 +790 +791 +792 +794 +796 +798 +799 +801 +804 +805 +807 +808 +809 +810 +811 +812 +813 +816 +817 +818 +822 +823 +824 +825 +826 +827 +828 +829 +830 +831 +832 +833 +834 +835 +836 +838 +839 +840 +841 +842 +843 +845 +846 +847 +848 +849 +850 +851 +852 +853 +854 +855 +856 +857 +858 +861 +862 +863 +864 +865 +866 +867 +868 +869 +870 +871 +872 +873 +874 +875 +876 +877 +878 +879 +880 +881 +882 +883 +884 +885 +886 +887 +888 +889 +891 +892 +894 +895 +896 +897 +899 +900 +901 +903 +904 +905 +908 +909 +910 +912 +913 +916 +919 +920 +922 +925 +931 +932 +933 +934 +935 +936 +939 +941 +944 +945 +946 +947 +949 +950 +951 +952 +953 +954 +955 +958 +960 +961 +963 +964 +968 +969 +970 +971 +976 +979 +983 +986 +990 +991 +992 +993 +994 +995 +996 +997 +998 +999 +1000 +1001 +1002 +1003 +1004 +1005 +1006 +1007 +1008 +1009 +1010 +1011 +1012 +1013 +1014 +1015 +1016 +1017 +1019 +1022 +1024 +1025 +1027 +1029 +1030 +1031 +1032 +1035 +1036 +1037 +1038 +1039 +1040 +1041 +1043 +1044 +1045 +1046 +1047 +1048 +1050 +1051 +1052 +1055 +1056 +1063 +1064 +1065 +1067 +1069 +1070 +1071 +1072 +1075 +1076 +1078 +1079 +1080 +1081 +1083 +1084 +1085 +1086 +1087 +1088 +1089 +1092 +1093 +1094 +1095 +1097 +1099 +1106 +1121 +1140 +1141 +1143 +1144 +1145 +1147 +1148 +1149 +1150 +1151 +1152 +1155 +1157 +1159 +1160 +1161 +1164 +1165 +1166 +1167 +1168 +1169 +1170 +1171 +1172 +1173 +1178 +1179 +1180 +1181 +1182 +1184 +1187 +1190 +1191 +1193 +1195 +1196 +1197 +1199 +1200 +1201 +1202 +1203 +1204 +1205 +1207 +1208 +1209 +1211 +1214 +1215 +1216 +1217 +1218 +1219 +1220 +1221 +1222 +1223 +1224 +1225 +1227 +1229 +1230 +1231 +1232 +1233 +1234 +1235 +1236 +1237 +1238 +1239 +1240 +1241 +1242 +1244 +1245 +1246 +1247 +1249 +1250 +1251 +1252 +1253 +1254 +1256 +1257 +1258 +1259 +1260 +1261 +1263 +1265 +1266 +1267 +1268 +1269 +1271 +1272 +1273 +1274 +1277 +1279 +1283 +1287 +1289 +1298 +1299 +1303 +1304 +1305 +1308 +1313 +1318 +1320 +1323 +1324 +1325 +1326 +1327 +1328 +1330 +1332 +1333 +1335 +1337 +1339 +1340 +1341 +1342 +1343 +1344 +1345 +1349 +1350 +1351 +1352 +1353 +1354 +1355 +1356 +1357 +1358 +1359 +1362 +1364 +1369 +1372 +1373 +1376 +1377 +1378 +1380 +1382 +1384 +1385 +1386 +1387 +1388 +1389 +1390 +1391 +1392 +1393 +1396 +1397 +1398 +1399 +1402 +1404 +1405 +1406 +1407 +1408 +1409 +1411 +1412 +1413 +1416 +1417 +1420 +1424 +1425 +1426 +1427 +1428 +1429 +1430 +1431 +1432 +1433 +1434 +1435 +1436 +1437 +1439 +1440 +1442 +1443 +1445 +1446 +1448 +1450 +1452 +1454 +1455 +1457 +1458 +1459 +1460 +1461 +1462 +1463 +1464 +1466 +1469 +1470 +1474 +1475 +1476 +1477 +1482 +1485 +1486 +1487 +1488 +1489 +1491 +1493 +1494 +1495 +1496 +1497 +1499 +1500 +1502 +1503 +1504 +1505 +1506 +1508 +1509 +1511 +1512 +1513 +1514 +1515 +1516 +1517 +1518 +1519 +1520 +1521 +1522 +1523 +1524 +1525 +1526 +1527 +1528 +1529 +1530 +1531 +1532 +1533 +1534 +1535 +1536 +1537 +1538 +1539 +1540 +1541 +1542 +1543 +1544 +1545 +1546 +1547 +1548 +1549 +1550 +1551 +1552 +1553 +1554 +1555 +1556 +1557 +1558 +1559 +1560 +1561 +1562 +1563 +1564 +1565 +1566 +1567 +1568 +1569 +1570 +1571 +1572 +1573 +1574 +1575 +1576 +1577 +1578 +1582 +1583 +1584 +1586 +1587 +1588 +1589 +1590 +1591 +1592 +1594 +1595 +1597 +1598 +1599 +1600 +1603 +1604 +1605 +1611 +1614 +1615 +1616 +1622 +1624 +1626 +1627 +1628 +1629 +1630 +1631 +1632 +1633 +1634 +1636 +1643 +1644 +1652 +1656 +1659 +1662 +1663 +1665 +1667 +1668 +1669 +1671 +1672 +1679 +1681 +1688 +1692 +1693 +1694 +1695 +1696 +1697 +1698 +1700 +1701 +1702 +1703 +1704 +1709 +1712 +1716 +1729 +1739 +1742 +1747 +1748 +1750 +1754 +1755 +1757 +1758 +1759 +1760 +1761 +1762 +1764 +1767 +1770 +1771 +1773 +1774 +1777 +1778 +1779 +1782 +1783 +1784 +1786 +1787 +1788 +1789 +1790 +1791 +1792 +1793 +1795 +1797 +1798 +1799 +1800 +1803 +1806 +1808 +1809 +1810 +1811 +1814 +1815 +1822 +1824 +1825 +1827 +1831 +1833 +1835 +1836 +1837 +1841 +1842 +1847 +1848 +1850 +1852 +1853 +1854 +1856 +1859 +1860 +1861 +1862 +1864 +1865 +1867 +1874 +1876 +1877 +1878 +1881 +1884 +1891 +1892 +1893 +1895 +1896 +1897 +1898 +1899 +1900 +1901 +1902 +1903 +1904 +1905 +1906 +1907 +1908 +1909 +1910 +1911 +1912 +1913 +1914 +1915 +1916 +1917 +1918 +1919 +1920 +1921 +1922 +1923 +1924 +1925 +1926 +1927 +1928 +1929 +1930 +1931 +1932 +1933 +1934 +1935 +1936 +1937 +1938 +1939 +1940 +1942 +1943 +1944 +1945 +1946 +1947 +1948 +1949 +1950 +1951 +1952 +1953 +1954 +1956 +1959 +1961 +1962 +1963 +1964 +1965 +1966 +1967 +1968 +1969 +1970 +1971 +1972 +1973 +1974 +1975 +1976 +1977 +1978 +1979 +1980 +1981 +1982 +1983 +1984 +1985 +1986 +1987 +1988 +1990 +1992 +1993 +1995 +1996 +1997 +1998 +1999 +2001 +2002 +2004 +2005 +2007 +2008 +2009 +2010 +2011 +2014 +2016 +2017 +2018 +2019 +2021 +2022 +2023 +2026 +2028 +2029 +2030 +2031 +2032 +2033 +2034 +2035 +2036 +2037 +2038 +2039 +2040 +2041 +2042 +2043 +2044 +2045 +2046 +2047 +2048 +2049 +2050 +2051 +2052 +2053 +2054 +2055 +2056 +2058 +2060 +2061 +2062 +2063 +2064 +2065 +2067 +2068 +2069 +2070 +2071 +2072 +2073 +2074 +2075 +2076 +2077 +2078 +2079 +2080 +2081 +2082 +2083 +2084 +2085 +2087 +2088 +2090 +2093 +2094 +2095 +2096 +2100 +2101 +2102 +2103 +2104 +2106 +2107 +2108 +2109 +2110 +2112 +2113 +2114 +2118 +2119 +2120 +2121 +2122 +2123 +2124 +2128 +2129 +2130 +2132 +2134 +2135 +2137 +2138 +2139 +2140 +2141 +2142 +2143 +2144 +2145 +2146 +2147 +2148 +2149 +2150 +2151 +2152 +2153 +2154 +2155 +2156 +2158 +2159 +2163 +2164 +2165 +2167 +2168 +2169 +2172 +2173 +2174 +2176 +2177 +2178 +2180 +2181 +2182 +2183 +2184 +2185 +2187 +2188 +2189 +2190 +2191 +2192 +2193 +2195 +2198 +2199 +2200 +2203 +2206 +2207 +2208 +2209 +2210 +2211 +2212 +2213 +2214 +2216 +2217 +2219 +2220 +2221 +2222 +2223 +2224 +2225 +2226 +2227 +2228 +2229 +2230 +2231 +2232 +2233 +2234 +2236 +2237 +2238 +2239 +2240 +2241 +2242 +2243 +2244 +2245 +2246 +2247 +2248 +2249 +2250 +2251 +2252 +2253 +2255 +2256 +2257 +2258 +2259 +2260 +2261 +2262 +2263 +2264 +2265 +2266 +2267 +2268 +2269 +2270 +2271 +2272 +2273 +2274 +2275 +2276 +2278 +2279 +2280 +2281 +2282 +2283 +2285 +2287 +2288 +2289 +2291 +2292 +2293 +2294 +2295 +2296 +2297 +2298 +2299 +2300 +2301 +2302 +2303 +2304 +2305 +2306 +2307 +2308 +2309 +2310 +2311 +2312 +2313 +2314 +2315 +2316 +2317 +2318 +2319 +2320 +2321 +2322 +2326 +2328 +2329 +2330 +2331 +2332 +2334 +2335 +2336 +2337 +2338 +2339 +2340 +2341 +2342 +2343 +2344 +2345 +2347 +2348 +2349 +2350 +2351 +2352 +2353 +2356 +2357 +2358 +2359 +2360 +2362 +2363 +2364 +2365 +2368 +2369 +2370 +2372 +2374 +2377 +2380 +2381 +2382 +2383 +2385 +2386 +2387 +2388 +2389 +2390 +2391 +2392 +2393 +2395 +2396 +2397 +2398 +2399 +2400 +2401 +2402 +2403 +2404 +2405 +2407 +2408 +2409 +2410 +2411 +2412 +2413 +2416 +2417 +2419 +2420 +2421 +2422 +2423 +2424 +2425 +2426 +2427 +2428 +2430 +2431 +2432 +2433 +2434 +2436 +2437 +2438 +2439 +2441 +2444 +2445 +2447 +2448 +2449 +2450 +2452 +2453 +2454 +2456 +2459 +2461 +2463 +2465 +2469 +2470 +2471 +2472 +2473 +2474 +2494 +2495 +2497 +2498 +2499 +2500 +2505 +2509 +2512 +2513 +2515 +2519 +2520 +2522 +2523 +2525 +2526 +2528 +2530 +2531 +2532 +2533 +2534 +2536 +2537 +2538 +2540 +2542 +2544 +2545 +2547 +2548 +2549 +2557 +2558 +2561 +2562 +2563 +2565 +2567 +2568 +2569 +2570 +2571 +2572 +2573 +2578 +2587 +2588 +2589 +2590 +2595 +2597 +2598 +2609 +2612 +2613 +2615 +2616 +2617 +2618 +2625 +2626 +2627 +2628 +2630 +2631 +2635 +2638 +2639 +2641 +2642 +2644 +2645 +2649 +2654 +2655 +2656 +2658 +2659 +2660 +2663 +2664 +2665 +2666 +2668 +2669 +2670 +2672 +2674 +2675 +2677 +2679 +2680 +2681 +2682 +2683 +2684 +2686 +2689 +2691 +2692 +2693 +2694 +2696 +2699 +2702 +2705 +2706 +2707 +2708 +2712 +2715 +2722 +2723 +2724 +2725 +2727 +2728 +2730 +2731 +2732 +2734 +2737 +2738 +2739 +2741 +2742 +2743 +2745 +2747 +2748 +2749 +2750 +2752 +2760 +2761 +2762 +2764 +2767 +2770 +2774 +2778 +2780 +2791 +2795 +2796 +2805 +2810 +2812 +2814 +2815 +2818 +2820 +2828 +2829 +2832 +2833 +2835 +2837 +2840 +2843 +2844 +2845 +2852 +2859 +2860 +2861 +2862 +2863 +2864 +2865 +2866 +2867 +2868 +2869 +2870 +2871 +2872 +2874 +2875 +2876 +2878 +2879 +2880 +2881 +2882 +2884 +2885 +2886 +2888 +2889 +2890 +2891 +2892 +2893 +2894 +2895 +2897 +2899 +2900 +2903 +2904 +2907 +2910 +2913 +2914 +2916 +2923 +2926 +2932 +2933 +2940 +2944 +2945 +2947 +2949 +2950 +2953 +2955 +2956 +2957 +2958 +2959 +2960 +2963 +2964 +2967 +2970 +2974 +2976 +2979 +2980 +2982 +2984 +2985 +2989 +2990 +2991 +2992 +2993 +2994 +2996 +2999 +3000 +3002 +3005 +3007 +3008 +3009 +3010 +3012 +3013 +3014 +3018 +3019 +3020 +3022 +3024 +3025 +3026 +3027 +3028 +3029 +3030 +3033 +3035 +3036 +3039 +3040 +3042 +3043 +3046 +3047 +3048 +3051 +3053 +3055 +3056 +3059 +3060 +3067 +3069 +3074 +3079 +3086 +3088 +3091 +3093 +3094 +3106 +3111 +3117 +3125 +3129 +3134 +3135 +3136 +3137 +3138 +3139 +3140 +3141 +3142 +3143 +3144 +3145 +3146 +3148 +3149 +3150 +3151 +3153 +3154 +3159 +3160 +3161 +3164 +3165 +3166 +3168 +3169 +3170 +3171 +3172 +3173 +3176 +3177 +3182 +3188 +3191 +3192 +3193 +3194 +3195 +3196 +3200 +3201 +3202 +3203 +3204 +3205 +3206 +3207 +3208 +3209 +3210 +3214 +3218 +3219 +3220 +3221 +3222 +3223 +3225 +3226 +3227 +3228 +3229 +3230 +3231 +3232 +3234 +3235 +3236 +3237 +3238 +3239 +3240 +3241 +3242 +3243 +3244 +3245 +3246 +3247 +3248 +3253 +3258 +3259 +3260 +3261 +3262 +3264 +3265 +3266 +3267 +3268 +3270 +3271 +3273 +3274 +3277 +3278 +3279 +3280 +3281 +3282 +3283 +3284 +3285 +3288 +3289 +3291 +3292 +3296 +3297 +3298 +3299 +3301 +3302 +3304 +3305 +3306 +3307 +3308 +3309 +3310 +3311 +3312 +3315 +3316 +3318 +3320 +3321 +3322 +3324 +3325 +3327 +3328 +3329 +3330 +3332 +3333 +3334 +3335 +3337 +3339 +3340 +3341 +3342 +3343 +3344 +3345 +3348 +3349 +3351 +3352 +3353 +3354 +3355 +3356 +3358 +3360 +3361 +3362 +3363 +3365 +3366 +3368 +3371 +3373 +3375 +3376 +3377 +3378 +3379 +3380 +3381 +3382 +3383 +3384 +3389 +3390 +3392 +3397 +3398 +3400 +3401 +3404 +3405 +3406 +3407 +3408 +3409 +3410 +3411 +3412 +3413 +3415 +3416 +3417 +3419 +3421 +3424 +3425 +3426 +3427 +3428 +3429 +3430 +3431 +3432 +3433 +3434 +3435 +3436 +3438 +3439 +3440 +3441 +3444 +3446 +3448 +3450 +3451 +3452 +3454 +3455 +3456 +3458 +3459 +3461 +3462 +3463 +3466 +3467 +3468 +3469 +3471 +3472 +3473 +3474 +3475 +3476 +3477 +3478 +3479 +3481 +3482 +3485 +3492 +3493 +3494 +3495 +3497 +3498 +3499 +3500 +3501 +3502 +3503 +3505 +3509 +3510 +3511 +3512 +3513 +3517 +3518 +3519 +3520 +3521 +3522 +3526 +3527 +3528 +3533 +3536 +3544 +3546 +3547 +3553 +3554 +3555 +3556 +3559 +3560 +3562 +3563 +3565 +3566 +3567 +3568 +3569 +3574 +3575 +3576 +3584 +3585 +3587 +3599 +3600 +3601 +3602 +3603 +3604 +3605 +3606 +3608 +3609 +3610 +3612 +3613 +3614 +3615 +3616 +3619 +3622 +3623 +3624 +3625 +3627 +3628 +3629 +3630 +3632 +3633 +3634 +3635 +3636 +3638 +3640 +3641 +3644 +3646 +3649 +3650 +3651 +3655 +3656 +3659 +3660 +3662 +3663 +3665 +3671 +3673 +3674 +3683 +3684 +3686 +3687 +3688 +3689 +3690 +3692 +3694 +3695 +3702 +3705 +3707 +3709 +3711 +3714 +3715 +3716 +3720 +3725 +3727 +3731 +3733 +3736 +3737 +3738 +3744 +3746 +3747 +3750 +3753 +3756 +3758 +3761 +3763 +3764 +3765 +3766 +3767 +3768 +3769 +3770 +3771 +3772 +3773 +3774 +3775 +3782 +3785 +3787 +3790 +3798 +3801 +3803 +3812 +3814 +3815 +3816 +3817 +3818 +3819 +3825 +3826 +3827 +3828 +3829 +3832 +3833 +3836 +3837 +3838 +3840 +3842 +3844 +3845 +3846 +3852 +3853 +3854 +3855 +3858 +3860 +3864 +3865 +3867 +3868 +3873 +3874 +3877 +3882 +3883 +3884 +3887 +3888 +3889 +3890 +3894 +3899 +3900 +3901 +3902 +3904 +3908 +3910 +3916 +3918 +3920 +3925 +3928 +3936 +3937 +3939 +3943 +3947 +3948 +3949 +3950 +3951 +3956 +3962 +3963 +3968 +3969 +3970 +3971 +3972 +3974 +3975 +3976 +3977 +3984 +3986 +3988 +3991 +4001 +4005 +4006 +4007 +4009 +4018 +4019 +4020 +4021 +4022 +4023 +4024 +4026 +4028 +4030 +4031 +4032 +4033 +4036 +4038 +4039 +4040 +4041 +4042 +4043 +4062 +4063 +4065 +4066 +4067 +4068 +4071 +4073 +4074 +4075 +4089 +4090 +4094 +4096 +4097 +4099 +4100 +4101 +4102 +4104 +4105 +4107 +4109 +4110 +4112 +4118 +4120 +4129 +4136 +4137 +4138 +4139 +4140 +4141 +4142 +4143 +4144 +4148 +4150 +4151 +4152 +4153 +4154 +4155 +4158 +4159 +4161 +4165 +4167 +4171 +4174 +4176 +4178 +4179 +4181 +4182 +4183 +4185 +4187 +4189 +4190 +4191 +4192 +4198 +4202 +4203 +4204 +4205 +4206 +4207 +4208 +4210 +4211 +4212 +4213 +4214 +4215 +4216 +4217 +4219 +4221 +4222 +4223 +4226 +4227 +4230 +4232 +4233 +4235 +4237 +4242 +4244 +4248 +4249 +4250 +4251 +4252 +4253 +4254 +4255 +4256 +4259 +4261 +4262 +4263 +4264 +4265 +4266 +4267 +4269 +4270 +4272 +4273 +4274 +4276 +4277 +4278 +4280 +4281 +4282 +4283 +4284 +4285 +4290 +4292 +4296 +4297 +4298 +4299 +4301 +4304 +4306 +4307 +4308 +4309 +4310 +4311 +4312 +4313 +4315 +4317 +4318 +4321 +4323 +4324 +4325 +4326 +4327 +4328 +4329 +4330 +4331 +4332 +4334 +4335 +4336 +4338 +4340 +4341 +4344 +4345 +4346 +4349 +4350 +4351 +4352 +4354 +4355 +4356 +4358 +4361 +4362 +4363 +4365 +4366 +4369 +4373 +4374 +4378 +4379 +4380 +4386 +4389 +4390 +4391 +4395 +4396 +4399 +4400 +4401 +4403 +4404 +4406 +4407 +4408 +4410 +4412 +4414 +4416 +4417 +4418 +4419 +4420 +4421 +4423 +4425 +4426 +4427 +4428 +4430 +4431 +4432 +4434 +4435 +4436 +4438 +4439 +4440 +4441 +4442 +4444 +4445 +4450 +4451 +4453 +4454 +4455 +4456 +4458 +4459 +4462 +4463 +4464 +4465 +4466 +4467 +4468 +4469 +4470 +4471 +4473 +4474 +4475 +4476 +4477 +4478 +4479 +4481 +4483 +4484 +4485 +4486 +4487 +4489 +4490 +4491 +4493 +4494 +4495 +4496 +4497 +4498 +4499 +4500 +4501 +4504 +4505 +4506 +4508 +4509 +4510 +4511 +4512 +4515 +4518 +4519 +4521 +4522 +4529 +4530 +4531 +4533 +4536 +4538 +4539 +4540 +4542 +4543 +4544 +4545 +4546 +4547 +4549 +4550 +4551 +4552 +4555 +4556 +4559 +4560 +4561 +4562 +4565 +4567 +4568 +4569 +4570 +4571 +4572 +4574 +4576 +4577 +4579 +4580 +4583 +4585 +4587 +4588 +4591 +4594 +4595 +4596 +4599 +4600 +4603 +4604 +4605 +4606 +4608 +4609 +4610 +4611 +4612 +4613 +4614 +4617 +4618 +4619 +4620 +4621 +4622 +4623 +4624 +4625 +4626 +4627 +4628 +4629 +4631 +4632 +4633 +4634 +4635 +4636 +4639 +4640 +4641 +4642 +4646 +4647 +4648 +4649 +4650 +4651 +4652 +4655 +4656 +4662 +4663 +4664 +4665 +4666 +4667 +4668 +4669 +4670 +4671 +4672 +4676 +4677 +4678 +4679 +4680 +4681 +4683 +4685 +4686 +4687 +4688 +4690 +4691 +4692 +4694 +4695 +4696 +4699 +4702 +4705 +4708 +4709 +4710 +4711 +4712 +4714 +4715 +4716 +4717 +4719 +4722 +4723 +4724 +4725 +4726 +4727 +4728 +4729 +4730 +4732 +4733 +4734 +4736 +4737 +4739 +4740 +4743 +4746 +4748 +4750 +4751 +4752 +4756 +4758 +4759 +4760 +4761 +4762 +4768 +4770 +4771 +4773 +4774 +4775 +4777 +4778 +4779 +4780 +4781 +4783 +4789 +4790 +4793 +4795 +4797 +4798 +4799 +4800 +4801 +4802 +4804 +4806 +4807 +4808 +4812 +4813 +4814 +4815 +4816 +4818 +4819 +4824 +4829 +4831 +4833 +4836 +4837 +4839 +4840 +4842 +4843 +4844 +4847 +4848 +4849 +4851 +4852 +4853 +4854 +4855 +4860 +4861 +4863 +4864 +4865 +4866 +4867 +4869 +4871 +4874 +4875 +4877 +4878 +4879 +4880 +4883 +4884 +4885 +4886 +4887 +4888 +4890 +4894 +4895 +4896 +4897 +4900 +4901 +4903 +4905 +4906 +4908 +4909 +4910 +4912 +4913 +4916 +4917 +4921 +4922 +4923 +4924 +4925 +4926 +4927 +4928 +4929 +4931 +4932 +4933 +4934 +4935 +4936 +4938 +4939 +4940 +4941 +4942 +4943 +4945 +4946 +4947 +4950 +4951 +4953 +4957 +4958 +4960 +4961 +4964 +4965 +4967 +4968 +4970 +4972 +4973 +4976 +4977 +4978 +4979 +4980 +4981 +4982 +4984 +4985 +4986 +4987 +4989 +4990 +4991 +4993 +4994 +4998 +4999 +5001 +5002 +5003 +5004 +5005 +5007 +5008 +5009 +5011 +5012 +5016 +5017 +5020 +5021 +5022 +5023 +5025 +5026 +5027 +5028 +5029 +5031 +5033 +5034 +5037 +5038 +5039 +5041 +5042 +5043 +5046 +5047 +5048 +5051 +5055 +5057 +5060 +5061 +5062 +5063 +5064 +5065 +5068 +5071 +5072 +5073 +5076 +5078 +5079 +5081 +5083 +5084 +5086 +5088 +5090 +5091 +5092 +5093 +5094 +5096 +5098 +5100 +5101 +5102 +5104 +5105 +5109 +5111 +5112 +5114 +5115 +5117 +5119 +5120 +5121 +5122 +5123 +5124 +5125 +5126 +5127 +5129 +5130 +5131 +5132 +5133 +5134 +5135 +5137 +5138 +5139 +5141 +5142 +5143 +5144 +5146 +5148 +5149 +5151 +5153 +5154 +5156 +5157 +5158 +5162 +5163 +5165 +5167 +5168 +5172 +5174 +5175 +5176 +5178 +5179 +5180 +5181 +5183 +5184 +5185 +5186 +5187 +5189 +5191 +5193 +5195 +5196 +5198 +5199 +5201 +5202 +5203 +5204 +5205 +5206 +5207 +5208 +5209 +5210 +5211 +5212 +5213 +5215 +5216 +5217 +5218 +5219 +5221 +5222 +5223 +5224 +5225 +5226 +5227 +5231 +5234 +5235 +5237 +5239 +5240 +5247 +5248 +5249 +5250 +5253 +5254 +5255 +5256 +5258 +5259 +5264 +5265 +5266 +5267 +5269 +5270 +5272 +5273 +5275 +5277 +5278 +5282 +5284 +5288 +5290 +5291 +5292 +5293 +5294 +5295 +5296 +5297 +5298 +5299 +5300 +5301 +5302 +5306 +5307 +5311 +5312 +5313 +5314 +5315 +5316 +5317 +5319 +5320 +5321 +5322 +5323 +5326 +5328 +5329 +5330 +5331 +5332 +5333 +5334 +5335 +5336 +5338 +5339 +5340 +5341 +5343 +5344 +5345 +5346 +5347 +5348 +5353 +5357 +5358 +5360 +5362 +5363 +5364 +5369 +5372 +5373 +5375 +5377 +5378 +5379 +5381 +5385 +5386 +5387 +5388 +5389 +5390 +5391 +5392 +5393 +5395 +5398 +5399 +5400 +5401 +5402 +5403 +5406 +5407 +5410 +5411 +5412 +5413 +5417 +5418 +5419 +5420 +5421 +5422 +5423 +5425 +5426 +5427 +5428 +5429 +5430 +5431 +5432 +5434 +5435 +5437 +5439 +5441 +5443 +5444 +5445 +5446 +5447 +5448 +5450 +5451 +5454 +5455 +5456 +5461 +5463 +5466 +5467 +5471 +5472 +5473 +5474 +5475 +5476 +5477 +5478 +5481 +5482 +5483 +5484 +5485 +5486 +5487 +5488 +5489 +5491 +5493 +5494 +5495 +5496 +5497 +5498 +5499 +5501 +5503 +5504 +5505 +5506 +5507 +5508 +5510 +5511 +5514 +5515 +5517 +5519 +5520 +5521 +5522 +5524 +5529 +5530 +5531 +5532 +5535 +5538 +5540 +5541 +5542 +5544 +5547 +5548 +5549 +5550 +5551 +5552 +5553 +5554 +5555 +5557 +5561 +5563 +5564 +5565 +5566 +5567 +5568 +5569 +5570 +5572 +5574 +5575 +5576 +5577 +5578 +5579 +5580 +5583 +5584 +5586 +5590 +5591 +5592 +5593 +5594 +5595 +5596 +5597 +5598 +5603 +5604 +5606 +5607 +5608 +5609 +5610 +5612 +5613 +5614 +5615 +5617 +5619 +5620 +5621 +5622 +5623 +5624 +5625 +5626 +5627 +5629 +5630 +5631 +5633 +5634 +5635 +5636 +5638 +5639 +5642 +5643 +5647 +5652 +5654 +5656 +5657 +5658 +5659 +5660 +5661 +5663 +5664 +5665 +5667 +5669 +5671 +5672 +5673 +5674 +5676 +5677 +5682 +5683 +5685 +5688 +5690 +5691 +5692 +5694 +5695 +5696 +5697 +5698 +5699 +5701 +5702 +5703 +5704 +5705 +5708 +5709 +5711 +5712 +5713 +5714 +5715 +5716 +5717 +5718 +5725 +5727 +5729 +5736 +5737 +5738 +5741 +5742 +5743 +5748 +5752 +5753 +5754 +5755 +5757 +5758 +5759 +5760 +5761 +5764 +5765 +5766 +5767 +5768 +5769 +5770 +5772 +5773 +5774 +5776 +5777 +5778 +5779 +5782 +5784 +5785 +5786 +5787 +5788 +5789 +5790 +5791 +5792 +5793 +5797 +5798 +5802 +5803 +5804 +5805 +5807 +5808 +5809 +5810 +5811 +5812 +5814 +5816 +5817 +5818 +5823 +5824 +5825 +5828 +5829 +5830 +5831 +5832 +5836 +5837 +5841 +5843 +5845 +5846 +5847 +5848 +5849 +5850 +5851 +5853 +5855 +5857 +5858 +5859 +5860 +5861 +5862 +5863 +5866 +5867 +5868 +5871 +5872 +5873 +5874 +5875 +5879 +5881 +5884 +5885 +5887 +5888 +5891 +5892 +5893 +5896 +5897 +5898 +5899 +5900 +5902 +5904 +5905 +5906 +5907 +5910 +5911 +5912 +5913 +5914 +5915 +5918 +5919 +5920 +5921 +5922 +5924 +5927 +5928 +5931 +5932 +5934 +5935 +5940 +5941 +5942 +5944 +5947 +5949 +5950 +5951 +5952 +5954 +5955 +5956 +5957 +5960 +5961 +5962 +5964 +5965 +5967 +5968 +5969 +5973 +5974 +5976 +5977 +5980 +5981 +5985 +5986 +5987 +5988 +5990 +5991 +5994 +5995 +5996 +5997 +5998 +5999 +6001 +6003 +6004 +6005 +6006 +6008 +6009 +6010 +6012 +6013 +6015 +6016 +6017 +6020 +6021 +6023 +6024 +6025 +6026 +6027 +6028 +6029 +6030 +6032 +6033 +6037 +6040 +6041 +6042 +6043 +6044 +6046 +6047 +6048 +6049 +6050 +6054 +6055 +6056 +6057 +6063 +6065 +6069 +6070 +6072 +6075 +6076 +6077 +6079 +6082 +6083 +6084 +6086 +6087 +6092 +6099 +6102 +6103 +6105 +6109 +6110 +6111 +6114 +6115 +6116 +6118 +6120 +6122 +6124 +6125 +6128 +6129 +6134 +6139 +6140 +6144 +6146 +6147 +6148 +6152 +6153 +6154 +6157 +6158 +6160 +6167 +6168 +6173 +6174 +6175 +6177 +6179 +6180 +6184 +6190 +6191 +6192 +6198 +6201 +6202 +6203 +6204 +6205 +6207 +6210 +6211 +6212 +6214 +6215 +6216 +6217 +6219 +6224 +6225 +6226 +6227 +6228 +6230 +6232 +6234 +6235 +6236 +6237 +6238 +6239 +6241 +6242 +6243 +6248 +6251 +6252 +6253 +6255 +6256 +6259 +6260 +6262 +6266 +6270 +6272 +6273 +6274 +6275 +6281 +6284 +6285 +6286 +6288 +6289 +6290 +6291 +6294 +6297 +6298 +6299 +6300 +6301 +6302 +6303 +6304 +6305 +6306 +6307 +6308 +6309 +6312 +6315 +6319 +6321 +6325 +6326 +6327 +6330 +6331 +6334 +6335 +6336 +6338 +6339 +6340 +6341 +6342 +6343 +6344 +6345 +6347 +6348 +6349 +6350 +6352 +6355 +6356 +6359 +6362 +6363 +6364 +6365 +6367 +6372 +6376 +6378 +6379 +6383 +6385 +6386 +6387 +6388 +6389 +6390 +6392 +6393 +6394 +6395 +6396 +6397 +6398 +6399 +6400 +6401 +6404 +6405 +6407 +6408 +6411 +6412 +6414 +6417 +6418 +6420 +6421 +6422 +6423 +6425 +6426 +6430 +6431 +6433 +6435 +6437 +6439 +6440 +6441 +6442 +6444 +6447 +6448 +6449 +6450 +6451 +6452 +6453 +6454 +6455 +6456 +6458 +6459 +6460 +6462 +6464 +6465 +6467 +6468 +6469 +6470 +6471 +6474 +6475 +6477 +6478 +6479 +6480 +6481 +6482 +6483 +6488 +6490 +6492 +6493 +6495 +6496 +6499 +6500 +6503 +6505 +6506 +6510 +6511 +6513 +6514 +6515 +6517 +6518 +6521 +6522 +6523 +6527 +6531 +6533 +6534 +6535 +6536 +6537 +6540 +6541 +6545 +6546 +6547 +6550 +6551 +6553 +6554 +6556 +6558 +6559 +6560 +6561 +6562 +6563 +6567 +6568 +6571 +6572 +6573 +6574 +6575 +6576 +6577 +6578 +6579 +6583 +6587 +6589 +6590 +6591 +6593 +6594 +6595 +6596 +6597 +6598 +6600 +6601 +6602 +6604 +6605 +6608 +6611 +6612 +6613 +6614 +6615 +6616 +6617 +6618 +6619 +6620 +6621 +6622 +6623 +6629 +6632 +6636 +6638 +6639 +6640 +6643 +6648 +6649 +6651 +6653 +6654 +6655 +6658 +6660 +6661 +6662 +6663 +6665 +6667 +6668 +6669 +6670 +6673 +6674 +6675 +6676 +6677 +6678 +6679 +6681 +6682 +6683 +6686 +6687 +6691 +6692 +6693 +6694 +6695 +6696 +6698 +6700 +6702 +6703 +6705 +6706 +6707 +6708 +6709 +6710 +6712 +6713 +6715 +6716 +6718 +6720 +6721 +6722 +6723 +6725 +6726 +6728 +6735 +6737 +6739 +6740 +6741 +6743 +6744 +6745 +6746 +6747 +6748 +6749 +6751 +6752 +6753 +6754 +6757 +6758 +6763 +6764 +6765 +6766 +6767 +6768 +6770 +6772 +6773 +6774 +6775 +6776 +6778 +6779 +6781 +6783 +6784 +6785 +6786 +6787 +6788 +6791 +6794 +6795 +6797 +6798 +6799 +6800 +6804 +6805 +6806 +6807 +6808 +6809 +6810 +6813 +6814 +6815 +6820 +6822 +6823 +6825 +6826 +6829 +6830 +6831 +6833 +6834 +6837 +6838 +6840 +6841 +6846 +6847 +6850 +6851 +6855 +6857 +6858 +6860 +6863 +6864 +6865 +6866 +6867 +6868 +6870 +6875 +6876 +6877 +6878 +6879 +6880 +6882 +6885 +6886 +6887 +6889 +6890 +6892 +6894 +6898 +6900 +6901 +6902 +6905 +6908 +6909 +6912 +6915 +6916 +6917 +6919 +6920 +6925 +6926 +6928 +6929 +6930 +6931 +6932 +6934 +6935 +6936 +6937 +6939 +6940 +6941 +6944 +6945 +6946 +6950 +6951 +6952 +6953 +6954 +6956 +6958 +6959 +6960 +6961 +6964 +6965 +6966 +6968 +6969 +6973 +6974 +6978 +6980 +6981 +6982 +6985 +6986 +6987 +6990 +6991 +6993 +6994 +6995 +6996 +6997 +6998 +6999 +7000 +7002 +7003 +7004 +7009 +7010 +7011 +7013 +7017 +7018 +7019 +7025 +7026 +7029 +7031 +7038 +7039 +7041 +7042 +7044 +7045 +7046 +7048 +7049 +7050 +7051 +7052 +7055 +7056 +7057 +7059 +7062 +7063 +7064 +7066 +7068 +7069 +7072 +7073 +7075 +7076 +7077 +7078 +7079 +7081 +7082 +7083 +7084 +7085 +7087 +7088 +7090 +7091 +7092 +7093 +7095 +7096 +7097 +7098 +7099 +7100 +7101 +7103 +7104 +7107 +7108 +7110 +7111 +7112 +7113 +7115 +7116 +7117 +7118 +7120 +7121 +7122 +7123 +7126 +7127 +7128 +7129 +7134 +7135 +7136 +7137 +7138 +7142 +7150 +7152 +7153 +7154 +7155 +7156 +7158 +7160 +7161 +7162 +7163 +7164 +7165 +7166 +7167 +7168 +7169 +7170 +7171 +7172 +7173 +7175 +7176 +7177 +7178 +7180 +7181 +7182 +7183 +7186 +7189 +7192 +7193 +7194 +7195 +7196 +7198 +7199 +7200 +7201 +7202 +7203 +7204 +7205 +7206 +7207 +7208 +7212 +7213 +7214 +7215 +7216 +7217 +7218 +7219 +7220 +7222 +7223 +7224 +7225 +7226 +7228 +7230 +7231 +7232 +7237 +7238 +7239 +7241 +7242 +7243 +7244 +7245 +7246 +7247 +7250 +7254 +7256 +7257 +7258 +7259 +7260 +7261 +7263 +7264 +7266 +7267 +7268 +7270 +7271 +7273 +7276 +7277 +7278 +7279 +7280 +7282 +7283 +7284 +7285 +7286 +7287 +7288 +7289 +7290 +7291 +7292 +7293 +7294 +7297 +7299 +7301 +7302 +7305 +7306 +7307 +7309 +7310 +7313 +7314 +7315 +7316 +7317 +7318 +7319 +7321 +7322 +7323 +7324 +7325 +7326 +7327 +7329 +7332 +7333 +7334 +7335 +7336 +7337 +7338 +7340 +7341 +7342 +7344 +7346 +7348 +7349 +7350 +7353 +7354 +7357 +7358 +7363 +7364 +7365 +7370 +7372 +7373 +7375 +7378 +7379 +7380 +7382 +7385 +7386 +7388 +7390 +7391 +7393 +7394 +7396 +7400 +7403 +7406 +7412 +7418 +7419 +7420 +7422 +7424 +7425 +7427 +7428 +7432 +7435 +7436 +7437 +7438 +7440 +7441 +7442 +7443 +7445 +7449 +7450 +7451 +7452 +7454 +7455 +7458 +7459 +7460 +7461 +7462 +7463 +7464 +7465 +7466 +7467 +7469 +7470 +7471 +7472 +7473 +7474 +7475 +7476 +7478 +7479 +7482 +7484 +7485 +7486 +7491 +7492 +7494 +7496 +7497 +7498 +7502 +7503 +7504 +7505 +7506 +7507 +7511 +7513 +7514 +7516 +7517 +7518 +7520 +7521 +7523 +7524 +7525 +7526 +7528 +7530 +7533 +7536 +7539 +7540 +7541 +7542 +7546 +7548 +7551 +7552 +7554 +7556 +7557 +7558 +7559 +7561 +7562 +7563 +7564 +7565 +7566 +7567 +7568 +7570 +7571 +7573 +7574 +7575 +7578 +7584 +7585 +7587 +7590 +7591 +7592 +7595 +7596 +7597 +7601 +7603 +7604 +7606 +7607 +7608 +7610 +7612 +7613 +7616 +7617 +7619 +7622 +7623 +7625 +7626 +7628 +7629 +7630 +7631 +7634 +7637 +7638 +7641 +7642 +7644 +7646 +7650 +7651 +7652 +7655 +7656 +7657 +7658 +7659 +7660 +7661 +7663 +7664 +7665 +7666 +7671 +7672 +7673 +7674 +7679 +7681 +7682 +7685 +7686 +7688 +7690 +7691 +7693 +7694 +7696 +7698 +7703 +7704 +7705 +7707 +7708 +7710 +7711 +7712 +7713 +7715 +7716 +7717 +7718 +7719 +7721 +7722 +7723 +7724 +7725 +7727 +7728 +7729 +7730 +7731 +7732 +7733 +7734 +7736 +7738 +7739 +7740 +7741 +7742 +7746 +7749 +7751 +7753 +7755 +7756 +7757 +7758 +7759 +7760 +7763 +7764 +7768 +7769 +7770 +7773 +7775 +7777 +7778 +7779 +7783 +7785 +7786 +7787 +7788 +7789 +7792 +7793 +7794 +7795 +7798 +7799 +7801 +7805 +7806 +7810 +7813 +7815 +7818 +7820 +7824 +7828 +7830 +7832 +7834 +7835 +7837 +7841 +7843 +7844 +7849 +7852 +7854 +7855 +7856 +7858 +7860 +7862 +7864 +7867 +7868 +7871 +7872 +7873 +7874 +7876 +7878 +7881 +7882 +7884 +7886 +7887 +7889 +7891 +7892 +7894 +7895 +7896 +7902 +7903 +7904 +7905 +7906 +7908 +7911 +7913 +7914 +7915 +7917 +7918 +7919 +7920 +7921 +7923 +7924 +7927 +7928 +7929 +7931 +7934 +7935 +7937 +7938 +7939 +7940 +7941 +7942 +7943 +7944 +7949 +7950 +7951 +7952 +7953 +7954 +7955 +7959 +7962 +7963 +7964 +7966 +7969 +7972 +7973 +7976 +7977 +7981 +7982 +7983 +7984 +7987 +7988 +7989 +7990 +7991 +7992 +7994 +7995 +7997 +7998 +7999 +8000 +8001 +8004 +8005 +8006 +8007 +8008 +8009 +8012 +8017 +8019 +8020 +8021 +8022 +8023 +8024 +8025 +8027 +8028 +8029 +8031 +8033 +8034 +8035 +8036 +8037 +8038 +8039 +8040 +8042 +8043 +8044 +8045 +8046 +8050 +8051 +8052 +8054 +8056 +8060 +8061 +8062 +8064 +8065 +8066 +8068 +8070 +8071 +8072 +8074 +8077 +8078 +8080 +8081 +8082 +8084 +8086 +8087 +8089 +8090 +8093 +8098 +8099 +8101 +8104 +8105 +8106 +8110 +8112 +8113 +8114 +8115 +8116 +8119 +8120 +8121 +8124 +8125 +8126 +8127 +8129 +8131 +8133 +8136 +8138 +8139 +8140 +8141 +8142 +8144 +8145 +8147 +8149 +8150 +8151 +8153 +8154 +8155 +8156 +8157 +8159 +8161 +8162 +8163 +8164 +8166 +8168 +8170 +8171 +8172 +8173 +8174 +8175 +8177 +8178 +8179 +8182 +8183 +8184 +8186 +8191 +8193 +8195 +8197 +8198 +8199 +8201 +8202 +8203 +8204 +8205 +8206 +8207 +8208 +8210 +8211 +8212 +8213 +8215 +8216 +8218 +8220 +8221 +8222 +8225 +8229 +8230 +8231 +8232 +8233 +8236 +8237 +8239 +8240 +8242 +8243 +8244 +8245 +8246 +8250 +8251 +8252 +8254 +8255 +8256 +8257 +8258 +8259 +8261 +8263 +8264 +8267 +8268 +8271 +8272 +8273 +8275 +8276 +8278 +8281 +8282 +8285 +8286 +8288 +8289 +8290 +8294 +8295 +8297 +8298 +8299 +8300 +8303 +8307 +8309 +8310 +8312 +8313 +8315 +8318 +8320 +8322 +8325 +8326 +8327 +8328 +8329 +8330 +8332 +8333 +8335 +8337 +8345 +8346 +8347 +8348 +8352 +8354 +8360 +8362 +8364 +8365 +8368 +8371 +8375 +8376 +8378 +8380 +8381 +8382 +8386 +8388 +8389 +8390 +8392 +8393 +8394 +8396 +8397 +8398 +8399 +8400 +8401 +8402 +8403 +8404 +8405 +8407 +8408 +8409 +8410 +8412 +8414 +8416 +8417 +8418 +8419 +8420 +8421 +8422 +8426 +8428 +8430 +8432 +8433 +8434 +8435 +8436 +8437 +8439 +8440 +8446 +8447 +8448 +8449 +8450 +8451 +8452 +8453 +8454 +8456 +8460 +8462 +8463 +8464 +8467 +8468 +8469 +8470 +8472 +8473 +8474 +8477 +8478 +8481 +8482 +8483 +8484 +8485 +8486 +8490 +8491 +8492 +8493 +8494 +8495 +8496 +8497 +8498 +8500 +8501 +8502 +8503 +8505 +8506 +8508 +8509 +8510 +8511 +8512 +8513 +8516 +8521 +8522 +8524 +8526 +8529 +8531 +8532 +8536 +8538 +8539 +8540 +8541 +8542 +8543 +8547 +8548 +8549 +8552 +8553 +8555 +8556 +8557 +8560 +8561 +8562 +8564 +8565 +8568 +8569 +8570 +8571 +8572 +8573 +8577 +8578 +8580 +8581 +8583 +8584 +8586 +8588 +8589 +8590 +8591 +8593 +8594 +8596 +8597 +8598 +8599 +8600 +8601 +8602 +8603 +8604 +8606 +8607 +8610 +8611 +8613 +8615 +8622 +8625 +8626 +8627 +8628 +8629 +8632 +8636 +8638 +8639 +8641 +8643 +8645 +8646 +8647 +8648 +8649 +8650 +8651 +8652 +8653 +8654 +8655 +8656 +8657 +8658 +8662 +8663 +8664 +8665 +8666 +8667 +8668 +8669 +8670 +8671 +8672 +8673 +8674 +8675 +8676 +8677 +8678 +8679 +8680 +8681 +8682 +8684 +8685 +8686 +8690 +8691 +8692 +8693 +8694 +8695 +8702 +8707 +8708 +8709 +8710 +8711 +8712 +8713 +8715 +8716 +8720 +8723 +8724 +8725 +8728 +8732 +8733 +8737 +8738 +8739 +8740 +8741 +8745 +8746 +8750 +8752 +8753 +8754 +8756 +8757 +8758 +8759 +8761 +8762 +8763 +8766 +8768 +8770 +8771 +8772 +8773 +8775 +8776 +8780 +8781 +8783 +8784 +8785 +8786 +8787 +8788 +8793 +8795 +8797 +8798 +8801 +8803 +8804 +8806 +8807 +8810 +8812 +8814 +8815 +8817 +8820 +8823 +8824 +8826 +8827 +8828 +8829 +8830 +8831 +8833 +8835 +8838 +8839 +8842 +8843 +8845 +8846 +8847 +8848 +8849 +8851 +8854 +8856 +8857 +8858 +8860 +8861 +8863 +8864 +8867 +8869 +8870 +8871 +8872 +8875 +8876 +8878 +8879 +8883 +8884 +8886 +8887 +8888 +8890 +8891 +8892 +8894 +8896 +8897 +8898 +8899 +8900 +8901 +8902 +8903 +8905 +8906 +8908 +8910 +8914 +8915 +8916 +8917 +8918 +8919 +8922 +8923 +8924 +8925 +8926 +8927 +8929 +8931 +8932 +8934 +8936 +8937 +8938 +8939 +8942 +8943 +8944 +8945 +8947 +8948 +8950 +8951 +8954 +8956 +8957 +8959 +8962 +8965 +8966 +8967 +8968 +8969 +8970 +8971 +8976 +8977 +8980 +8981 +8982 +8983 +8984 +8985 +8986 +8987 +8989 +8990 +8991 +8992 +8993 +8994 +8995 +9000 +9001 +9003 +9006 +9007 +9011 +9012 +9013 +9014 +9015 +9019 +9022 +9023 +9024 +9025 +9026 +9028 +9029 +9030 +9031 +9032 +9033 +9034 +9036 +9037 +9039 +9042 +9043 +9047 +9049 +9050 +9051 +9052 +9054 +9055 +9056 +9057 +9058 +9059 +9060 +9061 +9062 +9064 +9065 +9066 +9070 +9071 +9072 +9073 +9074 +9079 +9080 +9081 +9082 +9083 +9087 +9088 +9092 +9093 +9094 +9096 +9097 +9098 +9100 +9101 +9104 +9105 +9106 +9107 +9108 +9109 +9110 +9111 +9112 +9116 +9118 +9119 +9123 +9128 +9130 +9131 +9132 +9133 +9134 +9138 +9139 +9140 +9141 +9142 +9144 +9146 +9147 +9148 +9149 +9150 +9151 +9153 +9154 +9155 +9158 +9159 +9161 +9163 +9165 +9166 +9167 +9168 +9169 +9171 +9173 +9174 +9175 +9176 +9179 +9180 +9183 +9184 +9187 +9188 +9189 +9191 +9193 +9198 +9199 +9201 +9204 +9206 +9207 +9212 +9213 +9214 +9215 +9216 +9217 +9219 +9220 +9221 +9224 +9225 +9226 +9227 +9228 +9229 +9230 +9231 +9232 +9234 +9238 +9239 +9240 +9242 +9243 +9244 +9246 +9250 +9251 +9252 +9253 +9255 +9257 +9258 +9259 +9260 +9265 +9266 +9269 +9270 +9271 +9272 +9273 +9274 +9275 +9276 +9277 +9279 +9281 +9283 +9290 +9293 +9294 +9295 +9296 +9297 +9300 +9303 +9304 +9305 +9306 +9307 +9308 +9309 +9310 +9313 +9314 +9317 +9318 +9319 +9322 +9324 +9327 +9330 +9331 +9333 +9334 +9335 +9336 +9337 +9338 +9342 +9343 +9344 +9345 +9346 +9347 +9349 +9350 +9352 +9354 +9355 +9362 +9363 +9365 +9366 +9367 +9368 +9369 +9370 +9371 +9372 +9376 +9377 +9381 +9382 +9383 +9385 +9386 +9387 +9390 +9391 +9392 +9395 +9396 +9397 +9398 +9399 +9400 +9401 +9402 +9404 +9405 +9407 +9411 +9412 +9413 +9414 +9415 +9416 +9417 +9418 +9420 +9421 +9422 +9423 +9425 +9426 +9427 +9430 +9431 +9437 +9440 +9441 +9442 +9445 +9446 +9447 +9448 +9449 +9450 +9452 +9453 +9454 +9455 +9457 +9460 +9463 +9464 +9465 +9467 +9469 +9470 +9471 +9475 +9476 +9479 +9481 +9482 +9484 +9486 +9488 +9490 +9492 +9493 +9496 +9498 +9500 +9501 +9502 +9503 +9504 +9505 +9513 +9514 +9515 +9516 +9521 +9522 +9523 +9528 +9532 +9533 +9535 +9536 +9537 +9540 +9542 +9543 +9545 +9546 +9548 +9549 +9551 +9552 +9557 +9558 +9566 +9567 +9569 +9570 +9571 +9572 +9574 +9575 +9576 +9577 +9578 +9579 +9581 +9584 +9585 +9586 +9587 +9588 +9590 +9591 +9595 +9597 +9602 +9605 +9608 +9609 +9610 +9615 +9616 +9618 +9622 +9624 +9626 +9627 +9628 +9629 +9630 +9633 +9634 +9635 +9636 +9637 +9639 +9640 +9641 +9642 +9643 +9644 +9647 +9652 +9653 +9654 +9655 +9656 +9659 +9660 +9661 +9664 +9665 +9666 +9667 +9668 +9670 +9673 +9674 +9675 +9676 +9677 +9678 +9679 +9681 +9684 +9686 +9689 +9690 +9691 +9692 +9693 +9695 +9697 +9698 +9703 +9704 +9705 +9706 +9707 +9711 +9712 +9713 +9715 +9717 +9720 +9721 +9724 +9726 +9727 +9728 +9730 +9733 +9734 +9735 +9737 +9738 +9739 +9740 +9741 +9742 +9745 +9752 +9753 +9754 +9755 +9756 +9757 +9759 +9760 +9763 +9764 +9765 +9767 +9770 +9771 +9772 +9773 +9774 +9776 +9777 +9778 +9779 +9780 +9781 +9783 +9785 +9786 +9787 +9792 +9795 +9797 +9798 +9799 +9800 +9801 +9802 +9803 +9806 +9807 +9808 +9810 +9812 +9815 +9820 +9821 +9826 +9827 +9828 +9835 +9836 +9837 +9838 +9839 +9842 +9845 +9849 +9850 +9856 +9858 +9859 +9860 +9861 +9863 +9867 +9869 +9870 +9874 +9876 +9877 +9878 +9879 +9881 +9884 +9886 +9887 +9888 +9889 +9890 +9892 +9894 +9895 +9896 +9897 +9898 +9899 +9900 +9901 +9902 +9903 +9905 +9907 +9908 +9910 +9912 +9913 +9916 +9921 +9922 +9923 +9924 +9925 +9927 +9928 +9929 +9930 +9931 +9932 +9935 +9936 +9937 +9938 +9939 +9941 +9945 +9947 +9948 +9949 +9951 +9952 +9953 +9954 +9956 +9958 +9960 +9961 +9962 +9963 +9964 +9965 +9966 +9967 +9969 +9970 +9972 +9975 +9976 +9977 +9979 +9980 +9981 +9982 +9983 +9984 +9988 +9989 +9990 +9991 +9992 +9993 +9994 +9997 +9999 +10002 +10003 +10010 +10011 +10012 +10013 +10014 +10015 +10016 +10017 +10018 +10022 +10023 +10025 +10028 +10029 +10031 +10033 +10034 +10036 +10038 +10040 +10041 +10042 +10043 +10044 +10045 +10046 +10047 +10048 +10050 +10051 +10052 +10053 +10054 +10055 +10056 +10057 +10058 +10059 +10060 +10062 +10064 +10065 +10066 +10067 +10068 +10070 +10073 +10074 +10075 +10076 +10077 +10078 +10081 +10082 +10083 +10084 +10086 +10087 +10088 +10090 +10091 +10092 +10093 +10094 +10095 +10097 +10100 +10102 +10105 +10106 +10107 +10108 +10109 +10110 +10113 +10117 +10118 +10119 +10121 +10122 +10123 +10124 +10127 +10128 +10134 +10135 +10140 +10141 +10142 +10147 +10148 +10151 +10152 +10153 +10155 +10156 +10157 +10158 +10159 +10160 +10162 +10163 +10164 +10165 +10169 +10171 +10175 +10179 +10181 +10185 +10186 +10187 +10188 +10189 +10190 +10191 +10192 +10195 +10196 +10197 +10199 +10200 +10202 +10206 +10207 +10209 +10210 +10211 +10213 +10217 +10218 +10219 +10220 +10221 +10223 +10224 +10225 +10226 +10227 +10228 +10229 +10231 +10232 +10234 +10235 +10236 +10237 +10240 +10241 +10243 +10244 +10245 +10246 +10247 +10248 +10249 +10250 +10252 +10253 +10254 +10255 +10258 +10260 +10262 +10263 +10264 +10265 +10266 +10269 +10271 +10272 +10273 +10276 +10277 +10279 +10283 +10284 +10286 +10289 +10290 +10291 +10295 +10296 +10297 +10298 +10299 +10300 +10301 +10302 +10303 +10304 +10305 +10306 +10307 +10308 +10311 +10313 +10314 +10315 +10318 +10319 +10322 +10323 +10324 +10325 +10326 +10327 +10328 +10329 +10330 +10331 +10332 +10333 +10335 +10336 +10337 +10338 +10339 +10340 +10343 +10344 +10345 +10346 +10349 +10350 +10351 +10352 +10353 +10354 +10355 +10356 +10360 +10362 +10363 +10366 +10367 +10371 +10372 +10373 +10375 +10377 +10378 +10380 +10381 +10383 +10387 +10388 +10389 +10390 +10391 +10394 +10395 +10397 +10398 +10399 +10401 +10402 +10404 +10406 +10409 +10412 +10413 +10414 +10416 +10417 +10418 +10419 +10420 +10422 +10423 +10424 +10425 +10427 +10429 +10430 +10431 +10432 +10433 +10436 +10438 +10440 +10444 +10447 +10448 +10451 +10453 +10455 +10456 +10459 +10460 +10461 +10462 +10463 +10465 +10468 +10470 +10473 +10476 +10478 +10479 +10481 +10483 +10487 +10488 +10489 +10491 +10494 +10496 +10497 +10499 +10500 +10501 +10504 +10505 +10506 +10508 +10509 +10510 +10511 +10512 +10513 +10514 +10515 +10517 +10518 +10519 +10520 +10521 +10522 +10523 +10524 +10525 +10526 +10527 +10528 +10530 +10531 +10532 +10534 +10535 +10536 +10539 +10542 +10544 +10546 +10547 +10548 +10549 +10550 +10551 +10552 +10553 +10554 +10555 +10556 +10559 +10561 +10563 +10564 +10567 +10570 +10574 +10575 +10576 +10583 +10584 +10585 +10586 +10588 +10589 +10590 +10591 +10592 +10594 +10598 +10607 +10610 +10611 +10612 +10613 +10615 +10617 +10620 +10621 +10623 +10625 +10626 +10627 +10628 +10629 +10631 +10633 +10635 +10636 +10638 +10639 +10641 +10642 +10643 +10644 +10648 +10649 +10650 +10658 +10659 +10662 +10663 +10664 +10665 +10666 +10667 +10668 +10669 +10671 +10672 +10673 +10674 +10675 +10676 +10677 +10678 +10680 +10681 +10683 +10686 +10687 +10688 +10691 +10692 +10694 +10695 +10697 +10698 +10699 +10700 +10701 +10702 +10703 +10708 +10709 +10711 +10712 +10713 +10714 +10718 +10719 +10720 +10721 +10723 +10725 +10726 +10727 +10728 +10729 +10730 +10731 +10733 +10734 +10735 +10738 +10739 +10742 +10748 +10749 +10750 +10751 +10752 +10754 +10755 +10757 +10759 +10760 +10761 +10763 +10765 +10766 +10767 +10768 +10772 +10773 +10774 +10775 +10776 +10777 +10780 +10781 +10782 +10783 +10784 +10786 +10787 +10791 +10792 +10793 +10796 +10798 +10799 +10800 +10802 +10803 +10806 +10807 +10808 +10809 +10810 +10811 +10813 +10814 +10818 +10822 +10824 +10825 +10828 +10830 +10832 +10833 +10835 +10837 +10840 +10841 +10842 +10843 +10844 +10846 +10848 +10849 +10852 +10855 +10856 +10859 +10860 +10862 +10864 +10865 +10867 +10870 +10871 +10872 +10874 +10875 +10876 +10877 +10879 +10880 +10881 +10882 +10883 +10884 +10886 +10889 +10890 +10894 +10896 +10897 +10898 +10899 +10902 +10906 +10907 +10908 +10909 +10910 +10912 +10913 +10914 +10915 +10919 +10920 +10921 +10924 +10927 +10928 +10932 +10933 +10935 +10936 +10938 +10939 +10940 +10941 +10943 +10944 +10945 +10946 +10947 +10948 +10951 +10952 +10953 +10955 +10956 +10957 +10958 +10960 +10962 +10963 +10964 +10968 +10969 +10972 +10973 +10974 +10975 +10980 +10984 +10986 +10987 +10989 +10994 +10997 +10998 +10999 +11001 +11002 +11003 +11004 +11005 +11009 +11012 +11013 +11016 +11017 +11018 +11020 +11022 +11023 +11024 +11025 +11027 +11028 +11029 +11031 +11032 +11033 +11034 +11036 +11039 +11040 +11041 +11044 +11045 +11046 +11049 +11051 +11052 +11053 +11054 +11055 +11056 +11057 +11059 +11060 +11061 +11062 +11064 +11066 +11067 +11068 +11069 +11071 +11073 +11074 +11077 +11078 +11079 +11081 +11082 +11083 +11084 +11085 +11086 +11088 +11089 +11094 +11095 +11096 +11097 +11098 +11099 +11102 +11103 +11104 +11105 +11106 +11107 +11108 +11109 +11110 +11111 +11113 +11114 +11115 +11119 +11120 +11121 +11123 +11126 +11128 +11129 +11130 +11133 +11134 +11135 +11137 +11138 +11139 +11140 +11141 +11143 +11144 +11145 +11146 +11147 +11149 +11151 +11152 +11153 +11154 +11155 +11156 +11158 +11159 +11160 +11161 +11162 +11163 +11164 +11165 +11168 +11169 +11170 +11171 +11172 +11173 +11174 +11177 +11179 +11180 +11181 +11182 +11183 +11185 +11186 +11187 +11188 +11190 +11198 +11201 +11203 +11206 +11208 +11209 +11210 +11211 +11212 +11214 +11216 +11218 +11219 +11220 +11221 +11222 +11223 +11225 +11226 +11227 +11235 +11236 +11238 +11239 +11242 +11243 +11244 +11247 +11248 +11252 +11255 +11257 +11259 +11260 +11262 +11263 +11266 +11269 +11270 +11273 +11274 +11275 +11276 +11277 +11279 +11282 +11283 +11284 +11292 +11293 +11294 +11296 +11298 +11299 +11301 +11304 +11305 +11306 +11307 +11309 +11310 +11311 +11313 +11316 +11318 +11319 +11320 +11323 +11325 +11326 +11327 +11329 +11330 +11331 +11332 +11334 +11335 +11339 +11340 +11341 +11343 +11344 +11345 +11347 +11349 +11350 +11351 +11352 +11354 +11355 +11356 +11357 +11361 +11363 +11365 +11367 +11370 +11372 +11374 +11375 +11377 +11378 +11379 +11382 +11383 +11384 +11385 +11386 +11387 +11388 +11389 +11390 +11391 +11392 +11394 +11396 +11397 +11398 +11400 +11401 +11402 +11403 +11404 +11405 +11407 +11409 +11410 +11412 +11414 +11416 +11418 +11421 +11424 +11427 +11428 +11429 +11431 +11433 +11436 +11437 +11439 +11440 +11441 +11442 +11443 +11447 +11449 +11454 +11455 +11456 +11460 +11464 +11465 +11466 +11468 +11469 +11470 +11472 +11473 +11474 +11477 +11478 +11479 +11482 +11483 +11484 +11485 +11487 +11488 +11489 +11490 +11491 +11492 +11493 +11494 +11495 +11497 +11498 +11499 +11501 +11503 +11504 +11507 +11508 +11514 +11515 +11516 +11517 +11518 +11520 +11521 +11522 +11524 +11527 +11529 +11532 +11536 +11537 +11538 +11539 +11540 +11541 +11542 +11543 +11544 +11549 +11551 +11554 +11556 +11557 +11558 +11560 +11562 +11563 +11566 +11570 +11571 +11572 +11573 +11574 +11575 +11577 +11578 +11579 +11580 +11582 +11584 +11588 +11589 +11590 +11591 +11592 +11593 +11594 +11596 +11597 +11598 +11599 +11600 +11604 +11605 +11606 +11609 +11611 +11612 +11613 +11614 +11615 +11618 +11619 +11620 +11621 +11622 +11623 +11624 +11627 +11628 +11629 +11630 +11631 +11632 +11633 +11634 +11636 +11637 +11638 +11640 +11641 +11643 +11644 +11646 +11647 +11648 +11649 +11650 +11651 +11652 +11653 +11654 +11655 +11656 +11657 +11658 +11659 +11660 +11662 +11664 +11668 +11670 +11671 +11673 +11674 +11675 +11679 +11682 +11683 +11687 +11688 +11689 +11690 +11692 +11693 +11695 +11696 +11697 +11698 +11700 +11701 +11704 +11705 +11708 +11709 +11710 +11711 +11712 +11713 +11715 +11716 +11718 +11721 +11723 +11725 +11726 +11727 +11728 +11729 +11730 +11732 +11733 +11734 +11735 +11736 +11737 +11738 +11740 +11743 +11744 +11747 +11750 +11751 +11753 +11754 +11756 +11757 +11760 +11761 +11763 +11764 +11765 +11769 +11770 +11771 +11773 +11774 +11777 +11778 +11780 +11781 +11782 +11783 +11787 +11788 +11790 +11791 +11793 +11795 +11798 +11799 +11800 +11801 +11802 +11804 +11809 +11810 +11811 +11814 +11815 +11819 +11820 +11821 +11822 +11823 +11827 +11829 +11835 +11836 +11837 +11838 +11839 +11840 +11841 +11842 +11843 +11845 +11846 +11847 +11848 +11849 +11851 +11852 +11856 +11857 +11858 +11862 +11863 +11864 +11865 +11866 +11867 +11868 +11869 +11870 +11871 +11872 +11873 +11875 +11876 +11877 +11878 +11879 +11880 +11881 +11882 +11883 +11884 +11887 +11889 +11890 +11891 +11892 +11894 +11896 +11897 +11898 +11899 +11900 +11902 +11903 +11904 +11905 +11907 +11909 +11910 +11914 +11917 +11918 +11920 +11921 +11922 +11923 +11925 +11927 +11928 +11929 +11930 +11931 +11933 +11937 +11939 +11940 +11941 +11942 +11944 +11947 +11948 +11950 +11951 +11952 +11953 +11954 +11955 +11958 +11960 +11961 +11962 +11964 +11965 +11966 +11968 +11970 +11971 +11972 +11975 +11976 +11979 +11980 +11982 +11984 +11985 +11987 +11990 +11991 +11994 +11995 +11996 +12000 +12002 +12009 +12010 +12011 +12012 +12013 +12014 +12015 +12016 +12018 +12019 +12020 +12021 +12022 +12023 +12024 +12025 +12026 +12027 +12028 +12029 +12031 +12032 +12033 +12034 +12035 +12036 +12037 +12038 +12039 +12040 +12041 +12042 +12044 +12045 +12047 +12048 +12049 +12050 +12051 +12052 +12054 +12055 +12056 +12058 +12061 +12063 +12064 +12065 +12067 +12069 +12071 +12074 +12075 +12081 +12084 +12085 +12088 +12090 +12091 +12092 +12093 +12094 +12095 +12096 +12097 +12099 +12105 +12107 +12109 +12110 +12112 +12121 +12123 +12125 +12131 +12145 +12147 +12148 +12154 +12155 +12157 +12159 +12163 +12170 +12174 +12177 +12178 +12179 +12180 +12181 +12182 +12183 +12184 +12185 +12186 +12187 +12190 +12191 +12192 +12194 +12198 +12200 +12201 +12202 +12203 +12204 +12207 +12208 +12209 +12210 +12211 +12214 +12215 +12217 +12218 +12222 +12225 +12227 +12229 +12230 +12231 +12232 +12239 +12240 +12241 +12242 +12243 +12244 +12245 +12246 +12247 +12248 +12249 +12250 +12251 +12253 +12255 +12256 +12257 +12259 +12260 +12261 +12262 +12263 +12264 +12268 +12272 +12276 +12281 +12282 +12283 +12284 +12285 +12291 +12297 +12298 +12302 +12304 +12306 +12309 +12313 +12315 +12317 +12319 +12322 +12323 +12324 +12325 +12326 +12327 +12328 +12329 +12331 +12332 +12333 +12334 +12335 +12336 +12338 +12340 +12341 +12342 +12346 +12347 +12348 +12349 +12350 +12351 +12352 +12353 +12354 +12355 +12356 +12357 +12359 +12360 +12362 +12363 +12365 +12367 +12369 +12373 +12375 +12376 +12377 +12381 +12382 +12384 +12386 +12387 +12389 +12391 +12393 +12394 +12395 +12396 +12397 +12399 +12400 +12401 +12402 +12404 +12405 +12406 +12407 +12408 +12409 +12411 +12412 +12413 +12414 +12420 +12421 +12422 +12423 +12426 +12430 +12431 +12433 +12434 +12436 +12437 +12438 +12439 +12441 +12443 +12444 +12447 +12453 +12455 +12457 +12463 +12464 +12465 +12467 +12468 +12471 +12474 +12481 +12482 +12483 +12484 +12487 +12491 +12493 +12494 +12495 +12497 +12498 +12501 +12502 +12503 +12505 +12506 +12511 +12512 +12513 +12516 +12519 +12522 +12523 +12525 +12526 +12530 +12533 +12535 +12537 +12539 +12542 +12545 +12546 +12547 +12548 +12550 +12551 +12552 +12554 +12556 +12557 +12559 +12560 +12562 +12564 +12566 +12567 +12568 +12570 +12571 +12572 +12573 +12574 +12575 +12577 +12580 +12581 +12582 +12583 +12585 +12590 +12591 +12593 +12594 +12595 +12597 +12601 +12602 +12604 +12611 +12614 +12615 +12616 +12617 +12619 +12620 +12622 +12623 +12625 +12626 +12627 +12628 +12629 +12630 +12632 +12633 +12634 +12635 +12636 +12640 +12641 +12643 +12644 +12646 +12647 +12648 +12649 +12651 +12653 +12654 +12657 +12659 +12661 +12663 +12665 +12667 +12668 +12671 +12672 +12674 +12675 +12676 +12677 +12678 +12679 +12680 +12681 +12684 +12685 +12686 +12687 +12688 +12689 +12690 +12692 +12693 +12694 +12695 +12697 +12699 +12700 +12701 +12702 +12703 +12704 +12708 +12709 +12710 +12711 +12712 +12713 +12714 +12715 +12717 +12720 +12722 +12723 +12729 +12730 +12731 +12732 +12733 +12734 +12735 +12736 +12737 +12738 +12739 +12740 +12741 +12742 +12743 +12744 +12749 +12751 +12752 +12754 +12755 +12757 +12759 +12760 +12761 +12762 +12767 +12768 +12769 +12770 +12771 +12772 +12773 +12774 +12775 +12776 +12777 +12778 +12780 +12781 +12782 +12783 +12784 +12785 +12786 +12787 +12788 +12790 +12791 +12793 +12794 +12795 +12796 +12798 +12800 +12801 +12802 +12804 +12806 +12807 +12808 +12809 +12810 +12811 +12812 +12816 +12817 +12818 +12819 +12820 +12821 +12822 +12823 +12824 +12825 +12826 +12827 +12828 +12829 +12830 +12831 +12832 +12833 +12834 +12835 +12836 +12837 +12838 +12839 +12840 +12841 +12842 +12843 +12844 +12847 +12848 +12849 +12850 +12856 +12858 +12861 +12864 +12866 +12870 +12871 +12872 +12873 +12874 +12876 +12877 +12878 +12879 +12881 +12882 +12883 +12885 +12887 +12888 +12889 +12890 +12891 +12892 +12894 +12897 +12898 +12899 +12901 +12903 +12904 +12905 +12907 +12908 +12910 +12913 +12914 +12915 +12916 +12920 +12921 +12923 +12924 +12925 +12927 +12928 +12929 +12934 +12935 +12936 +12937 +12938 +12939 +12940 +12941 +12943 +12944 +12945 +12947 +12949 +12950 +12951 +12952 +12956 +12957 +12958 +12962 +12963 +12964 +12966 +12967 +12968 +12969 +12970 +12971 +12972 +12976 +12977 +12978 +12979 +12981 +12982 +12983 +12985 +12986 +12990 +12994 +12995 +12996 +12998 +13000 +13001 +13002 +13003 +13006 +13007 +13010 +13015 +13017 +13021 +13022 +13024 +13026 +13027 +13028 +13029 +13031 +13032 +13033 +13036 +13038 +13040 +13041 +13042 +13045 +13046 +13048 +13049 +13050 +13051 +13052 +13053 +13054 +13056 +13057 +13058 +13059 +13060 +13061 +13062 +13063 +13065 +13067 +13068 +13069 +13070 +13071 +13072 +13074 +13076 +13078 +13079 +13081 +13084 +13085 +13086 +13088 +13089 +13090 +13093 +13094 +13095 +13096 +13098 +13100 +13101 +13103 +13105 +13107 +13108 +13109 +13110 +13114 +13119 +13121 +13126 +13128 +13134 +13135 +13136 +13137 +13140 +13141 +13144 +13145 +13146 +13149 +13150 +13151 +13152 +13153 +13155 +13156 +13157 +13158 +13159 +13163 +13164 +13165 +13169 +13170 +13171 +13172 +13173 +13174 +13175 +13176 +13178 +13180 +13182 +13183 +13187 +13188 +13190 +13194 +13197 +13200 +13203 +13204 +13206 +13207 +13212 +13213 +13214 +13215 +13216 +13222 +13224 +13225 +13226 +13228 +13229 +13230 +13233 +13235 +13236 +13237 +13241 +13246 +13247 +13248 +13249 +13250 +13255 +13256 +13257 +13259 +13260 +13261 +13262 +13264 +13265 +13266 +13267 +13268 +13269 +13270 +13271 +13272 +13277 +13280 +13281 +13284 +13286 +13287 +13298 +13299 +13300 +13301 +13302 +13306 +13307 +13308 +13310 +13311 +13312 +13313 +13314 +13316 +13317 +13318 +13319 +13323 +13325 +13326 +13328 +13329 +13330 +13331 +13332 +13334 +13336 +13337 +13339 +13340 +13341 +13342 +13344 +13346 +13348 +13349 +13350 +13352 +13353 +13354 +13356 +13357 +13358 +13360 +13362 +13364 +13365 +13369 +13372 +13376 +13378 +13379 +13381 +13382 +13385 +13386 +13387 +13388 +13390 +13391 +13396 +13398 +13399 +13400 +13402 +13406 +13407 +13415 +13418 +13424 +13427 +13428 +13430 +13433 +13434 +13438 +13444 +13445 +13448 +13452 +13453 +13454 +13455 +13456 +13467 +13472 +13474 +13476 +13477 +13478 +13479 +13480 +13481 +13482 +13483 +13484 +13485 +13486 +13487 +13489 +13492 +13493 +13494 +13495 +13496 +13498 +13499 +13505 +13507 +13509 +13518 +13519 +13522 +13523 +13524 +13525 +13526 +13531 +13533 +13534 +13535 +13536 +13538 +13540 +13541 +13542 +13543 +13544 +13546 +13547 +13548 +13552 +13554 +13555 +13560 +13566 +13573 +13578 +13582 +13590 +13593 +13595 +13601 +13603 +13604 +13605 +13607 +13609 +13610 +13613 +13614 +13615 +13618 +13620 +13621 +13622 +13623 +13624 +13625 +13626 +13631 +13632 +13634 +13636 +13641 +13642 +13643 +13644 +13645 +13649 +13650 +13651 +13652 +13654 +13655 +13656 +13657 +13658 +13659 +13660 +13661 +13664 +13665 +13666 +13668 +13669 +13671 +13672 +13673 +13674 +13675 +13677 +13678 +13679 +13683 +13685 +13687 +13689 +13690 +13691 +13693 +13695 +13696 +13698 +13699 +13700 +13701 +13702 +13703 +13706 +13709 +13711 +13712 +13713 +13717 +13721 +13724 +13725 +13726 +13727 +13728 +13732 +13733 +13734 +13735 +13736 +13737 +13738 +13740 +13742 +13743 +13744 +13745 +13746 +13747 +13748 +13749 +13751 +13752 +13755 +13756 +13757 +13759 +13761 +13765 +13766 +13767 +13768 +13770 +13771 +13772 +13773 +13774 +13775 +13776 +13777 +13778 +13780 +13782 +13783 +13786 +13787 +13788 +13790 +13791 +13792 +13793 +13795 +13799 +13800 +13801 +13802 +13803 +13804 +13805 +13806 +13808 +13809 +13811 +13812 +13813 +13814 +13815 +13816 +13817 +13818 +13819 +13820 +13821 +13822 +13823 +13824 +13825 +13826 +13827 +13828 +13831 +13832 +13833 +13834 +13835 +13837 +13838 +13839 +13840 +13841 +13842 +13843 +13844 +13845 +13846 +13847 +13849 +13850 +13851 +13852 +13853 +13854 +13855 +13856 +13858 +13859 +13860 +13861 +13862 +13863 +13864 +13865 +13866 +13867 +13868 +13869 +13872 +13873 +13874 +13875 +13876 +13877 +13878 +13879 +13880 +13882 +13883 +13884 +13885 +13886 +13887 +13888 +13889 +13890 +13891 +13894 +13895 +13896 +13897 +13898 +13899 +13900 +13901 +13902 +13903 +13904 +13905 +13906 +13907 +13908 +13909 +13910 +13912 +13916 +13920 +13921 +13923 +13924 +13925 +13929 +13930 +13931 +13933 +13936 +13940 +13941 +13942 +13943 +13944 +13945 +13947 +13949 +13950 +13953 +13954 +13956 +13957 +13958 +13959 +13960 +13962 +13963 +13964 +13965 +13966 +13970 +13971 +13972 +13974 +13976 +13977 +13978 +13982 +13983 +13984 +13985 +13986 +13987 +13988 +13990 +13992 +13993 +13994 +13996 +13997 +13998 +14000 +14002 +14003 +14004 +14006 +14007 +14009 +14010 +14011 +14012 +14013 +14016 +14017 +14020 +14021 +14022 +14025 +14026 +14028 +14029 +14031 +14032 +14033 +14035 +14036 +14038 +14039 +14040 +14042 +14043 +14044 +14046 +14048 +14049 +14050 +14051 +14052 +14055 +14056 +14058 +14059 +14064 +14065 +14066 +14069 +14073 +14079 +14082 +14083 +14084 +14085 +14088 +14090 +14091 +14095 +14097 +14098 +14099 +14100 +14101 +14102 +14103 +14104 +14106 +14108 +14109 +14111 +14112 +14115 +14116 +14117 +14122 +14126 +14127 +14129 +14131 +14132 +14135 +14136 +14137 +14140 +14144 +14148 +14150 +14151 +14153 +14154 +14155 +14156 +14159 +14162 +14164 +14165 +14168 +14171 +14172 +14176 +14177 +14178 +14179 +14180 +14183 +14184 +14188 +14191 +14195 +14196 +14200 +14202 +14205 +14213 +14216 +14217 +14219 +14220 +14221 +14224 +14225 +14226 +14227 +14229 +14237 +14238 +14239 +14240 +14241 +14243 +14245 +14246 +14247 +14248 +14249 +14250 +14252 +14253 +14254 +14255 +14256 +14257 +14259 +14260 +14262 +14264 +14265 +14266 +14267 +14268 +14270 +14272 +14274 +14276 +14277 +14281 +14282 +14284 +14286 +14287 +14288 +14290 +14291 +14293 +14294 +14295 +14298 +14299 +14301 +14303 +14304 +14305 +14306 +14307 +14309 +14310 +14311 +14313 +14314 +14319 +14323 +14324 +14326 +14327 +14329 +14333 +14335 +14336 +14343 +14344 +14346 +14348 +14350 +14351 +14352 +14353 +14354 +14355 +14360 +14362 +14363 +14364 +14365 +14366 +14367 +14369 +14372 +14373 +14374 +14375 +14377 +14378 +14379 +14380 +14381 +14383 +14384 +14385 +14386 +14388 +14389 +14394 +14395 +14397 +14400 +14401 +14402 +14403 +14404 +14405 +14406 +14407 +14409 +14410 +14411 +14412 +14413 +14414 +14415 +14416 +14417 +14418 +14419 +14421 +14422 +14423 +14424 +14425 +14428 +14431 +14432 +14436 +14438 +14441 +14442 +14446 +14447 +14448 +14452 +14455 +14460 +14461 +14463 +14464 +14466 +14474 +14479 +14480 +14481 +14482 +14484 +14485 +14486 +14487 +14488 +14489 +14490 +14493 +14495 +14498 +14501 +14503 +14506 +14507 +14508 +14509 +14511 +14513 +14514 +14516 +14517 +14519 +14522 +14525 +14527 +14531 +14535 +14542 +14543 +14546 +14559 +14567 +14579 +14580 +14581 +14582 +14586 +14590 +14593 +14594 +14596 +14600 +14602 +14603 +14604 +14608 +14611 +14612 +14614 +14616 +14617 +14618 +14621 +14622 +14623 +14624 +14625 +14626 +14627 +14629 +14633 +14634 +14635 +14640 +14641 +14648 +14650 +14652 +14654 +14656 +14657 +14660 +14664 +14665 +14666 +14667 +14669 +14671 +14674 +14677 +14678 +14679 +14680 +14681 +14682 +14683 +14684 +14685 +14687 +14691 +14693 +14697 +14698 +14699 +14700 +14703 +14704 +14706 +14708 +14709 +14710 +14711 +14714 +14715 +14716 +14717 +14718 +14720 +14722 +14723 +14724 +14725 +14726 +14730 +14731 +14735 +14737 +14741 +14744 +14745 +14747 +14749 +14750 +14754 +14760 +14761 +14766 +14767 +14768 +14769 +14770 +14772 +14774 +14775 +14776 +14778 +14780 +14784 +14788 +14791 +14793 +14794 +14796 +14798 +14803 +14805 +14807 +14808 +14810 +14814 +14815 +14819 +14822 +14824 +14835 +14842 +14843 +14852 +14853 +14855 +14856 +14860 +14864 +14865 +14866 +14868 +14869 +14870 +14878 +14881 +14885 +14888 +14893 +14894 +14901 +14902 +14903 +14907 +14908 +14913 +14917 +14920 +14923 +14925 +14928 +14929 +14930 +14931 +14937 +14938 +14939 +14942 +14943 +14944 +14948 +14953 +14954 +14958 +14959 +14963 +14964 +14965 +14966 +14967 +14968 +14969 +14974 +14975 +14976 +14978 +14979 +14981 +14982 +14984 +14985 +14987 +14990 +14991 +14993 +14996 +14998 +14999 +15002 +15007 +15009 +15010 +15014 +15015 +15017 +15019 +15022 +15025 +15030 +15031 +15032 +15034 +15035 +15050 +15063 +15064 +15065 +15067 +15069 +15070 +15072 +15073 +15074 +15075 +15079 +15081 +15082 +15087 +15089 +15095 +15098 +15099 +15101 +15103 +15108 +15109 +15110 +15112 +15113 +15114 +15116 +15118 +15121 +15123 +15124 +15128 +15129 +15130 +15131 +15135 +15138 +15140 +15141 +15142 +15145 +15146 +15148 +15151 +15161 +15167 +15168 +15169 +15170 +15171 +15172 +15173 +15175 +15176 +15177 +15181 +15184 +15188 +15189 +15190 +15191 +15193 +15194 +15197 +15199 +15200 +15204 +15205 +15206 +15208 +15209 +15211 +15212 +15213 +15214 +15217 +15220 +15222 +15225 +15232 +15236 +15238 +15239 +15241 +15242 +15243 +15244 +15246 +15251 +15252 +15257 +15259 +15262 +15265 +15274 +15283 +15293 +15301 +15302 +15305 +15306 +15308 +15309 +15311 +15312 +15320 +15322 +15323 +15324 +15340 +15342 +15343 +15344 +15345 +15346 +15347 +15349 +15352 +15355 +15368 +15369 +15372 +15374 +15376 +15379 +15385 +15396 +15398 +15401 +15405 +15407 +15410 +15412 +15418 +15423 +15424 +15425 +15432 +15434 +15436 +15437 +15441 +15442 +15444 +15446 +15448 +15450 +15451 +15454 +15458 +15460 +15461 +15468 +15470 +15478 +15479 +15480 +15482 +15491 +15495 +15508 +15521 +15522 +15523 +15527 +15536 +15537 +15539 +15540 +15542 +15544 +15545 +15546 +15547 +15551 +15552 +15554 +15558 +15559 +15563 +15565 +15566 +15568 +15573 +15574 +15575 +15578 +15579 +15580 +15581 +15583 +15584 +15590 +15591 +15599 +15601 +15602 +15603 +15604 +15610 +15614 +15619 +15625 +15626 +15639 +15646 +15650 +15654 +15655 +15660 +15661 +15668 +15669 +15674 +15678 +15681 +15684 +15685 +15687 +15690 +15691 +15696 +15702 +15705 +15706 +15707 +15709 +15711 +15713 +15716 +15717 +15718 +15719 +15720 +15721 +15722 +15723 +15724 +15725 +15726 +15728 +15729 +15731 +15732 +15737 +15739 +15744 +15747 +15749 +15750 +15757 +15761 +15764 +15766 +15768 +15775 +15776 +15777 +15780 +15783 +15787 +15788 +15799 +15802 +15804 +15805 +15809 +15814 +15823 +15826 +15829 +15830 +15831 +15833 +15838 +15839 +15842 +15843 +15847 +15850 +15852 +15857 +15861 +15865 +15867 +15869 +15872 +15874 +15880 +15881 +15883 +15884 +15885 +15889 +15893 +15894 +15900 +15901 +15912 +15914 +15919 +15922 +15923 +15924 +15931 +15936 +15938 +15939 +15941 +15942 +15945 +15946 +15947 +15949 +15959 +15960 +15961 +15962 +15964 +15977 +15978 +15983 +15984 +15991 +15994 +15995 +15996 +15998 +16002 +16003 +16004 +16005 +16007 +16008 +16011 +16014 +16015 +16016 +16019 +16020 +16028 +16029 +16039 +16041 +16044 +16046 +16051 +16056 +16060 +16061 +16062 +16071 +16072 +16074 +16079 +16082 +16085 +16092 +16093 +16094 +16096 +16097 +16100 +16101 +16103 +16107 +16108 +16109 +16116 +16117 +16119 +16121 +16129 +16133 +16140 +16141 +16143 +16144 +16146 +16147 +16149 +16150 +16152 +16155 +16156 +16158 +16159 +16163 +16165 +16169 +16172 +16174 +16177 +16178 +16179 +16187 +16192 +16193 +16198 +16199 +16200 +16201 +16214 +16215 +16217 +16219 +16225 +16227 +16236 +16241 +16243 +16244 +16251 +16256 +16258 +16259 +16262 +16264 +16271 +16274 +16280 +16281 +16284 +16288 +16289 +16290 +16291 +16292 +16296 +16302 +16307 +16312 +16313 +16314 +16315 +16317 +16318 +16319 +16322 +16328 +16334 +16335 +16341 +16342 +16343 +16347 +16348 +16349 +16357 +16360 +16363 +16364 +16373 +16375 +16376 +16379 +16380 +16384 +16388 +16389 +16390 +16391 +16392 +16393 +16397 +16400 +16405 +16414 +16416 +16418 +16421 +16423 +16424 +16425 +16426 +16432 +16435 +16436 +16437 +16444 +16447 +16455 +16456 +16459 +16460 +16463 +16474 +16476 +16477 +16478 +16481 +16494 +16498 +16503 +16504 +16508 +16509 +16512 +16513 +16517 +16522 +16526 +16529 +16530 +16540 +16542 +16545 +16547 +16551 +16552 +16555 +16560 +16568 +16576 +16584 +16585 +16587 +16588 +16591 +16592 +16593 +16599 +16603 +16605 +16610 +16616 +16618 +16619 +16622 +16626 +16630 +16635 +16648 +16649 +16651 +16653 +16654 +16655 +16659 +16660 +16665 +16668 +16669 +16670 +16672 +16680 +16689 +16693 +16694 +16695 +16704 +16705 +16708 +16713 +16726 +16727 +16729 +16730 +16735 +16736 +16739 +16740 +16744 +16745 +16750 +16752 +16755 +16759 +16760 +16763 +16764 +16765 +16774 +16777 +16779 +16783 +16787 +16789 +16790 +16791 +16792 +16800 +16801 +16803 +16805 +16812 +16813 +16817 +16818 +16825 +16829 +16833 +16834 +16836 +16838 +16841 +16844 +16845 +16848 +16849 +16850 +16851 +16855 +16865 +16873 +16874 +16878 +16879 +16880 +16881 +16882 +16885 +16887 +16890 +16892 +16894 +16898 +16900 +16902 +16905 +16907 +16908 +16911 +16916 +16917 +16920 +16928 +16931 +16940 +16948 +16957 +16962 +16963 +16965 +16967 +16970 +16972 +16973 +16975 +16977 +16981 +16982 +16988 +16989 +16993 +16994 +16998 +17003 +17004 +17007 +17008 +17010 +17013 +17017 +17021 +17022 +17026 +17029 +17032 +17037 +17038 +17039 +17040 +17052 +17053 +17057 +17063 +17081 +17087 +17094 +17107 +17110 +17111 +17114 +17118 +17121 +17126 +17128 +17133 +17135 +17141 +17144 +17146 +17155 +17156 +17158 +17159 +17165 +17167 +17169 +17171 +17178 +17180 +17183 +17186 +17188 +17192 +17193 +17203 +17208 +17210 +17211 +17217 +17228 +17231 +17248 +17249 +17253 +17254 +17255 +17264 +17268 +17269 +17272 +17274 +17275 +17280 +17286 +17290 +17297 +17298 +17299 +17301 +17305 +17306 +17309 +17310 +17312 +17313 +17314 +17316 +17317 +17319 +17326 +17329 +17330 +17338 +17339 +17340 +17341 +17342 +17345 +17346 +17348 +17350 +17351 +17353 +17354 +17355 +17356 +17359 +17360 +17361 +17362 +17363 +17364 +17366 +17367 +17369 +17370 +17373 +17374 +17375 +17376 +17377 +17379 +17380 +17382 +17384 +17385 +17387 +17388 +17389 +17392 +17393 +17394 +17395 +17396 +17400 +17401 +17402 +17403 +17404 +17405 +17406 +17407 +17408 +17409 +17410 +17411 +17412 +17413 +17414 +17415 +17416 +17417 +17419 +17420 +17422 +17423 +17424 +17425 +17426 +17427 +17429 +17430 +17431 +17432 +17435 +17440 +17442 +17444 +17447 +17448 +17449 +17450 +17451 +17452 +17455 +17456 +17457 +17458 +17459 +17460 +17463 +17464 +17465 +17466 +17468 +17470 +17471 +17482 +17486 +17492 +17493 +17498 +17499 +17503 +17504 +17508 +17509 +17510 +17512 +17516 +17517 +17518 +17520 +17521 +17522 +17523 +17526 +17529 +17534 +17539 +17540 +17551 +17560 +17565 +17566 +17567 +17568 +17569 +17570 +17571 +17574 +17578 +17579 +17584 +17586 +17588 +17591 +17594 +17595 +17596 +17598 +17599 +17601 +17603 +17605 +17606 +17610 +17611 +17612 +17613 +17615 +17618 +17619 +17621 +17625 +17626 +17627 +17630 +17633 +17634 +17635 +17636 +17637 +17641 +17643 +17644 +17646 +17648 +17650 +17651 +17652 +17654 +17655 +17657 +17659 +17660 +17661 +17662 +17663 +17664 +17666 +17672 +17676 +17678 +17679 +17680 +17681 +17684 +17688 +17689 +17692 +17711 +17721 +17722 +17723 +17725 +17726 +17727 +17729 +17730 +17733 +17734 +17735 +17736 +17737 +17738 +17739 +17740 +17742 +17743 +17744 +17745 +17748 +17762 +17763 +17764 +17765 +17770 +17771 +17774 +17775 +17780 +17782 +17783 +17784 +17786 +17787 +17789 +17792 +17793 +17794 +17796 +17798 +17799 +17803 +17804 +17806 +17811 +17812 +17813 +17814 +17815 +17817 +17818 +17820 +17821 +17826 +17835 +17839 +17841 +17842 +17845 +17846 +17847 +17850 +17852 +17853 +17855 +17856 +17857 +17858 +17859 +17860 +17861 +17862 +17863 +17864 +17866 +17868 +17869 +17870 +17871 +17872 +17873 +17874 +17876 +17877 +17878 +17879 +17880 +17881 +17882 +17885 +17886 +17888 +17889 +17890 +17891 +17895 +17897 +17900 +17902 +17905 +17914 +17918 +17919 +17925 +17929 +17934 +17936 +17938 +17939 +17940 +17946 +17947 +17948 +17949 +17950 +17953 +17954 +17955 +17958 +17959 +17960 +17961 +17963 +17964 +17965 +17966 +17970 +17971 +17972 +17973 +17975 +17976 +17979 +17980 +17983 +17984 +17985 +17987 +17990 +17991 +17992 +17999 +18000 +18005 +18007 +18008 +18012 +18017 +18020 +18021 +18022 +18023 +18024 +18025 +18026 +18027 +18028 +18030 +18032 +18035 +18036 +18037 +18038 +18039 +18041 +18045 +18050 +18051 +18057 +18059 +18061 +18063 +18064 +18066 +18067 +18068 +18070 +18071 +18072 +18073 +18077 +18078 +18081 +18085 +18086 +18088 +18089 +18090 +18091 +18092 +18093 +18094 +18096 +18097 +18099 +18100 +18103 +18104 +18106 +18107 +18108 +18110 +18111 +18113 +18114 +18115 +18116 +18117 +18120 +18121 +18123 +18125 +18126 +18130 +18136 +18137 +18138 +18140 +18141 +18142 +18144 +18145 +18146 +18148 +18150 +18151 +18152 +18160 +18161 +18167 +18168 +18173 +18174 +18177 +18178 +18204 +18205 +18206 +18207 +18209 +18213 +18216 +18217 +18218 +18219 +18221 +18222 +18223 +18224 +18227 +18228 +18230 +18231 +18233 +18235 +18236 +18237 +18239 +18240 +18241 +18243 +18246 +18247 +18249 +18250 +18251 +18252 +18254 +18257 +18259 +18260 +18261 +18264 +18265 +18269 +18270 +18271 +18273 +18274 +18275 +18276 +18277 +18280 +18283 +18284 +18286 +18287 +18289 +18290 +18296 +18297 +18298 +18299 +18303 +18304 +18305 +18306 +18307 +18308 +18309 +18311 +18322 +18325 +18326 +18327 +18328 +18331 +18332 +18333 +18334 +18335 +18336 +18338 +18343 +18344 +18349 +18351 +18355 +18357 +18358 +18359 +18360 +18361 +18364 +18368 +18373 +18381 +18386 +18388 +18391 +18392 +18394 +18396 +18398 +18399 +18400 +18401 +18403 +18404 +18405 +18407 +18412 +18413 +18419 +18422 +18423 +18424 +18426 +18441 +18442 +18443 +18444 +18445 +18446 +18447 +18448 +18451 +18452 +18453 +18455 +18456 +18459 +18460 +18461 +18462 +18463 +18466 +18469 +18470 +18473 +18478 +18479 +18480 +18482 +18486 +18487 +18488 +18489 +18491 +18492 +18494 +18495 +18496 +18497 +18498 +18499 +18501 +18506 +18507 +18508 +18509 +18510 +18511 +18512 +18514 +18515 +18516 +18517 +18518 +18522 +18523 +18524 +18525 +18529 +18531 +18532 +18533 +18535 +18536 +18537 +18539 +18540 +18541 +18542 +18543 +18546 +18547 +18549 +18550 +18551 +18553 +18554 +18555 +18556 +18558 +18559 +18560 +18562 +18565 +18570 +18573 +18574 +18575 +18576 +18577 +18578 +18581 +18583 +18585 +18586 +18587 +18588 +18589 +18590 +18591 +18592 +18593 +18594 +18595 +18597 +18598 +18601 +18603 +18604 +18606 +18607 +18610 +18611 +18612 +18613 +18615 +18616 +18617 +18618 +18619 +18620 +18621 +18622 +18623 +18624 +18631 +18632 +18633 +18634 +18636 +18637 +18638 +18641 +18642 +18646 +18647 +18651 +18657 +18659 +18660 +18662 +18664 +18665 +18669 +18670 +18671 +18672 +18680 +18681 +18684 +18685 +18687 +18691 +18694 +18696 +18697 +18698 +18703 +18705 +18715 +18723 +18725 +18728 +18730 +18731 +18735 +18738 +18740 +18741 +18744 +18753 +18754 +18755 +18759 +18763 +18766 +18767 +18775 +18780 +18781 +18782 +18784 +18791 +18792 +18795 +18796 +18798 +18801 +18802 +18805 +18808 +18809 +18816 +18817 +18822 +18825 +18826 +18828 +18829 +18834 +18835 +18836 +18837 +18841 +18842 +18843 +18847 +18848 +18849 +18850 +18852 +18853 +18854 +18855 +18857 +18858 +18859 +18860 +18863 +18864 +18865 +18866 +18867 +18868 +18869 +18870 +18871 +18872 +18873 +18874 +18885 +18886 +18888 +18890 +18900 +18902 +18906 +18908 +18911 +18913 +18914 +18915 +18921 +18924 +18927 +18928 +18929 +18933 +18937 +18939 +18942 +18943 +18945 +18946 +18948 +18949 +18950 +18953 +18954 +18955 +18957 +18962 +18979 +18980 +18981 +18984 +18986 +18988 +18989 +18996 +19012 +19013 +19016 +19021 +19027 +19029 +19031 +19033 +19034 +19035 +19039 +19052 +19056 +19069 +19070 +19073 +19075 +19077 +19078 +19079 +19080 +19082 +19083 +19086 +19087 +19088 +19092 +19093 +19096 +19100 +19102 +19104 +19105 +19106 +19108 +19111 +19112 +19116 +19117 +19120 +19121 +19122 +19124 +19125 +19127 +19129 +19130 +19133 +19134 +19135 +19137 +19138 +19139 +19140 +19142 +19143 +19144 +19145 +19146 +19147 +19150 +19151 +19152 +19154 +19155 +19156 +19157 +19158 +19159 +19160 +19161 +19163 +19165 +19166 +19169 +19172 +19175 +19176 +19179 +19180 +19181 +19183 +19185 +19187 +19190 +19193 +19194 +19195 +19197 +19198 +19199 +19201 +19202 +19204 +19213 +19214 +19215 +19218 +19219 +19220 +19221 +19222 +19223 +19226 +19227 +19228 +19229 +19230 +19232 +19233 +19234 +19236 +19238 +19239 +19240 +19244 +19252 +19254 +19257 +19258 +19259 +19260 +19261 +19262 +19264 +19265 +19266 +19267 +19270 +19271 +19275 +19276 +19277 +19284 +19289 +19290 +19292 +19293 +19295 +19296 +19297 +19300 +19302 +19304 +19305 +19306 +19307 +19310 +19311 +19316 +19317 +19318 +19320 +19321 +19324 +19325 +19330 +19331 +19332 +19333 +19336 +19337 +19339 +19342 +19343 +19344 +19345 +19347 +19348 +19349 +19350 +19351 +19352 +19354 +19356 +19360 +19363 +19364 +19365 +19366 +19367 +19369 +19371 +19373 +19374 +19377 +19379 +19381 +19382 +19385 +19386 +19388 +19402 +19403 +19404 +19405 +19407 +19414 +19417 +19419 +19421 +19423 +19434 +19435 +19436 +19437 +19440 +19441 +19442 +19446 +19447 +19448 +19450 +19451 +19452 +19453 +19456 +19457 +19459 +19461 +19471 +19472 +19473 +19477 +19478 +19481 +19483 +19484 +19485 +19486 +19490 +19492 +19494 +19495 +19496 +19497 +19498 +19501 +19502 +19503 +19506 +19508 +19509 +19511 +19513 +19514 +19516 +19517 +19518 +19519 +19520 +19521 +19522 +19524 +19525 +19526 +19527 +19528 +19530 +19532 +19538 +19539 +19541 +19542 +19543 +19546 +19547 +19548 +19549 +19550 +19551 +19553 +19555 +19556 +19557 +19563 +19566 +19567 +19568 +19575 +19577 +19578 +19582 +19583 +19584 +19587 +19588 +19590 +19592 +19597 +19606 +19607 +19608 +19611 +19613 +19614 +19615 +19616 +19618 +19622 +19624 +19627 +19628 +19629 +19630 +19632 +19633 +19634 +19635 +19638 +19640 +19641 +19642 +19643 +19644 +19648 +19649 +19655 +19656 +19663 +19667 +19668 +19669 +19670 +19672 +19673 +19675 +19676 +19677 +19678 +19679 +19681 +19682 +19683 +19684 +19687 +19688 +19689 +19690 +19692 +19693 +19694 +19696 +19697 +19698 +19699 +19705 +19707 +19709 +19710 +19714 +19717 +19718 +19720 +19721 +19722 +19724 +19728 +19732 +19733 +19734 +19737 +19738 +19741 +19745 +19746 +19756 +19757 +19758 +19759 +19763 +19764 +19767 +19769 +19770 +19773 +19774 +19777 +19782 +19792 +19795 +19797 +19798 +19801 +19802 +19805 +19813 +19817 +19819 +19831 +19833 +19835 +19837 +19838 +19839 +19840 +19841 +19845 +19852 +19858 +19861 +19862 +19863 +19864 +19865 +19866 +19867 +19871 +19872 +19873 +19874 +19879 +19882 +19887 +19889 +19894 +19895 +19896 +19897 +19902 +19905 +19907 +19908 +19909 +19910 +19914 +19916 +19917 +19918 +19922 +19924 +19926 +19927 +19928 +19929 +19930 +19933 +19935 +19938 +19943 +19945 +19946 +19947 +19952 +19953 +19956 +19957 +19964 +19967 +19968 +19969 +19970 +19971 +19972 +19973 +19974 +19975 +19976 +19978 +19985 +19988 +19990 +19992 +19997 +19998 +19999 +20001 +20008 +20010 +20018 +20019 +20020 +20021 +20024 +20025 +20026 +20029 +20030 +20032 +20037 +20042 +20043 +20044 +20045 +20047 +20049 +20050 +20053 +20054 +20055 +20056 +20057 +20058 +20061 +20062 +20063 +20064 +20066 +20068 +20070 +20071 +20074 +20081 +20082 +20084 +20086 +20087 +20092 +20093 +20097 +20098 +20100 +20102 +20103 +20104 +20108 +20110 +20111 +20115 +20116 +20117 +20118 +20122 +20123 +20126 +20127 +20128 +20129 +20140 +20141 +20143 +20145 +20147 +20148 +20149 +20153 +20154 +20155 +20162 +20163 +20165 +20171 +20172 +20174 +20178 +20185 +20187 +20192 +20193 +20195 +20198 +20199 +20207 +20209 +20211 +20214 +20215 +20216 +20218 +20219 +20220 +20221 +20222 +20223 +20224 +20225 +20226 +20228 +20230 +20231 +20232 +20233 +20236 +20239 +20240 +20248 +20250 +20251 +20257 +20262 +20265 +20266 +20267 +20269 +20271 +20278 +20280 +20281 +20282 +20283 +20284 +20286 +20287 +20288 +20289 +20290 +20291 +20295 +20296 +20297 +20300 +20303 +20305 +20311 +20312 +20317 +20318 +20319 +20325 +20326 +20327 +20328 +20330 +20333 +20335 +20337 +20347 +20348 +20349 +20355 +20356 +20357 +20358 +20360 +20362 +20363 +20364 +20365 +20366 +20367 +20368 +20373 +20374 +20376 +20379 +20381 +20385 +20386 +20390 +20392 +20394 +20397 +20398 +20399 +20400 +20405 +20406 +20407 +20408 +20409 +20410 +20413 +20414 +20415 +20416 +20417 +20418 +20420 +20422 +20424 +20425 +20427 +20437 +20441 +20444 +20446 +20449 +20452 +20456 +20457 +20460 +20462 +20471 +20472 +20473 +20475 +20476 +20478 +20480 +20481 +20485 +20489 +20492 +20493 +20494 +20495 +20498 +20499 +20500 +20504 +20505 +20506 +20511 +20512 +20515 +20516 +20517 +20518 +20519 +20520 +20523 +20524 +20526 +20528 +20529 +20530 +20534 +20538 +20542 +20543 +20546 +20549 +20550 +20552 +20553 +20554 +20555 +20557 +20558 +20562 +20563 +20566 +20568 +20569 +20572 +20575 +20577 +20578 +20581 +20583 +20586 +20589 +20590 +20591 +20592 +20594 +20595 +20596 +20597 +20598 +20601 +20603 +20604 +20606 +20607 +20609 +20613 +20614 +20617 +20618 +20619 +20621 +20622 +20624 +20626 +20628 +20629 +20630 +20631 +20635 +20639 +20641 +20642 +20643 +20644 +20651 +20652 +20654 +20656 +20660 +20661 +20662 +20663 +20664 +20665 +20666 +20667 +20668 +20669 +20670 +20672 +20679 +20680 +20682 +20684 +20687 +20688 +20689 +20690 +20692 +20693 +20694 +20695 +20696 +20701 +20702 +20704 +20705 +20706 +20707 +20710 +20711 +20712 +20713 +20714 +20715 +20716 +20717 +20719 +20720 +20724 +20725 +20727 +20728 +20732 +20733 +20734 +20738 +20742 +20749 +20754 +20755 +20756 +20757 +20763 +20764 +20767 +20768 +20777 +20783 +20786 +20787 +20788 +20789 +20790 +20791 +20792 +20793 +20794 +20797 +20798 +20800 +20806 +20808 +20809 +20812 +20814 +20819 +20823 +20824 +20825 +20830 +20831 +20833 +20836 +20837 +20839 +20840 +20843 +20847 +20848 +20850 +20851 +20852 +20853 +20854 +20855 +20859 +20860 +20861 +20863 +20866 +20870 +20873 +20876 +20877 +20880 +20881 +20882 +20884 +20886 +20890 +20895 +20897 +20898 +20900 +20902 +20903 +20904 +20906 +20908 +20911 +20913 +20917 +20919 +20920 +20921 +20926 +20927 +20936 +20937 +20938 +20940 +20941 +20942 +20943 +20946 +20947 +20949 +20950 +20955 +20956 +20959 +20963 +20968 +20979 +20980 +20981 +20982 +20985 +20988 +20989 +20998 +21001 +21002 +21007 +21009 +21012 +21014 +21017 +21024 +21025 +21026 +21027 +21028 +21031 +21035 +21037 +21038 +21039 +21040 +21042 +21043 +21044 +21045 +21046 +21047 +21048 +21049 +21050 +21051 +21053 +21054 +21055 +21056 +21057 +21058 +21059 +21063 +21064 +21065 +21066 +21067 +21068 +21069 +21072 +21076 +21078 +21083 +21084 +21086 +21087 +21088 +21092 +21094 +21100 +21101 +21112 +21113 +21115 +21119 +21121 +21122 +21124 +21125 +21128 +21133 +21136 +21138 +21145 +21146 +21147 +21148 +21149 +21151 +21153 +21156 +21158 +21159 +21163 +21166 +21169 +21170 +21171 +21172 +21174 +21179 +21180 +21181 +21186 +21189 +21190 +21196 +21197 +21198 +21199 +21200 +21201 +21202 +21204 +21219 +21221 +21224 +21229 +21232 +21238 +21252 +21257 +21268 +21269 +21271 +21279 +21281 +21282 +21284 +21286 +21287 +21297 +21303 +21305 +21307 +21311 +21312 +21313 +21315 +21316 +21317 +21318 +21319 +21321 +21327 +21336 +21337 +21339 +21343 +21345 +21349 +21352 +21356 +21357 +21358 +21361 +21363 +21369 +21370 +21374 +21376 +21377 +21378 +21382 +21386 +21388 +21389 +21397 +21398 +21402 +21407 +21408 +21411 +21414 +21415 +21419 +21425 +21428 +21429 +21431 +21432 +21433 +21438 +21441 +21451 +21459 +21464 +21467 +21469 +21476 +21479 +21484 +21485 +21486 +21489 +21494 +21495 +21497 +21501 +21502 +21507 +21511 +21515 +21516 +21517 +21519 +21522 +21524 +21528 +21529 +21532 +21533 +21534 +21537 +21541 +21545 +21547 +21548 +21549 +21550 +21554 +21560 +21563 +21569 +21573 +21576 +21578 +21579 +21580 +21581 +21585 +21589 +21590 +21591 +21598 +21601 +21604 +21606 +21611 +21615 +21618 +21620 +21623 +21625 +21627 +21635 +21637 +21638 +21641 +21644 +21645 +21648 +21649 +21650 +21659 +21661 +21662 +21663 +21665 +21666 +21668 +21669 +21672 +21673 +21675 +21678 +21679 +21680 +21686 +21688 +21689 +21690 +21692 +21702 +21711 +21712 +21713 +21716 +21717 +21721 +21722 +21723 +21724 +21727 +21728 +21729 +21734 +21739 +21740 +21741 +21743 +21744 +21747 +21748 +21749 +21754 +21757 +21758 +21760 +21761 +21762 +21763 +21765 +21772 +21773 +21774 +21777 +21778 +21781 +21782 +21784 +21786 +21791 +21792 +21795 +21796 +21800 +21801 +21803 +21804 +21806 +21811 +21815 +21816 +21817 +21818 +21819 +21823 +21826 +21827 +21828 +21829 +21830 +21831 +21832 +21834 +21836 +21838 +21839 +21841 +21842 diff --git a/results/imagenet21k_goog_to_22k_indices.txt b/results/imagenet21k_goog_to_22k_indices.txt new file mode 100644 index 0000000000..12c05e6b11 --- /dev/null +++ b/results/imagenet21k_goog_to_22k_indices.txt @@ -0,0 +1,21841 @@ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +345 +346 +347 +348 +349 +350 +351 +352 +353 +354 +355 +356 +357 +358 +359 +360 +361 +362 +363 +364 +365 +366 +367 +368 +369 +370 +371 +372 +373 +374 +375 +376 +377 +378 +379 +380 +381 +382 +383 +384 +385 +386 +387 +388 +389 +390 +391 +392 +393 +394 +395 +396 +397 +398 +399 +400 +401 +402 +403 +404 +405 +406 +407 +408 +409 +410 +411 +412 +413 +414 +415 +416 +417 +418 +419 +420 +421 +422 +423 +424 +425 +426 +427 +428 +429 +430 +431 +432 +433 +434 +435 +436 +437 +438 +439 +440 +441 +442 +443 +444 +445 +446 +447 +448 +449 +450 +451 +452 +453 +454 +455 +456 +457 +458 +459 +460 +461 +462 +463 +464 +465 +466 +467 +468 +469 +470 +471 +472 +473 +474 +475 +476 +477 +478 +479 +480 +481 +482 +483 +484 +485 +486 +487 +488 +489 +490 +491 +492 +493 +494 +495 +496 +497 +498 +499 +500 +501 +502 +503 +504 +505 +506 +507 +508 +509 +510 +511 +512 +513 +514 +515 +516 +517 +518 +519 +520 +521 +522 +523 +524 +525 +526 +527 +528 +529 +530 +531 +532 +533 +534 +535 +536 +537 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +551 +552 +553 +554 +555 +556 +557 +558 +559 +560 +561 +562 +563 +564 +565 +566 +567 +568 +569 +570 +571 +572 +573 +574 +575 +576 +577 +578 +579 +580 +581 +582 +583 +584 +585 +586 +587 +588 +589 +590 +591 +592 +593 +594 +595 +596 +597 +598 +599 +600 +601 +602 +603 +604 +605 +606 +607 +608 +609 +610 +611 +612 +613 +614 +615 +616 +617 +618 +619 +620 +621 +622 +623 +624 +625 +626 +627 +628 +629 +630 +631 +632 +633 +634 +635 +636 +637 +638 +639 +640 +641 +642 +643 +644 +645 +646 +647 +648 +649 +650 +651 +652 +653 +654 +655 +656 +657 +658 +659 +660 +661 +662 +663 +664 +665 +666 +667 +668 +669 +670 +671 +672 +673 +674 +675 +676 +677 +678 +679 +680 +681 +682 +683 +684 +685 +686 +687 +688 +689 +690 +691 +692 +693 +694 +695 +696 +697 +698 +699 +700 +701 +702 +703 +704 +705 +706 +707 +708 +709 +710 +711 +712 +713 +714 +715 +716 +717 +718 +719 +720 +721 +722 +723 +724 +725 +726 +727 +728 +729 +730 +731 +732 +733 +734 +735 +736 +737 +738 +739 +740 +741 +742 +743 +744 +745 +746 +747 +748 +749 +750 +751 +752 +753 +754 +755 +756 +757 +758 +759 +760 +761 +762 +763 +764 +765 +766 +767 +768 +769 +770 +771 +772 +773 +774 +775 +776 +777 +778 +779 +780 +781 +782 +783 +784 +785 +786 +787 +788 +789 +790 +791 +792 +793 +794 +795 +796 +797 +798 +799 +800 +801 +802 +803 +804 +805 +806 +807 +808 +809 +810 +811 +812 +813 +814 +815 +816 +817 +818 +819 +820 +821 +822 +823 +824 +825 +826 +827 +828 +829 +830 +831 +832 +833 +834 +835 +836 +837 +838 +839 +840 +841 +842 +843 +844 +845 +846 +847 +848 +849 +850 +851 +852 +853 +854 +855 +856 +857 +858 +859 +860 +861 +862 +863 +864 +865 +866 +867 +868 +869 +870 +871 +872 +873 +874 +875 +876 +877 +878 +879 +880 +881 +882 +883 +884 +885 +886 +887 +888 +889 +890 +891 +892 +893 +894 +895 +896 +897 +898 +899 +900 +901 +902 +903 +904 +905 +906 +907 +908 +909 +910 +911 +912 +913 +914 +915 +916 +917 +918 +919 +920 +921 +922 +923 +924 +925 +926 +927 +928 +929 +930 +931 +932 +933 +934 +935 +936 +937 +938 +939 +940 +941 +942 +943 +944 +945 +946 +947 +948 +949 +950 +951 +952 +953 +954 +955 +956 +957 +958 +959 +960 +961 +962 +963 +964 +965 +966 +967 +968 +969 +970 +971 +972 +973 +974 +975 +976 +977 +978 +979 +980 +981 +982 +983 +984 +985 +986 +987 +988 +989 +990 +991 +992 +993 +994 +995 +996 +997 +998 +999 +1000 +1001 +1002 +1003 +1004 +1005 +1006 +1007 +1008 +1009 +1010 +1011 +1012 +1013 +1014 +1015 +1016 +1017 +1018 +1019 +1020 +1021 +1022 +1023 +1024 +1025 +1026 +1027 +1028 +1029 +1030 +1031 +1032 +1033 +1034 +1035 +1036 +1037 +1038 +1039 +1040 +1041 +1042 +1043 +1044 +1045 +1046 +1047 +1048 +1049 +1050 +1051 +1052 +1053 +1054 +1055 +1056 +1057 +1058 +1059 +1060 +1061 +1062 +1063 +1064 +1065 +1066 +1067 +1068 +1069 +1070 +1071 +1072 +1073 +1074 +1075 +1076 +1077 +1078 +1079 +1080 +1081 +1082 +1083 +1084 +1085 +1086 +1087 +1088 +1089 +1090 +1091 +1092 +1093 +1094 +1095 +1096 +1097 +1098 +1099 +1100 +1101 +1102 +1103 +1104 +1105 +1106 +1107 +1108 +1109 +1110 +1111 +1112 +1113 +1114 +1115 +1116 +1117 +1118 +1119 +1120 +1121 +1122 +1123 +1124 +1125 +1126 +1127 +1128 +1129 +1130 +1131 +1132 +1133 +1134 +1135 +1136 +1137 +1138 +1139 +1140 +1141 +1142 +1143 +1144 +1145 +1146 +1147 +1148 +1149 +1150 +1151 +1152 +1153 +1154 +1155 +1156 +1157 +1158 +1159 +1160 +1161 +1162 +1163 +1164 +1165 +1166 +1167 +1168 +1169 +1170 +1171 +1172 +1173 +1174 +1175 +1176 +1177 +1178 +1179 +1180 +1181 +1182 +1183 +1184 +1185 +1186 +1187 +1188 +1189 +1190 +1191 +1192 +1193 +1194 +1195 +1196 +1197 +1198 +1199 +1200 +1201 +1202 +1203 +1204 +1205 +1206 +1207 +1208 +1209 +1210 +1211 +1212 +1213 +1214 +1215 +1216 +1217 +1218 +1219 +1220 +1221 +1222 +1223 +1224 +1225 +1226 +1227 +1228 +1229 +1230 +1231 +1232 +1233 +1234 +1235 +1236 +1237 +1238 +1239 +1240 +1241 +1242 +1243 +1244 +1245 +1246 +1247 +1248 +1249 +1250 +1251 +1252 +1253 +1254 +1255 +1256 +1257 +1258 +1259 +1260 +1261 +1262 +1263 +1264 +1265 +1266 +1267 +1268 +1269 +1270 +1271 +1272 +1273 +1274 +1275 +1276 +1277 +1278 +1279 +1280 +1281 +1282 +1283 +1284 +1285 +1286 +1287 +1288 +1289 +1290 +1291 +1292 +1293 +1294 +1295 +1296 +1297 +1298 +1299 +1300 +1301 +1302 +1303 +1304 +1305 +1306 +1307 +1308 +1309 +1310 +1311 +1312 +1313 +1314 +1315 +1316 +1317 +1318 +1319 +1320 +1321 +1322 +1323 +1324 +1325 +1326 +1327 +1328 +1329 +1330 +1331 +1332 +1333 +1334 +1335 +1336 +1337 +1338 +1339 +1340 +1341 +1342 +1343 +1344 +1345 +1346 +1347 +1348 +1349 +1350 +1351 +1352 +1353 +1354 +1355 +1356 +1357 +1358 +1359 +1360 +1361 +1362 +1363 +1364 +1365 +1366 +1367 +1368 +1369 +1370 +1371 +1372 +1373 +1374 +1375 +1376 +1377 +1378 +1379 +1380 +1381 +1382 +1383 +1384 +1385 +1386 +1387 +1388 +1389 +1390 +1391 +1392 +1393 +1394 +1395 +1396 +1397 +1398 +1399 +1400 +1401 +1402 +1403 +1404 +1405 +1406 +1407 +1408 +1409 +1410 +1411 +1412 +1413 +1414 +1415 +1416 +1417 +1418 +1419 +1420 +1421 +1422 +1423 +1424 +1425 +1426 +1427 +1428 +1429 +1430 +1431 +1432 +1433 +1434 +1435 +1436 +1437 +1438 +1439 +1440 +1441 +1442 +1443 +1444 +1445 +1446 +1447 +1448 +1449 +1450 +1451 +1452 +1453 +1454 +1455 +1456 +1457 +1458 +1459 +1460 +1461 +1462 +1463 +1464 +1465 +1466 +1467 +1468 +1469 +1470 +1471 +1472 +1473 +1474 +1475 +1476 +1477 +1478 +1479 +1480 +1481 +1482 +1483 +1484 +1485 +1486 +1487 +1488 +1489 +1490 +1491 +1492 +1493 +1494 +1495 +1496 +1497 +1498 +1499 +1500 +1501 +1502 +1503 +1504 +1505 +1506 +1507 +1508 +1509 +1510 +1511 +1512 +1513 +1514 +1515 +1516 +1517 +1518 +1519 +1520 +1521 +1522 +1523 +1524 +1525 +1526 +1527 +1528 +1529 +1530 +1531 +1532 +1533 +1534 +1535 +1536 +1537 +1538 +1539 +1540 +1541 +1542 +1543 +1544 +1545 +1546 +1547 +1548 +1549 +1550 +1551 +1552 +1553 +1554 +1555 +1556 +1557 +1558 +1559 +1560 +1561 +1562 +1563 +1564 +1565 +1566 +1567 +1568 +1569 +1570 +1571 +1572 +1573 +1574 +1575 +1576 +1577 +1578 +1579 +1580 +1581 +1582 +1583 +1584 +1585 +1586 +1587 +1588 +1589 +1590 +1591 +1592 +1593 +1594 +1595 +1596 +1597 +1598 +1599 +1600 +1601 +1602 +1603 +1604 +1605 +1606 +1607 +1608 +1609 +1610 +1611 +1612 +1613 +1614 +1615 +1616 +1617 +1618 +1619 +1620 +1621 +1622 +1623 +1624 +1625 +1626 +1627 +1628 +1629 +1630 +1631 +1632 +1633 +1634 +1635 +1636 +1637 +1638 +1639 +1640 +1641 +1642 +1643 +1644 +1645 +1646 +1647 +1648 +1649 +1650 +1651 +1652 +1653 +1654 +1655 +1656 +1657 +1658 +1659 +1660 +1661 +1662 +1663 +1664 +1665 +1666 +1667 +1668 +1669 +1670 +1671 +1672 +1673 +1674 +1675 +1676 +1677 +1678 +1679 +1680 +1681 +1682 +1683 +1684 +1685 +1686 +1687 +1688 +1689 +1690 +1691 +1692 +1693 +1694 +1695 +1696 +1697 +1698 +1699 +1700 +1701 +1702 +1703 +1704 +1705 +1706 +1707 +1708 +1709 +1710 +1711 +1712 +1713 +1714 +1715 +1716 +1717 +1718 +1719 +1720 +1721 +1722 +1723 +1724 +1725 +1726 +1727 +1728 +1729 +1730 +1731 +1732 +1733 +1734 +1735 +1736 +1737 +1738 +1739 +1740 +1741 +1742 +1743 +1744 +1745 +1746 +1747 +1748 +1749 +1750 +1751 +1752 +1753 +1754 +1755 +1756 +1757 +1758 +1759 +1760 +1761 +1762 +1763 +1764 +1765 +1766 +1767 +1768 +1769 +1770 +1771 +1772 +1773 +1774 +1775 +1776 +1777 +1778 +1779 +1780 +1781 +1782 +1783 +1784 +1785 +1786 +1787 +1788 +1789 +1790 +1791 +1792 +1793 +1794 +1795 +1796 +1797 +1798 +1799 +1800 +1801 +1802 +1803 +1804 +1805 +1806 +1807 +1808 +1809 +1810 +1811 +1812 +1813 +1814 +1815 +1816 +1817 +1818 +1819 +1820 +1821 +1822 +1823 +1824 +1825 +1826 +1827 +1828 +1829 +1830 +1831 +1832 +1833 +1834 +1835 +1836 +1837 +1838 +1839 +1840 +1841 +1842 +1843 +1844 +1845 +1846 +1847 +1848 +1849 +1850 +1851 +1852 +1853 +1854 +1855 +1856 +1857 +1858 +1859 +1860 +1861 +1862 +1863 +1864 +1865 +1866 +1867 +1868 +1869 +1870 +1871 +1872 +1873 +1874 +1875 +1876 +1877 +1878 +1879 +1880 +1881 +1882 +1883 +1884 +1885 +1886 +1887 +1888 +1889 +1890 +1891 +1892 +1893 +1894 +1895 +1896 +1897 +1898 +1899 +1900 +1901 +1902 +1903 +1904 +1905 +1906 +1907 +1908 +1909 +1910 +1911 +1912 +1913 +1914 +1915 +1916 +1917 +1918 +1919 +1920 +1921 +1922 +1923 +1924 +1925 +1926 +1927 +1928 +1929 +1930 +1931 +1932 +1933 +1934 +1935 +1936 +1937 +1938 +1939 +1940 +1941 +1942 +1943 +1944 +1945 +1946 +1947 +1948 +1949 +1950 +1951 +1952 +1953 +1954 +1955 +1956 +1957 +1958 +1959 +1960 +1961 +1962 +1963 +1964 +1965 +1966 +1967 +1968 +1969 +1970 +1971 +1972 +1973 +1974 +1975 +1976 +1977 +1978 +1979 +1980 +1981 +1982 +1983 +1984 +1985 +1986 +1987 +1988 +1989 +1990 +1991 +1992 +1993 +1994 +1995 +1996 +1997 +1998 +1999 +2000 +2001 +2002 +2003 +2004 +2005 +2006 +2007 +2008 +2009 +2010 +2011 +2012 +2013 +2014 +2015 +2016 +2017 +2018 +2019 +2020 +2021 +2022 +2023 +2024 +2025 +2026 +2027 +2028 +2029 +2030 +2031 +2032 +2033 +2034 +2035 +2036 +2037 +2038 +2039 +2040 +2041 +2042 +2043 +2044 +2045 +2046 +2047 +2048 +2049 +2050 +2051 +2052 +2053 +2054 +2055 +2056 +2057 +2058 +2059 +2060 +2061 +2062 +2063 +2064 +2065 +2066 +2067 +2068 +2069 +2070 +2071 +2072 +2073 +2074 +2075 +2076 +2077 +2078 +2079 +2080 +2081 +2082 +2083 +2084 +2085 +2086 +2087 +2088 +2089 +2090 +2091 +2092 +2093 +2094 +2095 +2096 +2097 +2098 +2099 +2100 +2101 +2102 +2103 +2104 +2105 +2106 +2107 +2108 +2109 +2110 +2111 +2112 +2113 +2114 +2115 +2116 +2117 +2118 +2119 +2120 +2121 +2122 +2123 +2124 +2125 +2126 +2127 +2128 +2129 +2130 +2131 +2132 +2133 +2134 +2135 +2136 +2137 +2138 +2139 +2140 +2141 +2142 +2143 +2144 +2145 +2146 +2147 +2148 +2149 +2150 +2151 +2152 +2153 +2154 +2155 +2156 +2157 +2158 +2159 +2160 +2161 +2162 +2163 +2164 +2165 +2166 +2167 +2168 +2169 +2170 +2171 +2172 +2173 +2174 +2175 +2176 +2177 +2178 +2179 +2180 +2181 +2182 +2183 +2184 +2185 +2186 +2187 +2188 +2189 +2190 +2191 +2192 +2193 +2194 +2195 +2196 +2197 +2198 +2199 +2200 +2201 +2202 +2203 +2204 +2205 +2206 +2207 +2208 +2209 +2210 +2211 +2212 +2213 +2214 +2215 +2216 +2217 +2218 +2219 +2220 +2221 +2222 +2223 +2224 +2225 +2226 +2227 +2228 +2229 +2230 +2231 +2232 +2233 +2234 +2235 +2236 +2237 +2238 +2239 +2240 +2241 +2242 +2243 +2244 +2245 +2246 +2247 +2248 +2249 +2250 +2251 +2252 +2253 +2254 +2255 +2256 +2257 +2258 +2259 +2260 +2261 +2262 +2263 +2264 +2265 +2266 +2267 +2268 +2269 +2270 +2271 +2272 +2273 +2274 +2275 +2276 +2277 +2278 +2279 +2280 +2281 +2282 +2283 +2284 +2285 +2286 +2287 +2288 +2289 +2290 +2291 +2292 +2293 +2294 +2295 +2296 +2297 +2298 +2299 +2300 +2301 +2302 +2303 +2304 +2305 +2306 +2307 +2308 +2309 +2310 +2311 +2312 +2313 +2314 +2315 +2316 +2317 +2318 +2319 +2320 +2321 +2322 +2323 +2324 +2325 +2326 +2327 +2328 +2329 +2330 +2331 +2332 +2333 +2334 +2335 +2336 +2337 +2338 +2339 +2340 +2341 +2342 +2343 +2344 +2345 +2346 +2347 +2348 +2349 +2350 +2351 +2352 +2353 +2354 +2355 +2356 +2357 +2358 +2359 +2360 +2361 +2362 +2363 +2364 +2365 +2366 +2367 +2368 +2369 +2370 +2371 +2372 +2373 +2374 +2375 +2376 +2377 +2378 +2379 +2380 +2381 +2382 +2383 +2384 +2385 +2386 +2387 +2388 +2389 +2390 +2391 +2392 +2393 +2394 +2395 +2396 +2397 +2398 +2399 +2400 +2401 +2402 +2403 +2404 +2405 +2406 +2407 +2408 +2409 +2410 +2411 +2412 +2413 +2414 +2415 +2416 +2417 +2418 +2419 +2420 +2421 +2422 +2423 +2424 +2425 +2426 +2427 +2428 +2429 +2430 +2431 +2432 +2433 +2434 +2435 +2436 +2437 +2438 +2439 +2440 +2441 +2442 +2443 +2444 +2445 +2446 +2447 +2448 +2449 +2450 +2451 +2452 +2453 +2454 +2455 +2456 +2457 +2458 +2459 +2460 +2461 +2462 +2463 +2464 +2465 +2466 +2467 +2468 +2469 +2470 +2471 +2472 +2473 +2474 +2475 +2476 +2477 +2478 +2479 +2480 +2481 +2482 +2483 +2484 +2485 +2486 +2487 +2488 +2489 +2490 +2491 +2492 +2493 +2494 +2495 +2496 +2497 +2498 +2499 +2500 +2501 +2502 +2503 +2504 +2505 +2506 +2507 +2508 +2509 +2510 +2511 +2512 +2513 +2514 +2515 +2516 +2517 +2518 +2519 +2520 +2521 +2522 +2523 +2524 +2525 +2526 +2527 +2528 +2529 +2530 +2531 +2532 +2533 +2534 +2535 +2536 +2537 +2538 +2539 +2540 +2541 +2542 +2543 +2544 +2545 +2546 +2547 +2548 +2549 +2550 +2551 +2552 +2553 +2554 +2555 +2556 +2557 +2558 +2559 +2560 +2561 +2562 +2563 +2564 +2565 +2566 +2567 +2568 +2569 +2570 +2571 +2572 +2573 +2574 +2575 +2576 +2577 +2578 +2579 +2580 +2581 +2582 +2583 +2584 +2585 +2586 +2587 +2588 +2589 +2590 +2591 +2592 +2593 +2594 +2595 +2596 +2597 +2598 +2599 +2600 +2601 +2602 +2603 +2604 +2605 +2606 +2607 +2608 +2609 +2610 +2611 +2612 +2613 +2614 +2615 +2616 +2617 +2618 +2619 +2620 +2621 +2622 +2623 +2624 +2625 +2626 +2627 +2628 +2629 +2630 +2631 +2632 +2633 +2634 +2635 +2636 +2637 +2638 +2639 +2640 +2641 +2642 +2643 +2644 +2645 +2646 +2647 +2648 +2649 +2650 +2651 +2652 +2653 +2654 +2655 +2656 +2657 +2658 +2659 +2660 +2661 +2662 +2663 +2664 +2665 +2666 +2667 +2668 +2669 +2670 +2671 +2672 +2673 +2674 +2675 +2676 +2677 +2678 +2679 +2680 +2681 +2682 +2683 +2684 +2685 +2686 +2687 +2688 +2689 +2690 +2691 +2692 +2693 +2694 +2695 +2696 +2697 +2698 +2699 +2700 +2701 +2702 +2703 +2704 +2705 +2706 +2707 +2708 +2709 +2710 +2711 +2712 +2713 +2714 +2715 +2716 +2717 +2718 +2719 +2720 +2721 +2722 +2723 +2724 +2725 +2726 +2727 +2728 +2729 +2730 +2731 +2732 +2733 +2734 +2735 +2736 +2737 +2738 +2739 +2740 +2741 +2742 +2743 +2744 +2745 +2746 +2747 +2748 +2749 +2750 +2751 +2752 +2753 +2754 +2755 +2756 +2757 +2758 +2759 +2760 +2761 +2762 +2763 +2764 +2765 +2766 +2767 +2768 +2769 +2770 +2771 +2772 +2773 +2774 +2775 +2776 +2777 +2778 +2779 +2780 +2781 +2782 +2783 +2784 +2785 +2786 +2787 +2788 +2789 +2790 +2791 +2792 +2793 +2794 +2795 +2796 +2797 +2798 +2799 +2800 +2801 +2802 +2803 +2804 +2805 +2806 +2807 +2808 +2809 +2810 +2811 +2812 +2813 +2814 +2815 +2816 +2817 +2818 +2819 +2820 +2821 +2822 +2823 +2824 +2825 +2826 +2827 +2828 +2829 +2830 +2831 +2832 +2833 +2834 +2835 +2836 +2837 +2838 +2839 +2840 +2841 +2842 +2843 +2844 +2845 +2846 +2847 +2848 +2849 +2850 +2851 +2852 +2853 +2854 +2855 +2856 +2857 +2858 +2859 +2860 +2861 +2862 +2863 +2864 +2865 +2866 +2867 +2868 +2869 +2870 +2871 +2872 +2873 +2874 +2875 +2876 +2877 +2878 +2879 +2880 +2881 +2882 +2883 +2884 +2885 +2886 +2887 +2888 +2889 +2890 +2891 +2892 +2893 +2894 +2895 +2896 +2897 +2898 +2899 +2900 +2901 +2902 +2903 +2904 +2905 +2906 +2907 +2908 +2909 +2910 +2911 +2912 +2913 +2914 +2915 +2916 +2917 +2918 +2919 +2920 +2921 +2922 +2923 +2924 +2925 +2926 +2927 +2928 +2929 +2930 +2931 +2932 +2933 +2934 +2935 +2936 +2937 +2938 +2939 +2940 +2941 +2942 +2943 +2944 +2945 +2946 +2947 +2948 +2949 +2950 +2951 +2952 +2953 +2954 +2955 +2956 +2957 +2958 +2959 +2960 +2961 +2962 +2963 +2964 +2965 +2966 +2967 +2968 +2969 +2970 +2971 +2972 +2973 +2974 +2975 +2976 +2977 +2978 +2979 +2980 +2981 +2982 +2983 +2984 +2985 +2986 +2987 +2988 +2989 +2990 +2991 +2992 +2993 +2994 +2995 +2996 +2997 +2998 +2999 +3000 +3001 +3002 +3003 +3004 +3005 +3006 +3007 +3008 +3009 +3010 +3011 +3012 +3013 +3014 +3015 +3016 +3017 +3018 +3019 +3020 +3021 +3022 +3023 +3024 +3025 +3026 +3027 +3028 +3029 +3030 +3031 +3032 +3033 +3034 +3035 +3036 +3037 +3038 +3039 +3040 +3041 +3042 +3043 +3044 +3045 +3046 +3047 +3048 +3049 +3050 +3051 +3052 +3053 +3054 +3055 +3056 +3057 +3058 +3059 +3060 +3061 +3062 +3063 +3064 +3065 +3066 +3067 +3068 +3069 +3070 +3071 +3072 +3073 +3074 +3075 +3076 +3077 +3078 +3079 +3080 +3081 +3082 +3083 +3084 +3085 +3086 +3087 +3088 +3089 +3090 +3091 +3092 +3093 +3094 +3095 +3096 +3097 +3098 +3099 +3100 +3101 +3102 +3103 +3104 +3105 +3106 +3107 +3108 +3109 +3110 +3111 +3112 +3113 +3114 +3115 +3116 +3117 +3118 +3119 +3120 +3121 +3122 +3123 +3124 +3125 +3126 +3127 +3128 +3129 +3130 +3131 +3132 +3133 +3134 +3135 +3136 +3137 +3138 +3139 +3140 +3141 +3142 +3143 +3144 +3145 +3146 +3147 +3148 +3149 +3150 +3151 +3152 +3153 +3154 +3155 +3156 +3157 +3158 +3159 +3160 +3161 +3162 +3163 +3164 +3165 +3166 +3167 +3168 +3169 +3170 +3171 +3172 +3173 +3174 +3175 +3176 +3177 +3178 +3179 +3180 +3181 +3182 +3183 +3184 +3185 +3186 +3187 +3188 +3189 +3190 +3191 +3192 +3193 +3194 +3195 +3196 +3197 +3198 +3199 +3200 +3201 +3202 +3203 +3204 +3205 +3206 +3207 +3208 +3209 +3210 +3211 +3212 +3213 +3214 +3215 +3216 +3217 +3218 +3219 +3220 +3221 +3222 +3223 +3224 +3225 +3226 +3227 +3228 +3229 +3230 +3231 +3232 +3233 +3234 +3235 +3236 +3237 +3238 +3239 +3240 +3241 +3242 +3243 +3244 +3245 +3246 +3247 +3248 +3249 +3250 +3251 +3252 +3253 +3254 +3255 +3256 +3257 +3258 +3259 +3260 +3261 +3262 +3263 +3264 +3265 +3266 +3267 +3268 +3269 +3270 +3271 +3272 +3273 +3274 +3275 +3276 +3277 +3278 +3279 +3280 +3281 +3282 +3283 +3284 +3285 +3286 +3287 +3288 +3289 +3290 +3291 +3292 +3293 +3294 +3295 +3296 +3297 +3298 +3299 +3300 +3301 +3302 +3303 +3304 +3305 +3306 +3307 +3308 +3309 +3310 +3311 +3312 +3313 +3314 +3315 +3316 +3317 +3318 +3319 +3320 +3321 +3322 +3323 +3324 +3325 +3326 +3327 +3328 +3329 +3330 +3331 +3332 +3333 +3334 +3335 +3336 +3337 +3338 +3339 +3340 +3341 +3342 +3343 +3344 +3345 +3346 +3347 +3348 +3349 +3350 +3351 +3352 +3353 +3354 +3355 +3356 +3357 +3358 +3359 +3360 +3361 +3362 +3363 +3364 +3365 +3366 +3367 +3368 +3369 +3370 +3371 +3372 +3373 +3374 +3375 +3376 +3377 +3378 +3379 +3380 +3381 +3382 +3383 +3384 +3385 +3386 +3387 +3388 +3389 +3390 +3391 +3392 +3393 +3394 +3395 +3396 +3397 +3398 +3399 +3400 +3401 +3402 +3403 +3404 +3405 +3406 +3407 +3408 +3409 +3410 +3411 +3412 +3413 +3414 +3415 +3416 +3417 +3418 +3419 +3420 +3421 +3422 +3423 +3424 +3425 +3426 +3427 +3428 +3429 +3430 +3431 +3432 +3433 +3434 +3435 +3436 +3437 +3438 +3439 +3440 +3441 +3442 +3443 +3444 +3445 +3446 +3447 +3448 +3449 +3450 +3451 +3452 +3453 +3454 +3455 +3456 +3457 +3458 +3459 +3460 +3461 +3462 +3463 +3464 +3465 +3466 +3467 +3468 +3469 +3470 +3471 +3472 +3473 +3474 +3475 +3476 +3477 +3478 +3479 +3480 +3481 +3482 +3483 +3484 +3485 +3486 +3487 +3488 +3489 +3490 +3491 +3492 +3493 +3494 +3495 +3496 +3497 +3498 +3499 +3500 +3501 +3502 +3503 +3504 +3505 +3506 +3507 +3508 +3509 +3510 +3511 +3512 +3513 +3514 +3515 +3516 +3517 +3518 +3519 +3520 +3521 +3522 +3523 +3524 +3525 +3526 +3527 +3528 +3529 +3530 +3531 +3532 +3533 +3534 +3535 +3536 +3537 +3538 +3539 +3540 +3541 +3542 +3543 +3544 +3545 +3546 +3547 +3548 +3549 +3550 +3551 +3552 +3553 +3554 +3555 +3556 +3557 +3558 +3559 +3560 +3561 +3562 +3563 +3564 +3565 +3566 +3567 +3568 +3569 +3570 +3571 +3572 +3573 +3574 +3575 +3576 +3577 +3578 +3579 +3580 +3581 +3582 +3583 +3584 +3585 +3586 +3587 +3588 +3589 +3590 +3591 +3592 +3593 +3594 +3595 +3596 +3597 +3598 +3599 +3600 +3601 +3602 +3603 +3604 +3605 +3606 +3607 +3608 +3609 +3610 +3611 +3612 +3613 +3614 +3615 +3616 +3617 +3618 +3619 +3620 +3621 +3622 +3623 +3624 +3625 +3626 +3627 +3628 +3629 +3630 +3631 +3632 +3633 +3634 +3635 +3636 +3637 +3638 +3639 +3640 +3641 +3642 +3643 +3644 +3645 +3646 +3647 +3648 +3649 +3650 +3651 +3652 +3653 +3654 +3655 +3656 +3657 +3658 +3659 +3660 +3661 +3662 +3663 +3664 +3665 +3666 +3667 +3668 +3669 +3670 +3671 +3672 +3673 +3674 +3675 +3676 +3677 +3678 +3679 +3680 +3681 +3682 +3683 +3684 +3685 +3686 +3687 +3688 +3689 +3690 +3691 +3692 +3693 +3694 +3695 +3696 +3697 +3698 +3699 +3700 +3701 +3702 +3703 +3704 +3705 +3706 +3707 +3708 +3709 +3710 +3711 +3712 +3713 +3714 +3715 +3716 +3717 +3718 +3719 +3720 +3721 +3722 +3723 +3724 +3725 +3726 +3727 +3728 +3729 +3730 +3731 +3732 +3733 +3734 +3735 +3736 +3737 +3738 +3739 +3740 +3741 +3742 +3743 +3744 +3745 +3746 +3747 +3748 +3749 +3750 +3751 +3752 +3753 +3754 +3755 +3756 +3757 +3758 +3759 +3760 +3761 +3762 +3763 +3764 +3765 +3766 +3767 +3768 +3769 +3770 +3771 +3772 +3773 +3774 +3775 +3776 +3777 +3778 +3779 +3780 +3781 +3782 +3783 +3784 +3785 +3786 +3787 +3788 +3789 +3790 +3791 +3792 +3793 +3794 +3795 +3796 +3797 +3798 +3799 +3800 +3801 +3802 +3803 +3804 +3805 +3806 +3807 +3808 +3809 +3810 +3811 +3812 +3813 +3814 +3815 +3816 +3817 +3818 +3819 +3820 +3821 +3822 +3823 +3824 +3825 +3826 +3827 +3828 +3829 +3830 +3831 +3832 +3833 +3834 +3835 +3836 +3837 +3838 +3839 +3840 +3841 +3842 +3843 +3844 +3845 +3846 +3847 +3848 +3849 +3850 +3851 +3852 +3853 +3854 +3855 +3856 +3857 +3858 +3859 +3860 +3861 +3862 +3863 +3864 +3865 +3866 +3867 +3868 +3869 +3870 +3871 +3872 +3873 +3874 +3875 +3876 +3877 +3878 +3879 +3880 +3881 +3882 +3883 +3884 +3885 +3886 +3887 +3888 +3889 +3890 +3891 +3892 +3893 +3894 +3895 +3896 +3897 +3898 +3899 +3900 +3901 +3902 +3903 +3904 +3905 +3906 +3907 +3908 +3909 +3910 +3911 +3912 +3913 +3914 +3915 +3916 +3917 +3918 +3919 +3920 +3921 +3922 +3923 +3924 +3925 +3926 +3927 +3928 +3929 +3930 +3931 +3932 +3933 +3934 +3935 +3936 +3937 +3938 +3939 +3940 +3941 +3942 +3943 +3944 +3945 +3946 +3947 +3948 +3949 +3950 +3951 +3952 +3953 +3954 +3955 +3956 +3957 +3958 +3959 +3960 +3961 +3962 +3963 +3964 +3965 +3966 +3967 +3968 +3969 +3970 +3971 +3972 +3973 +3974 +3975 +3976 +3977 +3978 +3979 +3980 +3981 +3982 +3983 +3984 +3985 +3986 +3987 +3988 +3989 +3990 +3991 +3992 +3993 +3994 +3995 +3996 +3997 +3998 +3999 +4000 +4001 +4002 +4003 +4004 +4005 +4006 +4007 +4008 +4009 +4010 +4011 +4012 +4013 +4014 +4015 +4016 +4017 +4018 +4019 +4020 +4021 +4022 +4023 +4024 +4025 +4026 +4027 +4028 +4029 +4030 +4031 +4032 +4033 +4034 +4035 +4036 +4037 +4038 +4039 +4040 +4041 +4042 +4043 +4044 +4045 +4046 +4047 +4048 +4049 +4050 +4051 +4052 +4053 +4054 +4055 +4056 +4057 +4058 +4059 +4060 +4061 +4062 +4063 +4064 +4065 +4066 +4067 +4068 +4069 +4070 +4071 +4072 +4073 +4074 +4075 +4076 +4077 +4078 +4079 +4080 +4081 +4082 +4083 +4084 +4085 +4086 +4087 +4088 +4089 +4090 +4091 +4092 +4093 +4094 +4095 +4096 +4097 +4098 +4099 +4100 +4101 +4102 +4103 +4104 +4105 +4106 +4107 +4108 +4109 +4110 +4111 +4112 +4113 +4114 +4115 +4116 +4117 +4118 +4119 +4120 +4121 +4122 +4123 +4124 +4125 +4126 +4127 +4128 +4129 +4130 +4131 +4132 +4133 +4134 +4135 +4136 +4137 +4138 +4139 +4140 +4141 +4142 +4143 +4144 +4145 +4146 +4147 +4148 +4149 +4150 +4151 +4152 +4153 +4154 +4155 +4156 +4157 +4158 +4159 +4160 +4161 +4162 +4163 +4164 +4165 +4166 +4167 +4168 +4169 +4170 +4171 +4172 +4173 +4174 +4175 +4176 +4177 +4178 +4179 +4180 +4181 +4182 +4183 +4184 +4185 +4186 +4187 +4188 +4189 +4190 +4191 +4192 +4193 +4194 +4195 +4196 +4197 +4198 +4199 +4200 +4201 +4202 +4203 +4204 +4205 +4206 +4207 +4208 +4209 +4210 +4211 +4212 +4213 +4214 +4215 +4216 +4217 +4218 +4219 +4220 +4221 +4222 +4223 +4224 +4225 +4226 +4227 +4228 +4229 +4230 +4231 +4232 +4233 +4234 +4235 +4236 +4237 +4238 +4239 +4240 +4241 +4242 +4243 +4244 +4245 +4246 +4247 +4248 +4249 +4250 +4251 +4252 +4253 +4254 +4255 +4256 +4257 +4258 +4259 +4260 +4261 +4262 +4263 +4264 +4265 +4266 +4267 +4268 +4269 +4270 +4271 +4272 +4273 +4274 +4275 +4276 +4277 +4278 +4279 +4280 +4281 +4282 +4283 +4284 +4285 +4286 +4287 +4288 +4289 +4290 +4291 +4292 +4293 +4294 +4295 +4296 +4297 +4298 +4299 +4300 +4301 +4302 +4303 +4304 +4305 +4306 +4307 +4308 +4309 +4310 +4311 +4312 +4313 +4314 +4315 +4316 +4317 +4318 +4319 +4320 +4321 +4322 +4323 +4324 +4325 +4326 +4327 +4328 +4329 +4330 +4331 +4332 +4333 +4334 +4335 +4336 +4337 +4338 +4339 +4340 +4341 +4342 +4343 +4344 +4345 +4346 +4347 +4348 +4349 +4350 +4351 +4352 +4353 +4354 +4355 +4356 +4357 +4358 +4359 +4360 +4361 +4362 +4363 +4364 +4365 +4366 +4367 +4368 +4369 +4370 +4371 +4372 +4373 +4374 +4375 +4376 +4377 +4378 +4379 +4380 +4381 +4382 +4383 +4384 +4385 +4386 +4387 +4388 +4389 +4390 +4391 +4392 +4393 +4394 +4395 +4396 +4397 +4398 +4399 +4400 +4401 +4402 +4403 +4404 +4405 +4406 +4407 +4408 +4409 +4410 +4411 +4412 +4413 +4414 +4415 +4416 +4417 +4418 +4419 +4420 +4421 +4422 +4423 +4424 +4425 +4426 +4427 +4428 +4429 +4430 +4431 +4432 +4433 +4434 +4435 +4436 +4437 +4438 +4439 +4440 +4441 +4442 +4443 +4444 +4445 +4446 +4447 +4448 +4449 +4450 +4451 +4452 +4453 +4454 +4455 +4456 +4457 +4458 +4459 +4460 +4461 +4462 +4463 +4464 +4465 +4466 +4467 +4468 +4469 +4470 +4471 +4472 +4473 +4474 +4475 +4476 +4477 +4478 +4479 +4480 +4481 +4482 +4483 +4484 +4485 +4486 +4487 +4488 +4489 +4490 +4491 +4492 +4493 +4494 +4495 +4496 +4497 +4498 +4499 +4500 +4501 +4502 +4503 +4504 +4505 +4506 +4507 +4508 +4509 +4510 +4511 +4512 +4513 +4514 +4515 +4516 +4517 +4518 +4519 +4520 +4521 +4522 +4523 +4524 +4525 +4526 +4527 +4528 +4529 +4530 +4531 +4532 +4533 +4534 +4535 +4536 +4537 +4538 +4539 +4540 +4541 +4542 +4543 +4544 +4545 +4546 +4547 +4548 +4549 +4550 +4551 +4552 +4553 +4554 +4555 +4556 +4557 +4558 +4559 +4560 +4561 +4562 +4563 +4564 +4565 +4566 +4567 +4568 +4569 +4570 +4571 +4572 +4573 +4574 +4575 +4576 +4577 +4578 +4579 +4580 +4581 +4582 +4583 +4584 +4585 +4586 +4587 +4588 +4589 +4590 +4591 +4592 +4593 +4594 +4595 +4596 +4597 +4598 +4599 +4600 +4601 +4602 +4603 +4604 +4605 +4606 +4607 +4608 +4609 +4610 +4611 +4612 +4613 +4614 +4615 +4616 +4617 +4618 +4619 +4620 +4621 +4622 +4623 +4624 +4625 +4626 +4627 +4628 +4629 +4630 +4631 +4632 +4633 +4634 +4635 +4636 +4637 +4638 +4639 +4640 +4641 +4642 +4643 +4644 +4645 +4646 +4647 +4648 +4649 +4650 +4651 +4652 +4653 +4654 +4655 +4656 +4657 +4658 +4659 +4660 +4661 +4662 +4663 +4664 +4665 +4666 +4667 +4668 +4669 +4670 +4671 +4672 +4673 +4674 +4675 +4676 +4677 +4678 +4679 +4680 +4681 +4682 +4683 +4684 +4685 +4686 +4687 +4688 +4689 +4690 +4691 +4692 +4693 +4694 +4695 +4696 +4697 +4698 +4699 +4700 +4701 +4702 +4703 +4704 +4705 +4706 +4707 +4708 +4709 +4710 +4711 +4712 +4713 +4714 +4715 +4716 +4717 +4718 +4719 +4720 +4721 +4722 +4723 +4724 +4725 +4726 +4727 +4728 +4729 +4730 +4731 +4732 +4733 +4734 +4735 +4736 +4737 +4738 +4739 +4740 +4741 +4742 +4743 +4744 +4745 +4746 +4747 +4748 +4749 +4750 +4751 +4752 +4753 +4754 +4755 +4756 +4757 +4758 +4759 +4760 +4761 +4762 +4763 +4764 +4765 +4766 +4767 +4768 +4769 +4770 +4771 +4772 +4773 +4774 +4775 +4776 +4777 +4778 +4779 +4780 +4781 +4782 +4783 +4784 +4785 +4786 +4787 +4788 +4789 +4790 +4791 +4792 +4793 +4794 +4795 +4796 +4797 +4798 +4799 +4800 +4801 +4802 +4803 +4804 +4805 +4806 +4807 +4808 +4809 +4810 +4811 +4812 +4813 +4814 +4815 +4816 +4817 +4818 +4819 +4820 +4821 +4822 +4823 +4824 +4825 +4826 +4827 +4828 +4829 +4830 +4831 +4832 +4833 +4834 +4835 +4836 +4837 +4838 +4839 +4840 +4841 +4842 +4843 +4844 +4845 +4846 +4847 +4848 +4849 +4850 +4851 +4852 +4853 +4854 +4855 +4856 +4857 +4858 +4859 +4860 +4861 +4862 +4863 +4864 +4865 +4866 +4867 +4868 +4869 +4870 +4871 +4872 +4873 +4874 +4875 +4876 +4877 +4878 +4879 +4880 +4881 +4882 +4883 +4884 +4885 +4886 +4887 +4888 +4889 +4890 +4891 +4892 +4893 +4894 +4895 +4896 +4897 +4898 +4899 +4900 +4901 +4902 +4903 +4904 +4905 +4906 +4907 +4908 +4909 +4910 +4911 +4912 +4913 +4914 +4915 +4916 +4917 +4918 +4919 +4920 +4921 +4922 +4923 +4924 +4925 +4926 +4927 +4928 +4929 +4930 +4931 +4932 +4933 +4934 +4935 +4936 +4937 +4938 +4939 +4940 +4941 +4942 +4943 +4944 +4945 +4946 +4947 +4948 +4949 +4950 +4951 +4952 +4953 +4954 +4955 +4956 +4957 +4958 +4959 +4960 +4961 +4962 +4963 +4964 +4965 +4966 +4967 +4968 +4969 +4970 +4971 +4972 +4973 +4974 +4975 +4976 +4977 +4978 +4979 +4980 +4981 +4982 +4983 +4984 +4985 +4986 +4987 +4988 +4989 +4990 +4991 +4992 +4993 +4994 +4995 +4996 +4997 +4998 +4999 +5000 +5001 +5002 +5003 +5004 +5005 +5006 +5007 +5008 +5009 +5010 +5011 +5012 +5013 +5014 +5015 +5016 +5017 +5018 +5019 +5020 +5021 +5022 +5023 +5024 +5025 +5026 +5027 +5028 +5029 +5030 +5031 +5032 +5033 +5034 +5035 +5036 +5037 +5038 +5039 +5040 +5041 +5042 +5043 +5044 +5045 +5046 +5047 +5048 +5049 +5050 +5051 +5052 +5053 +5054 +5055 +5056 +5057 +5058 +5059 +5060 +5061 +5062 +5063 +5064 +5065 +5066 +5067 +5068 +5069 +5070 +5071 +5072 +5073 +5074 +5075 +5076 +5077 +5078 +5079 +5080 +5081 +5082 +5083 +5084 +5085 +5086 +5087 +5088 +5089 +5090 +5091 +5092 +5093 +5094 +5095 +5096 +5097 +5098 +5099 +5100 +5101 +5102 +5103 +5104 +5105 +5106 +5107 +5108 +5109 +5110 +5111 +5112 +5113 +5114 +5115 +5116 +5117 +5118 +5119 +5120 +5121 +5122 +5123 +5124 +5125 +5126 +5127 +5128 +5129 +5130 +5131 +5132 +5133 +5134 +5135 +5136 +5137 +5138 +5139 +5140 +5141 +5142 +5143 +5144 +5145 +5146 +5147 +5148 +5149 +5150 +5151 +5152 +5153 +5154 +5155 +5156 +5157 +5158 +5159 +5160 +5161 +5162 +5163 +5164 +5165 +5166 +5167 +5168 +5169 +5170 +5171 +5172 +5173 +5174 +5175 +5176 +5177 +5178 +5179 +5180 +5181 +5182 +5183 +5184 +5185 +5186 +5187 +5188 +5189 +5190 +5191 +5192 +5193 +5194 +5195 +5196 +5197 +5198 +5199 +5200 +5201 +5202 +5203 +5204 +5205 +5206 +5207 +5208 +5209 +5210 +5211 +5212 +5213 +5214 +5215 +5216 +5217 +5218 +5219 +5220 +5221 +5222 +5223 +5224 +5225 +5226 +5227 +5228 +5229 +5230 +5231 +5232 +5233 +5234 +5235 +5236 +5237 +5238 +5239 +5240 +5241 +5242 +5243 +5244 +5245 +5246 +5247 +5248 +5249 +5250 +5251 +5252 +5253 +5254 +5255 +5256 +5257 +5258 +5259 +5260 +5261 +5262 +5263 +5264 +5265 +5266 +5267 +5268 +5269 +5270 +5271 +5272 +5273 +5274 +5275 +5276 +5277 +5278 +5279 +5280 +5281 +5282 +5283 +5284 +5285 +5286 +5287 +5288 +5289 +5290 +5291 +5292 +5293 +5294 +5295 +5296 +5297 +5298 +5299 +5300 +5301 +5302 +5303 +5304 +5305 +5306 +5307 +5308 +5309 +5310 +5311 +5312 +5313 +5314 +5315 +5316 +5317 +5318 +5319 +5320 +5321 +5322 +5323 +5324 +5325 +5326 +5327 +5328 +5329 +5330 +5331 +5332 +5333 +5334 +5335 +5336 +5337 +5338 +5339 +5340 +5341 +5342 +5343 +5344 +5345 +5346 +5347 +5348 +5349 +5350 +5351 +5352 +5353 +5354 +5355 +5356 +5357 +5358 +5359 +5360 +5361 +5362 +5363 +5364 +5365 +5366 +5367 +5368 +5369 +5370 +5371 +5372 +5373 +5374 +5375 +5376 +5377 +5378 +5379 +5380 +5381 +5382 +5383 +5384 +5385 +5386 +5387 +5388 +5389 +5390 +5391 +5392 +5393 +5394 +5395 +5396 +5397 +5398 +5399 +5400 +5401 +5402 +5403 +5404 +5405 +5406 +5407 +5408 +5409 +5410 +5411 +5412 +5413 +5414 +5415 +5416 +5417 +5418 +5419 +5420 +5421 +5422 +5423 +5424 +5425 +5426 +5427 +5428 +5429 +5430 +5431 +5432 +5433 +5434 +5435 +5436 +5437 +5438 +5439 +5440 +5441 +5442 +5443 +5444 +5445 +5446 +5447 +5448 +5449 +5450 +5451 +5452 +5453 +5454 +5455 +5456 +5457 +5458 +5459 +5460 +5461 +5462 +5463 +5464 +5465 +5466 +5467 +5468 +5469 +5470 +5471 +5472 +5473 +5474 +5475 +5476 +5477 +5478 +5479 +5480 +5481 +5482 +5483 +5484 +5485 +5486 +5487 +5488 +5489 +5490 +5491 +5492 +5493 +5494 +5495 +5496 +5497 +5498 +5499 +5500 +5501 +5502 +5503 +5504 +5505 +5506 +5507 +5508 +5509 +5510 +5511 +5512 +5513 +5514 +5515 +5516 +5517 +5518 +5519 +5520 +5521 +5522 +5523 +5524 +5525 +5526 +5527 +5528 +5529 +5530 +5531 +5532 +5533 +5534 +5535 +5536 +5537 +5538 +5539 +5540 +5541 +5542 +5543 +5544 +5545 +5546 +5547 +5548 +5549 +5550 +5551 +5552 +5553 +5554 +5555 +5556 +5557 +5558 +5559 +5560 +5561 +5562 +5563 +5564 +5565 +5566 +5567 +5568 +5569 +5570 +5571 +5572 +5573 +5574 +5575 +5576 +5577 +5578 +5579 +5580 +5581 +5582 +5583 +5584 +5585 +5586 +5587 +5588 +5589 +5590 +5591 +5592 +5593 +5594 +5595 +5596 +5597 +5598 +5599 +5600 +5601 +5602 +5603 +5604 +5605 +5606 +5607 +5608 +5609 +5610 +5611 +5612 +5613 +5614 +5615 +5616 +5617 +5618 +5619 +5620 +5621 +5622 +5623 +5624 +5625 +5626 +5627 +5628 +5629 +5630 +5631 +5632 +5633 +5634 +5635 +5636 +5637 +5638 +5639 +5640 +5641 +5642 +5643 +5644 +5645 +5646 +5647 +5648 +5649 +5650 +5651 +5652 +5653 +5654 +5655 +5656 +5657 +5658 +5659 +5660 +5661 +5662 +5663 +5664 +5665 +5666 +5667 +5668 +5669 +5670 +5671 +5672 +5673 +5674 +5675 +5676 +5677 +5678 +5679 +5680 +5681 +5682 +5683 +5684 +5685 +5686 +5687 +5688 +5689 +5690 +5691 +5692 +5693 +5694 +5695 +5696 +5697 +5698 +5699 +5700 +5701 +5702 +5703 +5704 +5705 +5706 +5707 +5708 +5709 +5710 +5711 +5712 +5713 +5714 +5715 +5716 +5717 +5718 +5719 +5720 +5721 +5722 +5723 +5724 +5725 +5726 +5727 +5728 +5729 +5730 +5731 +5732 +5733 +5734 +5735 +5736 +5737 +5738 +5739 +5740 +5741 +5742 +5743 +5744 +5745 +5746 +5747 +5748 +5749 +5750 +5751 +5752 +5753 +5754 +5755 +5756 +5757 +5758 +5759 +5760 +5761 +5762 +5763 +5764 +5765 +5766 +5767 +5768 +5769 +5770 +5771 +5772 +5773 +5774 +5775 +5776 +5777 +5778 +5779 +5780 +5781 +5782 +5783 +5784 +5785 +5786 +5787 +5788 +5789 +5790 +5791 +5792 +5793 +5794 +5795 +5796 +5797 +5798 +5799 +5800 +5801 +5802 +5803 +5804 +5805 +5806 +5807 +5808 +5809 +5810 +5811 +5812 +5813 +5814 +5815 +5816 +5817 +5818 +5819 +5820 +5821 +5822 +5823 +5824 +5825 +5826 +5827 +5828 +5829 +5830 +5831 +5832 +5833 +5834 +5835 +5836 +5837 +5838 +5839 +5840 +5841 +5842 +5843 +5844 +5845 +5846 +5847 +5848 +5849 +5850 +5851 +5852 +5853 +5854 +5855 +5856 +5857 +5858 +5859 +5860 +5861 +5862 +5863 +5864 +5865 +5866 +5867 +5868 +5869 +5870 +5871 +5872 +5873 +5874 +5875 +5876 +5877 +5878 +5879 +5880 +5881 +5882 +5883 +5884 +5885 +5886 +5887 +5888 +5889 +5890 +5891 +5892 +5893 +5894 +5895 +5896 +5897 +5898 +5899 +5900 +5901 +5902 +5903 +5904 +5905 +5906 +5907 +5908 +5909 +5910 +5911 +5912 +5913 +5914 +5915 +5916 +5917 +5918 +5919 +5920 +5921 +5922 +5923 +5924 +5925 +5926 +5927 +5928 +5929 +5930 +5931 +5932 +5933 +5934 +5935 +5936 +5937 +5938 +5939 +5940 +5941 +5942 +5943 +5944 +5945 +5946 +5947 +5948 +5949 +5950 +5951 +5952 +5953 +5954 +5955 +5956 +5957 +5958 +5959 +5960 +5961 +5962 +5963 +5964 +5965 +5966 +5967 +5968 +5969 +5970 +5971 +5972 +5973 +5974 +5975 +5976 +5977 +5978 +5979 +5980 +5981 +5982 +5983 +5984 +5985 +5986 +5987 +5988 +5989 +5990 +5991 +5992 +5993 +5994 +5995 +5996 +5997 +5998 +5999 +6000 +6001 +6002 +6003 +6004 +6005 +6006 +6007 +6008 +6009 +6010 +6011 +6012 +6013 +6014 +6015 +6016 +6017 +6018 +6019 +6020 +6021 +6022 +6023 +6024 +6025 +6026 +6027 +6028 +6029 +6030 +6031 +6032 +6033 +6034 +6035 +6036 +6037 +6038 +6039 +6040 +6041 +6042 +6043 +6044 +6045 +6046 +6047 +6048 +6049 +6050 +6051 +6052 +6053 +6054 +6055 +6056 +6057 +6058 +6059 +6060 +6061 +6062 +6063 +6064 +6065 +6066 +6067 +6068 +6069 +6070 +6071 +6072 +6073 +6074 +6075 +6076 +6077 +6078 +6079 +6080 +6081 +6082 +6083 +6084 +6085 +6086 +6087 +6088 +6089 +6090 +6091 +6092 +6093 +6094 +6095 +6096 +6097 +6098 +6099 +6100 +6101 +6102 +6103 +6104 +6105 +6106 +6107 +6108 +6109 +6110 +6111 +6112 +6113 +6114 +6115 +6116 +6117 +6118 +6119 +6120 +6121 +6122 +6123 +6124 +6125 +6126 +6127 +6128 +6129 +6130 +6131 +6132 +6133 +6134 +6135 +6136 +6137 +6138 +6139 +6140 +6141 +6142 +6143 +6144 +6145 +6146 +6147 +6148 +6149 +6150 +6151 +6152 +6153 +6154 +6155 +6156 +6157 +6158 +6159 +6160 +6161 +6162 +6163 +6164 +6165 +6166 +6167 +6168 +6169 +6170 +6171 +6172 +6173 +6174 +6175 +6176 +6177 +6178 +6179 +6180 +6181 +6182 +6183 +6184 +6185 +6186 +6187 +6188 +6189 +6190 +6191 +6192 +6193 +6194 +6195 +6196 +6197 +6198 +6199 +6200 +6201 +6202 +6203 +6204 +6205 +6206 +6207 +6208 +6209 +6210 +6211 +6212 +6213 +6214 +6215 +6216 +6217 +6218 +6219 +6220 +6221 +6222 +6223 +6224 +6225 +6226 +6227 +6228 +6229 +6230 +6231 +6232 +6233 +6234 +6235 +6236 +6237 +6238 +6239 +6240 +6241 +6242 +6243 +6244 +6245 +6246 +6247 +6248 +6249 +6250 +6251 +6252 +6253 +6254 +6255 +6256 +6257 +6258 +6259 +6260 +6261 +6262 +6263 +6264 +6265 +6266 +6267 +6268 +6269 +6270 +6271 +6272 +6273 +6274 +6275 +6276 +6277 +6278 +6279 +6280 +6281 +6282 +6283 +6284 +6285 +6286 +6287 +6288 +6289 +6290 +6291 +6292 +6293 +6294 +6295 +6296 +6297 +6298 +6299 +6300 +6301 +6302 +6303 +6304 +6305 +6306 +6307 +6308 +6309 +6310 +6311 +6312 +6313 +6314 +6315 +6316 +6317 +6318 +6319 +6320 +6321 +6322 +6323 +6324 +6325 +6326 +6327 +6328 +6329 +6330 +6331 +6332 +6333 +6334 +6335 +6336 +6337 +6338 +6339 +6340 +6341 +6342 +6343 +6344 +6345 +6346 +6347 +6348 +6349 +6350 +6351 +6352 +6353 +6354 +6355 +6356 +6357 +6358 +6359 +6360 +6361 +6362 +6363 +6364 +6365 +6366 +6367 +6368 +6369 +6370 +6371 +6372 +6373 +6374 +6375 +6376 +6377 +6378 +6379 +6380 +6381 +6382 +6383 +6384 +6385 +6386 +6387 +6388 +6389 +6390 +6391 +6392 +6393 +6394 +6395 +6396 +6397 +6398 +6399 +6400 +6401 +6402 +6403 +6404 +6405 +6406 +6407 +6408 +6409 +6410 +6411 +6412 +6413 +6414 +6415 +6416 +6417 +6418 +6419 +6420 +6421 +6422 +6423 +6424 +6425 +6426 +6427 +6428 +6429 +6430 +6431 +6432 +6433 +6434 +6435 +6436 +6437 +6438 +6439 +6440 +6441 +6442 +6443 +6444 +6445 +6446 +6447 +6448 +6449 +6450 +6451 +6452 +6453 +6454 +6455 +6456 +6457 +6458 +6459 +6460 +6461 +6462 +6463 +6464 +6465 +6466 +6467 +6468 +6469 +6470 +6471 +6472 +6473 +6474 +6475 +6476 +6477 +6478 +6479 +6480 +6481 +6482 +6483 +6484 +6485 +6486 +6487 +6488 +6489 +6490 +6491 +6492 +6493 +6494 +6495 +6496 +6497 +6498 +6499 +6500 +6501 +6502 +6503 +6504 +6505 +6506 +6507 +6508 +6509 +6510 +6511 +6512 +6513 +6514 +6515 +6516 +6517 +6518 +6519 +6520 +6521 +6522 +6523 +6524 +6525 +6526 +6527 +6528 +6529 +6530 +6531 +6532 +6533 +6534 +6535 +6536 +6537 +6538 +6539 +6540 +6541 +6542 +6543 +6544 +6545 +6546 +6547 +6548 +6549 +6550 +6551 +6552 +6553 +6554 +6555 +6556 +6557 +6558 +6559 +6560 +6561 +6562 +6563 +6564 +6565 +6566 +6567 +6568 +6569 +6570 +6571 +6572 +6573 +6574 +6575 +6576 +6577 +6578 +6579 +6580 +6581 +6582 +6583 +6584 +6585 +6586 +6587 +6588 +6589 +6590 +6591 +6592 +6593 +6594 +6595 +6596 +6597 +6598 +6599 +6600 +6601 +6602 +6603 +6604 +6605 +6606 +6607 +6608 +6609 +6610 +6611 +6612 +6613 +6614 +6615 +6616 +6617 +6618 +6619 +6620 +6621 +6622 +6623 +6624 +6625 +6626 +6627 +6628 +6629 +6630 +6631 +6632 +6633 +6634 +6635 +6636 +6637 +6638 +6639 +6640 +6641 +6642 +6643 +6644 +6645 +6646 +6647 +6648 +6649 +6650 +6651 +6652 +6653 +6654 +6655 +6656 +6657 +6658 +6659 +6660 +6661 +6662 +6663 +6664 +6665 +6666 +6667 +6668 +6669 +6670 +6671 +6672 +6673 +6674 +6675 +6676 +6677 +6678 +6679 +6680 +6681 +6682 +6683 +6684 +6685 +6686 +6687 +6688 +6689 +6690 +6691 +6692 +6693 +6694 +6695 +6696 +6697 +6698 +6699 +6700 +6701 +6702 +6703 +6704 +6705 +6706 +6707 +6708 +6709 +6710 +6711 +6712 +6713 +6714 +6715 +6716 +6717 +6718 +6719 +6720 +6721 +6722 +6723 +6724 +6725 +6726 +6727 +6728 +6729 +6730 +6731 +6732 +6733 +6734 +6735 +6736 +6737 +6738 +6739 +6740 +6741 +6742 +6743 +6744 +6745 +6746 +6747 +6748 +6749 +6750 +6751 +6752 +6753 +6754 +6755 +6756 +6757 +6758 +6759 +6760 +6761 +6762 +6763 +6764 +6765 +6766 +6767 +6768 +6769 +6770 +6771 +6772 +6773 +6774 +6775 +6776 +6777 +6778 +6779 +6780 +6781 +6782 +6783 +6784 +6785 +6786 +6787 +6788 +6789 +6790 +6791 +6792 +6793 +6794 +6795 +6796 +6797 +6798 +6799 +6800 +6801 +6802 +6803 +6804 +6805 +6806 +6807 +6808 +6809 +6810 +6811 +6812 +6813 +6814 +6815 +6816 +6817 +6818 +6819 +6820 +6821 +6822 +6823 +6824 +6825 +6826 +6827 +6828 +6829 +6830 +6831 +6832 +6833 +6834 +6835 +6836 +6837 +6838 +6839 +6840 +6841 +6842 +6843 +6844 +6845 +6846 +6847 +6848 +6849 +6850 +6851 +6852 +6853 +6854 +6855 +6856 +6857 +6858 +6859 +6860 +6861 +6862 +6863 +6864 +6865 +6866 +6867 +6868 +6869 +6870 +6871 +6872 +6873 +6874 +6875 +6876 +6877 +6878 +6879 +6880 +6881 +6882 +6883 +6884 +6885 +6886 +6887 +6888 +6889 +6890 +6891 +6892 +6893 +6894 +6895 +6896 +6897 +6898 +6899 +6900 +6901 +6902 +6903 +6904 +6905 +6906 +6907 +6908 +6909 +6910 +6911 +6912 +6913 +6914 +6915 +6916 +6917 +6918 +6919 +6920 +6921 +6922 +6923 +6924 +6925 +6926 +6927 +6928 +6929 +6930 +6931 +6932 +6933 +6934 +6935 +6936 +6937 +6938 +6939 +6940 +6941 +6942 +6943 +6944 +6945 +6946 +6947 +6948 +6949 +6950 +6951 +6952 +6953 +6954 +6955 +6956 +6957 +6958 +6959 +6960 +6961 +6962 +6963 +6964 +6965 +6966 +6967 +6968 +6969 +6970 +6971 +6972 +6973 +6974 +6975 +6976 +6977 +6978 +6979 +6980 +6981 +6982 +6983 +6984 +6985 +6986 +6987 +6988 +6989 +6990 +6991 +6992 +6993 +6994 +6995 +6996 +6997 +6998 +6999 +7000 +7001 +7002 +7003 +7004 +7005 +7006 +7007 +7008 +7009 +7010 +7011 +7012 +7013 +7014 +7015 +7016 +7017 +7018 +7019 +7020 +7021 +7022 +7023 +7024 +7025 +7026 +7027 +7028 +7029 +7030 +7031 +7032 +7033 +7034 +7035 +7036 +7037 +7038 +7039 +7040 +7041 +7042 +7043 +7044 +7045 +7046 +7047 +7048 +7049 +7050 +7051 +7052 +7053 +7054 +7055 +7056 +7057 +7058 +7059 +7060 +7061 +7062 +7063 +7064 +7065 +7066 +7067 +7068 +7069 +7070 +7071 +7072 +7073 +7074 +7075 +7076 +7077 +7078 +7079 +7080 +7081 +7082 +7083 +7084 +7085 +7086 +7087 +7088 +7089 +7090 +7091 +7092 +7093 +7094 +7095 +7096 +7097 +7098 +7099 +7100 +7101 +7102 +7103 +7104 +7105 +7106 +7107 +7108 +7109 +7110 +7111 +7112 +7113 +7114 +7115 +7116 +7117 +7118 +7119 +7120 +7121 +7122 +7123 +7124 +7125 +7126 +7127 +7128 +7129 +7130 +7131 +7132 +7133 +7134 +7135 +7136 +7137 +7138 +7139 +7140 +7141 +7142 +7143 +7144 +7145 +7146 +7147 +7148 +7149 +7150 +7151 +7152 +7153 +7154 +7155 +7156 +7157 +7158 +7159 +7160 +7161 +7162 +7163 +7164 +7165 +7166 +7167 +7168 +7169 +7170 +7171 +7172 +7173 +7174 +7175 +7176 +7177 +7178 +7179 +7180 +7181 +7182 +7183 +7184 +7185 +7186 +7187 +7188 +7189 +7190 +7191 +7192 +7193 +7194 +7195 +7196 +7197 +7198 +7199 +7200 +7201 +7202 +7203 +7204 +7205 +7206 +7207 +7208 +7209 +7210 +7211 +7212 +7213 +7214 +7215 +7216 +7217 +7218 +7219 +7220 +7221 +7222 +7223 +7224 +7225 +7226 +7227 +7228 +7229 +7230 +7231 +7232 +7233 +7234 +7235 +7236 +7237 +7238 +7239 +7240 +7241 +7242 +7243 +7244 +7245 +7246 +7247 +7248 +7249 +7250 +7251 +7252 +7253 +7254 +7255 +7256 +7257 +7258 +7259 +7260 +7261 +7262 +7263 +7264 +7265 +7266 +7267 +7268 +7269 +7270 +7271 +7272 +7273 +7274 +7275 +7276 +7277 +7278 +7279 +7280 +7281 +7282 +7283 +7284 +7285 +7286 +7287 +7288 +7289 +7290 +7291 +7292 +7293 +7294 +7295 +7296 +7297 +7298 +7299 +7300 +7301 +7302 +7303 +7304 +7305 +7306 +7307 +7308 +7309 +7310 +7311 +7312 +7313 +7314 +7315 +7316 +7317 +7318 +7319 +7320 +7321 +7322 +7323 +7324 +7325 +7326 +7327 +7328 +7329 +7330 +7331 +7332 +7333 +7334 +7335 +7336 +7337 +7338 +7339 +7340 +7341 +7342 +7343 +7344 +7345 +7346 +7347 +7348 +7349 +7350 +7351 +7352 +7353 +7354 +7355 +7356 +7357 +7358 +7359 +7360 +7361 +7362 +7363 +7364 +7365 +7366 +7367 +7368 +7369 +7370 +7371 +7372 +7373 +7374 +7375 +7376 +7377 +7378 +7379 +7380 +7381 +7382 +7383 +7384 +7385 +7386 +7387 +7388 +7389 +7390 +7391 +7392 +7393 +7394 +7395 +7396 +7397 +7398 +7399 +7400 +7401 +7402 +7403 +7404 +7405 +7406 +7407 +7408 +7409 +7410 +7411 +7412 +7413 +7414 +7415 +7416 +7417 +7418 +7419 +7420 +7421 +7422 +7423 +7424 +7425 +7426 +7427 +7428 +7429 +7430 +7431 +7432 +7433 +7434 +7435 +7436 +7437 +7438 +7439 +7440 +7441 +7442 +7443 +7444 +7445 +7446 +7447 +7448 +7449 +7450 +7451 +7452 +7453 +7454 +7455 +7456 +7457 +7458 +7459 +7460 +7461 +7462 +7463 +7464 +7465 +7466 +7467 +7468 +7469 +7470 +7471 +7472 +7473 +7474 +7475 +7476 +7477 +7478 +7479 +7480 +7481 +7482 +7483 +7484 +7485 +7486 +7487 +7488 +7489 +7490 +7491 +7492 +7493 +7494 +7495 +7496 +7497 +7498 +7499 +7500 +7501 +7502 +7503 +7504 +7505 +7506 +7507 +7508 +7509 +7510 +7511 +7512 +7513 +7514 +7515 +7516 +7517 +7518 +7519 +7520 +7521 +7522 +7523 +7524 +7525 +7526 +7527 +7528 +7529 +7530 +7531 +7532 +7533 +7534 +7535 +7536 +7537 +7538 +7539 +7540 +7541 +7542 +7543 +7544 +7545 +7546 +7547 +7548 +7549 +7550 +7551 +7552 +7553 +7554 +7555 +7556 +7557 +7558 +7559 +7560 +7561 +7562 +7563 +7564 +7565 +7566 +7567 +7568 +7569 +7570 +7571 +7572 +7573 +7574 +7575 +7576 +7577 +7578 +7579 +7580 +7581 +7582 +7583 +7584 +7585 +7586 +7587 +7588 +7589 +7590 +7591 +7592 +7593 +7594 +7595 +7596 +7597 +7598 +7599 +7600 +7601 +7602 +7603 +7604 +7605 +7606 +7607 +7608 +7609 +7610 +7611 +7612 +7613 +7614 +7615 +7616 +7617 +7618 +7619 +7620 +7621 +7622 +7623 +7624 +7625 +7626 +7627 +7628 +7629 +7630 +7631 +7632 +7633 +7634 +7635 +7636 +7637 +7638 +7639 +7640 +7641 +7642 +7643 +7644 +7645 +7646 +7647 +7648 +7649 +7650 +7651 +7652 +7653 +7654 +7655 +7656 +7657 +7658 +7659 +7660 +7661 +7662 +7663 +7664 +7665 +7666 +7667 +7668 +7669 +7670 +7671 +7672 +7673 +7674 +7675 +7676 +7677 +7678 +7679 +7680 +7681 +7682 +7683 +7684 +7685 +7686 +7687 +7688 +7689 +7690 +7691 +7692 +7693 +7694 +7695 +7696 +7697 +7698 +7699 +7700 +7701 +7702 +7703 +7704 +7705 +7706 +7707 +7708 +7709 +7710 +7711 +7712 +7713 +7714 +7715 +7716 +7717 +7718 +7719 +7720 +7721 +7722 +7723 +7724 +7725 +7726 +7727 +7728 +7729 +7730 +7731 +7732 +7733 +7734 +7735 +7736 +7737 +7738 +7739 +7740 +7741 +7742 +7743 +7744 +7745 +7746 +7747 +7748 +7749 +7750 +7751 +7752 +7753 +7754 +7755 +7756 +7757 +7758 +7759 +7760 +7761 +7762 +7763 +7764 +7765 +7766 +7767 +7768 +7769 +7770 +7771 +7772 +7773 +7774 +7775 +7776 +7777 +7778 +7779 +7780 +7781 +7782 +7783 +7784 +7785 +7786 +7787 +7788 +7789 +7790 +7791 +7792 +7793 +7794 +7795 +7796 +7797 +7798 +7799 +7800 +7801 +7802 +7803 +7804 +7805 +7806 +7807 +7808 +7809 +7810 +7811 +7812 +7813 +7814 +7815 +7816 +7817 +7818 +7819 +7820 +7821 +7822 +7823 +7824 +7825 +7826 +7827 +7828 +7829 +7830 +7831 +7832 +7833 +7834 +7835 +7836 +7837 +7838 +7839 +7840 +7841 +7842 +7843 +7844 +7845 +7846 +7847 +7848 +7849 +7850 +7851 +7852 +7853 +7854 +7855 +7856 +7857 +7858 +7859 +7860 +7861 +7862 +7863 +7864 +7865 +7866 +7867 +7868 +7869 +7870 +7871 +7872 +7873 +7874 +7875 +7876 +7877 +7878 +7879 +7880 +7881 +7882 +7883 +7884 +7885 +7886 +7887 +7888 +7889 +7890 +7891 +7892 +7893 +7894 +7895 +7896 +7897 +7898 +7899 +7900 +7901 +7902 +7903 +7904 +7905 +7906 +7907 +7908 +7909 +7910 +7911 +7912 +7913 +7914 +7915 +7916 +7917 +7918 +7919 +7920 +7921 +7922 +7923 +7924 +7925 +7926 +7927 +7928 +7929 +7930 +7931 +7932 +7933 +7934 +7935 +7936 +7937 +7938 +7939 +7940 +7941 +7942 +7943 +7944 +7945 +7946 +7947 +7948 +7949 +7950 +7951 +7952 +7953 +7954 +7955 +7956 +7957 +7958 +7959 +7960 +7961 +7962 +7963 +7964 +7965 +7966 +7967 +7968 +7969 +7970 +7971 +7972 +7973 +7974 +7975 +7976 +7977 +7978 +7979 +7980 +7981 +7982 +7983 +7984 +7985 +7986 +7987 +7988 +7989 +7990 +7991 +7992 +7993 +7994 +7995 +7996 +7997 +7998 +7999 +8000 +8001 +8002 +8003 +8004 +8005 +8006 +8007 +8008 +8009 +8010 +8011 +8012 +8013 +8014 +8015 +8016 +8017 +8018 +8019 +8020 +8021 +8022 +8023 +8024 +8025 +8026 +8027 +8028 +8029 +8030 +8031 +8032 +8033 +8034 +8035 +8036 +8037 +8038 +8039 +8040 +8041 +8042 +8043 +8044 +8045 +8046 +8047 +8048 +8049 +8050 +8051 +8052 +8053 +8054 +8055 +8056 +8057 +8058 +8059 +8060 +8061 +8062 +8063 +8064 +8065 +8066 +8067 +8068 +8069 +8070 +8071 +8072 +8073 +8074 +8075 +8076 +8077 +8078 +8079 +8080 +8081 +8082 +8083 +8084 +8085 +8086 +8087 +8088 +8089 +8090 +8091 +8092 +8093 +8094 +8095 +8096 +8097 +8098 +8099 +8100 +8101 +8102 +8103 +8104 +8105 +8106 +8107 +8108 +8109 +8110 +8111 +8112 +8113 +8114 +8115 +8116 +8117 +8118 +8119 +8120 +8121 +8122 +8123 +8124 +8125 +8126 +8127 +8128 +8129 +8130 +8131 +8132 +8133 +8134 +8135 +8136 +8137 +8138 +8139 +8140 +8141 +8142 +8143 +8144 +8145 +8146 +8147 +8148 +8149 +8150 +8151 +8152 +8153 +8154 +8155 +8156 +8157 +8158 +8159 +8160 +8161 +8162 +8163 +8164 +8165 +8166 +8167 +8168 +8169 +8170 +8171 +8172 +8173 +8174 +8175 +8176 +8177 +8178 +8179 +8180 +8181 +8182 +8183 +8184 +8185 +8186 +8187 +8188 +8189 +8190 +8191 +8192 +8193 +8194 +8195 +8196 +8197 +8198 +8199 +8200 +8201 +8202 +8203 +8204 +8205 +8206 +8207 +8208 +8209 +8210 +8211 +8212 +8213 +8214 +8215 +8216 +8217 +8218 +8219 +8220 +8221 +8222 +8223 +8224 +8225 +8226 +8227 +8228 +8229 +8230 +8231 +8232 +8233 +8234 +8235 +8236 +8237 +8238 +8239 +8240 +8241 +8242 +8243 +8244 +8245 +8246 +8247 +8248 +8249 +8250 +8251 +8252 +8253 +8254 +8255 +8256 +8257 +8258 +8259 +8260 +8261 +8262 +8263 +8264 +8265 +8266 +8267 +8268 +8269 +8270 +8271 +8272 +8273 +8274 +8275 +8276 +8277 +8278 +8279 +8280 +8281 +8282 +8283 +8284 +8285 +8286 +8287 +8288 +8289 +8290 +8291 +8292 +8293 +8294 +8295 +8296 +8297 +8298 +8299 +8300 +8301 +8302 +8303 +8304 +8305 +8306 +8307 +8308 +8309 +8310 +8311 +8312 +8313 +8314 +8315 +8316 +8317 +8318 +8319 +8320 +8321 +8322 +8323 +8324 +8325 +8326 +8327 +8328 +8329 +8330 +8331 +8332 +8333 +8334 +8335 +8336 +8337 +8338 +8339 +8340 +8341 +8342 +8343 +8344 +8345 +8346 +8347 +8348 +8349 +8350 +8351 +8352 +8353 +8354 +8355 +8356 +8357 +8358 +8359 +8360 +8361 +8362 +8363 +8364 +8365 +8366 +8367 +8368 +8369 +8370 +8371 +8372 +8373 +8374 +8375 +8376 +8377 +8378 +8379 +8380 +8381 +8382 +8383 +8384 +8385 +8386 +8387 +8388 +8389 +8390 +8391 +8392 +8393 +8394 +8395 +8396 +8397 +8398 +8399 +8400 +8401 +8402 +8403 +8404 +8405 +8406 +8407 +8408 +8409 +8410 +8411 +8412 +8413 +8414 +8415 +8416 +8417 +8418 +8419 +8420 +8421 +8422 +8423 +8424 +8425 +8426 +8427 +8428 +8429 +8430 +8431 +8432 +8433 +8434 +8435 +8436 +8437 +8438 +8439 +8440 +8441 +8442 +8443 +8444 +8445 +8446 +8447 +8448 +8449 +8450 +8451 +8452 +8453 +8454 +8455 +8456 +8457 +8458 +8459 +8460 +8461 +8462 +8463 +8464 +8465 +8466 +8467 +8468 +8469 +8470 +8471 +8472 +8473 +8474 +8475 +8476 +8477 +8478 +8479 +8480 +8481 +8482 +8483 +8484 +8485 +8486 +8487 +8488 +8489 +8490 +8491 +8492 +8493 +8494 +8495 +8496 +8497 +8498 +8499 +8500 +8501 +8502 +8503 +8504 +8505 +8506 +8507 +8508 +8509 +8510 +8511 +8512 +8513 +8514 +8515 +8516 +8517 +8518 +8519 +8520 +8521 +8522 +8523 +8524 +8525 +8526 +8527 +8528 +8529 +8530 +8531 +8532 +8533 +8534 +8535 +8536 +8537 +8538 +8539 +8540 +8541 +8542 +8543 +8544 +8545 +8546 +8547 +8548 +8549 +8550 +8551 +8552 +8553 +8554 +8555 +8556 +8557 +8558 +8559 +8560 +8561 +8562 +8563 +8564 +8565 +8566 +8567 +8568 +8569 +8570 +8571 +8572 +8573 +8574 +8575 +8576 +8577 +8578 +8579 +8580 +8581 +8582 +8583 +8584 +8585 +8586 +8587 +8588 +8589 +8590 +8591 +8592 +8593 +8594 +8595 +8596 +8597 +8598 +8599 +8600 +8601 +8602 +8603 +8604 +8605 +8606 +8607 +8608 +8609 +8610 +8611 +8612 +8613 +8614 +8615 +8616 +8617 +8618 +8619 +8620 +8621 +8622 +8623 +8624 +8625 +8626 +8627 +8628 +8629 +8630 +8631 +8632 +8633 +8634 +8635 +8636 +8637 +8638 +8639 +8640 +8641 +8642 +8643 +8644 +8645 +8646 +8647 +8648 +8649 +8650 +8651 +8652 +8653 +8654 +8655 +8656 +8657 +8658 +8659 +8660 +8661 +8662 +8663 +8664 +8665 +8666 +8667 +8668 +8669 +8670 +8671 +8672 +8673 +8674 +8675 +8676 +8677 +8678 +8679 +8680 +8681 +8682 +8683 +8684 +8685 +8686 +8687 +8688 +8689 +8690 +8691 +8692 +8693 +8694 +8695 +8696 +8697 +8698 +8699 +8700 +8701 +8702 +8703 +8704 +8705 +8706 +8707 +8708 +8709 +8710 +8711 +8712 +8713 +8714 +8715 +8716 +8717 +8718 +8719 +8720 +8721 +8722 +8723 +8724 +8725 +8726 +8727 +8728 +8729 +8730 +8731 +8732 +8733 +8734 +8735 +8736 +8737 +8738 +8739 +8740 +8741 +8742 +8743 +8744 +8745 +8746 +8747 +8748 +8749 +8750 +8751 +8752 +8753 +8754 +8755 +8756 +8757 +8758 +8759 +8760 +8761 +8762 +8763 +8764 +8765 +8766 +8767 +8768 +8769 +8770 +8771 +8772 +8773 +8774 +8775 +8776 +8777 +8778 +8779 +8780 +8781 +8782 +8783 +8784 +8785 +8786 +8787 +8788 +8789 +8790 +8791 +8792 +8793 +8794 +8795 +8796 +8797 +8798 +8799 +8800 +8801 +8802 +8803 +8804 +8805 +8806 +8807 +8808 +8809 +8810 +8811 +8812 +8813 +8814 +8815 +8816 +8817 +8818 +8819 +8820 +8821 +8822 +8823 +8824 +8825 +8826 +8827 +8828 +8829 +8830 +8831 +8832 +8833 +8834 +8835 +8836 +8837 +8838 +8839 +8840 +8841 +8842 +8843 +8844 +8845 +8846 +8847 +8848 +8849 +8850 +8851 +8852 +8853 +8854 +8855 +8856 +8857 +8858 +8859 +8860 +8861 +8862 +8863 +8864 +8865 +8866 +8867 +8868 +8869 +8870 +8871 +8872 +8873 +8874 +8875 +8876 +8877 +8878 +8879 +8880 +8881 +8882 +8883 +8884 +8885 +8886 +8887 +8888 +8889 +8890 +8891 +8892 +8893 +8894 +8895 +8896 +8897 +8898 +8899 +8900 +8901 +8902 +8903 +8904 +8905 +8906 +8907 +8908 +8909 +8910 +8911 +8912 +8913 +8914 +8915 +8916 +8917 +8918 +8919 +8920 +8921 +8922 +8923 +8924 +8925 +8926 +8927 +8928 +8929 +8930 +8931 +8932 +8933 +8934 +8935 +8936 +8937 +8938 +8939 +8940 +8941 +8942 +8943 +8944 +8945 +8946 +8947 +8948 +8949 +8950 +8951 +8952 +8953 +8954 +8955 +8956 +8957 +8958 +8959 +8960 +8961 +8962 +8963 +8964 +8965 +8966 +8967 +8968 +8969 +8970 +8971 +8972 +8973 +8974 +8975 +8976 +8977 +8978 +8979 +8980 +8981 +8982 +8983 +8984 +8985 +8986 +8987 +8988 +8989 +8990 +8991 +8992 +8993 +8994 +8995 +8996 +8997 +8998 +8999 +9000 +9001 +9002 +9003 +9004 +9005 +9006 +9007 +9008 +9009 +9010 +9011 +9012 +9013 +9014 +9015 +9016 +9017 +9018 +9019 +9020 +9021 +9022 +9023 +9024 +9025 +9026 +9027 +9028 +9029 +9030 +9031 +9032 +9033 +9034 +9035 +9036 +9037 +9038 +9039 +9040 +9041 +9042 +9043 +9044 +9045 +9046 +9047 +9048 +9049 +9050 +9051 +9052 +9053 +9054 +9055 +9056 +9057 +9058 +9059 +9060 +9061 +9062 +9063 +9064 +9065 +9066 +9067 +9068 +9069 +9070 +9071 +9072 +9073 +9074 +9075 +9076 +9077 +9078 +9079 +9080 +9081 +9082 +9083 +9084 +9085 +9086 +9087 +9088 +9089 +9090 +9091 +9092 +9093 +9094 +9095 +9096 +9097 +9098 +9099 +9100 +9101 +9102 +9103 +9104 +9105 +9106 +9107 +9108 +9109 +9110 +9111 +9112 +9113 +9114 +9115 +9116 +9117 +9118 +9119 +9120 +9121 +9122 +9123 +9124 +9125 +9126 +9127 +9128 +9129 +9130 +9131 +9132 +9133 +9134 +9135 +9136 +9137 +9138 +9139 +9140 +9141 +9142 +9143 +9144 +9145 +9146 +9147 +9148 +9149 +9150 +9151 +9152 +9153 +9154 +9155 +9156 +9157 +9158 +9159 +9160 +9161 +9162 +9163 +9164 +9165 +9166 +9167 +9168 +9169 +9170 +9171 +9172 +9173 +9174 +9175 +9176 +9177 +9178 +9179 +9180 +9181 +9182 +9183 +9184 +9185 +9186 +9187 +9188 +9189 +9190 +9191 +9192 +9193 +9194 +9195 +9196 +9197 +9198 +9199 +9200 +9201 +9202 +9203 +9204 +9206 +9207 +9208 +9209 +9210 +9211 +9212 +9213 +9214 +9215 +9216 +9217 +9218 +9219 +9220 +9221 +9222 +9223 +9224 +9225 +9226 +9227 +9228 +9229 +9230 +9231 +9232 +9233 +9234 +9235 +9236 +9237 +9238 +9239 +9240 +9241 +9242 +9243 +9244 +9245 +9246 +9247 +9248 +9249 +9250 +9251 +9252 +9253 +9254 +9255 +9256 +9257 +9258 +9259 +9260 +9261 +9262 +9263 +9264 +9265 +9266 +9267 +9268 +9269 +9270 +9271 +9272 +9273 +9274 +9275 +9276 +9277 +9278 +9279 +9280 +9281 +9282 +9283 +9284 +9285 +9286 +9287 +9288 +9289 +9290 +9291 +9292 +9293 +9294 +9295 +9296 +9297 +9298 +9299 +9300 +9301 +9302 +9303 +9304 +9305 +9306 +9307 +9308 +9309 +9310 +9311 +9312 +9313 +9314 +9315 +9316 +9317 +9318 +9319 +9320 +9321 +9322 +9323 +9324 +9325 +9326 +9327 +9328 +9329 +9330 +9331 +9332 +9333 +9334 +9335 +9336 +9337 +9338 +9339 +9340 +9341 +9342 +9343 +9344 +9345 +9346 +9347 +9348 +9349 +9350 +9351 +9352 +9353 +9354 +9355 +9356 +9357 +9358 +9359 +9360 +9361 +9362 +9363 +9364 +9365 +9366 +9367 +9368 +9369 +9370 +9371 +9372 +9373 +9374 +9375 +9376 +9377 +9378 +9379 +9380 +9381 +9382 +9383 +9384 +9385 +9386 +9387 +9388 +9389 +9390 +9391 +9392 +9393 +9394 +9395 +9396 +9397 +9398 +9399 +9400 +9401 +9402 +9403 +9404 +9405 +9406 +9407 +9408 +9409 +9410 +9411 +9412 +9413 +9414 +9415 +9416 +9417 +9418 +9419 +9420 +9421 +9422 +9423 +9424 +9425 +9426 +9427 +9428 +9429 +9430 +9431 +9432 +9433 +9434 +9435 +9436 +9437 +9438 +9439 +9440 +9441 +9442 +9443 +9444 +9445 +9446 +9447 +9448 +9449 +9450 +9451 +9452 +9453 +9454 +9455 +9456 +9457 +9458 +9459 +9460 +9461 +9462 +9463 +9464 +9465 +9466 +9467 +9468 +9469 +9470 +9471 +9472 +9473 +9474 +9475 +9476 +9477 +9478 +9479 +9480 +9481 +9482 +9483 +9484 +9485 +9486 +9487 +9488 +9489 +9490 +9491 +9492 +9493 +9494 +9495 +9496 +9497 +9498 +9499 +9500 +9501 +9502 +9503 +9504 +9505 +9506 +9507 +9508 +9509 +9510 +9511 +9512 +9513 +9514 +9515 +9516 +9517 +9518 +9519 +9520 +9521 +9522 +9523 +9524 +9525 +9526 +9527 +9528 +9529 +9530 +9531 +9532 +9533 +9534 +9535 +9536 +9537 +9538 +9539 +9540 +9541 +9542 +9543 +9544 +9545 +9546 +9547 +9548 +9549 +9550 +9551 +9552 +9553 +9554 +9555 +9556 +9557 +9558 +9559 +9560 +9561 +9562 +9563 +9564 +9565 +9566 +9567 +9568 +9569 +9570 +9571 +9572 +9573 +9574 +9575 +9576 +9577 +9578 +9579 +9580 +9581 +9582 +9583 +9584 +9585 +9586 +9587 +9588 +9589 +9590 +9591 +9592 +9593 +9594 +9595 +9596 +9597 +9598 +9599 +9600 +9601 +9602 +9603 +9604 +9605 +9606 +9607 +9608 +9609 +9610 +9611 +9612 +9613 +9614 +9615 +9616 +9617 +9618 +9619 +9620 +9621 +9622 +9623 +9624 +9625 +9626 +9627 +9628 +9629 +9630 +9631 +9632 +9633 +9634 +9635 +9636 +9637 +9638 +9639 +9640 +9641 +9642 +9643 +9644 +9645 +9646 +9647 +9648 +9649 +9650 +9651 +9652 +9653 +9654 +9655 +9656 +9657 +9658 +9659 +9660 +9661 +9662 +9663 +9664 +9665 +9666 +9667 +9668 +9669 +9670 +9671 +9672 +9673 +9674 +9675 +9676 +9677 +9678 +9679 +9680 +9681 +9682 +9683 +9684 +9685 +9686 +9687 +9688 +9689 +9690 +9691 +9692 +9693 +9694 +9695 +9696 +9697 +9698 +9699 +9700 +9701 +9702 +9703 +9704 +9705 +9706 +9707 +9708 +9709 +9710 +9711 +9712 +9713 +9714 +9715 +9716 +9717 +9718 +9719 +9720 +9721 +9722 +9723 +9724 +9725 +9726 +9727 +9728 +9729 +9730 +9731 +9732 +9733 +9734 +9735 +9736 +9737 +9738 +9739 +9740 +9741 +9742 +9743 +9744 +9745 +9746 +9747 +9748 +9749 +9750 +9751 +9752 +9753 +9754 +9755 +9756 +9757 +9758 +9759 +9760 +9761 +9762 +9763 +9764 +9765 +9766 +9767 +9768 +9769 +9770 +9771 +9772 +9773 +9774 +9775 +9776 +9777 +9778 +9779 +9780 +9781 +9782 +9783 +9784 +9785 +9786 +9787 +9788 +9789 +9790 +9791 +9792 +9793 +9794 +9795 +9796 +9797 +9798 +9799 +9800 +9801 +9802 +9803 +9804 +9805 +9806 +9807 +9808 +9809 +9810 +9811 +9812 +9813 +9814 +9815 +9816 +9817 +9818 +9819 +9820 +9821 +9822 +9823 +9824 +9825 +9826 +9827 +9828 +9829 +9830 +9831 +9832 +9833 +9834 +9835 +9836 +9837 +9838 +9839 +9840 +9841 +9842 +9843 +9844 +9845 +9846 +9847 +9848 +9849 +9850 +9851 +9852 +9853 +9854 +9855 +9856 +9857 +9858 +9859 +9860 +9861 +9862 +9863 +9864 +9865 +9866 +9867 +9868 +9869 +9870 +9871 +9872 +9873 +9874 +9875 +9876 +9877 +9878 +9879 +9880 +9881 +9882 +9883 +9884 +9885 +9886 +9887 +9888 +9889 +9890 +9891 +9892 +9893 +9894 +9895 +9896 +9897 +9898 +9899 +9900 +9901 +9902 +9903 +9904 +9905 +9906 +9907 +9908 +9909 +9910 +9911 +9912 +9913 +9914 +9915 +9916 +9917 +9918 +9919 +9920 +9921 +9922 +9923 +9924 +9925 +9926 +9927 +9928 +9929 +9930 +9931 +9932 +9933 +9934 +9935 +9936 +9937 +9938 +9939 +9940 +9941 +9942 +9943 +9944 +9945 +9946 +9947 +9948 +9949 +9950 +9951 +9952 +9953 +9954 +9955 +9956 +9957 +9958 +9959 +9960 +9961 +9962 +9963 +9964 +9965 +9966 +9967 +9968 +9969 +9970 +9971 +9972 +9973 +9974 +9975 +9976 +9977 +9978 +9979 +9980 +9981 +9982 +9983 +9984 +9985 +9986 +9987 +9988 +9989 +9990 +9991 +9992 +9993 +9994 +9995 +9996 +9997 +9998 +9999 +10000 +10001 +10002 +10003 +10004 +10005 +10006 +10007 +10008 +10009 +10010 +10011 +10012 +10013 +10014 +10015 +10016 +10017 +10018 +10019 +10020 +10021 +10022 +10023 +10024 +10025 +10026 +10027 +10028 +10029 +10030 +10031 +10032 +10033 +10034 +10035 +10036 +10037 +10038 +10039 +10040 +10041 +10042 +10043 +10044 +10045 +10046 +10047 +10048 +10049 +10050 +10051 +10052 +10053 +10054 +10055 +10056 +10057 +10058 +10059 +10060 +10061 +10062 +10063 +10064 +10065 +10066 +10067 +10068 +10069 +10070 +10071 +10072 +10073 +10074 +10075 +10076 +10077 +10078 +10079 +10080 +10081 +10082 +10083 +10084 +10085 +10086 +10087 +10088 +10089 +10090 +10091 +10092 +10093 +10094 +10095 +10096 +10097 +10098 +10099 +10100 +10101 +10102 +10103 +10104 +10105 +10106 +10107 +10108 +10109 +10110 +10111 +10112 +10113 +10114 +10115 +10116 +10117 +10118 +10119 +10120 +10121 +10122 +10123 +10124 +10125 +10126 +10127 +10128 +10129 +10130 +10131 +10132 +10133 +10134 +10135 +10136 +10137 +10138 +10139 +10140 +10141 +10142 +10143 +10144 +10145 +10146 +10147 +10148 +10149 +10150 +10151 +10152 +10153 +10154 +10155 +10156 +10157 +10158 +10159 +10160 +10161 +10162 +10163 +10164 +10165 +10166 +10167 +10168 +10169 +10170 +10171 +10172 +10173 +10174 +10175 +10176 +10177 +10178 +10179 +10180 +10181 +10182 +10183 +10184 +10185 +10186 +10187 +10188 +10189 +10190 +10191 +10192 +10193 +10194 +10195 +10196 +10197 +10198 +10199 +10200 +10201 +10202 +10203 +10204 +10205 +10206 +10207 +10208 +10209 +10210 +10211 +10212 +10213 +10214 +10215 +10216 +10217 +10218 +10219 +10220 +10221 +10222 +10223 +10224 +10225 +10226 +10227 +10228 +10229 +10230 +10231 +10232 +10233 +10234 +10235 +10236 +10237 +10238 +10239 +10240 +10241 +10242 +10243 +10244 +10245 +10246 +10247 +10248 +10249 +10250 +10251 +10252 +10253 +10254 +10255 +10256 +10257 +10258 +10259 +10260 +10261 +10262 +10263 +10264 +10265 +10266 +10267 +10268 +10269 +10270 +10271 +10272 +10273 +10274 +10275 +10276 +10277 +10278 +10279 +10280 +10281 +10282 +10283 +10284 +10285 +10286 +10287 +10288 +10289 +10290 +10291 +10292 +10293 +10294 +10295 +10296 +10297 +10298 +10299 +10300 +10301 +10302 +10303 +10304 +10305 +10306 +10307 +10308 +10309 +10310 +10311 +10312 +10313 +10314 +10315 +10316 +10317 +10318 +10319 +10320 +10321 +10322 +10323 +10324 +10325 +10326 +10327 +10328 +10329 +10330 +10331 +10332 +10333 +10334 +10335 +10336 +10337 +10338 +10339 +10340 +10341 +10342 +10343 +10344 +10345 +10346 +10347 +10348 +10349 +10350 +10351 +10352 +10353 +10354 +10355 +10356 +10357 +10358 +10359 +10360 +10361 +10362 +10363 +10364 +10365 +10366 +10367 +10368 +10369 +10370 +10371 +10372 +10373 +10374 +10375 +10376 +10377 +10378 +10379 +10380 +10381 +10382 +10383 +10384 +10385 +10386 +10387 +10388 +10389 +10390 +10391 +10392 +10393 +10394 +10395 +10396 +10397 +10398 +10399 +10400 +10401 +10402 +10403 +10404 +10405 +10406 +10407 +10408 +10409 +10410 +10411 +10412 +10413 +10414 +10415 +10416 +10417 +10418 +10419 +10420 +10421 +10422 +10423 +10424 +10425 +10426 +10427 +10428 +10429 +10430 +10431 +10432 +10433 +10434 +10435 +10436 +10437 +10438 +10439 +10440 +10441 +10442 +10443 +10444 +10445 +10446 +10447 +10448 +10449 +10450 +10451 +10452 +10453 +10454 +10455 +10456 +10457 +10458 +10459 +10460 +10461 +10462 +10463 +10464 +10465 +10466 +10467 +10468 +10469 +10470 +10471 +10472 +10473 +10474 +10475 +10476 +10477 +10478 +10479 +10480 +10481 +10482 +10483 +10484 +10485 +10486 +10487 +10488 +10489 +10490 +10491 +10492 +10493 +10494 +10495 +10496 +10497 +10498 +10499 +10500 +10501 +10502 +10503 +10504 +10505 +10506 +10507 +10508 +10509 +10510 +10511 +10512 +10513 +10514 +10515 +10516 +10517 +10518 +10519 +10520 +10521 +10522 +10523 +10524 +10525 +10526 +10527 +10528 +10529 +10530 +10531 +10532 +10533 +10534 +10535 +10536 +10537 +10538 +10539 +10540 +10541 +10542 +10543 +10544 +10545 +10546 +10547 +10548 +10549 +10550 +10551 +10552 +10553 +10554 +10555 +10556 +10557 +10558 +10559 +10560 +10561 +10562 +10563 +10564 +10565 +10566 +10567 +10568 +10569 +10570 +10571 +10572 +10573 +10574 +10575 +10576 +10577 +10578 +10579 +10580 +10581 +10582 +10583 +10584 +10585 +10586 +10587 +10588 +10589 +10590 +10591 +10592 +10593 +10594 +10595 +10596 +10597 +10598 +10599 +10600 +10601 +10602 +10603 +10604 +10605 +10606 +10607 +10608 +10609 +10610 +10611 +10612 +10613 +10614 +10615 +10616 +10617 +10618 +10619 +10620 +10621 +10622 +10623 +10624 +10625 +10626 +10627 +10628 +10629 +10630 +10631 +10632 +10633 +10634 +10635 +10636 +10637 +10638 +10639 +10640 +10641 +10642 +10643 +10644 +10645 +10646 +10647 +10648 +10649 +10650 +10651 +10652 +10653 +10654 +10655 +10656 +10657 +10658 +10659 +10660 +10661 +10662 +10663 +10664 +10665 +10666 +10667 +10668 +10669 +10670 +10671 +10672 +10673 +10674 +10675 +10676 +10677 +10678 +10679 +10680 +10681 +10682 +10683 +10684 +10685 +10686 +10687 +10688 +10689 +10690 +10691 +10692 +10693 +10694 +10695 +10696 +10697 +10698 +10699 +10700 +10701 +10702 +10703 +10704 +10705 +10706 +10707 +10708 +10709 +10710 +10711 +10712 +10713 +10714 +10715 +10716 +10717 +10718 +10719 +10720 +10721 +10722 +10723 +10724 +10725 +10726 +10727 +10728 +10729 +10730 +10731 +10732 +10733 +10734 +10735 +10736 +10737 +10738 +10739 +10740 +10741 +10742 +10743 +10744 +10745 +10746 +10747 +10748 +10749 +10750 +10751 +10752 +10753 +10754 +10755 +10756 +10757 +10758 +10759 +10760 +10761 +10762 +10763 +10764 +10765 +10766 +10767 +10768 +10769 +10770 +10771 +10772 +10773 +10774 +10775 +10776 +10777 +10778 +10779 +10780 +10781 +10782 +10783 +10784 +10785 +10786 +10787 +10788 +10789 +10790 +10791 +10792 +10793 +10794 +10795 +10796 +10797 +10798 +10799 +10800 +10801 +10802 +10803 +10804 +10805 +10806 +10807 +10808 +10809 +10810 +10811 +10812 +10813 +10814 +10815 +10816 +10817 +10818 +10819 +10820 +10821 +10822 +10823 +10824 +10825 +10826 +10827 +10828 +10829 +10830 +10831 +10832 +10833 +10834 +10835 +10836 +10837 +10838 +10839 +10840 +10841 +10842 +10843 +10844 +10845 +10846 +10847 +10848 +10849 +10850 +10851 +10852 +10853 +10854 +10855 +10856 +10857 +10858 +10859 +10860 +10861 +10862 +10863 +10864 +10865 +10866 +10867 +10868 +10869 +10870 +10871 +10872 +10873 +10874 +10875 +10876 +10877 +10878 +10879 +10880 +10881 +10882 +10883 +10884 +10885 +10886 +10887 +10888 +10889 +10890 +10891 +10892 +10893 +10894 +10895 +10896 +10897 +10898 +10899 +10900 +10901 +10902 +10903 +10904 +10905 +10906 +10907 +10908 +10909 +10910 +10911 +10912 +10913 +10914 +10915 +10916 +10917 +10918 +10919 +10920 +10921 +10922 +10923 +10924 +10925 +10926 +10927 +10928 +10929 +10930 +10931 +10932 +10933 +10934 +10935 +10936 +10937 +10938 +10939 +10940 +10941 +10942 +10943 +10944 +10945 +10946 +10947 +10948 +10949 +10950 +10951 +10952 +10953 +10954 +10955 +10956 +10957 +10958 +10959 +10960 +10961 +10962 +10963 +10964 +10965 +10966 +10967 +10968 +10969 +10970 +10971 +10972 +10973 +10974 +10975 +10976 +10977 +10978 +10979 +10980 +10981 +10982 +10983 +10984 +10985 +10986 +10987 +10988 +10989 +10990 +10991 +10992 +10993 +10994 +10995 +10996 +10997 +10998 +10999 +11000 +11001 +11002 +11003 +11004 +11005 +11006 +11007 +11008 +11009 +11010 +11011 +11012 +11013 +11014 +11015 +11016 +11017 +11018 +11019 +11020 +11021 +11022 +11023 +11024 +11025 +11026 +11027 +11028 +11029 +11030 +11031 +11032 +11033 +11034 +11035 +11036 +11037 +11038 +11039 +11040 +11041 +11042 +11043 +11044 +11045 +11046 +11047 +11048 +11049 +11050 +11051 +11052 +11053 +11054 +11055 +11056 +11057 +11058 +11059 +11060 +11061 +11062 +11063 +11064 +11065 +11066 +11067 +11068 +11069 +11070 +11071 +11072 +11073 +11074 +11075 +11076 +11077 +11078 +11079 +11080 +11081 +11082 +11083 +11084 +11085 +11086 +11087 +11088 +11089 +11090 +11091 +11092 +11093 +11094 +11095 +11096 +11097 +11098 +11099 +11100 +11101 +11102 +11103 +11104 +11105 +11106 +11107 +11108 +11109 +11110 +11111 +11112 +11113 +11114 +11115 +11116 +11117 +11118 +11119 +11120 +11121 +11122 +11123 +11124 +11125 +11126 +11127 +11128 +11129 +11130 +11131 +11132 +11133 +11134 +11135 +11136 +11137 +11138 +11139 +11140 +11141 +11142 +11143 +11144 +11145 +11146 +11147 +11148 +11149 +11150 +11151 +11152 +11153 +11154 +11155 +11156 +11157 +11158 +11159 +11160 +11161 +11162 +11163 +11164 +11165 +11166 +11167 +11168 +11169 +11170 +11171 +11172 +11173 +11174 +11175 +11176 +11177 +11178 +11179 +11180 +11181 +11182 +11183 +11184 +11185 +11186 +11187 +11188 +11189 +11190 +11191 +11192 +11193 +11194 +11195 +11196 +11197 +11198 +11199 +11200 +11201 +11202 +11203 +11204 +11205 +11206 +11207 +11208 +11209 +11210 +11211 +11212 +11213 +11214 +11215 +11216 +11217 +11218 +11219 +11220 +11221 +11222 +11223 +11224 +11225 +11226 +11227 +11228 +11229 +11230 +11231 +11232 +11233 +11234 +11235 +11236 +11237 +11238 +11239 +11240 +11241 +11242 +11243 +11244 +11245 +11246 +11247 +11248 +11249 +11250 +11251 +11252 +11253 +11254 +11255 +11256 +11257 +11258 +11259 +11260 +11261 +11262 +11263 +11264 +11265 +11266 +11267 +11268 +11269 +11270 +11271 +11272 +11273 +11274 +11275 +11276 +11277 +11278 +11279 +11280 +11281 +11282 +11283 +11284 +11285 +11286 +11287 +11288 +11289 +11290 +11291 +11292 +11293 +11294 +11295 +11296 +11297 +11298 +11299 +11300 +11301 +11302 +11303 +11304 +11305 +11306 +11307 +11308 +11309 +11310 +11311 +11312 +11313 +11314 +11315 +11316 +11317 +11318 +11319 +11320 +11321 +11322 +11323 +11324 +11325 +11326 +11327 +11328 +11329 +11330 +11331 +11332 +11333 +11334 +11335 +11336 +11337 +11338 +11339 +11340 +11341 +11342 +11343 +11344 +11345 +11346 +11347 +11348 +11349 +11350 +11351 +11352 +11353 +11354 +11355 +11356 +11357 +11358 +11359 +11360 +11361 +11362 +11363 +11364 +11365 +11366 +11367 +11368 +11369 +11370 +11371 +11372 +11373 +11374 +11375 +11376 +11377 +11378 +11379 +11380 +11381 +11382 +11383 +11384 +11385 +11386 +11387 +11388 +11389 +11390 +11391 +11392 +11393 +11394 +11395 +11396 +11397 +11398 +11399 +11400 +11401 +11402 +11403 +11404 +11405 +11406 +11407 +11408 +11409 +11410 +11411 +11412 +11413 +11414 +11415 +11416 +11417 +11418 +11419 +11420 +11421 +11422 +11423 +11424 +11425 +11426 +11427 +11428 +11429 +11430 +11431 +11432 +11433 +11434 +11435 +11436 +11437 +11438 +11439 +11440 +11441 +11442 +11443 +11444 +11445 +11446 +11447 +11448 +11449 +11450 +11451 +11452 +11453 +11454 +11455 +11456 +11457 +11458 +11459 +11460 +11461 +11462 +11463 +11464 +11465 +11466 +11467 +11468 +11469 +11470 +11471 +11472 +11473 +11474 +11475 +11476 +11477 +11478 +11479 +11480 +11481 +11482 +11483 +11484 +11485 +11486 +11487 +11488 +11489 +11490 +11491 +11492 +11493 +11494 +11495 +11496 +11497 +11498 +11499 +11500 +11501 +11502 +11503 +11504 +11505 +11506 +11507 +11508 +11509 +11510 +11511 +11512 +11513 +11514 +11515 +11516 +11517 +11518 +11519 +11520 +11521 +11522 +11523 +11524 +11525 +11526 +11527 +11528 +11529 +11530 +11531 +11532 +11533 +11534 +11535 +11536 +11537 +11538 +11539 +11540 +11541 +11542 +11543 +11544 +11545 +11546 +11547 +11548 +11549 +11550 +11551 +11552 +11553 +11554 +11555 +11556 +11557 +11558 +11559 +11560 +11561 +11562 +11563 +11564 +11565 +11566 +11567 +11568 +11569 +11570 +11571 +11572 +11573 +11574 +11575 +11576 +11577 +11578 +11579 +11580 +11581 +11582 +11583 +11584 +11585 +11586 +11587 +11588 +11589 +11590 +11591 +11592 +11593 +11594 +11595 +11596 +11597 +11598 +11599 +11600 +11601 +11602 +11603 +11604 +11605 +11606 +11607 +11608 +11609 +11610 +11611 +11612 +11613 +11614 +11615 +11616 +11617 +11618 +11619 +11620 +11621 +11622 +11623 +11624 +11625 +11626 +11627 +11628 +11629 +11630 +11631 +11632 +11633 +11634 +11635 +11636 +11637 +11638 +11639 +11640 +11641 +11642 +11643 +11644 +11645 +11646 +11647 +11648 +11649 +11650 +11651 +11652 +11653 +11654 +11655 +11656 +11657 +11658 +11659 +11660 +11661 +11662 +11663 +11664 +11665 +11666 +11667 +11668 +11669 +11670 +11671 +11672 +11673 +11674 +11675 +11676 +11677 +11678 +11679 +11680 +11681 +11682 +11683 +11684 +11685 +11686 +11687 +11688 +11689 +11690 +11691 +11692 +11693 +11694 +11695 +11696 +11697 +11698 +11699 +11700 +11701 +11702 +11703 +11704 +11705 +11706 +11707 +11708 +11709 +11710 +11711 +11712 +11713 +11714 +11715 +11716 +11717 +11718 +11719 +11720 +11721 +11722 +11723 +11724 +11725 +11726 +11727 +11728 +11729 +11730 +11731 +11732 +11733 +11734 +11735 +11736 +11737 +11738 +11739 +11740 +11741 +11742 +11743 +11744 +11745 +11746 +11747 +11748 +11749 +11750 +11751 +11752 +11753 +11754 +11755 +11756 +11757 +11758 +11759 +11760 +11761 +11762 +11763 +11764 +11765 +11766 +11767 +11768 +11769 +11770 +11771 +11772 +11773 +11774 +11775 +11776 +11777 +11778 +11779 +11780 +11781 +11782 +11783 +11784 +11785 +11786 +11787 +11788 +11789 +11790 +11791 +11792 +11793 +11794 +11795 +11796 +11797 +11798 +11799 +11800 +11801 +11802 +11803 +11804 +11805 +11806 +11807 +11808 +11809 +11810 +11811 +11812 +11813 +11814 +11815 +11816 +11817 +11818 +11819 +11820 +11821 +11822 +11823 +11824 +11825 +11826 +11827 +11828 +11829 +11830 +11831 +11832 +11833 +11834 +11835 +11836 +11837 +11838 +11839 +11840 +11841 +11842 +11843 +11844 +11845 +11846 +11847 +11848 +11849 +11850 +11851 +11852 +11853 +11854 +11855 +11856 +11857 +11858 +11859 +11860 +11861 +11862 +11863 +11864 +11865 +11866 +11867 +11868 +11869 +11870 +11871 +11872 +11873 +11874 +11875 +11876 +11877 +11878 +11879 +11880 +11881 +11882 +11883 +11884 +11885 +11886 +11887 +11888 +11889 +11890 +11891 +11892 +11893 +11894 +11895 +11896 +11897 +11898 +11899 +11900 +11901 +11902 +11903 +11904 +11905 +11906 +11907 +11908 +11909 +11910 +11911 +11912 +11913 +11914 +11915 +11916 +11917 +11918 +11919 +11920 +11921 +11922 +11923 +11924 +11925 +11926 +11927 +11928 +11929 +11930 +11931 +11932 +11933 +11934 +11935 +11936 +11937 +11938 +11939 +11940 +11941 +11942 +11943 +11944 +11945 +11946 +11947 +11948 +11949 +11950 +11951 +11952 +11953 +11954 +11955 +11956 +11957 +11958 +11959 +11960 +11961 +11962 +11963 +11964 +11965 +11966 +11967 +11968 +11969 +11970 +11971 +11972 +11973 +11974 +11975 +11976 +11977 +11978 +11979 +11980 +11981 +11982 +11983 +11984 +11985 +11986 +11987 +11988 +11989 +11990 +11991 +11992 +11993 +11994 +11995 +11996 +11997 +11998 +11999 +12000 +12001 +12002 +12003 +12004 +12005 +12006 +12007 +12008 +12009 +12010 +12011 +12012 +12013 +12014 +12015 +12016 +12017 +12018 +12019 +12020 +12021 +12022 +12023 +12024 +12025 +12026 +12027 +12028 +12029 +12030 +12031 +12032 +12033 +12034 +12035 +12036 +12037 +12038 +12039 +12040 +12041 +12042 +12043 +12044 +12045 +12046 +12047 +12048 +12049 +12050 +12051 +12052 +12053 +12054 +12055 +12056 +12057 +12058 +12059 +12060 +12061 +12062 +12063 +12064 +12065 +12066 +12067 +12068 +12069 +12070 +12071 +12072 +12073 +12074 +12075 +12076 +12077 +12078 +12079 +12080 +12081 +12082 +12083 +12084 +12085 +12086 +12087 +12088 +12089 +12090 +12091 +12092 +12093 +12094 +12095 +12096 +12097 +12098 +12099 +12100 +12101 +12102 +12103 +12104 +12105 +12106 +12107 +12108 +12109 +12110 +12111 +12112 +12113 +12114 +12115 +12116 +12117 +12118 +12119 +12120 +12121 +12122 +12123 +12124 +12125 +12126 +12127 +12128 +12129 +12130 +12131 +12132 +12133 +12134 +12135 +12136 +12137 +12138 +12139 +12140 +12141 +12142 +12143 +12144 +12145 +12146 +12147 +12148 +12149 +12150 +12151 +12152 +12153 +12154 +12155 +12156 +12157 +12158 +12159 +12160 +12161 +12162 +12163 +12164 +12165 +12166 +12167 +12168 +12169 +12170 +12171 +12172 +12173 +12174 +12175 +12176 +12177 +12178 +12179 +12180 +12181 +12182 +12183 +12184 +12185 +12186 +12187 +12188 +12189 +12190 +12191 +12192 +12193 +12194 +12195 +12196 +12197 +12198 +12199 +12200 +12201 +12202 +12203 +12204 +12205 +12206 +12207 +12208 +12209 +12210 +12211 +12212 +12213 +12214 +12215 +12216 +12217 +12218 +12219 +12220 +12221 +12222 +12223 +12224 +12225 +12226 +12227 +12228 +12229 +12230 +12231 +12232 +12233 +12234 +12235 +12236 +12237 +12238 +12239 +12240 +12241 +12242 +12243 +12244 +12245 +12246 +12247 +12248 +12249 +12250 +12251 +12252 +12253 +12254 +12255 +12256 +12257 +12258 +12259 +12260 +12261 +12262 +12263 +12264 +12265 +12266 +12267 +12268 +12269 +12270 +12271 +12272 +12273 +12274 +12275 +12276 +12277 +12278 +12279 +12280 +12281 +12282 +12283 +12284 +12285 +12286 +12287 +12288 +12289 +12290 +12291 +12292 +12293 +12294 +12295 +12296 +12297 +12298 +12299 +12300 +12301 +12302 +12303 +12304 +12305 +12306 +12307 +12308 +12309 +12310 +12311 +12312 +12313 +12314 +12315 +12316 +12317 +12318 +12319 +12320 +12321 +12322 +12323 +12324 +12325 +12326 +12327 +12328 +12329 +12330 +12331 +12332 +12333 +12334 +12335 +12336 +12337 +12338 +12339 +12340 +12341 +12342 +12343 +12344 +12345 +12346 +12347 +12348 +12349 +12350 +12351 +12352 +12353 +12354 +12355 +12356 +12357 +12358 +12359 +12360 +12361 +12362 +12363 +12364 +12365 +12366 +12367 +12368 +12369 +12370 +12371 +12372 +12373 +12374 +12375 +12376 +12377 +12378 +12379 +12380 +12381 +12382 +12383 +12384 +12385 +12386 +12387 +12388 +12389 +12390 +12391 +12392 +12393 +12394 +12395 +12396 +12397 +12398 +12399 +12400 +12401 +12402 +12403 +12404 +12405 +12406 +12407 +12408 +12409 +12410 +12411 +12412 +12413 +12414 +12415 +12416 +12417 +12418 +12419 +12420 +12421 +12422 +12423 +12424 +12425 +12426 +12427 +12428 +12429 +12430 +12431 +12432 +12433 +12434 +12435 +12436 +12437 +12438 +12439 +12440 +12441 +12442 +12443 +12444 +12445 +12446 +12447 +12448 +12449 +12450 +12451 +12452 +12453 +12454 +12455 +12456 +12457 +12458 +12459 +12460 +12461 +12462 +12463 +12464 +12465 +12466 +12467 +12468 +12469 +12470 +12471 +12472 +12473 +12474 +12475 +12476 +12477 +12478 +12479 +12480 +12481 +12482 +12483 +12484 +12485 +12486 +12487 +12488 +12489 +12490 +12491 +12492 +12493 +12494 +12495 +12496 +12497 +12498 +12499 +12500 +12501 +12502 +12503 +12504 +12505 +12506 +12507 +12508 +12509 +12510 +12511 +12512 +12513 +12514 +12515 +12516 +12517 +12518 +12519 +12520 +12521 +12522 +12523 +12524 +12525 +12526 +12527 +12528 +12529 +12530 +12531 +12532 +12533 +12534 +12535 +12536 +12537 +12538 +12539 +12540 +12541 +12542 +12543 +12544 +12545 +12546 +12547 +12548 +12549 +12550 +12551 +12552 +12553 +12554 +12555 +12556 +12557 +12558 +12559 +12560 +12561 +12562 +12563 +12564 +12565 +12566 +12567 +12568 +12569 +12570 +12571 +12572 +12573 +12574 +12575 +12576 +12577 +12578 +12579 +12580 +12581 +12582 +12583 +12584 +12585 +12586 +12587 +12588 +12589 +12590 +12591 +12592 +12593 +12594 +12595 +12596 +12597 +12598 +12599 +12600 +12601 +12602 +12603 +12604 +12605 +12606 +12607 +12608 +12609 +12610 +12611 +12612 +12613 +12614 +12615 +12616 +12617 +12618 +12619 +12620 +12621 +12622 +12623 +12624 +12625 +12626 +12627 +12628 +12629 +12630 +12631 +12632 +12633 +12634 +12635 +12636 +12637 +12638 +12639 +12640 +12641 +12642 +12643 +12644 +12645 +12646 +12647 +12648 +12649 +12650 +12651 +12652 +12653 +12654 +12655 +12656 +12657 +12658 +12659 +12660 +12661 +12662 +12663 +12664 +12665 +12666 +12667 +12668 +12669 +12670 +12671 +12672 +12673 +12674 +12675 +12676 +12677 +12678 +12679 +12680 +12681 +12682 +12683 +12684 +12685 +12686 +12687 +12688 +12689 +12690 +12691 +12692 +12693 +12694 +12695 +12696 +12697 +12698 +12699 +12700 +12701 +12702 +12703 +12704 +12705 +12706 +12707 +12708 +12709 +12710 +12711 +12712 +12713 +12714 +12715 +12716 +12717 +12718 +12719 +12720 +12721 +12722 +12723 +12724 +12725 +12726 +12727 +12728 +12729 +12730 +12731 +12732 +12733 +12734 +12735 +12736 +12737 +12738 +12739 +12740 +12741 +12742 +12743 +12744 +12745 +12746 +12747 +12748 +12749 +12750 +12751 +12752 +12753 +12754 +12755 +12756 +12757 +12758 +12759 +12760 +12761 +12762 +12763 +12764 +12765 +12766 +12767 +12768 +12769 +12770 +12771 +12772 +12773 +12774 +12775 +12776 +12777 +12778 +12779 +12780 +12781 +12782 +12783 +12784 +12785 +12786 +12787 +12788 +12789 +12790 +12791 +12792 +12793 +12794 +12795 +12796 +12797 +12798 +12799 +12800 +12801 +12802 +12803 +12804 +12805 +12806 +12807 +12808 +12809 +12810 +12811 +12812 +12813 +12814 +12815 +12816 +12817 +12818 +12819 +12820 +12821 +12822 +12823 +12824 +12825 +12826 +12827 +12828 +12829 +12830 +12831 +12832 +12833 +12834 +12835 +12836 +12837 +12838 +12839 +12840 +12841 +12842 +12843 +12844 +12845 +12846 +12847 +12848 +12849 +12850 +12851 +12852 +12853 +12854 +12855 +12856 +12857 +12858 +12859 +12860 +12861 +12862 +12863 +12864 +12865 +12866 +12867 +12868 +12869 +12870 +12871 +12872 +12873 +12874 +12875 +12876 +12877 +12878 +12879 +12880 +12881 +12882 +12883 +12884 +12885 +12886 +12887 +12888 +12889 +12890 +12891 +12892 +12893 +12894 +12895 +12896 +12897 +12898 +12899 +12900 +12901 +12902 +12903 +12904 +12905 +12906 +12907 +12908 +12909 +12910 +12911 +12912 +12913 +12914 +12915 +12916 +12917 +12918 +12919 +12920 +12921 +12922 +12923 +12924 +12925 +12926 +12927 +12928 +12929 +12930 +12931 +12932 +12933 +12934 +12935 +12936 +12937 +12938 +12939 +12940 +12941 +12942 +12943 +12944 +12945 +12946 +12947 +12948 +12949 +12950 +12951 +12952 +12953 +12954 +12955 +12956 +12957 +12958 +12959 +12960 +12961 +12962 +12963 +12964 +12965 +12966 +12967 +12968 +12969 +12970 +12971 +12972 +12973 +12974 +12975 +12976 +12977 +12978 +12979 +12980 +12981 +12982 +12983 +12984 +12985 +12986 +12987 +12988 +12989 +12990 +12991 +12992 +12993 +12994 +12995 +12996 +12997 +12998 +12999 +13000 +13001 +13002 +13003 +13004 +13005 +13006 +13007 +13008 +13009 +13010 +13011 +13012 +13013 +13014 +13015 +13016 +13017 +13018 +13019 +13020 +13021 +13022 +13023 +13024 +13025 +13026 +13027 +13028 +13029 +13030 +13031 +13032 +13033 +13034 +13035 +13036 +13037 +13038 +13039 +13040 +13041 +13042 +13043 +13044 +13045 +13046 +13047 +13048 +13049 +13050 +13051 +13052 +13053 +13054 +13055 +13056 +13057 +13058 +13059 +13060 +13061 +13062 +13063 +13064 +13065 +13066 +13067 +13068 +13069 +13070 +13071 +13072 +13073 +13074 +13075 +13076 +13077 +13078 +13079 +13080 +13081 +13082 +13083 +13084 +13085 +13086 +13087 +13088 +13089 +13090 +13091 +13092 +13093 +13094 +13095 +13096 +13097 +13098 +13099 +13100 +13101 +13102 +13103 +13104 +13105 +13106 +13107 +13108 +13109 +13110 +13111 +13112 +13113 +13114 +13115 +13116 +13117 +13118 +13119 +13120 +13121 +13122 +13123 +13124 +13125 +13126 +13127 +13128 +13129 +13130 +13131 +13132 +13133 +13134 +13135 +13136 +13137 +13138 +13139 +13140 +13141 +13142 +13143 +13144 +13145 +13146 +13147 +13148 +13149 +13150 +13151 +13152 +13153 +13154 +13155 +13156 +13157 +13158 +13159 +13160 +13161 +13162 +13163 +13164 +13165 +13166 +13167 +13168 +13169 +13170 +13171 +13172 +13173 +13174 +13175 +13176 +13177 +13178 +13179 +13180 +13181 +13182 +13183 +13184 +13185 +13186 +13187 +13188 +13189 +13190 +13191 +13192 +13193 +13194 +13195 +13196 +13197 +13198 +13199 +13200 +13201 +13202 +13203 +13204 +13205 +13206 +13207 +13208 +13209 +13210 +13211 +13212 +13213 +13214 +13215 +13216 +13217 +13218 +13219 +13220 +13221 +13222 +13223 +13224 +13225 +13226 +13227 +13228 +13229 +13230 +13231 +13232 +13233 +13234 +13235 +13236 +13237 +13238 +13239 +13240 +13241 +13242 +13243 +13244 +13245 +13246 +13247 +13248 +13249 +13250 +13251 +13252 +13253 +13254 +13255 +13256 +13257 +13258 +13259 +13260 +13261 +13262 +13263 +13264 +13265 +13266 +13267 +13268 +13269 +13270 +13271 +13272 +13273 +13274 +13275 +13276 +13277 +13278 +13279 +13280 +13281 +13282 +13283 +13284 +13285 +13286 +13287 +13288 +13289 +13290 +13291 +13292 +13293 +13294 +13295 +13296 +13297 +13298 +13299 +13300 +13301 +13302 +13303 +13304 +13305 +13306 +13307 +13308 +13309 +13310 +13311 +13312 +13313 +13314 +13315 +13316 +13317 +13318 +13319 +13320 +13321 +13322 +13323 +13324 +13325 +13326 +13327 +13328 +13329 +13330 +13331 +13332 +13333 +13334 +13335 +13336 +13337 +13338 +13339 +13340 +13341 +13342 +13343 +13344 +13345 +13346 +13347 +13348 +13349 +13350 +13351 +13352 +13353 +13354 +13355 +13356 +13357 +13358 +13359 +13360 +13361 +13362 +13363 +13364 +13365 +13366 +13367 +13368 +13369 +13370 +13371 +13372 +13373 +13374 +13375 +13376 +13377 +13378 +13379 +13380 +13381 +13382 +13383 +13384 +13385 +13386 +13387 +13388 +13389 +13390 +13391 +13392 +13393 +13394 +13395 +13396 +13397 +13398 +13399 +13400 +13401 +13402 +13403 +13404 +13405 +13406 +13407 +13408 +13409 +13410 +13411 +13412 +13413 +13414 +13415 +13416 +13417 +13418 +13419 +13420 +13421 +13422 +13423 +13424 +13425 +13426 +13427 +13428 +13429 +13430 +13431 +13432 +13433 +13434 +13435 +13436 +13437 +13438 +13439 +13440 +13441 +13442 +13443 +13444 +13445 +13446 +13447 +13448 +13449 +13450 +13451 +13452 +13453 +13454 +13455 +13456 +13457 +13458 +13459 +13460 +13461 +13462 +13463 +13464 +13465 +13466 +13467 +13468 +13469 +13470 +13471 +13472 +13473 +13474 +13475 +13476 +13477 +13478 +13479 +13480 +13481 +13482 +13483 +13484 +13485 +13486 +13487 +13488 +13489 +13490 +13491 +13492 +13493 +13494 +13495 +13496 +13497 +13498 +13499 +13500 +13501 +13502 +13503 +13504 +13505 +13506 +13507 +13508 +13509 +13510 +13511 +13512 +13513 +13514 +13515 +13516 +13517 +13518 +13519 +13520 +13521 +13522 +13523 +13524 +13525 +13526 +13527 +13528 +13529 +13530 +13531 +13532 +13533 +13534 +13535 +13536 +13537 +13538 +13539 +13540 +13541 +13542 +13543 +13544 +13545 +13546 +13547 +13548 +13549 +13550 +13551 +13552 +13553 +13554 +13555 +13556 +13557 +13558 +13559 +13560 +13561 +13562 +13563 +13564 +13565 +13566 +13567 +13568 +13569 +13570 +13571 +13572 +13573 +13574 +13575 +13576 +13577 +13578 +13579 +13580 +13581 +13582 +13583 +13584 +13585 +13586 +13587 +13588 +13589 +13590 +13591 +13592 +13593 +13594 +13595 +13596 +13597 +13598 +13599 +13600 +13601 +13602 +13603 +13604 +13605 +13606 +13607 +13608 +13609 +13610 +13611 +13612 +13613 +13614 +13615 +13616 +13617 +13618 +13619 +13620 +13621 +13622 +13623 +13624 +13625 +13626 +13627 +13628 +13629 +13630 +13631 +13632 +13633 +13634 +13635 +13636 +13637 +13638 +13639 +13640 +13641 +13642 +13643 +13644 +13645 +13646 +13647 +13648 +13649 +13650 +13651 +13652 +13653 +13654 +13655 +13656 +13657 +13658 +13659 +13660 +13661 +13662 +13663 +13664 +13665 +13666 +13667 +13668 +13669 +13670 +13671 +13672 +13673 +13674 +13675 +13676 +13677 +13678 +13679 +13680 +13681 +13682 +13683 +13684 +13685 +13686 +13687 +13688 +13689 +13690 +13691 +13692 +13693 +13694 +13695 +13696 +13697 +13698 +13699 +13700 +13701 +13702 +13703 +13704 +13705 +13706 +13707 +13708 +13709 +13710 +13711 +13712 +13713 +13714 +13715 +13716 +13717 +13718 +13719 +13720 +13721 +13722 +13723 +13724 +13725 +13726 +13727 +13728 +13729 +13730 +13731 +13732 +13733 +13734 +13735 +13736 +13737 +13738 +13739 +13740 +13741 +13742 +13743 +13744 +13745 +13746 +13747 +13748 +13749 +13750 +13751 +13752 +13753 +13754 +13755 +13756 +13757 +13758 +13759 +13760 +13761 +13762 +13763 +13764 +13765 +13766 +13767 +13768 +13769 +13770 +13771 +13772 +13773 +13774 +13775 +13776 +13777 +13778 +13779 +13780 +13781 +13782 +13783 +13784 +13785 +13786 +13787 +13788 +13789 +13790 +13791 +13792 +13793 +13794 +13795 +13796 +13797 +13798 +13799 +13800 +13801 +13802 +13803 +13804 +13805 +13806 +13807 +13808 +13809 +13810 +13811 +13812 +13813 +13814 +13815 +13816 +13817 +13818 +13819 +13820 +13821 +13822 +13823 +13824 +13825 +13826 +13827 +13828 +13829 +13830 +13831 +13832 +13833 +13834 +13835 +13836 +13837 +13838 +13839 +13840 +13841 +13842 +13843 +13844 +13845 +13846 +13847 +13848 +13849 +13850 +13851 +13852 +13853 +13854 +13855 +13856 +13857 +13858 +13859 +13860 +13861 +13862 +13863 +13864 +13865 +13866 +13867 +13868 +13869 +13870 +13871 +13872 +13873 +13874 +13875 +13876 +13877 +13878 +13879 +13880 +13881 +13882 +13883 +13884 +13885 +13886 +13887 +13888 +13889 +13890 +13891 +13892 +13893 +13894 +13895 +13896 +13897 +13898 +13899 +13900 +13901 +13902 +13903 +13904 +13905 +13906 +13907 +13908 +13909 +13910 +13911 +13912 +13913 +13914 +13915 +13916 +13917 +13918 +13919 +13920 +13921 +13922 +13923 +13924 +13925 +13926 +13927 +13928 +13929 +13930 +13931 +13932 +13933 +13934 +13935 +13936 +13937 +13938 +13939 +13940 +13941 +13942 +13943 +13944 +13945 +13946 +13947 +13948 +13949 +13950 +13951 +13952 +13953 +13954 +13955 +13956 +13957 +13958 +13959 +13960 +13961 +13962 +13963 +13964 +13965 +13966 +13967 +13968 +13969 +13970 +13971 +13972 +13973 +13974 +13975 +13976 +13977 +13978 +13979 +13980 +13981 +13982 +13983 +13984 +13985 +13986 +13987 +13988 +13989 +13990 +13991 +13992 +13993 +13994 +13995 +13996 +13997 +13998 +13999 +14000 +14001 +14002 +14003 +14004 +14005 +14006 +14007 +14008 +14009 +14010 +14011 +14012 +14013 +14014 +14015 +14016 +14017 +14018 +14019 +14020 +14021 +14022 +14023 +14024 +14025 +14026 +14027 +14028 +14029 +14030 +14031 +14032 +14033 +14034 +14035 +14036 +14037 +14038 +14039 +14040 +14041 +14042 +14043 +14044 +14045 +14046 +14047 +14048 +14049 +14050 +14051 +14052 +14053 +14054 +14055 +14056 +14057 +14058 +14059 +14060 +14061 +14062 +14063 +14064 +14065 +14066 +14067 +14068 +14069 +14070 +14071 +14072 +14073 +14074 +14075 +14076 +14077 +14078 +14079 +14080 +14081 +14082 +14083 +14084 +14085 +14086 +14087 +14088 +14089 +14090 +14091 +14092 +14093 +14094 +14095 +14096 +14097 +14098 +14099 +14100 +14101 +14102 +14103 +14104 +14105 +14106 +14107 +14108 +14109 +14110 +14111 +14112 +14113 +14114 +14115 +14116 +14117 +14118 +14119 +14120 +14121 +14122 +14123 +14124 +14125 +14126 +14127 +14128 +14129 +14130 +14131 +14132 +14133 +14134 +14135 +14136 +14137 +14138 +14139 +14140 +14141 +14142 +14143 +14144 +14145 +14146 +14147 +14148 +14149 +14150 +14151 +14152 +14153 +14154 +14155 +14156 +14157 +14158 +14159 +14160 +14161 +14162 +14163 +14164 +14165 +14166 +14167 +14168 +14169 +14170 +14171 +14172 +14173 +14174 +14175 +14176 +14177 +14178 +14179 +14180 +14181 +14182 +14183 +14184 +14185 +14186 +14187 +14188 +14189 +14190 +14191 +14192 +14193 +14194 +14195 +14196 +14197 +14198 +14199 +14200 +14201 +14202 +14203 +14204 +14205 +14206 +14207 +14208 +14209 +14210 +14211 +14212 +14213 +14214 +14215 +14216 +14217 +14218 +14219 +14220 +14221 +14222 +14223 +14224 +14225 +14226 +14227 +14228 +14229 +14230 +14231 +14232 +14233 +14234 +14235 +14236 +14237 +14238 +14239 +14240 +14241 +14242 +14243 +14244 +14245 +14246 +14247 +14248 +14249 +14250 +14251 +14252 +14253 +14254 +14255 +14256 +14257 +14258 +14259 +14260 +14261 +14262 +14263 +14264 +14265 +14266 +14267 +14268 +14269 +14270 +14271 +14272 +14273 +14274 +14275 +14276 +14277 +14278 +14279 +14280 +14281 +14282 +14283 +14284 +14285 +14286 +14287 +14288 +14289 +14290 +14291 +14292 +14293 +14294 +14295 +14296 +14297 +14298 +14299 +14300 +14301 +14302 +14303 +14304 +14305 +14306 +14307 +14308 +14309 +14310 +14311 +14312 +14313 +14314 +14315 +14316 +14317 +14318 +14319 +14320 +14321 +14322 +14323 +14324 +14325 +14326 +14327 +14328 +14329 +14330 +14331 +14332 +14333 +14334 +14335 +14336 +14337 +14338 +14339 +14340 +14341 +14342 +14343 +14344 +14345 +14346 +14347 +14348 +14349 +14350 +14351 +14352 +14353 +14354 +14355 +14356 +14357 +14358 +14359 +14360 +14361 +14362 +14363 +14364 +14365 +14366 +14367 +14368 +14369 +14370 +14371 +14372 +14373 +14374 +14375 +14376 +14377 +14378 +14379 +14380 +14381 +14382 +14383 +14384 +14385 +14386 +14387 +14388 +14389 +14390 +14391 +14392 +14393 +14394 +14395 +14396 +14397 +14398 +14399 +14400 +14401 +14402 +14403 +14404 +14405 +14406 +14407 +14408 +14409 +14410 +14411 +14412 +14413 +14414 +14415 +14416 +14417 +14418 +14419 +14420 +14421 +14422 +14423 +14424 +14425 +14426 +14427 +14428 +14429 +14430 +14431 +14432 +14433 +14434 +14435 +14436 +14437 +14438 +14439 +14440 +14441 +14442 +14443 +14444 +14445 +14446 +14447 +14448 +14449 +14450 +14451 +14452 +14453 +14454 +14455 +14456 +14457 +14458 +14459 +14460 +14461 +14462 +14463 +14464 +14465 +14466 +14467 +14468 +14469 +14470 +14471 +14472 +14473 +14474 +14475 +14476 +14477 +14478 +14479 +14480 +14481 +14482 +14483 +14484 +14485 +14486 +14487 +14488 +14489 +14490 +14491 +14492 +14493 +14494 +14495 +14496 +14497 +14498 +14499 +14500 +14501 +14502 +14503 +14504 +14505 +14506 +14507 +14508 +14509 +14510 +14511 +14512 +14513 +14514 +14515 +14516 +14517 +14518 +14519 +14520 +14521 +14522 +14523 +14524 +14525 +14526 +14527 +14528 +14529 +14530 +14531 +14532 +14533 +14534 +14535 +14536 +14537 +14538 +14539 +14540 +14541 +14542 +14543 +14544 +14545 +14546 +14547 +14548 +14549 +14550 +14551 +14552 +14553 +14554 +14555 +14556 +14557 +14558 +14559 +14560 +14561 +14562 +14563 +14564 +14565 +14566 +14567 +14568 +14569 +14570 +14571 +14572 +14573 +14574 +14575 +14576 +14577 +14578 +14579 +14580 +14581 +14582 +14583 +14584 +14585 +14586 +14587 +14588 +14589 +14590 +14591 +14592 +14593 +14594 +14595 +14596 +14597 +14598 +14599 +14600 +14601 +14602 +14603 +14604 +14605 +14606 +14607 +14608 +14609 +14610 +14611 +14612 +14613 +14614 +14615 +14616 +14617 +14618 +14619 +14620 +14621 +14622 +14623 +14624 +14625 +14626 +14627 +14628 +14629 +14630 +14631 +14632 +14633 +14634 +14635 +14636 +14637 +14638 +14639 +14640 +14641 +14642 +14643 +14644 +14645 +14646 +14647 +14648 +14649 +14650 +14651 +14652 +14653 +14654 +14655 +14656 +14657 +14658 +14659 +14660 +14661 +14662 +14663 +14664 +14665 +14666 +14667 +14668 +14669 +14670 +14671 +14672 +14673 +14674 +14675 +14676 +14677 +14678 +14679 +14680 +14681 +14682 +14683 +14684 +14685 +14686 +14687 +14688 +14689 +14690 +14691 +14692 +14693 +14694 +14695 +14696 +14697 +14698 +14699 +14700 +14701 +14702 +14703 +14704 +14705 +14706 +14707 +14708 +14709 +14710 +14711 +14712 +14713 +14714 +14715 +14716 +14717 +14718 +14719 +14720 +14721 +14722 +14723 +14724 +14725 +14726 +14727 +14728 +14729 +14730 +14731 +14732 +14733 +14734 +14735 +14736 +14737 +14738 +14739 +14740 +14741 +14742 +14743 +14744 +14745 +14746 +14747 +14748 +14749 +14750 +14751 +14752 +14753 +14754 +14755 +14756 +14757 +14758 +14759 +14760 +14761 +14762 +14763 +14764 +14765 +14766 +14767 +14768 +14769 +14770 +14771 +14772 +14773 +14774 +14775 +14776 +14777 +14778 +14779 +14780 +14781 +14782 +14783 +14784 +14785 +14786 +14787 +14788 +14789 +14790 +14791 +14792 +14793 +14794 +14795 +14796 +14797 +14798 +14799 +14800 +14801 +14802 +14803 +14804 +14805 +14806 +14807 +14808 +14809 +14810 +14811 +14812 +14813 +14814 +14815 +14816 +14817 +14818 +14819 +14820 +14821 +14822 +14823 +14824 +14825 +14826 +14827 +14828 +14829 +14830 +14831 +14832 +14833 +14834 +14835 +14836 +14837 +14838 +14839 +14840 +14841 +14842 +14843 +14844 +14845 +14846 +14847 +14848 +14849 +14850 +14851 +14852 +14853 +14854 +14855 +14856 +14857 +14858 +14859 +14860 +14861 +14862 +14863 +14864 +14865 +14866 +14867 +14868 +14869 +14870 +14871 +14872 +14873 +14874 +14875 +14876 +14877 +14878 +14879 +14880 +14881 +14882 +14883 +14884 +14885 +14886 +14887 +14888 +14889 +14890 +14891 +14892 +14893 +14894 +14895 +14896 +14897 +14898 +14899 +14900 +14901 +14902 +14903 +14904 +14905 +14906 +14907 +14908 +14909 +14910 +14911 +14912 +14913 +14914 +14915 +14916 +14917 +14918 +14919 +14920 +14921 +14922 +14923 +14924 +14925 +14926 +14927 +14928 +14929 +14930 +14931 +14932 +14933 +14934 +14935 +14936 +14937 +14938 +14939 +14940 +14941 +14942 +14943 +14944 +14945 +14946 +14947 +14948 +14949 +14950 +14951 +14952 +14953 +14954 +14955 +14956 +14957 +14958 +14959 +14960 +14961 +14962 +14963 +14964 +14965 +14966 +14967 +14968 +14969 +14970 +14971 +14972 +14973 +14974 +14975 +14976 +14977 +14978 +14979 +14980 +14981 +14982 +14983 +14984 +14985 +14986 +14987 +14988 +14989 +14990 +14991 +14992 +14993 +14994 +14995 +14996 +14997 +14998 +14999 +15000 +15001 +15002 +15003 +15004 +15005 +15006 +15007 +15008 +15009 +15010 +15011 +15012 +15013 +15014 +15015 +15016 +15017 +15018 +15019 +15020 +15021 +15022 +15023 +15024 +15025 +15026 +15028 +15029 +15030 +15031 +15032 +15033 +15034 +15035 +15036 +15037 +15038 +15039 +15040 +15041 +15042 +15043 +15044 +15045 +15046 +15047 +15048 +15049 +15050 +15051 +15052 +15053 +15054 +15055 +15056 +15057 +15058 +15059 +15060 +15061 +15062 +15063 +15064 +15065 +15066 +15067 +15068 +15069 +15070 +15071 +15072 +15073 +15074 +15075 +15076 +15077 +15078 +15079 +15080 +15081 +15082 +15083 +15084 +15085 +15086 +15087 +15088 +15089 +15090 +15091 +15092 +15093 +15094 +15095 +15096 +15097 +15098 +15099 +15100 +15101 +15102 +15103 +15104 +15105 +15106 +15107 +15108 +15109 +15110 +15111 +15112 +15113 +15114 +15115 +15116 +15117 +15118 +15119 +15120 +15121 +15122 +15123 +15124 +15125 +15126 +15127 +15128 +15129 +15130 +15131 +15132 +15133 +15134 +15135 +15136 +15137 +15138 +15139 +15140 +15141 +15142 +15143 +15144 +15145 +15146 +15147 +15148 +15149 +15150 +15151 +15152 +15153 +15154 +15155 +15156 +15157 +15158 +15159 +15160 +15161 +15162 +15163 +15164 +15165 +15166 +15167 +15168 +15169 +15170 +15171 +15172 +15173 +15174 +15175 +15176 +15177 +15178 +15179 +15180 +15181 +15182 +15183 +15184 +15185 +15186 +15187 +15188 +15189 +15190 +15191 +15192 +15193 +15194 +15195 +15196 +15197 +15198 +15199 +15200 +15201 +15202 +15203 +15204 +15205 +15206 +15207 +15208 +15209 +15210 +15211 +15212 +15213 +15214 +15215 +15216 +15217 +15218 +15219 +15220 +15221 +15222 +15223 +15224 +15225 +15226 +15227 +15228 +15229 +15230 +15231 +15232 +15233 +15234 +15235 +15236 +15237 +15238 +15239 +15240 +15241 +15242 +15243 +15244 +15245 +15246 +15247 +15248 +15249 +15250 +15251 +15252 +15253 +15254 +15255 +15256 +15257 +15258 +15259 +15260 +15261 +15262 +15263 +15264 +15265 +15266 +15267 +15268 +15269 +15270 +15271 +15272 +15273 +15274 +15275 +15276 +15277 +15278 +15279 +15280 +15281 +15282 +15283 +15284 +15285 +15286 +15287 +15288 +15289 +15290 +15291 +15292 +15293 +15294 +15295 +15296 +15297 +15298 +15299 +15300 +15301 +15302 +15303 +15304 +15305 +15306 +15307 +15308 +15309 +15310 +15311 +15312 +15313 +15314 +15315 +15316 +15317 +15318 +15319 +15320 +15321 +15322 +15323 +15324 +15325 +15326 +15327 +15328 +15329 +15330 +15331 +15332 +15333 +15334 +15335 +15336 +15337 +15338 +15339 +15340 +15341 +15342 +15343 +15344 +15345 +15346 +15347 +15348 +15349 +15350 +15351 +15352 +15353 +15354 +15355 +15356 +15357 +15358 +15359 +15360 +15361 +15362 +15363 +15364 +15365 +15366 +15367 +15368 +15369 +15370 +15371 +15372 +15373 +15374 +15375 +15376 +15377 +15378 +15379 +15380 +15381 +15382 +15383 +15384 +15385 +15386 +15387 +15388 +15389 +15390 +15391 +15392 +15393 +15394 +15395 +15396 +15397 +15398 +15399 +15400 +15401 +15402 +15403 +15404 +15405 +15406 +15407 +15408 +15409 +15410 +15411 +15412 +15413 +15414 +15415 +15416 +15417 +15418 +15419 +15420 +15421 +15422 +15423 +15424 +15425 +15426 +15427 +15428 +15429 +15430 +15431 +15432 +15433 +15434 +15435 +15436 +15437 +15438 +15439 +15440 +15441 +15442 +15443 +15444 +15445 +15446 +15447 +15448 +15449 +15450 +15451 +15452 +15453 +15454 +15455 +15456 +15457 +15458 +15459 +15460 +15461 +15462 +15463 +15464 +15465 +15466 +15467 +15468 +15469 +15470 +15471 +15472 +15473 +15474 +15475 +15476 +15477 +15478 +15479 +15480 +15481 +15482 +15483 +15484 +15485 +15486 +15487 +15488 +15489 +15490 +15491 +15492 +15493 +15494 +15495 +15496 +15497 +15498 +15499 +15500 +15501 +15502 +15503 +15504 +15505 +15506 +15507 +15508 +15509 +15510 +15511 +15512 +15513 +15514 +15515 +15516 +15517 +15518 +15519 +15520 +15521 +15522 +15523 +15524 +15525 +15526 +15527 +15528 +15529 +15530 +15531 +15532 +15533 +15534 +15535 +15536 +15537 +15538 +15539 +15540 +15541 +15542 +15543 +15544 +15545 +15546 +15547 +15548 +15549 +15550 +15551 +15552 +15553 +15554 +15555 +15556 +15557 +15558 +15559 +15560 +15561 +15562 +15563 +15564 +15565 +15566 +15567 +15568 +15569 +15570 +15571 +15572 +15573 +15574 +15575 +15576 +15577 +15578 +15579 +15580 +15581 +15582 +15583 +15584 +15585 +15586 +15587 +15588 +15589 +15590 +15591 +15592 +15593 +15594 +15595 +15596 +15597 +15598 +15599 +15600 +15601 +15602 +15603 +15604 +15605 +15606 +15607 +15608 +15609 +15610 +15611 +15612 +15613 +15614 +15615 +15616 +15617 +15618 +15619 +15620 +15621 +15622 +15623 +15624 +15625 +15626 +15627 +15628 +15629 +15630 +15631 +15632 +15633 +15634 +15635 +15636 +15637 +15638 +15639 +15640 +15641 +15642 +15643 +15644 +15645 +15646 +15647 +15648 +15649 +15650 +15651 +15652 +15653 +15654 +15655 +15656 +15657 +15658 +15659 +15660 +15661 +15662 +15663 +15664 +15665 +15666 +15667 +15668 +15669 +15670 +15671 +15672 +15673 +15674 +15675 +15676 +15677 +15678 +15679 +15680 +15681 +15682 +15683 +15684 +15685 +15686 +15687 +15688 +15689 +15690 +15691 +15692 +15693 +15694 +15695 +15696 +15697 +15698 +15699 +15700 +15701 +15702 +15703 +15704 +15705 +15706 +15707 +15708 +15709 +15710 +15711 +15712 +15713 +15714 +15715 +15716 +15717 +15718 +15719 +15720 +15721 +15722 +15723 +15724 +15725 +15726 +15727 +15728 +15729 +15730 +15731 +15732 +15733 +15734 +15735 +15736 +15737 +15738 +15739 +15740 +15741 +15742 +15743 +15744 +15745 +15746 +15747 +15748 +15749 +15750 +15751 +15752 +15753 +15754 +15755 +15756 +15757 +15758 +15759 +15760 +15761 +15762 +15763 +15764 +15765 +15766 +15767 +15768 +15769 +15770 +15771 +15772 +15773 +15774 +15775 +15776 +15777 +15778 +15779 +15780 +15781 +15782 +15783 +15784 +15785 +15786 +15787 +15788 +15789 +15790 +15791 +15792 +15793 +15794 +15795 +15796 +15797 +15798 +15799 +15800 +15801 +15802 +15803 +15804 +15805 +15806 +15807 +15808 +15809 +15810 +15811 +15812 +15813 +15814 +15815 +15816 +15817 +15818 +15819 +15820 +15821 +15822 +15823 +15824 +15825 +15826 +15827 +15828 +15829 +15830 +15831 +15832 +15833 +15834 +15835 +15836 +15837 +15838 +15839 +15840 +15841 +15842 +15843 +15844 +15845 +15846 +15847 +15848 +15849 +15850 +15851 +15852 +15853 +15854 +15855 +15856 +15857 +15858 +15859 +15860 +15861 +15862 +15863 +15864 +15865 +15866 +15867 +15868 +15869 +15870 +15871 +15872 +15873 +15874 +15875 +15876 +15877 +15878 +15879 +15880 +15881 +15882 +15883 +15884 +15885 +15886 +15887 +15888 +15889 +15890 +15891 +15892 +15893 +15894 +15895 +15896 +15897 +15898 +15899 +15900 +15901 +15902 +15903 +15904 +15905 +15906 +15907 +15908 +15909 +15910 +15911 +15912 +15913 +15914 +15915 +15916 +15917 +15918 +15919 +15920 +15921 +15922 +15923 +15924 +15925 +15926 +15927 +15928 +15929 +15930 +15931 +15932 +15933 +15934 +15935 +15936 +15937 +15938 +15939 +15940 +15941 +15942 +15943 +15944 +15945 +15946 +15947 +15948 +15949 +15950 +15951 +15952 +15953 +15954 +15955 +15956 +15957 +15958 +15959 +15960 +15961 +15962 +15963 +15964 +15965 +15966 +15967 +15968 +15969 +15970 +15971 +15972 +15973 +15974 +15975 +15976 +15977 +15978 +15979 +15980 +15981 +15982 +15983 +15984 +15985 +15986 +15987 +15988 +15989 +15990 +15991 +15992 +15993 +15994 +15995 +15996 +15997 +15998 +15999 +16000 +16001 +16002 +16003 +16004 +16005 +16006 +16007 +16008 +16009 +16010 +16011 +16012 +16013 +16014 +16015 +16016 +16017 +16018 +16019 +16020 +16021 +16022 +16023 +16024 +16025 +16026 +16027 +16028 +16029 +16030 +16031 +16032 +16033 +16034 +16035 +16036 +16037 +16038 +16039 +16040 +16041 +16042 +16043 +16044 +16045 +16046 +16047 +16048 +16049 +16050 +16051 +16052 +16053 +16054 +16055 +16056 +16057 +16058 +16059 +16060 +16061 +16062 +16063 +16064 +16065 +16066 +16067 +16068 +16069 +16070 +16071 +16072 +16073 +16074 +16075 +16076 +16077 +16078 +16079 +16080 +16081 +16082 +16083 +16084 +16085 +16086 +16087 +16088 +16089 +16090 +16091 +16092 +16093 +16094 +16095 +16096 +16097 +16098 +16099 +16100 +16101 +16102 +16103 +16104 +16105 +16106 +16107 +16108 +16109 +16110 +16111 +16112 +16113 +16114 +16115 +16116 +16117 +16118 +16119 +16120 +16121 +16122 +16123 +16124 +16125 +16126 +16127 +16128 +16129 +16130 +16131 +16132 +16133 +16134 +16135 +16136 +16137 +16138 +16139 +16140 +16141 +16142 +16143 +16144 +16145 +16146 +16147 +16148 +16149 +16150 +16151 +16152 +16153 +16154 +16155 +16156 +16157 +16158 +16159 +16160 +16161 +16162 +16163 +16164 +16165 +16166 +16167 +16168 +16169 +16170 +16171 +16172 +16173 +16174 +16175 +16176 +16177 +16178 +16179 +16180 +16181 +16182 +16183 +16184 +16185 +16186 +16187 +16188 +16189 +16190 +16191 +16192 +16193 +16194 +16195 +16196 +16197 +16198 +16199 +16200 +16201 +16202 +16203 +16204 +16205 +16206 +16207 +16208 +16209 +16210 +16211 +16212 +16213 +16214 +16215 +16216 +16217 +16218 +16219 +16220 +16221 +16222 +16223 +16224 +16225 +16226 +16227 +16228 +16229 +16230 +16231 +16232 +16233 +16234 +16235 +16236 +16237 +16238 +16239 +16240 +16241 +16242 +16243 +16244 +16245 +16246 +16247 +16248 +16249 +16250 +16251 +16252 +16253 +16254 +16255 +16256 +16257 +16258 +16259 +16260 +16261 +16262 +16263 +16264 +16265 +16266 +16267 +16268 +16269 +16270 +16271 +16272 +16273 +16274 +16275 +16276 +16277 +16278 +16279 +16280 +16281 +16282 +16283 +16284 +16285 +16286 +16287 +16288 +16289 +16290 +16291 +16292 +16293 +16294 +16295 +16296 +16297 +16298 +16299 +16300 +16301 +16302 +16303 +16304 +16305 +16306 +16307 +16308 +16309 +16310 +16311 +16312 +16313 +16314 +16315 +16316 +16317 +16318 +16319 +16320 +16321 +16322 +16323 +16324 +16325 +16326 +16327 +16328 +16329 +16330 +16331 +16332 +16333 +16334 +16335 +16336 +16337 +16338 +16339 +16340 +16341 +16342 +16343 +16344 +16345 +16346 +16347 +16348 +16349 +16350 +16351 +16352 +16353 +16354 +16355 +16356 +16357 +16358 +16359 +16360 +16361 +16362 +16363 +16364 +16365 +16366 +16367 +16368 +16369 +16370 +16371 +16372 +16373 +16374 +16375 +16376 +16377 +16378 +16379 +16380 +16381 +16382 +16383 +16384 +16385 +16386 +16387 +16388 +16389 +16390 +16391 +16392 +16393 +16394 +16395 +16396 +16397 +16398 +16399 +16400 +16401 +16402 +16403 +16404 +16405 +16406 +16407 +16408 +16409 +16410 +16411 +16412 +16413 +16414 +16415 +16416 +16417 +16418 +16419 +16420 +16421 +16422 +16423 +16424 +16425 +16426 +16427 +16428 +16429 +16430 +16431 +16432 +16433 +16434 +16435 +16436 +16437 +16438 +16439 +16440 +16441 +16442 +16443 +16444 +16445 +16446 +16447 +16448 +16449 +16450 +16451 +16452 +16453 +16454 +16455 +16456 +16457 +16458 +16459 +16460 +16461 +16462 +16463 +16464 +16465 +16466 +16467 +16468 +16469 +16470 +16471 +16472 +16473 +16474 +16475 +16476 +16477 +16478 +16479 +16480 +16481 +16482 +16483 +16484 +16485 +16486 +16487 +16488 +16489 +16490 +16491 +16492 +16493 +16494 +16495 +16496 +16497 +16498 +16499 +16500 +16501 +16502 +16503 +16504 +16505 +16506 +16507 +16508 +16509 +16510 +16511 +16512 +16513 +16514 +16515 +16516 +16517 +16518 +16519 +16520 +16521 +16522 +16523 +16524 +16525 +16526 +16527 +16528 +16529 +16530 +16531 +16532 +16533 +16534 +16535 +16536 +16537 +16538 +16539 +16540 +16541 +16542 +16543 +16544 +16545 +16546 +16547 +16548 +16549 +16550 +16551 +16552 +16553 +16554 +16555 +16556 +16557 +16558 +16559 +16560 +16561 +16562 +16563 +16564 +16565 +16566 +16567 +16568 +16569 +16570 +16571 +16572 +16573 +16574 +16575 +16576 +16577 +16578 +16579 +16580 +16581 +16582 +16583 +16584 +16585 +16586 +16587 +16588 +16589 +16590 +16591 +16592 +16593 +16594 +16595 +16596 +16597 +16598 +16599 +16600 +16601 +16602 +16603 +16604 +16605 +16606 +16607 +16608 +16609 +16610 +16611 +16612 +16613 +16614 +16615 +16616 +16617 +16618 +16619 +16620 +16621 +16622 +16623 +16624 +16625 +16626 +16627 +16628 +16629 +16630 +16631 +16632 +16633 +16634 +16635 +16636 +16637 +16638 +16639 +16640 +16641 +16642 +16643 +16644 +16645 +16646 +16647 +16648 +16649 +16650 +16651 +16652 +16653 +16654 +16655 +16656 +16657 +16658 +16659 +16660 +16661 +16662 +16663 +16664 +16665 +16666 +16667 +16668 +16669 +16670 +16671 +16672 +16673 +16674 +16675 +16676 +16677 +16678 +16679 +16680 +16681 +16682 +16683 +16684 +16685 +16686 +16687 +16688 +16689 +16690 +16691 +16692 +16693 +16694 +16695 +16696 +16697 +16698 +16699 +16700 +16701 +16702 +16703 +16704 +16705 +16706 +16707 +16708 +16709 +16710 +16711 +16712 +16713 +16714 +16715 +16716 +16717 +16718 +16719 +16720 +16721 +16722 +16723 +16724 +16725 +16726 +16727 +16728 +16729 +16730 +16731 +16732 +16733 +16734 +16735 +16736 +16737 +16738 +16739 +16740 +16741 +16742 +16743 +16744 +16745 +16746 +16747 +16748 +16749 +16750 +16751 +16752 +16753 +16754 +16755 +16756 +16757 +16758 +16759 +16760 +16761 +16762 +16763 +16764 +16765 +16766 +16767 +16768 +16769 +16770 +16771 +16772 +16773 +16774 +16775 +16776 +16777 +16778 +16779 +16780 +16781 +16782 +16783 +16784 +16785 +16786 +16787 +16788 +16789 +16790 +16791 +16792 +16793 +16794 +16795 +16796 +16797 +16798 +16799 +16800 +16801 +16802 +16803 +16804 +16805 +16806 +16807 +16808 +16809 +16810 +16811 +16812 +16813 +16814 +16815 +16816 +16817 +16818 +16819 +16820 +16821 +16822 +16823 +16824 +16825 +16826 +16827 +16828 +16829 +16830 +16831 +16832 +16833 +16834 +16835 +16836 +16837 +16838 +16839 +16840 +16841 +16842 +16843 +16844 +16845 +16846 +16847 +16848 +16849 +16850 +16851 +16852 +16853 +16854 +16855 +16856 +16857 +16858 +16859 +16860 +16861 +16862 +16863 +16864 +16865 +16866 +16867 +16868 +16869 +16870 +16871 +16872 +16873 +16874 +16875 +16876 +16877 +16878 +16879 +16880 +16881 +16882 +16883 +16884 +16885 +16886 +16887 +16888 +16889 +16890 +16891 +16892 +16893 +16894 +16895 +16896 +16897 +16898 +16899 +16900 +16901 +16902 +16903 +16904 +16905 +16906 +16907 +16908 +16909 +16910 +16911 +16912 +16913 +16914 +16915 +16916 +16917 +16918 +16919 +16920 +16921 +16922 +16923 +16924 +16925 +16926 +16927 +16928 +16929 +16930 +16931 +16932 +16933 +16934 +16935 +16936 +16937 +16938 +16939 +16940 +16941 +16942 +16943 +16944 +16945 +16946 +16947 +16948 +16949 +16950 +16951 +16952 +16953 +16954 +16955 +16956 +16957 +16958 +16959 +16960 +16961 +16962 +16963 +16964 +16965 +16966 +16967 +16968 +16969 +16970 +16971 +16972 +16973 +16974 +16975 +16976 +16977 +16978 +16979 +16980 +16981 +16982 +16983 +16984 +16985 +16986 +16987 +16988 +16989 +16990 +16991 +16992 +16993 +16994 +16995 +16996 +16997 +16998 +16999 +17000 +17001 +17002 +17003 +17004 +17005 +17006 +17007 +17008 +17009 +17010 +17011 +17012 +17013 +17014 +17015 +17016 +17017 +17018 +17019 +17020 +17021 +17022 +17023 +17024 +17025 +17026 +17027 +17028 +17029 +17030 +17031 +17032 +17033 +17034 +17035 +17036 +17037 +17038 +17039 +17040 +17041 +17042 +17043 +17044 +17045 +17046 +17047 +17048 +17049 +17050 +17051 +17052 +17053 +17054 +17055 +17056 +17057 +17058 +17059 +17060 +17061 +17062 +17063 +17064 +17065 +17066 +17067 +17068 +17069 +17070 +17071 +17072 +17073 +17074 +17075 +17076 +17077 +17078 +17079 +17080 +17081 +17082 +17083 +17084 +17085 +17086 +17087 +17088 +17089 +17090 +17091 +17092 +17093 +17094 +17095 +17096 +17097 +17098 +17099 +17100 +17101 +17102 +17103 +17104 +17105 +17106 +17107 +17108 +17109 +17110 +17111 +17112 +17113 +17114 +17115 +17116 +17117 +17118 +17119 +17120 +17121 +17122 +17123 +17124 +17125 +17126 +17127 +17128 +17129 +17130 +17131 +17132 +17133 +17134 +17135 +17136 +17137 +17138 +17139 +17140 +17141 +17142 +17143 +17144 +17145 +17146 +17147 +17148 +17149 +17150 +17151 +17152 +17153 +17154 +17155 +17156 +17157 +17158 +17159 +17160 +17161 +17162 +17163 +17164 +17165 +17166 +17167 +17168 +17169 +17170 +17171 +17172 +17173 +17174 +17175 +17176 +17177 +17178 +17179 +17180 +17181 +17182 +17183 +17184 +17185 +17186 +17187 +17188 +17189 +17190 +17191 +17192 +17193 +17194 +17195 +17196 +17197 +17198 +17199 +17200 +17201 +17202 +17203 +17204 +17205 +17206 +17207 +17208 +17209 +17210 +17211 +17212 +17213 +17214 +17215 +17216 +17217 +17218 +17219 +17220 +17221 +17222 +17223 +17224 +17225 +17226 +17227 +17228 +17229 +17230 +17231 +17232 +17233 +17234 +17235 +17236 +17237 +17238 +17239 +17240 +17241 +17242 +17243 +17244 +17245 +17246 +17247 +17248 +17249 +17250 +17251 +17252 +17253 +17254 +17255 +17256 +17257 +17258 +17259 +17260 +17261 +17262 +17263 +17264 +17265 +17266 +17267 +17268 +17269 +17270 +17271 +17272 +17273 +17274 +17275 +17276 +17277 +17278 +17279 +17280 +17281 +17282 +17283 +17284 +17285 +17286 +17287 +17288 +17289 +17290 +17291 +17292 +17293 +17294 +17295 +17296 +17297 +17298 +17299 +17300 +17301 +17302 +17303 +17304 +17305 +17306 +17307 +17308 +17309 +17310 +17311 +17312 +17313 +17314 +17315 +17316 +17317 +17318 +17319 +17320 +17321 +17322 +17323 +17324 +17325 +17326 +17327 +17328 +17329 +17330 +17331 +17332 +17333 +17334 +17335 +17336 +17337 +17338 +17339 +17340 +17341 +17342 +17343 +17344 +17345 +17346 +17347 +17348 +17349 +17350 +17351 +17352 +17353 +17354 +17355 +17356 +17357 +17358 +17359 +17360 +17361 +17362 +17363 +17364 +17365 +17366 +17367 +17368 +17369 +17370 +17371 +17372 +17373 +17374 +17375 +17376 +17377 +17378 +17379 +17380 +17381 +17382 +17383 +17384 +17385 +17386 +17387 +17388 +17389 +17390 +17391 +17392 +17393 +17394 +17395 +17396 +17397 +17398 +17399 +17400 +17401 +17402 +17403 +17404 +17405 +17406 +17407 +17408 +17409 +17410 +17411 +17412 +17413 +17414 +17415 +17416 +17417 +17418 +17419 +17420 +17421 +17422 +17423 +17424 +17425 +17426 +17427 +17428 +17429 +17430 +17431 +17432 +17433 +17434 +17435 +17436 +17437 +17438 +17439 +17440 +17441 +17442 +17443 +17444 +17445 +17446 +17447 +17448 +17449 +17450 +17451 +17452 +17453 +17454 +17455 +17456 +17457 +17458 +17459 +17460 +17461 +17462 +17463 +17464 +17465 +17466 +17467 +17468 +17469 +17470 +17471 +17472 +17473 +17474 +17475 +17476 +17477 +17478 +17479 +17480 +17481 +17482 +17483 +17484 +17485 +17486 +17487 +17488 +17489 +17490 +17491 +17492 +17493 +17494 +17495 +17496 +17497 +17498 +17499 +17500 +17501 +17502 +17503 +17504 +17505 +17506 +17507 +17508 +17509 +17510 +17511 +17512 +17513 +17514 +17515 +17516 +17517 +17518 +17519 +17520 +17521 +17522 +17523 +17524 +17525 +17526 +17527 +17528 +17529 +17530 +17531 +17532 +17533 +17534 +17535 +17536 +17537 +17538 +17539 +17540 +17541 +17542 +17543 +17544 +17545 +17546 +17547 +17548 +17549 +17550 +17551 +17552 +17553 +17554 +17555 +17556 +17557 +17558 +17559 +17560 +17561 +17562 +17563 +17564 +17565 +17566 +17567 +17568 +17569 +17570 +17571 +17572 +17573 +17574 +17575 +17576 +17577 +17578 +17579 +17580 +17581 +17582 +17583 +17584 +17585 +17586 +17587 +17588 +17589 +17590 +17591 +17592 +17593 +17594 +17595 +17596 +17597 +17598 +17599 +17600 +17601 +17602 +17603 +17604 +17605 +17606 +17607 +17608 +17609 +17610 +17611 +17612 +17613 +17614 +17615 +17616 +17617 +17618 +17619 +17620 +17621 +17622 +17623 +17624 +17625 +17626 +17627 +17628 +17629 +17630 +17631 +17632 +17633 +17634 +17635 +17636 +17637 +17638 +17639 +17640 +17641 +17642 +17643 +17644 +17645 +17646 +17647 +17648 +17649 +17650 +17651 +17652 +17653 +17654 +17655 +17656 +17657 +17658 +17659 +17660 +17661 +17662 +17663 +17664 +17665 +17666 +17667 +17668 +17669 +17670 +17671 +17672 +17673 +17674 +17675 +17676 +17677 +17678 +17679 +17680 +17681 +17682 +17683 +17684 +17685 +17686 +17687 +17688 +17689 +17690 +17691 +17692 +17693 +17694 +17695 +17696 +17697 +17698 +17699 +17700 +17701 +17702 +17703 +17704 +17705 +17706 +17707 +17708 +17709 +17710 +17711 +17712 +17713 +17714 +17715 +17716 +17717 +17718 +17719 +17720 +17721 +17722 +17723 +17724 +17725 +17726 +17727 +17728 +17729 +17730 +17731 +17732 +17733 +17734 +17735 +17736 +17737 +17738 +17739 +17740 +17741 +17742 +17743 +17744 +17745 +17746 +17747 +17748 +17749 +17750 +17751 +17752 +17753 +17754 +17755 +17756 +17757 +17758 +17759 +17760 +17761 +17762 +17763 +17764 +17765 +17766 +17767 +17768 +17769 +17770 +17771 +17772 +17773 +17774 +17775 +17776 +17777 +17778 +17779 +17780 +17781 +17782 +17783 +17784 +17785 +17786 +17787 +17788 +17789 +17790 +17791 +17792 +17793 +17794 +17795 +17796 +17797 +17798 +17799 +17800 +17801 +17802 +17803 +17804 +17805 +17806 +17807 +17808 +17809 +17810 +17811 +17812 +17813 +17814 +17815 +17816 +17817 +17818 +17819 +17820 +17821 +17822 +17823 +17824 +17825 +17826 +17827 +17828 +17829 +17830 +17831 +17832 +17833 +17834 +17835 +17836 +17837 +17838 +17839 +17840 +17841 +17842 +17843 +17844 +17845 +17846 +17847 +17848 +17849 +17850 +17851 +17852 +17853 +17854 +17855 +17856 +17857 +17858 +17859 +17860 +17861 +17862 +17863 +17864 +17865 +17866 +17867 +17868 +17869 +17870 +17871 +17872 +17873 +17874 +17875 +17876 +17877 +17878 +17879 +17880 +17881 +17882 +17883 +17884 +17885 +17886 +17887 +17888 +17889 +17890 +17891 +17892 +17893 +17894 +17895 +17896 +17897 +17898 +17899 +17900 +17901 +17902 +17903 +17904 +17905 +17906 +17907 +17908 +17909 +17910 +17911 +17912 +17913 +17914 +17915 +17916 +17917 +17918 +17919 +17920 +17921 +17922 +17923 +17924 +17925 +17926 +17927 +17928 +17929 +17930 +17931 +17932 +17933 +17934 +17935 +17936 +17937 +17938 +17939 +17940 +17941 +17942 +17943 +17944 +17945 +17946 +17947 +17948 +17949 +17950 +17951 +17952 +17953 +17954 +17955 +17956 +17957 +17958 +17959 +17960 +17961 +17962 +17963 +17964 +17965 +17966 +17967 +17968 +17969 +17970 +17971 +17972 +17973 +17974 +17975 +17976 +17977 +17978 +17979 +17980 +17981 +17982 +17983 +17984 +17985 +17986 +17987 +17988 +17989 +17990 +17991 +17992 +17993 +17994 +17995 +17996 +17997 +17998 +17999 +18000 +18001 +18002 +18003 +18004 +18005 +18006 +18007 +18008 +18009 +18010 +18011 +18012 +18013 +18014 +18015 +18016 +18017 +18018 +18019 +18020 +18021 +18022 +18023 +18024 +18025 +18026 +18027 +18028 +18029 +18030 +18031 +18032 +18033 +18034 +18035 +18036 +18037 +18038 +18039 +18040 +18041 +18042 +18043 +18044 +18045 +18046 +18047 +18048 +18049 +18050 +18051 +18052 +18053 +18054 +18055 +18056 +18057 +18058 +18059 +18060 +18061 +18062 +18063 +18064 +18065 +18066 +18067 +18068 +18069 +18070 +18071 +18072 +18073 +18074 +18075 +18076 +18077 +18078 +18079 +18080 +18081 +18082 +18083 +18084 +18085 +18086 +18087 +18088 +18089 +18090 +18091 +18092 +18093 +18094 +18095 +18096 +18097 +18098 +18099 +18100 +18101 +18102 +18103 +18104 +18105 +18106 +18107 +18108 +18109 +18110 +18111 +18112 +18113 +18114 +18115 +18116 +18117 +18118 +18119 +18120 +18121 +18122 +18123 +18124 +18125 +18126 +18127 +18128 +18129 +18130 +18131 +18132 +18133 +18134 +18135 +18136 +18137 +18138 +18139 +18140 +18141 +18142 +18143 +18144 +18145 +18146 +18147 +18148 +18149 +18150 +18151 +18152 +18153 +18154 +18155 +18156 +18157 +18158 +18159 +18160 +18161 +18162 +18163 +18164 +18165 +18166 +18167 +18168 +18169 +18170 +18171 +18172 +18173 +18174 +18175 +18176 +18177 +18178 +18179 +18180 +18181 +18182 +18183 +18184 +18185 +18186 +18187 +18188 +18189 +18190 +18191 +18192 +18193 +18194 +18195 +18196 +18197 +18198 +18199 +18200 +18201 +18202 +18203 +18204 +18205 +18206 +18207 +18208 +18209 +18210 +18211 +18212 +18213 +18214 +18215 +18216 +18217 +18218 +18219 +18220 +18221 +18222 +18223 +18224 +18225 +18226 +18227 +18228 +18229 +18230 +18231 +18232 +18233 +18234 +18235 +18236 +18237 +18238 +18239 +18240 +18241 +18242 +18243 +18244 +18245 +18246 +18247 +18248 +18249 +18250 +18251 +18252 +18253 +18254 +18255 +18256 +18257 +18258 +18259 +18260 +18261 +18262 +18263 +18264 +18265 +18266 +18267 +18268 +18269 +18270 +18271 +18272 +18273 +18274 +18275 +18276 +18277 +18278 +18279 +18280 +18281 +18282 +18283 +18284 +18285 +18286 +18287 +18288 +18289 +18290 +18291 +18292 +18293 +18294 +18295 +18296 +18297 +18298 +18299 +18300 +18301 +18302 +18303 +18304 +18305 +18306 +18307 +18308 +18309 +18310 +18311 +18312 +18313 +18314 +18315 +18316 +18317 +18318 +18319 +18320 +18321 +18322 +18323 +18324 +18325 +18326 +18327 +18328 +18329 +18330 +18331 +18332 +18333 +18334 +18335 +18336 +18337 +18338 +18339 +18340 +18341 +18342 +18343 +18344 +18345 +18346 +18347 +18348 +18349 +18350 +18351 +18352 +18353 +18354 +18355 +18356 +18357 +18358 +18359 +18360 +18361 +18362 +18363 +18364 +18365 +18366 +18367 +18368 +18369 +18370 +18371 +18372 +18373 +18374 +18375 +18376 +18377 +18378 +18379 +18380 +18381 +18382 +18383 +18384 +18385 +18386 +18387 +18388 +18389 +18390 +18391 +18392 +18393 +18394 +18395 +18396 +18397 +18398 +18399 +18400 +18401 +18402 +18403 +18404 +18405 +18406 +18407 +18408 +18409 +18410 +18411 +18412 +18413 +18414 +18415 +18416 +18417 +18418 +18419 +18420 +18421 +18422 +18423 +18424 +18425 +18426 +18427 +18428 +18429 +18430 +18431 +18432 +18433 +18434 +18435 +18436 +18437 +18438 +18439 +18440 +18441 +18442 +18443 +18444 +18445 +18446 +18447 +18448 +18449 +18450 +18451 +18452 +18453 +18454 +18455 +18456 +18457 +18458 +18459 +18460 +18461 +18462 +18463 +18464 +18465 +18466 +18467 +18468 +18469 +18470 +18471 +18472 +18473 +18474 +18475 +18476 +18477 +18478 +18479 +18480 +18481 +18482 +18483 +18484 +18485 +18486 +18487 +18488 +18489 +18490 +18491 +18492 +18493 +18494 +18495 +18496 +18497 +18498 +18499 +18500 +18501 +18502 +18503 +18504 +18505 +18506 +18507 +18508 +18509 +18510 +18511 +18512 +18513 +18514 +18515 +18516 +18517 +18518 +18519 +18520 +18521 +18522 +18523 +18524 +18525 +18526 +18527 +18528 +18529 +18530 +18531 +18532 +18533 +18534 +18535 +18536 +18537 +18538 +18539 +18540 +18541 +18542 +18543 +18544 +18545 +18546 +18547 +18548 +18549 +18550 +18551 +18552 +18553 +18554 +18555 +18556 +18557 +18558 +18559 +18560 +18561 +18562 +18563 +18564 +18565 +18566 +18567 +18568 +18569 +18570 +18571 +18572 +18573 +18574 +18575 +18576 +18577 +18578 +18579 +18580 +18581 +18582 +18583 +18584 +18585 +18586 +18587 +18588 +18589 +18590 +18591 +18592 +18593 +18594 +18595 +18596 +18597 +18598 +18599 +18600 +18601 +18602 +18603 +18604 +18605 +18606 +18607 +18608 +18609 +18610 +18611 +18612 +18613 +18614 +18615 +18616 +18617 +18618 +18619 +18620 +18621 +18622 +18623 +18624 +18625 +18626 +18627 +18628 +18629 +18630 +18631 +18632 +18633 +18634 +18635 +18636 +18637 +18638 +18639 +18640 +18641 +18642 +18643 +18644 +18645 +18646 +18647 +18648 +18649 +18650 +18651 +18652 +18653 +18654 +18655 +18656 +18657 +18658 +18659 +18660 +18661 +18662 +18663 +18664 +18665 +18666 +18667 +18668 +18669 +18670 +18671 +18672 +18673 +18674 +18675 +18676 +18677 +18678 +18679 +18680 +18681 +18682 +18683 +18684 +18685 +18686 +18687 +18688 +18689 +18690 +18691 +18692 +18693 +18694 +18695 +18696 +18697 +18698 +18699 +18700 +18701 +18702 +18703 +18704 +18705 +18706 +18707 +18708 +18709 +18710 +18711 +18712 +18713 +18714 +18715 +18716 +18717 +18718 +18719 +18720 +18721 +18722 +18723 +18724 +18725 +18726 +18727 +18728 +18729 +18730 +18731 +18732 +18733 +18734 +18735 +18736 +18737 +18738 +18739 +18740 +18741 +18742 +18743 +18744 +18745 +18746 +18747 +18748 +18749 +18750 +18751 +18752 +18753 +18754 +18755 +18756 +18757 +18758 +18759 +18760 +18761 +18762 +18763 +18764 +18765 +18766 +18767 +18768 +18769 +18770 +18771 +18772 +18773 +18774 +18775 +18776 +18777 +18778 +18779 +18780 +18781 +18782 +18783 +18784 +18785 +18786 +18787 +18788 +18789 +18790 +18791 +18792 +18793 +18794 +18795 +18796 +18797 +18798 +18799 +18800 +18801 +18802 +18803 +18804 +18805 +18806 +18807 +18808 +18809 +18810 +18811 +18812 +18813 +18814 +18815 +18816 +18817 +18818 +18819 +18820 +18821 +18822 +18823 +18824 +18825 +18826 +18827 +18828 +18829 +18830 +18831 +18832 +18833 +18834 +18835 +18836 +18837 +18838 +18839 +18840 +18841 +18842 +18843 +18844 +18845 +18846 +18847 +18848 +18849 +18850 +18851 +18852 +18853 +18854 +18855 +18856 +18857 +18858 +18859 +18860 +18861 +18862 +18863 +18864 +18865 +18866 +18867 +18868 +18869 +18870 +18871 +18872 +18873 +18874 +18875 +18876 +18877 +18878 +18879 +18880 +18881 +18882 +18883 +18884 +18885 +18886 +18887 +18888 +18889 +18890 +18891 +18892 +18893 +18894 +18895 +18896 +18897 +18898 +18899 +18900 +18901 +18902 +18903 +18904 +18905 +18906 +18907 +18908 +18909 +18910 +18911 +18912 +18913 +18914 +18915 +18916 +18917 +18918 +18919 +18920 +18921 +18922 +18923 +18924 +18925 +18926 +18927 +18928 +18929 +18930 +18931 +18932 +18933 +18934 +18935 +18936 +18937 +18938 +18939 +18940 +18941 +18942 +18943 +18944 +18945 +18946 +18947 +18948 +18949 +18950 +18951 +18952 +18953 +18954 +18955 +18956 +18957 +18958 +18959 +18960 +18961 +18962 +18963 +18964 +18965 +18966 +18967 +18968 +18969 +18970 +18971 +18972 +18973 +18974 +18975 +18976 +18977 +18978 +18979 +18980 +18981 +18982 +18983 +18984 +18985 +18986 +18987 +18988 +18989 +18990 +18991 +18992 +18993 +18994 +18995 +18996 +18997 +18998 +18999 +19000 +19001 +19002 +19003 +19004 +19005 +19006 +19007 +19008 +19009 +19010 +19011 +19012 +19013 +19014 +19015 +19016 +19017 +19018 +19019 +19020 +19021 +19022 +19023 +19024 +19025 +19026 +19027 +19028 +19029 +19030 +19031 +19032 +19033 +19034 +19035 +19036 +19037 +19038 +19039 +19040 +19041 +19042 +19043 +19044 +19045 +19046 +19047 +19048 +19049 +19050 +19051 +19052 +19053 +19054 +19055 +19056 +19057 +19058 +19059 +19060 +19061 +19062 +19063 +19064 +19065 +19066 +19067 +19068 +19069 +19070 +19071 +19072 +19073 +19074 +19075 +19076 +19077 +19078 +19079 +19080 +19081 +19082 +19083 +19084 +19085 +19086 +19087 +19088 +19089 +19090 +19091 +19092 +19093 +19094 +19095 +19096 +19097 +19098 +19099 +19100 +19101 +19102 +19103 +19104 +19105 +19106 +19107 +19108 +19109 +19110 +19111 +19112 +19113 +19114 +19115 +19116 +19117 +19118 +19119 +19120 +19121 +19122 +19123 +19124 +19125 +19126 +19127 +19128 +19129 +19130 +19131 +19132 +19133 +19134 +19135 +19136 +19137 +19138 +19139 +19140 +19141 +19142 +19143 +19144 +19145 +19146 +19147 +19148 +19149 +19150 +19151 +19152 +19153 +19154 +19155 +19156 +19157 +19158 +19159 +19160 +19161 +19162 +19163 +19164 +19165 +19166 +19167 +19168 +19169 +19170 +19171 +19172 +19173 +19174 +19175 +19176 +19177 +19178 +19179 +19180 +19181 +19182 +19183 +19184 +19185 +19186 +19187 +19188 +19189 +19190 +19191 +19192 +19193 +19194 +19195 +19196 +19197 +19198 +19199 +19200 +19201 +19202 +19203 +19204 +19205 +19206 +19207 +19208 +19209 +19210 +19211 +19212 +19213 +19214 +19215 +19216 +19217 +19218 +19219 +19220 +19221 +19222 +19223 +19224 +19225 +19226 +19227 +19228 +19229 +19230 +19231 +19232 +19233 +19234 +19235 +19236 +19237 +19238 +19239 +19240 +19241 +19242 +19243 +19244 +19245 +19246 +19247 +19248 +19249 +19250 +19251 +19252 +19253 +19254 +19255 +19256 +19257 +19258 +19259 +19260 +19261 +19262 +19263 +19264 +19265 +19266 +19267 +19268 +19269 +19270 +19271 +19272 +19273 +19274 +19275 +19276 +19277 +19278 +19279 +19280 +19281 +19282 +19283 +19284 +19285 +19286 +19287 +19288 +19289 +19290 +19291 +19292 +19293 +19294 +19295 +19296 +19297 +19298 +19299 +19300 +19301 +19302 +19303 +19304 +19305 +19306 +19307 +19308 +19309 +19310 +19311 +19312 +19313 +19314 +19315 +19316 +19317 +19318 +19319 +19320 +19321 +19322 +19323 +19324 +19325 +19326 +19327 +19328 +19329 +19330 +19331 +19332 +19333 +19334 +19335 +19336 +19337 +19338 +19339 +19340 +19341 +19342 +19343 +19344 +19345 +19346 +19347 +19348 +19349 +19350 +19351 +19352 +19353 +19354 +19355 +19356 +19357 +19358 +19359 +19360 +19361 +19362 +19363 +19364 +19365 +19366 +19367 +19368 +19369 +19370 +19371 +19372 +19373 +19374 +19375 +19376 +19377 +19378 +19379 +19380 +19381 +19382 +19383 +19384 +19385 +19386 +19387 +19388 +19389 +19390 +19391 +19392 +19393 +19394 +19395 +19396 +19397 +19398 +19399 +19400 +19401 +19402 +19403 +19404 +19405 +19406 +19407 +19408 +19409 +19410 +19411 +19412 +19413 +19414 +19415 +19416 +19417 +19418 +19419 +19420 +19421 +19422 +19423 +19424 +19425 +19426 +19427 +19428 +19429 +19430 +19431 +19432 +19433 +19434 +19435 +19436 +19437 +19438 +19439 +19440 +19441 +19442 +19443 +19444 +19445 +19446 +19447 +19448 +19449 +19450 +19451 +19452 +19453 +19454 +19455 +19456 +19457 +19458 +19459 +19460 +19461 +19462 +19463 +19464 +19465 +19466 +19467 +19468 +19469 +19470 +19471 +19472 +19473 +19474 +19475 +19476 +19477 +19478 +19479 +19480 +19481 +19482 +19483 +19484 +19485 +19486 +19487 +19488 +19489 +19490 +19491 +19492 +19493 +19494 +19495 +19496 +19497 +19498 +19499 +19500 +19501 +19502 +19503 +19504 +19505 +19506 +19507 +19508 +19509 +19510 +19511 +19512 +19513 +19514 +19515 +19516 +19517 +19518 +19519 +19520 +19521 +19522 +19523 +19524 +19525 +19526 +19527 +19528 +19529 +19530 +19531 +19532 +19533 +19534 +19535 +19536 +19537 +19538 +19539 +19540 +19541 +19542 +19543 +19544 +19545 +19546 +19547 +19548 +19549 +19550 +19551 +19552 +19553 +19554 +19555 +19556 +19557 +19558 +19559 +19560 +19561 +19562 +19563 +19564 +19565 +19566 +19567 +19568 +19569 +19570 +19571 +19572 +19573 +19574 +19575 +19576 +19577 +19578 +19579 +19580 +19581 +19582 +19583 +19584 +19585 +19586 +19587 +19588 +19589 +19590 +19591 +19592 +19593 +19594 +19595 +19596 +19597 +19598 +19599 +19600 +19601 +19602 +19603 +19604 +19605 +19606 +19607 +19608 +19609 +19610 +19611 +19612 +19613 +19614 +19615 +19616 +19617 +19618 +19619 +19620 +19621 +19622 +19623 +19624 +19625 +19626 +19627 +19628 +19629 +19630 +19631 +19632 +19633 +19634 +19635 +19636 +19637 +19638 +19639 +19640 +19641 +19642 +19643 +19644 +19645 +19646 +19647 +19648 +19649 +19650 +19651 +19652 +19653 +19654 +19655 +19656 +19657 +19658 +19659 +19660 +19661 +19662 +19663 +19664 +19665 +19666 +19667 +19668 +19669 +19670 +19671 +19672 +19673 +19674 +19675 +19676 +19677 +19678 +19679 +19680 +19681 +19682 +19683 +19684 +19685 +19686 +19687 +19688 +19689 +19690 +19691 +19692 +19693 +19694 +19695 +19696 +19697 +19698 +19699 +19700 +19701 +19702 +19703 +19704 +19705 +19706 +19707 +19708 +19709 +19710 +19711 +19712 +19713 +19714 +19715 +19716 +19717 +19718 +19719 +19720 +19721 +19722 +19723 +19724 +19725 +19726 +19727 +19728 +19729 +19730 +19731 +19732 +19733 +19734 +19735 +19736 +19737 +19738 +19739 +19740 +19741 +19742 +19743 +19744 +19745 +19746 +19747 +19748 +19749 +19750 +19751 +19752 +19753 +19754 +19755 +19756 +19757 +19758 +19759 +19760 +19761 +19762 +19763 +19764 +19765 +19766 +19767 +19768 +19769 +19770 +19771 +19772 +19773 +19774 +19775 +19776 +19777 +19778 +19779 +19780 +19781 +19782 +19783 +19784 +19785 +19786 +19787 +19788 +19789 +19790 +19791 +19792 +19793 +19794 +19795 +19796 +19797 +19798 +19799 +19800 +19801 +19802 +19803 +19804 +19805 +19806 +19807 +19808 +19809 +19810 +19811 +19812 +19813 +19814 +19815 +19816 +19817 +19818 +19819 +19820 +19821 +19822 +19823 +19824 +19825 +19826 +19827 +19828 +19829 +19830 +19831 +19832 +19833 +19834 +19835 +19836 +19837 +19838 +19839 +19840 +19841 +19842 +19843 +19844 +19845 +19846 +19847 +19848 +19849 +19850 +19851 +19852 +19853 +19854 +19855 +19856 +19857 +19858 +19859 +19860 +19861 +19862 +19863 +19864 +19865 +19866 +19867 +19868 +19869 +19870 +19871 +19872 +19873 +19874 +19875 +19876 +19877 +19878 +19879 +19880 +19881 +19882 +19883 +19884 +19885 +19886 +19887 +19888 +19889 +19890 +19891 +19892 +19893 +19894 +19895 +19896 +19897 +19898 +19899 +19900 +19901 +19902 +19903 +19904 +19905 +19906 +19907 +19908 +19909 +19910 +19911 +19912 +19913 +19914 +19915 +19916 +19917 +19918 +19919 +19920 +19921 +19922 +19923 +19924 +19925 +19926 +19927 +19928 +19929 +19930 +19931 +19932 +19933 +19934 +19935 +19936 +19937 +19938 +19939 +19940 +19941 +19942 +19943 +19944 +19945 +19946 +19947 +19948 +19949 +19950 +19951 +19952 +19953 +19954 +19955 +19956 +19957 +19958 +19959 +19960 +19961 +19962 +19963 +19964 +19965 +19966 +19967 +19968 +19969 +19970 +19971 +19972 +19973 +19974 +19975 +19976 +19977 +19978 +19979 +19980 +19981 +19982 +19983 +19984 +19985 +19986 +19987 +19988 +19989 +19990 +19991 +19992 +19993 +19994 +19995 +19996 +19997 +19998 +19999 +20000 +20001 +20002 +20003 +20004 +20005 +20006 +20007 +20008 +20009 +20010 +20011 +20012 +20013 +20014 +20015 +20016 +20017 +20018 +20019 +20020 +20021 +20022 +20023 +20024 +20025 +20026 +20027 +20028 +20029 +20030 +20031 +20032 +20033 +20034 +20035 +20036 +20037 +20038 +20039 +20040 +20041 +20042 +20043 +20044 +20045 +20046 +20047 +20048 +20049 +20050 +20051 +20052 +20053 +20054 +20055 +20056 +20057 +20058 +20059 +20060 +20061 +20062 +20063 +20064 +20065 +20066 +20067 +20068 +20069 +20070 +20071 +20072 +20073 +20074 +20075 +20076 +20077 +20078 +20079 +20080 +20081 +20082 +20083 +20084 +20085 +20086 +20087 +20088 +20089 +20090 +20091 +20092 +20093 +20094 +20095 +20096 +20097 +20098 +20099 +20100 +20101 +20102 +20103 +20104 +20105 +20106 +20107 +20108 +20109 +20110 +20111 +20112 +20113 +20114 +20115 +20116 +20117 +20118 +20119 +20120 +20121 +20122 +20123 +20124 +20125 +20126 +20127 +20128 +20129 +20130 +20131 +20132 +20133 +20134 +20135 +20136 +20137 +20138 +20139 +20140 +20141 +20142 +20143 +20144 +20145 +20146 +20147 +20148 +20149 +20150 +20151 +20152 +20153 +20154 +20155 +20156 +20157 +20158 +20159 +20160 +20161 +20162 +20163 +20164 +20165 +20166 +20167 +20168 +20169 +20170 +20171 +20172 +20173 +20174 +20175 +20176 +20177 +20178 +20179 +20180 +20181 +20182 +20183 +20184 +20185 +20186 +20187 +20188 +20189 +20190 +20191 +20192 +20193 +20194 +20195 +20196 +20197 +20198 +20199 +20200 +20201 +20202 +20203 +20204 +20205 +20206 +20207 +20208 +20209 +20210 +20211 +20212 +20213 +20214 +20215 +20216 +20217 +20218 +20219 +20220 +20221 +20222 +20223 +20224 +20225 +20226 +20227 +20228 +20229 +20230 +20231 +20232 +20233 +20234 +20235 +20236 +20237 +20238 +20239 +20240 +20241 +20242 +20243 +20244 +20245 +20246 +20247 +20248 +20249 +20250 +20251 +20252 +20253 +20254 +20255 +20256 +20257 +20258 +20259 +20260 +20261 +20262 +20263 +20264 +20265 +20266 +20267 +20268 +20269 +20270 +20271 +20272 +20273 +20274 +20275 +20276 +20277 +20278 +20279 +20280 +20281 +20282 +20283 +20284 +20285 +20286 +20287 +20288 +20289 +20290 +20291 +20292 +20293 +20294 +20295 +20296 +20297 +20298 +20299 +20300 +20301 +20302 +20303 +20304 +20305 +20306 +20307 +20308 +20309 +20310 +20311 +20312 +20313 +20314 +20315 +20316 +20317 +20318 +20319 +20320 +20321 +20322 +20323 +20324 +20325 +20326 +20327 +20328 +20329 +20330 +20331 +20332 +20333 +20334 +20335 +20336 +20337 +20338 +20339 +20340 +20341 +20342 +20343 +20344 +20345 +20346 +20347 +20348 +20349 +20350 +20351 +20352 +20353 +20354 +20355 +20356 +20357 +20358 +20359 +20360 +20361 +20362 +20363 +20364 +20365 +20366 +20367 +20368 +20369 +20370 +20371 +20372 +20373 +20374 +20375 +20376 +20377 +20378 +20379 +20380 +20381 +20382 +20383 +20384 +20385 +20386 +20387 +20388 +20389 +20390 +20391 +20392 +20393 +20394 +20395 +20396 +20397 +20398 +20399 +20400 +20401 +20402 +20403 +20404 +20405 +20406 +20407 +20408 +20409 +20410 +20411 +20412 +20413 +20414 +20415 +20416 +20417 +20418 +20419 +20420 +20421 +20422 +20423 +20424 +20425 +20426 +20427 +20428 +20429 +20430 +20431 +20432 +20433 +20434 +20435 +20436 +20437 +20438 +20439 +20440 +20441 +20442 +20443 +20444 +20445 +20446 +20447 +20448 +20449 +20450 +20451 +20452 +20453 +20454 +20455 +20456 +20457 +20458 +20459 +20460 +20461 +20462 +20463 +20464 +20465 +20466 +20467 +20468 +20469 +20470 +20471 +20472 +20473 +20474 +20475 +20476 +20477 +20478 +20479 +20480 +20481 +20482 +20483 +20484 +20485 +20486 +20487 +20488 +20489 +20490 +20491 +20492 +20493 +20494 +20495 +20496 +20497 +20498 +20499 +20500 +20501 +20502 +20503 +20504 +20505 +20506 +20507 +20508 +20509 +20510 +20511 +20512 +20513 +20514 +20515 +20516 +20517 +20518 +20519 +20520 +20521 +20522 +20523 +20524 +20525 +20526 +20527 +20528 +20529 +20530 +20531 +20532 +20533 +20534 +20535 +20536 +20537 +20538 +20539 +20540 +20541 +20542 +20543 +20544 +20545 +20546 +20547 +20548 +20549 +20550 +20551 +20552 +20553 +20554 +20555 +20556 +20557 +20558 +20559 +20560 +20561 +20562 +20563 +20564 +20565 +20566 +20567 +20568 +20569 +20570 +20571 +20572 +20573 +20574 +20575 +20576 +20577 +20578 +20579 +20580 +20581 +20582 +20583 +20584 +20585 +20586 +20587 +20588 +20589 +20590 +20591 +20592 +20593 +20594 +20595 +20596 +20597 +20598 +20599 +20600 +20601 +20602 +20603 +20604 +20605 +20606 +20607 +20608 +20609 +20610 +20611 +20612 +20613 +20614 +20615 +20616 +20617 +20618 +20619 +20620 +20621 +20622 +20623 +20624 +20625 +20626 +20627 +20628 +20629 +20630 +20631 +20632 +20633 +20634 +20635 +20636 +20637 +20638 +20639 +20640 +20641 +20642 +20643 +20644 +20645 +20646 +20647 +20648 +20649 +20650 +20651 +20652 +20653 +20654 +20655 +20656 +20657 +20658 +20659 +20660 +20661 +20662 +20663 +20664 +20665 +20666 +20667 +20668 +20669 +20670 +20671 +20672 +20673 +20674 +20675 +20676 +20677 +20678 +20679 +20680 +20681 +20682 +20683 +20684 +20685 +20686 +20687 +20688 +20689 +20690 +20691 +20692 +20693 +20694 +20695 +20696 +20697 +20698 +20699 +20700 +20701 +20702 +20703 +20704 +20705 +20706 +20707 +20708 +20709 +20710 +20711 +20712 +20713 +20714 +20715 +20716 +20717 +20718 +20719 +20720 +20721 +20722 +20723 +20724 +20725 +20726 +20727 +20728 +20729 +20730 +20731 +20732 +20733 +20734 +20735 +20736 +20737 +20738 +20739 +20740 +20741 +20742 +20743 +20744 +20745 +20746 +20747 +20748 +20749 +20750 +20751 +20752 +20753 +20754 +20755 +20756 +20757 +20758 +20759 +20760 +20761 +20762 +20763 +20764 +20765 +20766 +20767 +20768 +20769 +20770 +20771 +20772 +20773 +20774 +20775 +20776 +20777 +20778 +20779 +20780 +20781 +20782 +20783 +20784 +20785 +20786 +20787 +20788 +20789 +20790 +20791 +20792 +20793 +20794 +20795 +20796 +20797 +20798 +20799 +20800 +20801 +20802 +20803 +20804 +20805 +20806 +20807 +20808 +20809 +20810 +20811 +20812 +20813 +20814 +20815 +20816 +20817 +20818 +20819 +20820 +20821 +20822 +20823 +20824 +20825 +20826 +20827 +20828 +20829 +20830 +20831 +20832 +20833 +20834 +20835 +20836 +20837 +20838 +20839 +20840 +20841 +20842 +20843 +20844 +20845 +20846 +20847 +20848 +20849 +20850 +20851 +20852 +20853 +20854 +20855 +20856 +20857 +20858 +20859 +20860 +20861 +20862 +20863 +20864 +20865 +20866 +20867 +20868 +20869 +20870 +20871 +20872 +20873 +20874 +20875 +20876 +20877 +20878 +20879 +20880 +20881 +20882 +20883 +20884 +20885 +20886 +20887 +20888 +20889 +20890 +20891 +20892 +20893 +20894 +20895 +20896 +20897 +20898 +20899 +20900 +20901 +20902 +20903 +20904 +20905 +20906 +20907 +20908 +20909 +20910 +20911 +20912 +20913 +20914 +20915 +20916 +20917 +20918 +20919 +20920 +20921 +20922 +20923 +20924 +20925 +20926 +20927 +20928 +20929 +20930 +20931 +20932 +20933 +20934 +20935 +20936 +20937 +20938 +20939 +20940 +20941 +20942 +20943 +20944 +20945 +20946 +20947 +20948 +20949 +20950 +20951 +20952 +20953 +20954 +20955 +20956 +20957 +20958 +20959 +20960 +20961 +20962 +20963 +20964 +20965 +20966 +20967 +20968 +20969 +20970 +20971 +20972 +20973 +20974 +20975 +20976 +20977 +20978 +20979 +20980 +20981 +20982 +20983 +20984 +20985 +20986 +20987 +20988 +20989 +20990 +20991 +20992 +20993 +20994 +20995 +20996 +20997 +20998 +20999 +21000 +21001 +21002 +21003 +21004 +21005 +21006 +21007 +21008 +21009 +21010 +21011 +21012 +21013 +21014 +21015 +21016 +21017 +21018 +21019 +21020 +21021 +21022 +21023 +21024 +21025 +21026 +21027 +21028 +21029 +21030 +21031 +21032 +21033 +21034 +21035 +21036 +21037 +21038 +21039 +21040 +21041 +21042 +21043 +21044 +21045 +21046 +21047 +21048 +21049 +21050 +21051 +21052 +21053 +21054 +21055 +21056 +21057 +21058 +21059 +21060 +21061 +21062 +21063 +21064 +21065 +21066 +21067 +21068 +21069 +21070 +21071 +21072 +21073 +21074 +21075 +21076 +21077 +21078 +21079 +21080 +21081 +21082 +21083 +21084 +21085 +21086 +21087 +21088 +21089 +21090 +21091 +21092 +21093 +21094 +21095 +21096 +21097 +21098 +21099 +21100 +21101 +21102 +21103 +21104 +21105 +21106 +21107 +21108 +21109 +21110 +21111 +21112 +21113 +21114 +21115 +21116 +21117 +21118 +21119 +21120 +21121 +21122 +21123 +21124 +21125 +21126 +21127 +21128 +21129 +21130 +21131 +21132 +21133 +21134 +21135 +21136 +21137 +21138 +21139 +21140 +21141 +21142 +21143 +21144 +21145 +21146 +21147 +21148 +21149 +21150 +21151 +21152 +21153 +21154 +21155 +21156 +21157 +21158 +21159 +21160 +21161 +21162 +21163 +21164 +21165 +21166 +21167 +21168 +21169 +21170 +21171 +21172 +21173 +21174 +21175 +21176 +21177 +21178 +21179 +21180 +21181 +21182 +21183 +21184 +21185 +21186 +21187 +21188 +21189 +21190 +21191 +21192 +21193 +21194 +21195 +21196 +21197 +21198 +21199 +21200 +21201 +21202 +21203 +21204 +21205 +21206 +21207 +21208 +21209 +21210 +21211 +21212 +21213 +21214 +21215 +21216 +21217 +21218 +21219 +21220 +21221 +21222 +21223 +21224 +21225 +21226 +21227 +21228 +21229 +21230 +21231 +21232 +21233 +21234 +21235 +21236 +21237 +21238 +21239 +21240 +21241 +21242 +21243 +21244 +21245 +21246 +21247 +21248 +21249 +21250 +21251 +21252 +21253 +21254 +21255 +21256 +21257 +21258 +21259 +21260 +21261 +21262 +21263 +21264 +21265 +21266 +21267 +21268 +21269 +21270 +21271 +21272 +21273 +21274 +21275 +21276 +21277 +21278 +21279 +21280 +21281 +21282 +21283 +21284 +21285 +21286 +21287 +21288 +21289 +21290 +21291 +21292 +21293 +21294 +21295 +21296 +21297 +21298 +21299 +21300 +21301 +21302 +21303 +21304 +21305 +21306 +21307 +21308 +21309 +21310 +21311 +21312 +21313 +21314 +21315 +21316 +21317 +21318 +21319 +21320 +21321 +21322 +21323 +21324 +21325 +21326 +21327 +21328 +21329 +21330 +21331 +21332 +21333 +21334 +21335 +21336 +21337 +21338 +21339 +21340 +21341 +21342 +21343 +21344 +21345 +21346 +21347 +21348 +21349 +21350 +21351 +21352 +21353 +21354 +21355 +21356 +21357 +21358 +21359 +21360 +21361 +21362 +21363 +21364 +21365 +21366 +21367 +21368 +21369 +21370 +21371 +21372 +21373 +21374 +21375 +21376 +21377 +21378 +21379 +21380 +21381 +21382 +21383 +21384 +21385 +21386 +21387 +21388 +21389 +21390 +21391 +21392 +21393 +21394 +21395 +21396 +21397 +21398 +21399 +21400 +21401 +21402 +21403 +21404 +21405 +21406 +21407 +21408 +21409 +21410 +21411 +21412 +21413 +21414 +21415 +21416 +21417 +21418 +21419 +21420 +21421 +21422 +21423 +21424 +21425 +21426 +21427 +21428 +21429 +21430 +21431 +21432 +21433 +21434 +21435 +21436 +21437 +21438 +21439 +21440 +21441 +21442 +21443 +21444 +21445 +21446 +21447 +21448 +21449 +21450 +21451 +21452 +21453 +21454 +21455 +21456 +21457 +21458 +21459 +21460 +21461 +21462 +21463 +21464 +21465 +21466 +21467 +21468 +21469 +21470 +21471 +21472 +21473 +21474 +21475 +21476 +21477 +21478 +21479 +21480 +21481 +21482 +21483 +21484 +21485 +21486 +21487 +21488 +21489 +21490 +21491 +21492 +21493 +21494 +21495 +21496 +21497 +21498 +21499 +21500 +21501 +21502 +21503 +21504 +21505 +21506 +21507 +21508 +21509 +21510 +21511 +21512 +21513 +21514 +21515 +21516 +21517 +21518 +21519 +21520 +21521 +21522 +21523 +21524 +21525 +21526 +21527 +21528 +21529 +21530 +21531 +21532 +21533 +21534 +21535 +21536 +21537 +21538 +21539 +21540 +21541 +21542 +21543 +21544 +21545 +21546 +21547 +21548 +21549 +21550 +21551 +21552 +21553 +21554 +21555 +21556 +21557 +21558 +21559 +21560 +21561 +21562 +21563 +21564 +21565 +21566 +21567 +21568 +21569 +21570 +21571 +21572 +21573 +21574 +21575 +21576 +21577 +21578 +21579 +21580 +21581 +21582 +21583 +21584 +21585 +21586 +21587 +21588 +21589 +21590 +21591 +21592 +21593 +21594 +21595 +21596 +21597 +21598 +21599 +21600 +21601 +21602 +21603 +21604 +21605 +21606 +21607 +21608 +21609 +21610 +21611 +21612 +21613 +21614 +21615 +21616 +21617 +21618 +21619 +21620 +21621 +21622 +21623 +21624 +21625 +21626 +21627 +21628 +21629 +21630 +21631 +21632 +21633 +21634 +21635 +21636 +21637 +21638 +21639 +21640 +21641 +21642 +21643 +21644 +21645 +21646 +21647 +21648 +21649 +21650 +21651 +21652 +21653 +21654 +21655 +21656 +21657 +21658 +21659 +21660 +21661 +21662 +21663 +21664 +21665 +21666 +21667 +21668 +21669 +21670 +21671 +21672 +21673 +21674 +21675 +21676 +21677 +21678 +21679 +21680 +21681 +21682 +21683 +21684 +21685 +21686 +21687 +21688 +21689 +21690 +21691 +21692 +21693 +21694 +21695 +21696 +21697 +21698 +21699 +21700 +21701 +21702 +21703 +21704 +21705 +21706 +21707 +21708 +21709 +21710 +21711 +21712 +21713 +21714 +21715 +21716 +21717 +21718 +21719 +21720 +21721 +21722 +21723 +21724 +21725 +21726 +21727 +21728 +21729 +21730 +21731 +21732 +21733 +21734 +21735 +21736 +21737 +21738 +21739 +21740 +21741 +21742 +21743 +21744 +21745 +21746 +21747 +21748 +21749 +21750 +21751 +21752 +21753 +21754 +21755 +21756 +21757 +21758 +21759 +21760 +21761 +21762 +21763 +21764 +21765 +21766 +21767 +21768 +21769 +21770 +21771 +21772 +21773 +21774 +21775 +21776 +21777 +21778 +21779 +21780 +21781 +21782 +21783 +21784 +21785 +21786 +21787 +21788 +21789 +21790 +21791 +21792 +21793 +21794 +21795 +21796 +21797 +21798 +21799 +21800 +21801 +21802 +21803 +21804 +21805 +21806 +21807 +21808 +21809 +21810 +21811 +21812 +21813 +21814 +21815 +21816 +21817 +21818 +21819 +21820 +21821 +21822 +21823 +21824 +21825 +21826 +21827 +21828 +21829 +21830 +21831 +21832 +21833 +21834 +21835 +21836 +21837 +21838 +21839 +21840 +21841 +21842 diff --git a/results/imagenet22k_to_12k_rw_indices.txt b/results/imagenet22k_to_12k_rw_indices.txt new file mode 100644 index 0000000000..a63bc984b9 --- /dev/null +++ b/results/imagenet22k_to_12k_rw_indices.txt @@ -0,0 +1,11821 @@ +1 +3 +4 +5 +6 +7 +8 +9 +10 +11 +13 +14 +15 +16 +17 +18 +19 +20 +21 +23 +24 +26 +27 +28 +29 +30 +31 +32 +33 +34 +37 +38 +41 +43 +44 +45 +46 +47 +48 +49 +50 +51 +53 +55 +56 +57 +58 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +89 +90 +91 +93 +94 +95 +96 +97 +99 +100 +101 +102 +103 +105 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +137 +138 +140 +141 +142 +143 +144 +146 +147 +148 +149 +151 +152 +153 +154 +156 +157 +158 +159 +161 +162 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +175 +176 +179 +180 +181 +182 +184 +188 +192 +193 +195 +196 +197 +199 +200 +203 +206 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +230 +231 +235 +249 +250 +251 +252 +253 +254 +289 +292 +295 +301 +306 +307 +312 +313 +315 +317 +320 +324 +325 +326 +327 +332 +341 +343 +347 +352 +353 +354 +356 +359 +360 +366 +367 +368 +369 +370 +377 +379 +380 +382 +383 +384 +385 +386 +392 +395 +398 +402 +405 +408 +410 +411 +413 +415 +416 +418 +422 +423 +424 +430 +431 +440 +441 +451 +452 +455 +456 +457 +460 +461 +464 +465 +466 +468 +469 +470 +471 +472 +473 +474 +475 +477 +479 +482 +486 +489 +490 +491 +492 +493 +496 +499 +500 +502 +503 +505 +510 +511 +512 +513 +514 +515 +516 +520 +523 +524 +525 +526 +527 +528 +529 +530 +533 +536 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +552 +553 +554 +555 +556 +557 +558 +559 +560 +561 +562 +563 +564 +566 +567 +568 +569 +570 +571 +572 +573 +574 +575 +576 +577 +578 +580 +581 +583 +584 +585 +586 +587 +588 +589 +590 +591 +592 +595 +596 +598 +601 +602 +603 +604 +605 +607 +608 +609 +610 +611 +612 +613 +614 +615 +616 +618 +619 +620 +621 +623 +624 +628 +629 +630 +631 +632 +634 +635 +636 +637 +638 +639 +640 +641 +643 +644 +645 +646 +647 +648 +649 +650 +651 +653 +654 +655 +656 +657 +658 +659 +660 +661 +663 +664 +665 +666 +667 +668 +669 +670 +671 +672 +673 +674 +675 +677 +678 +679 +680 +681 +682 +683 +684 +685 +686 +687 +688 +689 +691 +692 +693 +694 +695 +696 +697 +698 +700 +701 +702 +703 +704 +705 +706 +707 +708 +710 +711 +713 +714 +715 +716 +717 +718 +719 +720 +721 +722 +723 +727 +728 +730 +732 +733 +734 +736 +737 +738 +739 +740 +741 +742 +743 +744 +745 +746 +747 +748 +749 +751 +752 +753 +755 +757 +758 +759 +761 +762 +763 +764 +765 +766 +767 +768 +769 +770 +773 +774 +775 +776 +777 +778 +780 +781 +782 +783 +784 +785 +786 +787 +789 +790 +791 +792 +794 +796 +798 +799 +801 +804 +805 +807 +808 +809 +810 +811 +812 +813 +816 +817 +818 +822 +823 +824 +825 +826 +827 +828 +829 +830 +831 +832 +833 +834 +835 +836 +838 +839 +840 +841 +842 +843 +845 +846 +847 +848 +849 +850 +851 +852 +853 +854 +855 +856 +857 +858 +861 +862 +863 +864 +865 +866 +867 +868 +869 +870 +871 +872 +873 +874 +875 +876 +877 +878 +879 +880 +881 +882 +883 +884 +885 +886 +887 +888 +889 +891 +892 +894 +895 +896 +897 +899 +900 +901 +903 +904 +905 +908 +909 +910 +912 +913 +916 +919 +920 +922 +925 +931 +932 +933 +934 +935 +936 +939 +941 +944 +945 +946 +947 +949 +950 +951 +952 +953 +954 +955 +958 +960 +961 +963 +964 +968 +969 +970 +971 +976 +979 +983 +986 +990 +991 +992 +993 +994 +995 +996 +997 +998 +999 +1000 +1001 +1002 +1003 +1004 +1005 +1006 +1007 +1008 +1009 +1010 +1011 +1012 +1013 +1014 +1015 +1016 +1017 +1019 +1022 +1024 +1025 +1027 +1029 +1030 +1031 +1032 +1035 +1036 +1037 +1038 +1039 +1040 +1041 +1043 +1044 +1045 +1046 +1047 +1048 +1050 +1051 +1052 +1055 +1056 +1063 +1064 +1065 +1067 +1069 +1070 +1071 +1072 +1075 +1076 +1078 +1079 +1080 +1081 +1083 +1084 +1085 +1086 +1087 +1088 +1089 +1092 +1093 +1094 +1095 +1097 +1099 +1106 +1121 +1140 +1141 +1143 +1144 +1145 +1147 +1148 +1149 +1150 +1151 +1152 +1155 +1157 +1159 +1160 +1161 +1164 +1165 +1166 +1167 +1168 +1169 +1170 +1171 +1172 +1173 +1178 +1179 +1180 +1181 +1182 +1184 +1187 +1190 +1191 +1193 +1195 +1196 +1197 +1199 +1200 +1201 +1202 +1203 +1204 +1205 +1207 +1208 +1209 +1211 +1214 +1215 +1216 +1217 +1218 +1219 +1220 +1221 +1222 +1223 +1224 +1225 +1227 +1229 +1230 +1231 +1232 +1233 +1234 +1235 +1236 +1237 +1238 +1239 +1240 +1241 +1242 +1244 +1245 +1246 +1247 +1249 +1250 +1251 +1252 +1253 +1254 +1256 +1257 +1258 +1259 +1260 +1261 +1263 +1265 +1266 +1267 +1268 +1269 +1271 +1272 +1273 +1274 +1277 +1279 +1283 +1287 +1289 +1298 +1299 +1303 +1304 +1305 +1308 +1313 +1318 +1320 +1323 +1324 +1325 +1326 +1327 +1328 +1330 +1332 +1333 +1335 +1337 +1339 +1340 +1341 +1342 +1343 +1344 +1345 +1349 +1350 +1351 +1352 +1353 +1354 +1355 +1356 +1357 +1358 +1359 +1362 +1364 +1369 +1372 +1373 +1376 +1377 +1378 +1380 +1382 +1384 +1385 +1386 +1387 +1388 +1389 +1390 +1391 +1392 +1393 +1396 +1397 +1398 +1399 +1402 +1404 +1405 +1406 +1407 +1408 +1409 +1411 +1412 +1413 +1416 +1417 +1420 +1424 +1425 +1426 +1427 +1428 +1429 +1430 +1431 +1432 +1433 +1434 +1435 +1436 +1437 +1439 +1440 +1442 +1443 +1445 +1446 +1448 +1450 +1452 +1454 +1455 +1457 +1458 +1459 +1460 +1461 +1462 +1463 +1464 +1466 +1469 +1470 +1474 +1475 +1476 +1477 +1482 +1485 +1486 +1487 +1488 +1489 +1491 +1493 +1494 +1495 +1496 +1497 +1499 +1500 +1502 +1503 +1504 +1505 +1506 +1508 +1509 +1511 +1512 +1513 +1514 +1515 +1516 +1517 +1518 +1519 +1520 +1521 +1522 +1523 +1524 +1525 +1526 +1527 +1528 +1529 +1530 +1531 +1532 +1533 +1534 +1535 +1536 +1537 +1538 +1539 +1540 +1541 +1542 +1543 +1544 +1545 +1546 +1547 +1548 +1549 +1550 +1551 +1552 +1553 +1554 +1555 +1556 +1557 +1558 +1559 +1560 +1561 +1562 +1563 +1564 +1565 +1566 +1567 +1568 +1569 +1570 +1571 +1572 +1573 +1574 +1575 +1576 +1577 +1578 +1582 +1583 +1584 +1586 +1587 +1588 +1589 +1590 +1591 +1592 +1594 +1595 +1597 +1598 +1599 +1600 +1603 +1604 +1605 +1611 +1614 +1615 +1616 +1622 +1624 +1626 +1627 +1628 +1629 +1630 +1631 +1632 +1633 +1634 +1636 +1643 +1644 +1652 +1656 +1659 +1662 +1663 +1665 +1667 +1668 +1669 +1671 +1672 +1679 +1681 +1688 +1692 +1693 +1694 +1695 +1696 +1697 +1698 +1700 +1701 +1702 +1703 +1704 +1709 +1712 +1716 +1729 +1739 +1742 +1747 +1748 +1750 +1754 +1755 +1757 +1758 +1759 +1760 +1761 +1762 +1764 +1767 +1770 +1771 +1773 +1774 +1777 +1778 +1779 +1782 +1783 +1784 +1786 +1787 +1788 +1789 +1790 +1791 +1792 +1793 +1795 +1797 +1798 +1799 +1800 +1803 +1806 +1808 +1809 +1810 +1811 +1814 +1815 +1822 +1824 +1825 +1827 +1831 +1833 +1835 +1836 +1837 +1841 +1842 +1847 +1848 +1850 +1852 +1853 +1854 +1856 +1859 +1860 +1861 +1862 +1864 +1865 +1867 +1874 +1876 +1877 +1878 +1881 +1884 +1891 +1892 +1893 +1895 +1896 +1897 +1898 +1899 +1900 +1901 +1902 +1903 +1904 +1905 +1906 +1907 +1908 +1909 +1910 +1911 +1912 +1913 +1914 +1915 +1916 +1917 +1918 +1919 +1920 +1921 +1922 +1923 +1924 +1925 +1926 +1927 +1928 +1929 +1930 +1931 +1932 +1933 +1934 +1935 +1936 +1937 +1938 +1939 +1940 +1942 +1943 +1944 +1945 +1946 +1947 +1948 +1949 +1950 +1951 +1952 +1953 +1954 +1956 +1959 +1961 +1962 +1963 +1964 +1965 +1966 +1967 +1968 +1969 +1970 +1971 +1972 +1973 +1974 +1975 +1976 +1977 +1978 +1979 +1980 +1981 +1982 +1983 +1984 +1985 +1986 +1987 +1988 +1990 +1992 +1993 +1995 +1996 +1997 +1998 +1999 +2001 +2002 +2004 +2005 +2007 +2008 +2009 +2010 +2011 +2014 +2016 +2017 +2018 +2019 +2021 +2022 +2023 +2026 +2028 +2029 +2030 +2031 +2032 +2033 +2034 +2035 +2036 +2037 +2038 +2039 +2040 +2041 +2042 +2043 +2044 +2045 +2046 +2047 +2048 +2049 +2050 +2051 +2052 +2053 +2054 +2055 +2056 +2058 +2060 +2061 +2062 +2063 +2064 +2065 +2067 +2068 +2069 +2070 +2071 +2072 +2073 +2074 +2075 +2076 +2077 +2078 +2079 +2080 +2081 +2082 +2083 +2084 +2085 +2087 +2088 +2090 +2093 +2094 +2095 +2096 +2100 +2101 +2102 +2103 +2104 +2106 +2107 +2108 +2109 +2110 +2112 +2113 +2114 +2118 +2119 +2120 +2121 +2122 +2123 +2124 +2128 +2129 +2130 +2132 +2134 +2135 +2137 +2138 +2139 +2140 +2141 +2142 +2143 +2144 +2145 +2146 +2147 +2148 +2149 +2150 +2151 +2152 +2153 +2154 +2155 +2156 +2158 +2159 +2163 +2164 +2165 +2167 +2168 +2169 +2172 +2173 +2174 +2176 +2177 +2178 +2180 +2181 +2182 +2183 +2184 +2185 +2187 +2188 +2189 +2190 +2191 +2192 +2193 +2195 +2198 +2199 +2200 +2203 +2206 +2207 +2208 +2209 +2210 +2211 +2212 +2213 +2214 +2216 +2217 +2219 +2220 +2221 +2222 +2223 +2224 +2225 +2226 +2227 +2228 +2229 +2230 +2231 +2232 +2233 +2234 +2236 +2237 +2238 +2239 +2240 +2241 +2242 +2243 +2244 +2245 +2246 +2247 +2248 +2249 +2250 +2251 +2252 +2253 +2255 +2256 +2257 +2258 +2259 +2260 +2261 +2262 +2263 +2264 +2265 +2266 +2267 +2268 +2269 +2270 +2271 +2272 +2273 +2274 +2275 +2276 +2278 +2279 +2280 +2281 +2282 +2283 +2285 +2287 +2288 +2289 +2291 +2292 +2293 +2294 +2295 +2296 +2297 +2298 +2299 +2300 +2301 +2302 +2303 +2304 +2305 +2306 +2307 +2308 +2309 +2310 +2311 +2312 +2313 +2314 +2315 +2316 +2317 +2318 +2319 +2320 +2321 +2322 +2326 +2328 +2329 +2330 +2331 +2332 +2334 +2335 +2336 +2337 +2338 +2339 +2340 +2341 +2342 +2343 +2344 +2345 +2347 +2348 +2349 +2350 +2351 +2352 +2353 +2356 +2357 +2358 +2359 +2360 +2362 +2363 +2364 +2365 +2368 +2369 +2370 +2372 +2374 +2377 +2380 +2381 +2382 +2383 +2385 +2386 +2387 +2388 +2389 +2390 +2391 +2392 +2393 +2395 +2396 +2397 +2398 +2399 +2400 +2401 +2402 +2403 +2404 +2405 +2407 +2408 +2409 +2410 +2411 +2412 +2413 +2416 +2417 +2419 +2420 +2421 +2422 +2423 +2424 +2425 +2426 +2427 +2428 +2430 +2431 +2432 +2433 +2434 +2436 +2437 +2438 +2439 +2441 +2444 +2445 +2447 +2448 +2449 +2450 +2452 +2453 +2454 +2456 +2459 +2461 +2463 +2465 +2469 +2470 +2471 +2472 +2473 +2474 +2494 +2495 +2497 +2498 +2499 +2500 +2505 +2509 +2512 +2513 +2515 +2519 +2520 +2522 +2523 +2525 +2526 +2528 +2530 +2531 +2532 +2533 +2534 +2536 +2537 +2538 +2540 +2542 +2544 +2545 +2547 +2548 +2549 +2557 +2558 +2561 +2562 +2563 +2565 +2567 +2568 +2569 +2570 +2571 +2572 +2573 +2578 +2587 +2588 +2589 +2590 +2595 +2597 +2598 +2609 +2612 +2613 +2615 +2616 +2617 +2618 +2625 +2626 +2627 +2628 +2630 +2631 +2635 +2638 +2639 +2641 +2642 +2644 +2645 +2649 +2654 +2655 +2656 +2658 +2659 +2660 +2663 +2664 +2665 +2666 +2668 +2669 +2670 +2672 +2674 +2675 +2677 +2679 +2680 +2681 +2682 +2683 +2684 +2686 +2689 +2691 +2692 +2693 +2694 +2696 +2699 +2702 +2705 +2706 +2707 +2708 +2712 +2715 +2722 +2723 +2724 +2725 +2727 +2728 +2730 +2731 +2732 +2734 +2737 +2738 +2739 +2741 +2742 +2743 +2745 +2747 +2748 +2749 +2750 +2752 +2760 +2761 +2762 +2764 +2767 +2770 +2774 +2778 +2780 +2791 +2795 +2796 +2805 +2810 +2812 +2814 +2815 +2818 +2820 +2828 +2829 +2832 +2833 +2835 +2837 +2840 +2843 +2844 +2845 +2852 +2859 +2860 +2861 +2862 +2863 +2864 +2865 +2866 +2867 +2868 +2869 +2870 +2871 +2872 +2874 +2875 +2876 +2878 +2879 +2880 +2881 +2882 +2884 +2885 +2886 +2888 +2889 +2890 +2891 +2892 +2893 +2894 +2895 +2897 +2899 +2900 +2903 +2904 +2907 +2910 +2913 +2914 +2916 +2923 +2926 +2932 +2933 +2940 +2944 +2945 +2947 +2949 +2950 +2953 +2955 +2956 +2957 +2958 +2959 +2960 +2963 +2964 +2967 +2970 +2974 +2976 +2979 +2980 +2982 +2984 +2985 +2989 +2990 +2991 +2992 +2993 +2994 +2996 +2999 +3000 +3002 +3005 +3007 +3008 +3009 +3010 +3012 +3013 +3014 +3018 +3019 +3020 +3022 +3024 +3025 +3026 +3027 +3028 +3029 +3030 +3033 +3035 +3036 +3039 +3040 +3042 +3043 +3046 +3047 +3048 +3051 +3053 +3055 +3056 +3059 +3060 +3067 +3069 +3074 +3079 +3086 +3088 +3091 +3093 +3094 +3106 +3111 +3117 +3125 +3129 +3134 +3135 +3136 +3137 +3138 +3139 +3140 +3141 +3142 +3143 +3144 +3145 +3146 +3148 +3149 +3150 +3151 +3153 +3154 +3159 +3160 +3161 +3164 +3165 +3166 +3168 +3169 +3170 +3171 +3172 +3173 +3176 +3177 +3182 +3188 +3191 +3192 +3193 +3194 +3195 +3196 +3200 +3201 +3202 +3203 +3204 +3205 +3206 +3207 +3208 +3209 +3210 +3214 +3218 +3219 +3220 +3221 +3222 +3223 +3225 +3226 +3227 +3228 +3229 +3230 +3231 +3232 +3234 +3235 +3236 +3237 +3238 +3239 +3240 +3241 +3242 +3243 +3244 +3245 +3246 +3247 +3248 +3253 +3258 +3259 +3260 +3261 +3262 +3264 +3265 +3266 +3267 +3268 +3270 +3271 +3273 +3274 +3277 +3278 +3279 +3280 +3281 +3282 +3283 +3284 +3285 +3288 +3289 +3291 +3292 +3296 +3297 +3298 +3299 +3301 +3302 +3304 +3305 +3306 +3307 +3308 +3309 +3310 +3311 +3312 +3315 +3316 +3318 +3320 +3321 +3322 +3324 +3325 +3327 +3328 +3329 +3330 +3332 +3333 +3334 +3335 +3337 +3339 +3340 +3341 +3342 +3343 +3344 +3345 +3348 +3349 +3351 +3352 +3353 +3354 +3355 +3356 +3358 +3360 +3361 +3362 +3363 +3365 +3366 +3368 +3371 +3373 +3375 +3376 +3377 +3378 +3379 +3380 +3381 +3382 +3383 +3384 +3389 +3390 +3392 +3397 +3398 +3400 +3401 +3404 +3405 +3406 +3407 +3408 +3409 +3410 +3411 +3412 +3413 +3415 +3416 +3417 +3419 +3421 +3424 +3425 +3426 +3427 +3428 +3429 +3430 +3431 +3432 +3433 +3434 +3435 +3436 +3438 +3439 +3440 +3441 +3444 +3446 +3448 +3450 +3451 +3452 +3454 +3455 +3456 +3458 +3459 +3461 +3462 +3463 +3466 +3467 +3468 +3469 +3471 +3472 +3473 +3474 +3475 +3476 +3477 +3478 +3479 +3481 +3482 +3485 +3492 +3493 +3494 +3495 +3497 +3498 +3499 +3500 +3501 +3502 +3503 +3505 +3509 +3510 +3511 +3512 +3513 +3517 +3518 +3519 +3520 +3521 +3522 +3526 +3527 +3528 +3533 +3536 +3544 +3546 +3547 +3553 +3554 +3555 +3556 +3559 +3560 +3562 +3563 +3565 +3566 +3567 +3568 +3569 +3574 +3575 +3576 +3584 +3585 +3587 +3599 +3600 +3601 +3602 +3603 +3604 +3605 +3606 +3608 +3609 +3610 +3612 +3613 +3614 +3615 +3616 +3619 +3622 +3623 +3624 +3625 +3627 +3628 +3629 +3630 +3632 +3633 +3634 +3635 +3636 +3638 +3640 +3641 +3644 +3646 +3649 +3650 +3651 +3655 +3656 +3659 +3660 +3662 +3663 +3665 +3671 +3673 +3674 +3683 +3684 +3686 +3687 +3688 +3689 +3690 +3692 +3694 +3695 +3702 +3705 +3707 +3709 +3711 +3714 +3715 +3716 +3720 +3725 +3727 +3731 +3733 +3736 +3737 +3738 +3744 +3746 +3747 +3750 +3753 +3756 +3758 +3761 +3763 +3764 +3765 +3766 +3767 +3768 +3769 +3770 +3771 +3772 +3773 +3774 +3775 +3782 +3785 +3787 +3790 +3798 +3801 +3803 +3812 +3814 +3815 +3816 +3817 +3818 +3819 +3825 +3826 +3827 +3828 +3829 +3832 +3833 +3836 +3837 +3838 +3840 +3842 +3844 +3845 +3846 +3852 +3853 +3854 +3855 +3858 +3860 +3864 +3865 +3867 +3868 +3873 +3874 +3877 +3882 +3883 +3884 +3887 +3888 +3889 +3890 +3894 +3899 +3900 +3901 +3902 +3904 +3908 +3910 +3916 +3918 +3920 +3925 +3928 +3936 +3937 +3939 +3943 +3947 +3948 +3949 +3950 +3951 +3956 +3962 +3963 +3968 +3969 +3970 +3971 +3972 +3974 +3975 +3976 +3977 +3984 +3986 +3988 +3991 +4001 +4005 +4006 +4007 +4009 +4018 +4019 +4020 +4021 +4022 +4023 +4024 +4026 +4028 +4030 +4031 +4032 +4033 +4036 +4038 +4039 +4040 +4041 +4042 +4043 +4062 +4063 +4065 +4066 +4067 +4068 +4071 +4073 +4074 +4075 +4089 +4090 +4094 +4096 +4097 +4099 +4100 +4101 +4102 +4104 +4105 +4107 +4109 +4110 +4112 +4118 +4120 +4129 +4136 +4137 +4138 +4139 +4140 +4141 +4142 +4143 +4144 +4148 +4150 +4151 +4152 +4153 +4154 +4155 +4158 +4159 +4161 +4165 +4167 +4171 +4174 +4176 +4178 +4179 +4181 +4182 +4183 +4185 +4187 +4189 +4190 +4191 +4192 +4198 +4202 +4203 +4204 +4205 +4206 +4207 +4208 +4210 +4211 +4212 +4213 +4214 +4215 +4216 +4217 +4219 +4221 +4222 +4223 +4226 +4227 +4230 +4232 +4233 +4235 +4237 +4242 +4244 +4248 +4249 +4250 +4251 +4252 +4253 +4254 +4255 +4256 +4259 +4261 +4262 +4263 +4264 +4265 +4266 +4267 +4269 +4270 +4272 +4273 +4274 +4276 +4277 +4278 +4280 +4281 +4282 +4283 +4284 +4285 +4290 +4292 +4296 +4297 +4298 +4299 +4301 +4304 +4306 +4307 +4308 +4309 +4310 +4311 +4312 +4313 +4315 +4317 +4318 +4321 +4323 +4324 +4325 +4326 +4327 +4328 +4329 +4330 +4331 +4332 +4334 +4335 +4336 +4338 +4340 +4341 +4344 +4345 +4346 +4349 +4350 +4351 +4352 +4354 +4355 +4356 +4358 +4361 +4362 +4363 +4365 +4366 +4369 +4373 +4374 +4378 +4379 +4380 +4386 +4389 +4390 +4391 +4395 +4396 +4399 +4400 +4401 +4403 +4404 +4406 +4407 +4408 +4410 +4412 +4414 +4416 +4417 +4418 +4419 +4420 +4421 +4423 +4425 +4426 +4427 +4428 +4430 +4431 +4432 +4434 +4435 +4436 +4438 +4439 +4440 +4441 +4442 +4444 +4445 +4450 +4451 +4453 +4454 +4455 +4456 +4458 +4459 +4462 +4463 +4464 +4465 +4466 +4467 +4468 +4469 +4470 +4471 +4473 +4474 +4475 +4476 +4477 +4478 +4479 +4481 +4483 +4484 +4485 +4486 +4487 +4489 +4490 +4491 +4493 +4494 +4495 +4496 +4497 +4498 +4499 +4500 +4501 +4504 +4505 +4506 +4508 +4509 +4510 +4511 +4512 +4515 +4518 +4519 +4521 +4522 +4529 +4530 +4531 +4533 +4536 +4538 +4539 +4540 +4542 +4543 +4544 +4545 +4546 +4547 +4549 +4550 +4551 +4552 +4555 +4556 +4559 +4560 +4561 +4562 +4565 +4567 +4568 +4569 +4570 +4571 +4572 +4574 +4576 +4577 +4579 +4580 +4583 +4585 +4587 +4588 +4591 +4594 +4595 +4596 +4599 +4600 +4603 +4604 +4605 +4606 +4608 +4609 +4610 +4611 +4612 +4613 +4614 +4617 +4618 +4619 +4620 +4621 +4622 +4623 +4624 +4625 +4626 +4627 +4628 +4629 +4631 +4632 +4633 +4634 +4635 +4636 +4639 +4640 +4641 +4642 +4646 +4647 +4648 +4649 +4650 +4651 +4652 +4655 +4656 +4662 +4663 +4664 +4665 +4666 +4667 +4668 +4669 +4670 +4671 +4672 +4676 +4677 +4678 +4679 +4680 +4681 +4683 +4685 +4686 +4687 +4688 +4690 +4691 +4692 +4694 +4695 +4696 +4699 +4702 +4705 +4708 +4709 +4710 +4711 +4712 +4714 +4715 +4716 +4717 +4719 +4722 +4723 +4724 +4725 +4726 +4727 +4728 +4729 +4730 +4732 +4733 +4734 +4736 +4737 +4739 +4740 +4743 +4746 +4748 +4750 +4751 +4752 +4756 +4758 +4759 +4760 +4761 +4762 +4768 +4770 +4771 +4773 +4774 +4775 +4777 +4778 +4779 +4780 +4781 +4783 +4789 +4790 +4793 +4795 +4797 +4798 +4799 +4800 +4801 +4802 +4804 +4806 +4807 +4808 +4812 +4813 +4814 +4815 +4816 +4818 +4819 +4824 +4829 +4831 +4833 +4836 +4837 +4839 +4840 +4842 +4843 +4844 +4847 +4848 +4849 +4851 +4852 +4853 +4854 +4855 +4860 +4861 +4863 +4864 +4865 +4866 +4867 +4869 +4871 +4874 +4875 +4877 +4878 +4879 +4880 +4883 +4884 +4885 +4886 +4887 +4888 +4890 +4894 +4895 +4896 +4897 +4900 +4901 +4903 +4905 +4906 +4908 +4909 +4910 +4912 +4913 +4916 +4917 +4921 +4922 +4923 +4924 +4925 +4926 +4927 +4928 +4929 +4931 +4932 +4933 +4934 +4935 +4936 +4938 +4939 +4940 +4941 +4942 +4943 +4945 +4946 +4947 +4950 +4951 +4953 +4957 +4958 +4960 +4961 +4964 +4965 +4967 +4968 +4970 +4972 +4973 +4976 +4977 +4978 +4979 +4980 +4981 +4982 +4984 +4985 +4986 +4987 +4989 +4990 +4991 +4993 +4994 +4998 +4999 +5001 +5002 +5003 +5004 +5005 +5007 +5008 +5009 +5011 +5012 +5016 +5017 +5020 +5021 +5022 +5023 +5025 +5026 +5027 +5028 +5029 +5031 +5033 +5034 +5037 +5038 +5039 +5041 +5042 +5043 +5046 +5047 +5048 +5051 +5055 +5057 +5060 +5061 +5062 +5063 +5064 +5065 +5068 +5071 +5072 +5073 +5076 +5078 +5079 +5081 +5083 +5084 +5086 +5088 +5090 +5091 +5092 +5093 +5094 +5096 +5098 +5100 +5101 +5102 +5104 +5105 +5109 +5111 +5112 +5114 +5115 +5117 +5119 +5120 +5121 +5122 +5123 +5124 +5125 +5126 +5127 +5129 +5130 +5131 +5132 +5133 +5134 +5135 +5137 +5138 +5139 +5141 +5142 +5143 +5144 +5146 +5148 +5149 +5151 +5153 +5154 +5156 +5157 +5158 +5162 +5163 +5165 +5167 +5168 +5172 +5174 +5175 +5176 +5178 +5179 +5180 +5181 +5183 +5184 +5185 +5186 +5187 +5189 +5191 +5193 +5195 +5196 +5198 +5199 +5201 +5202 +5203 +5204 +5205 +5206 +5207 +5208 +5209 +5210 +5211 +5212 +5213 +5215 +5216 +5217 +5218 +5219 +5221 +5222 +5223 +5224 +5225 +5226 +5227 +5231 +5234 +5235 +5237 +5239 +5240 +5247 +5248 +5249 +5250 +5253 +5254 +5255 +5256 +5258 +5259 +5264 +5265 +5266 +5267 +5269 +5270 +5272 +5273 +5275 +5277 +5278 +5282 +5284 +5288 +5290 +5291 +5292 +5293 +5294 +5295 +5296 +5297 +5298 +5299 +5300 +5301 +5302 +5306 +5307 +5311 +5312 +5313 +5314 +5315 +5316 +5317 +5319 +5320 +5321 +5322 +5323 +5326 +5328 +5329 +5330 +5331 +5332 +5333 +5334 +5335 +5336 +5338 +5339 +5340 +5341 +5343 +5344 +5345 +5346 +5347 +5348 +5353 +5357 +5358 +5360 +5362 +5363 +5364 +5369 +5372 +5373 +5375 +5377 +5378 +5379 +5381 +5385 +5386 +5387 +5388 +5389 +5390 +5391 +5392 +5393 +5395 +5398 +5399 +5400 +5401 +5402 +5403 +5406 +5407 +5410 +5411 +5412 +5413 +5417 +5418 +5419 +5420 +5421 +5422 +5423 +5425 +5426 +5427 +5428 +5429 +5430 +5431 +5432 +5434 +5435 +5437 +5439 +5441 +5443 +5444 +5445 +5446 +5447 +5448 +5450 +5451 +5454 +5455 +5456 +5461 +5463 +5466 +5467 +5471 +5472 +5473 +5474 +5475 +5476 +5477 +5478 +5481 +5482 +5483 +5484 +5485 +5486 +5487 +5488 +5489 +5491 +5493 +5494 +5495 +5496 +5497 +5498 +5499 +5501 +5503 +5504 +5505 +5506 +5507 +5508 +5510 +5511 +5514 +5515 +5517 +5519 +5520 +5521 +5522 +5524 +5529 +5530 +5531 +5532 +5535 +5538 +5540 +5541 +5542 +5544 +5547 +5548 +5549 +5550 +5551 +5552 +5553 +5554 +5555 +5557 +5561 +5563 +5564 +5565 +5566 +5567 +5568 +5569 +5570 +5572 +5574 +5575 +5576 +5577 +5578 +5579 +5580 +5583 +5584 +5586 +5590 +5591 +5592 +5593 +5594 +5595 +5596 +5597 +5598 +5603 +5604 +5606 +5607 +5608 +5609 +5610 +5612 +5613 +5614 +5615 +5617 +5619 +5620 +5621 +5622 +5623 +5624 +5625 +5626 +5627 +5629 +5630 +5631 +5633 +5634 +5635 +5636 +5638 +5639 +5642 +5643 +5647 +5652 +5654 +5656 +5657 +5658 +5659 +5660 +5661 +5663 +5664 +5665 +5667 +5669 +5671 +5672 +5673 +5674 +5676 +5677 +5682 +5683 +5685 +5688 +5690 +5691 +5692 +5694 +5695 +5696 +5697 +5698 +5699 +5701 +5702 +5703 +5704 +5705 +5708 +5709 +5711 +5712 +5713 +5714 +5715 +5716 +5717 +5718 +5725 +5727 +5729 +5736 +5737 +5738 +5741 +5742 +5743 +5748 +5752 +5753 +5754 +5755 +5757 +5758 +5759 +5760 +5761 +5764 +5765 +5766 +5767 +5768 +5769 +5770 +5772 +5773 +5774 +5776 +5777 +5778 +5779 +5782 +5784 +5785 +5786 +5787 +5788 +5789 +5790 +5791 +5792 +5793 +5797 +5798 +5802 +5803 +5804 +5805 +5807 +5808 +5809 +5810 +5811 +5812 +5814 +5816 +5817 +5818 +5823 +5824 +5825 +5828 +5829 +5830 +5831 +5832 +5836 +5837 +5841 +5843 +5845 +5846 +5847 +5848 +5849 +5850 +5851 +5853 +5855 +5857 +5858 +5859 +5860 +5861 +5862 +5863 +5866 +5867 +5868 +5871 +5872 +5873 +5874 +5875 +5879 +5881 +5884 +5885 +5887 +5888 +5891 +5892 +5893 +5896 +5897 +5898 +5899 +5900 +5902 +5904 +5905 +5906 +5907 +5910 +5911 +5912 +5913 +5914 +5915 +5918 +5919 +5920 +5921 +5922 +5924 +5927 +5928 +5931 +5932 +5934 +5935 +5940 +5941 +5942 +5944 +5947 +5949 +5950 +5951 +5952 +5954 +5955 +5956 +5957 +5960 +5961 +5962 +5964 +5965 +5967 +5968 +5969 +5973 +5974 +5976 +5977 +5980 +5981 +5985 +5986 +5987 +5988 +5990 +5991 +5994 +5995 +5996 +5997 +5998 +5999 +6001 +6003 +6004 +6005 +6006 +6008 +6009 +6010 +6012 +6013 +6015 +6016 +6017 +6020 +6021 +6023 +6024 +6025 +6026 +6027 +6028 +6029 +6030 +6032 +6033 +6037 +6040 +6041 +6042 +6043 +6044 +6046 +6047 +6048 +6049 +6050 +6054 +6055 +6056 +6057 +6063 +6065 +6069 +6070 +6072 +6075 +6076 +6077 +6079 +6082 +6083 +6084 +6086 +6087 +6092 +6099 +6102 +6103 +6105 +6109 +6110 +6111 +6114 +6115 +6116 +6118 +6120 +6122 +6124 +6125 +6128 +6129 +6134 +6139 +6140 +6144 +6146 +6147 +6148 +6152 +6153 +6154 +6157 +6158 +6160 +6167 +6168 +6173 +6174 +6175 +6177 +6179 +6180 +6184 +6190 +6191 +6192 +6198 +6201 +6202 +6203 +6204 +6205 +6207 +6210 +6211 +6212 +6214 +6215 +6216 +6217 +6219 +6224 +6225 +6226 +6227 +6228 +6230 +6232 +6234 +6235 +6236 +6237 +6238 +6239 +6241 +6242 +6243 +6248 +6251 +6252 +6253 +6255 +6256 +6259 +6260 +6262 +6266 +6270 +6272 +6273 +6274 +6275 +6281 +6284 +6285 +6286 +6288 +6289 +6290 +6291 +6294 +6297 +6298 +6299 +6300 +6301 +6302 +6303 +6304 +6305 +6306 +6307 +6308 +6309 +6312 +6315 +6319 +6321 +6325 +6326 +6327 +6330 +6331 +6334 +6335 +6336 +6338 +6339 +6340 +6341 +6342 +6343 +6344 +6345 +6347 +6348 +6349 +6350 +6352 +6355 +6356 +6359 +6362 +6363 +6364 +6365 +6367 +6372 +6376 +6378 +6379 +6383 +6385 +6386 +6387 +6388 +6389 +6390 +6392 +6393 +6394 +6395 +6396 +6397 +6398 +6399 +6400 +6401 +6404 +6405 +6407 +6408 +6411 +6412 +6414 +6417 +6418 +6420 +6421 +6422 +6423 +6425 +6426 +6430 +6431 +6433 +6435 +6437 +6439 +6440 +6441 +6442 +6444 +6447 +6448 +6449 +6450 +6451 +6452 +6453 +6454 +6455 +6456 +6458 +6459 +6460 +6462 +6464 +6465 +6467 +6468 +6469 +6470 +6471 +6474 +6475 +6477 +6478 +6479 +6480 +6481 +6482 +6483 +6488 +6490 +6492 +6493 +6495 +6496 +6499 +6500 +6503 +6505 +6506 +6510 +6511 +6513 +6514 +6515 +6517 +6518 +6521 +6522 +6523 +6527 +6531 +6533 +6534 +6535 +6536 +6537 +6540 +6541 +6545 +6546 +6547 +6550 +6551 +6553 +6554 +6556 +6558 +6559 +6560 +6561 +6562 +6563 +6567 +6568 +6571 +6572 +6573 +6574 +6575 +6576 +6577 +6578 +6579 +6583 +6587 +6589 +6590 +6591 +6593 +6594 +6595 +6596 +6597 +6598 +6600 +6601 +6602 +6604 +6605 +6608 +6611 +6612 +6613 +6614 +6615 +6616 +6617 +6618 +6619 +6620 +6621 +6622 +6623 +6629 +6632 +6636 +6638 +6639 +6640 +6643 +6648 +6649 +6651 +6653 +6654 +6655 +6658 +6660 +6661 +6662 +6663 +6665 +6667 +6668 +6669 +6670 +6673 +6674 +6675 +6676 +6677 +6678 +6679 +6681 +6682 +6683 +6686 +6687 +6691 +6692 +6693 +6694 +6695 +6696 +6698 +6700 +6702 +6703 +6705 +6706 +6707 +6708 +6709 +6710 +6712 +6713 +6715 +6716 +6718 +6720 +6721 +6722 +6723 +6725 +6726 +6728 +6735 +6737 +6739 +6740 +6741 +6743 +6744 +6745 +6746 +6747 +6748 +6749 +6751 +6752 +6753 +6754 +6757 +6758 +6763 +6764 +6765 +6766 +6767 +6768 +6770 +6772 +6773 +6774 +6775 +6776 +6778 +6779 +6781 +6783 +6784 +6785 +6786 +6787 +6788 +6791 +6794 +6795 +6797 +6798 +6799 +6800 +6804 +6805 +6806 +6807 +6808 +6809 +6810 +6813 +6814 +6815 +6820 +6822 +6823 +6825 +6826 +6829 +6830 +6831 +6833 +6834 +6837 +6838 +6840 +6841 +6846 +6847 +6850 +6851 +6855 +6857 +6858 +6860 +6863 +6864 +6865 +6866 +6867 +6868 +6870 +6875 +6876 +6877 +6878 +6879 +6880 +6882 +6885 +6886 +6887 +6889 +6890 +6892 +6894 +6898 +6900 +6901 +6902 +6905 +6908 +6909 +6912 +6915 +6916 +6917 +6919 +6920 +6925 +6926 +6928 +6929 +6930 +6931 +6932 +6934 +6935 +6936 +6937 +6939 +6940 +6941 +6944 +6945 +6946 +6950 +6951 +6952 +6953 +6954 +6956 +6958 +6959 +6960 +6961 +6964 +6965 +6966 +6968 +6969 +6973 +6974 +6978 +6980 +6981 +6982 +6985 +6986 +6987 +6990 +6991 +6993 +6994 +6995 +6996 +6997 +6998 +6999 +7000 +7002 +7003 +7004 +7009 +7010 +7011 +7013 +7017 +7018 +7019 +7025 +7026 +7029 +7031 +7038 +7039 +7041 +7042 +7044 +7045 +7046 +7048 +7049 +7050 +7051 +7052 +7055 +7056 +7057 +7059 +7062 +7063 +7064 +7066 +7068 +7069 +7072 +7073 +7075 +7076 +7077 +7078 +7079 +7081 +7082 +7083 +7084 +7085 +7087 +7088 +7090 +7091 +7092 +7093 +7095 +7096 +7097 +7098 +7099 +7100 +7101 +7103 +7104 +7107 +7108 +7110 +7111 +7112 +7113 +7115 +7116 +7117 +7118 +7120 +7121 +7122 +7123 +7126 +7127 +7128 +7129 +7134 +7135 +7136 +7137 +7138 +7142 +7150 +7152 +7153 +7154 +7155 +7156 +7158 +7160 +7161 +7162 +7163 +7164 +7165 +7166 +7167 +7168 +7169 +7170 +7171 +7172 +7173 +7175 +7176 +7177 +7178 +7180 +7181 +7182 +7183 +7186 +7189 +7192 +7193 +7194 +7195 +7196 +7198 +7199 +7200 +7201 +7202 +7203 +7204 +7205 +7206 +7207 +7208 +7212 +7213 +7214 +7215 +7216 +7217 +7218 +7219 +7220 +7222 +7223 +7224 +7225 +7226 +7228 +7230 +7231 +7232 +7237 +7238 +7239 +7241 +7242 +7243 +7244 +7245 +7246 +7247 +7250 +7254 +7256 +7257 +7258 +7259 +7260 +7261 +7263 +7264 +7266 +7267 +7268 +7270 +7271 +7273 +7276 +7277 +7278 +7279 +7280 +7282 +7283 +7284 +7285 +7286 +7287 +7288 +7289 +7290 +7291 +7292 +7293 +7294 +7297 +7299 +7301 +7302 +7305 +7306 +7307 +7309 +7310 +7313 +7314 +7315 +7316 +7317 +7318 +7319 +7321 +7322 +7323 +7324 +7325 +7326 +7327 +7329 +7332 +7333 +7334 +7335 +7336 +7337 +7338 +7340 +7341 +7342 +7344 +7346 +7348 +7349 +7350 +7353 +7354 +7357 +7358 +7363 +7364 +7365 +7370 +7372 +7373 +7375 +7378 +7379 +7380 +7382 +7385 +7386 +7388 +7390 +7391 +7393 +7394 +7396 +7400 +7403 +7406 +7412 +7418 +7419 +7420 +7422 +7424 +7425 +7427 +7428 +7432 +7435 +7436 +7437 +7438 +7440 +7441 +7442 +7443 +7445 +7449 +7450 +7451 +7452 +7454 +7455 +7458 +7459 +7460 +7461 +7462 +7463 +7464 +7465 +7466 +7467 +7469 +7470 +7471 +7472 +7473 +7474 +7475 +7476 +7478 +7479 +7482 +7484 +7485 +7486 +7491 +7492 +7494 +7496 +7497 +7498 +7502 +7503 +7504 +7505 +7506 +7507 +7511 +7513 +7514 +7516 +7517 +7518 +7520 +7521 +7523 +7524 +7525 +7526 +7528 +7530 +7533 +7536 +7539 +7540 +7541 +7542 +7546 +7548 +7551 +7552 +7554 +7556 +7557 +7558 +7559 +7561 +7562 +7563 +7564 +7565 +7566 +7567 +7568 +7570 +7571 +7573 +7574 +7575 +7578 +7584 +7585 +7587 +7590 +7591 +7592 +7595 +7596 +7597 +7601 +7603 +7604 +7606 +7607 +7608 +7610 +7612 +7613 +7616 +7617 +7619 +7622 +7623 +7625 +7626 +7628 +7629 +7630 +7631 +7634 +7637 +7638 +7641 +7642 +7644 +7646 +7650 +7651 +7652 +7655 +7656 +7657 +7658 +7659 +7660 +7661 +7663 +7664 +7665 +7666 +7671 +7672 +7673 +7674 +7679 +7681 +7682 +7685 +7686 +7688 +7690 +7691 +7693 +7694 +7696 +7698 +7703 +7704 +7705 +7707 +7708 +7710 +7711 +7712 +7713 +7715 +7716 +7717 +7718 +7719 +7721 +7722 +7723 +7724 +7725 +7727 +7728 +7729 +7730 +7731 +7732 +7733 +7734 +7736 +7738 +7739 +7740 +7741 +7742 +7746 +7749 +7751 +7753 +7755 +7756 +7757 +7758 +7759 +7760 +7763 +7764 +7768 +7769 +7770 +7773 +7775 +7777 +7778 +7779 +7783 +7785 +7786 +7787 +7788 +7789 +7792 +7793 +7794 +7795 +7798 +7799 +7801 +7805 +7806 +7810 +7813 +7815 +7818 +7820 +7824 +7828 +7830 +7832 +7834 +7835 +7837 +7841 +7843 +7844 +7849 +7852 +7854 +7855 +7856 +7858 +7860 +7862 +7864 +7867 +7868 +7871 +7872 +7873 +7874 +7876 +7878 +7881 +7882 +7884 +7886 +7887 +7889 +7891 +7892 +7894 +7895 +7896 +7902 +7903 +7904 +7905 +7906 +7908 +7911 +7913 +7914 +7915 +7917 +7918 +7919 +7920 +7921 +7923 +7924 +7927 +7928 +7929 +7931 +7934 +7935 +7937 +7938 +7939 +7940 +7941 +7942 +7943 +7944 +7949 +7950 +7951 +7952 +7953 +7954 +7955 +7959 +7962 +7963 +7964 +7966 +7969 +7972 +7973 +7976 +7977 +7981 +7982 +7983 +7984 +7987 +7988 +7989 +7990 +7991 +7992 +7994 +7995 +7997 +7998 +7999 +8000 +8001 +8004 +8005 +8006 +8007 +8008 +8009 +8012 +8017 +8019 +8020 +8021 +8022 +8023 +8024 +8025 +8027 +8028 +8029 +8031 +8033 +8034 +8035 +8036 +8037 +8038 +8039 +8040 +8042 +8043 +8044 +8045 +8046 +8050 +8051 +8052 +8054 +8056 +8060 +8061 +8062 +8064 +8065 +8066 +8068 +8070 +8071 +8072 +8074 +8077 +8078 +8080 +8081 +8082 +8084 +8086 +8087 +8089 +8090 +8093 +8098 +8099 +8101 +8104 +8105 +8106 +8110 +8112 +8113 +8114 +8115 +8116 +8119 +8120 +8121 +8124 +8125 +8126 +8127 +8129 +8131 +8133 +8136 +8138 +8139 +8140 +8141 +8142 +8144 +8145 +8147 +8149 +8150 +8151 +8153 +8154 +8155 +8156 +8157 +8159 +8161 +8162 +8163 +8164 +8166 +8168 +8170 +8171 +8172 +8173 +8174 +8175 +8177 +8178 +8179 +8182 +8183 +8184 +8186 +8191 +8193 +8195 +8197 +8198 +8199 +8201 +8202 +8203 +8204 +8205 +8206 +8207 +8208 +8210 +8211 +8212 +8213 +8215 +8216 +8218 +8220 +8221 +8222 +8225 +8229 +8230 +8231 +8232 +8233 +8236 +8237 +8239 +8240 +8242 +8243 +8244 +8245 +8246 +8250 +8251 +8252 +8254 +8255 +8256 +8257 +8258 +8259 +8261 +8263 +8264 +8267 +8268 +8271 +8272 +8273 +8275 +8276 +8278 +8281 +8282 +8285 +8286 +8288 +8289 +8290 +8294 +8295 +8297 +8298 +8299 +8300 +8303 +8307 +8309 +8310 +8312 +8313 +8315 +8318 +8320 +8322 +8325 +8326 +8327 +8328 +8329 +8330 +8332 +8333 +8335 +8337 +8345 +8346 +8347 +8348 +8352 +8354 +8360 +8362 +8364 +8365 +8368 +8371 +8375 +8376 +8378 +8380 +8381 +8382 +8386 +8388 +8389 +8390 +8392 +8393 +8394 +8396 +8397 +8398 +8399 +8400 +8401 +8402 +8403 +8404 +8405 +8407 +8408 +8409 +8410 +8412 +8414 +8416 +8417 +8418 +8419 +8420 +8421 +8422 +8426 +8428 +8430 +8432 +8433 +8434 +8435 +8436 +8437 +8439 +8440 +8446 +8447 +8448 +8449 +8450 +8451 +8452 +8453 +8454 +8456 +8460 +8462 +8463 +8464 +8467 +8468 +8469 +8470 +8472 +8473 +8474 +8477 +8478 +8481 +8482 +8483 +8484 +8485 +8486 +8490 +8491 +8492 +8493 +8494 +8495 +8496 +8497 +8498 +8500 +8501 +8502 +8503 +8505 +8506 +8508 +8509 +8510 +8511 +8512 +8513 +8516 +8521 +8522 +8524 +8526 +8529 +8531 +8532 +8536 +8538 +8539 +8540 +8541 +8542 +8543 +8547 +8548 +8549 +8552 +8553 +8555 +8556 +8557 +8560 +8561 +8562 +8564 +8565 +8568 +8569 +8570 +8571 +8572 +8573 +8577 +8578 +8580 +8581 +8583 +8584 +8586 +8588 +8589 +8590 +8591 +8593 +8594 +8596 +8597 +8598 +8599 +8600 +8601 +8602 +8603 +8604 +8606 +8607 +8610 +8611 +8613 +8615 +8622 +8625 +8626 +8627 +8628 +8629 +8632 +8636 +8638 +8639 +8641 +8643 +8645 +8646 +8647 +8648 +8649 +8650 +8651 +8652 +8653 +8654 +8655 +8656 +8657 +8658 +8662 +8663 +8664 +8665 +8666 +8667 +8668 +8669 +8670 +8671 +8672 +8673 +8674 +8675 +8676 +8677 +8678 +8679 +8680 +8681 +8682 +8684 +8685 +8686 +8690 +8691 +8692 +8693 +8694 +8695 +8702 +8707 +8708 +8709 +8710 +8711 +8712 +8713 +8715 +8716 +8720 +8723 +8724 +8725 +8728 +8732 +8733 +8737 +8738 +8739 +8740 +8741 +8745 +8746 +8750 +8752 +8753 +8754 +8756 +8757 +8758 +8759 +8761 +8762 +8763 +8766 +8768 +8770 +8771 +8772 +8773 +8775 +8776 +8780 +8781 +8783 +8784 +8785 +8786 +8787 +8788 +8793 +8795 +8797 +8798 +8801 +8803 +8804 +8806 +8807 +8810 +8812 +8814 +8815 +8817 +8820 +8823 +8824 +8826 +8827 +8828 +8829 +8830 +8831 +8833 +8835 +8838 +8839 +8842 +8843 +8845 +8846 +8847 +8848 +8849 +8851 +8854 +8856 +8857 +8858 +8860 +8861 +8863 +8864 +8867 +8869 +8870 +8871 +8872 +8875 +8876 +8878 +8879 +8883 +8884 +8886 +8887 +8888 +8890 +8891 +8892 +8894 +8896 +8897 +8898 +8899 +8900 +8901 +8902 +8903 +8905 +8906 +8908 +8910 +8914 +8915 +8916 +8917 +8918 +8919 +8922 +8923 +8924 +8925 +8926 +8927 +8929 +8931 +8932 +8934 +8936 +8937 +8938 +8939 +8942 +8943 +8944 +8945 +8947 +8948 +8950 +8951 +8954 +8956 +8957 +8959 +8962 +8965 +8966 +8967 +8968 +8969 +8970 +8971 +8976 +8977 +8980 +8981 +8982 +8983 +8984 +8985 +8986 +8987 +8989 +8990 +8991 +8992 +8993 +8994 +8995 +9000 +9001 +9003 +9006 +9007 +9011 +9012 +9013 +9014 +9015 +9019 +9022 +9023 +9024 +9025 +9026 +9028 +9029 +9030 +9031 +9032 +9033 +9034 +9036 +9037 +9039 +9042 +9043 +9047 +9049 +9050 +9051 +9052 +9054 +9055 +9056 +9057 +9058 +9059 +9060 +9061 +9062 +9064 +9065 +9066 +9070 +9071 +9072 +9073 +9074 +9079 +9080 +9081 +9082 +9083 +9087 +9088 +9092 +9093 +9094 +9096 +9097 +9098 +9100 +9101 +9104 +9105 +9106 +9107 +9108 +9109 +9110 +9111 +9112 +9116 +9118 +9119 +9123 +9128 +9130 +9131 +9132 +9133 +9134 +9138 +9139 +9140 +9141 +9142 +9144 +9146 +9147 +9148 +9149 +9150 +9151 +9153 +9154 +9155 +9158 +9159 +9161 +9163 +9165 +9166 +9167 +9168 +9169 +9171 +9173 +9174 +9175 +9176 +9179 +9180 +9183 +9184 +9187 +9188 +9189 +9191 +9193 +9198 +9199 +9201 +9204 +9205 +9206 +9211 +9212 +9213 +9214 +9215 +9216 +9218 +9219 +9220 +9223 +9224 +9225 +9226 +9227 +9228 +9229 +9230 +9231 +9233 +9237 +9238 +9239 +9241 +9242 +9243 +9245 +9249 +9250 +9251 +9252 +9254 +9256 +9257 +9258 +9259 +9264 +9265 +9268 +9269 +9270 +9271 +9272 +9273 +9274 +9275 +9276 +9278 +9280 +9282 +9289 +9292 +9293 +9294 +9295 +9296 +9299 +9302 +9303 +9304 +9305 +9306 +9307 +9308 +9309 +9312 +9313 +9316 +9317 +9318 +9321 +9323 +9326 +9329 +9330 +9332 +9333 +9334 +9335 +9336 +9337 +9341 +9342 +9343 +9344 +9345 +9346 +9348 +9349 +9351 +9353 +9354 +9361 +9362 +9364 +9365 +9366 +9367 +9368 +9369 +9370 +9371 +9375 +9376 +9380 +9381 +9382 +9384 +9385 +9386 +9389 +9390 +9391 +9394 +9395 +9396 +9397 +9398 +9399 +9400 +9401 +9403 +9404 +9406 +9410 +9411 +9412 +9413 +9414 +9415 +9416 +9417 +9419 +9420 +9421 +9422 +9424 +9425 +9426 +9429 +9430 +9436 +9439 +9440 +9441 +9444 +9445 +9446 +9447 +9448 +9449 +9451 +9452 +9453 +9454 +9456 +9459 +9462 +9463 +9464 +9466 +9468 +9469 +9470 +9474 +9475 +9478 +9480 +9481 +9483 +9485 +9487 +9489 +9491 +9492 +9495 +9497 +9499 +9500 +9501 +9502 +9503 +9504 +9512 +9513 +9514 +9515 +9520 +9521 +9522 +9527 +9531 +9532 +9534 +9535 +9536 +9539 +9541 +9542 +9544 +9545 +9547 +9548 +9550 +9551 +9556 +9557 +9565 +9566 +9568 +9569 +9570 +9571 +9573 +9574 +9575 +9576 +9577 +9578 +9580 +9583 +9584 +9585 +9586 +9587 +9589 +9590 +9594 +9596 +9601 +9604 +9607 +9608 +9609 +9614 +9615 +9617 +9621 +9623 +9625 +9626 +9627 +9628 +9629 +9632 +9633 +9634 +9635 +9636 +9638 +9639 +9640 +9641 +9642 +9643 +9646 +9651 +9652 +9653 +9654 +9655 +9658 +9659 +9660 +9663 +9664 +9665 +9666 +9667 +9669 +9672 +9673 +9674 +9675 +9676 +9677 +9678 +9680 +9683 +9685 +9688 +9689 +9690 +9691 +9692 +9694 +9696 +9697 +9702 +9703 +9704 +9705 +9706 +9710 +9711 +9712 +9714 +9716 +9719 +9720 +9723 +9725 +9726 +9727 +9729 +9732 +9733 +9734 +9736 +9737 +9738 +9739 +9740 +9741 +9744 +9751 +9752 +9753 +9754 +9755 +9756 +9758 +9759 +9762 +9763 +9764 +9766 +9769 +9770 +9771 +9772 +9773 +9775 +9776 +9777 +9778 +9779 +9780 +9782 +9784 +9785 +9786 +9791 +9794 +9796 +9797 +9798 +9799 +9800 +9801 +9802 +9805 +9806 +9807 +9809 +9811 +9814 +9819 +9820 +9825 +9826 +9827 +9834 +9835 +9836 +9837 +9838 +9841 +9844 +9848 +9849 +9855 +9857 +9858 +9859 +9860 +9862 +9866 +9868 +9869 +9873 +9875 +9876 +9877 +9878 +9880 +9883 +9885 +9886 +9887 +9888 +9889 +9891 +9893 +9894 +9895 +9896 +9897 +9898 +9899 +9900 +9901 +9902 +9904 +9906 +9907 +9909 +9911 +9912 +9915 +9920 +9921 +9922 +9923 +9924 +9926 +9927 +9928 +9929 +9930 +9931 +9934 +9935 +9936 +9937 +9938 +9940 +9944 +9946 +9947 +9948 +9950 +9951 +9952 +9953 +9955 +9957 +9959 +9960 +9961 +9962 +9963 +9964 +9965 +9966 +9968 +9969 +9971 +9974 +9975 +9976 +9978 +9979 +9980 +9981 +9982 +9983 +9987 +9988 +9989 +9990 +9991 +9992 +9993 +9996 +9998 +10001 +10002 +10009 +10010 +10011 +10012 +10013 +10014 +10015 +10016 +10017 +10021 +10022 +10024 +10027 +10028 +10030 +10032 +10033 +10035 +10037 +10039 +10040 +10041 +10042 +10043 +10044 +10045 +10046 +10047 +10049 +10050 +10051 +10052 +10053 +10054 +10055 +10056 +10057 +10058 +10059 +10061 +10063 +10064 +10065 +10066 +10067 +10069 +10072 +10073 +10074 +10075 +10076 +10077 +10080 +10081 +10082 +10083 +10085 +10086 +10087 +10089 +10090 +10091 +10092 +10093 +10094 +10096 +10099 +10101 +10104 +10105 +10106 +10107 +10108 +10109 +10112 +10116 +10117 +10118 +10120 +10121 +10122 +10123 +10126 +10127 +10133 +10134 +10139 +10140 +10141 +10146 +10147 +10150 +10151 +10152 +10154 +10155 +10156 +10157 +10158 +10159 +10161 +10162 +10163 +10164 +10168 +10170 +10174 +10178 +10180 +10184 +10185 +10186 +10187 +10188 +10189 +10190 +10191 +10194 +10195 +10196 +10198 +10199 +10201 +10205 +10206 +10208 +10209 +10210 +10212 +10216 +10217 +10218 +10219 +10220 +10222 +10223 +10224 +10225 +10226 +10227 +10228 +10230 +10231 +10233 +10234 +10235 +10236 +10239 +10240 +10242 +10243 +10244 +10245 +10246 +10247 +10248 +10249 +10251 +10252 +10253 +10254 +10257 +10259 +10261 +10262 +10263 +10264 +10265 +10268 +10270 +10271 +10272 +10275 +10276 +10278 +10282 +10283 +10285 +10288 +10289 +10290 +10294 +10295 +10296 +10297 +10298 +10299 +10300 +10301 +10302 +10303 +10304 +10305 +10306 +10307 +10310 +10312 +10313 +10314 +10317 +10318 +10321 +10322 +10323 +10324 +10325 +10326 +10327 +10328 +10329 +10330 +10331 +10332 +10334 +10335 +10336 +10337 +10338 +10339 +10342 +10343 +10344 +10345 +10348 +10349 +10350 +10351 +10352 +10353 +10354 +10355 +10359 +10361 +10362 +10365 +10366 +10370 +10371 +10372 +10374 +10376 +10377 +10379 +10380 +10382 +10386 +10387 +10388 +10389 +10390 +10393 +10394 +10396 +10397 +10398 +10400 +10401 +10403 +10405 +10408 +10411 +10412 +10413 +10415 +10416 +10417 +10418 +10419 +10421 +10422 +10423 +10424 +10426 +10428 +10429 +10430 +10431 +10432 +10435 +10437 +10439 +10443 +10446 +10447 +10450 +10452 +10454 +10455 +10458 +10459 +10460 +10461 +10462 +10464 +10467 +10469 +10472 +10475 +10477 +10478 +10480 +10482 +10486 +10487 +10488 +10490 +10493 +10495 +10496 +10498 +10499 +10500 +10503 +10504 +10505 +10507 +10508 +10509 +10510 +10511 +10512 +10513 +10514 +10516 +10517 +10518 +10519 +10520 +10521 +10522 +10523 +10524 +10525 +10526 +10527 +10529 +10530 +10531 +10533 +10534 +10535 +10538 +10541 +10543 +10545 +10546 +10547 +10548 +10549 +10550 +10551 +10552 +10553 +10554 +10555 +10558 +10560 +10562 +10563 +10566 +10569 +10573 +10574 +10575 +10582 +10583 +10584 +10585 +10587 +10588 +10589 +10590 +10591 +10593 +10597 +10606 +10609 +10610 +10611 +10612 +10614 +10616 +10619 +10620 +10622 +10624 +10625 +10626 +10627 +10628 +10630 +10632 +10634 +10635 +10637 +10638 +10640 +10641 +10642 +10643 +10647 +10648 +10649 +10657 +10658 +10661 +10662 +10663 +10664 +10665 +10666 +10667 +10668 +10670 +10671 +10672 +10673 +10674 +10675 +10676 +10677 +10679 +10680 +10682 +10685 +10686 +10687 +10690 +10691 +10693 +10694 +10696 +10697 +10698 +10699 +10700 +10701 +10702 +10707 +10708 +10710 +10711 +10712 +10713 +10717 +10718 +10719 +10720 +10722 +10724 +10725 +10726 +10727 +10728 +10729 +10730 +10732 +10733 +10734 +10737 +10738 +10741 +10747 +10748 +10749 +10750 +10751 +10753 +10754 +10756 +10758 +10759 +10760 +10762 +10764 +10765 +10766 +10767 +10771 +10772 +10773 +10774 +10775 +10776 +10779 +10780 +10781 +10782 +10783 +10785 +10786 +10790 +10791 +10792 +10795 +10797 +10798 +10799 +10801 +10802 +10805 +10806 +10807 +10808 +10809 +10810 +10812 +10813 +10817 +10821 +10823 +10824 +10827 +10829 +10831 +10832 +10834 +10836 +10839 +10840 +10841 +10842 +10843 +10845 +10847 +10848 +10851 +10854 +10855 +10858 +10859 +10861 +10863 +10864 +10866 +10869 +10870 +10871 +10873 +10874 +10875 +10876 +10878 +10879 +10880 +10881 +10882 +10883 +10885 +10888 +10889 +10893 +10895 +10896 +10897 +10898 +10901 +10905 +10906 +10907 +10908 +10909 +10911 +10912 +10913 +10914 +10918 +10919 +10920 +10923 +10926 +10927 +10931 +10932 +10934 +10935 +10937 +10938 +10939 +10940 +10942 +10943 +10944 +10945 +10946 +10947 +10950 +10951 +10952 +10954 +10955 +10956 +10957 +10959 +10961 +10962 +10963 +10967 +10968 +10971 +10972 +10973 +10974 +10979 +10983 +10985 +10986 +10988 +10993 +10996 +10997 +10998 +11000 +11001 +11002 +11003 +11004 +11008 +11011 +11012 +11015 +11016 +11017 +11019 +11021 +11022 +11023 +11024 +11026 +11027 +11028 +11030 +11031 +11032 +11033 +11035 +11038 +11039 +11040 +11043 +11044 +11045 +11048 +11050 +11051 +11052 +11053 +11054 +11055 +11056 +11058 +11059 +11060 +11061 +11063 +11065 +11066 +11067 +11068 +11070 +11072 +11073 +11076 +11077 +11078 +11080 +11081 +11082 +11083 +11084 +11085 +11087 +11088 +11093 +11094 +11095 +11096 +11097 +11098 +11101 +11102 +11103 +11104 +11105 +11106 +11107 +11108 +11109 +11110 +11112 +11113 +11114 +11118 +11119 +11120 +11122 +11125 +11127 +11128 +11129 +11132 +11133 +11134 +11136 +11137 +11138 +11139 +11140 +11142 +11143 +11144 +11145 +11146 +11148 +11150 +11151 +11152 +11153 +11154 +11155 +11157 +11158 +11159 +11160 +11161 +11162 +11163 +11164 +11167 +11168 +11169 +11170 +11171 +11172 +11173 +11176 +11178 +11179 +11180 +11181 +11182 +11184 +11185 +11186 +11187 +11189 +11197 +11200 +11202 +11205 +11207 +11208 +11209 +11210 +11211 +11213 +11215 +11217 +11218 +11219 +11220 +11221 +11222 +11224 +11225 +11226 +11234 +11235 +11237 +11238 +11241 +11242 +11243 +11246 +11247 +11251 +11254 +11256 +11258 +11259 +11261 +11262 +11265 +11268 +11269 +11272 +11273 +11274 +11275 +11276 +11278 +11281 +11282 +11283 +11291 +11292 +11293 +11295 +11297 +11298 +11300 +11303 +11304 +11305 +11306 +11308 +11309 +11310 +11312 +11315 +11317 +11318 +11319 +11322 +11324 +11325 +11326 +11328 +11329 +11330 +11331 +11333 +11334 +11338 +11339 +11340 +11342 +11343 +11344 +11346 +11348 +11349 +11350 +11351 +11353 +11354 +11355 +11356 +11360 +11362 +11364 +11366 +11369 +11371 +11373 +11374 +11376 +11377 +11378 +11381 +11382 +11383 +11384 +11385 +11386 +11387 +11388 +11389 +11390 +11391 +11393 +11395 +11396 +11397 +11399 +11400 +11401 +11402 +11403 +11404 +11406 +11408 +11409 +11411 +11413 +11415 +11417 +11420 +11423 +11426 +11427 +11428 +11430 +11432 +11435 +11436 +11438 +11439 +11440 +11441 +11442 +11446 +11448 +11453 +11454 +11455 +11459 +11463 +11464 +11465 +11467 +11468 +11469 +11471 +11472 +11473 +11476 +11477 +11478 +11481 +11482 +11483 +11484 +11486 +11487 +11488 +11489 +11490 +11491 +11492 +11493 +11494 +11496 +11497 +11498 +11500 +11502 +11503 +11506 +11507 +11513 +11514 +11515 +11516 +11517 +11519 +11520 +11521 +11523 +11526 +11528 +11531 +11535 +11536 +11537 +11538 +11539 +11540 +11541 +11542 +11543 +11548 +11550 +11553 +11555 +11556 +11557 +11559 +11561 +11562 +11565 +11569 +11570 +11571 +11572 +11573 +11574 +11576 +11577 +11578 +11579 +11581 +11583 +11587 +11588 +11589 +11590 +11591 +11592 +11593 +11595 +11596 +11597 +11598 +11599 +11603 +11604 +11605 +11608 +11610 +11611 +11612 +11613 +11614 +11617 +11618 +11619 +11620 +11621 +11622 +11623 +11626 +11627 +11628 +11629 +11630 +11631 +11632 +11633 +11635 +11636 +11637 +11639 +11640 +11642 +11643 +11645 +11646 +11647 +11648 +11649 +11650 +11651 +11652 +11653 +11654 +11655 +11656 +11657 +11658 +11659 +11661 +11663 +11667 +11669 +11670 +11672 +11673 +11674 +11678 +11681 +11682 +11686 +11687 +11688 +11689 +11691 +11692 +11694 +11695 +11696 +11697 +11699 +11700 +11703 +11704 +11707 +11708 +11709 +11710 +11711 +11712 +11714 +11715 +11717 +11720 +11722 +11724 +11725 +11726 +11727 +11728 +11729 +11731 +11732 +11733 +11734 +11735 +11736 +11737 +11739 +11742 +11743 +11746 +11749 +11750 +11752 +11753 +11755 +11756 +11759 +11760 +11762 +11763 +11764 +11768 +11769 +11770 +11772 +11773 +11776 +11777 +11779 +11780 +11781 +11782 +11786 +11787 +11789 +11790 +11792 +11794 +11797 +11798 +11799 +11800 +11801 +11803 +11808 +11809 +11810 +11813 +11814 +11818 +11819 +11820 +11821 +11822 +11826 +11828 +11834 +11835 +11836 +11837 +11838 +11839 +11840 +11841 +11842 +11844 +11845 +11846 +11847 +11848 +11850 +11851 +11855 +11856 +11857 +11861 +11862 +11863 +11864 +11865 +11866 +11867 +11868 +11869 +11870 +11871 +11872 +11874 +11875 +11876 +11877 +11878 +11879 +11880 +11881 +11882 +11883 +11886 +11888 +11889 +11890 +11891 +11893 +11895 +11896 +11897 +11898 +11899 +11901 +11902 +11903 +11904 +11906 +11908 +11909 +11913 +11916 +11917 +11919 +11920 +11921 +11922 +11924 +11926 +11927 +11928 +11929 +11930 +11932 +11936 +11938 +11939 +11940 +11941 +11943 +11946 +11947 +11949 +11950 +11951 +11952 +11953 +11954 +11957 +11959 +11960 +11961 +11963 +11964 +11965 +11967 +11969 +11970 +11971 +11974 +11975 +11978 +11979 +11981 +11983 +11984 +11986 +11989 +11990 +11993 +11994 +11995 +11999 +12001 +12008 +12009 +12010 +12011 +12012 +12013 +12014 +12015 +12017 +12018 +12019 +12020 +12021 +12022 +12023 +12024 +12025 +12026 +12027 +12028 +12030 +12031 +12032 +12033 +12034 +12035 +12036 +12037 +12038 +12039 +12040 +12041 +12043 +12044 +12046 +12047 +12048 +12049 +12050 +12051 +12053 +12054 +12055 +12057 +12060 +12062 +12063 +12064 +12066 +12068 +12070 +12073 +12074 +12080 +12083 +12084 +12087 +12089 +12090 +12091 +12092 +12093 +12094 +12095 +12096 +12098 +12104 +12106 +12108 +12109 +12111 +12120 +12122 +12124 +12130 +12144 +12146 +12147 +12153 +12154 +12156 +12158 +12162 +12169 +12173 +12176 +12177 +12178 +12179 +12180 +12181 +12182 +12183 +12184 +12185 +12186 +12189 +12190 +12191 +12193 +12197 +12199 +12200 +12201 +12202 +12203 +12206 +12207 +12208 +12209 +12210 +12213 +12214 +12216 +12217 +12221 +12224 +12226 +12228 +12229 +12230 +12231 +12238 +12239 +12240 +12241 +12242 +12243 +12244 +12245 +12246 +12247 +12248 +12249 +12250 +12252 +12254 +12255 +12256 +12258 +12259 +12260 +12261 +12262 +12263 +12267 +12271 +12275 +12280 +12281 +12282 +12283 +12284 +12290 +12296 +12297 +12301 +12303 +12305 +12308 +12312 +12314 +12316 +12318 +12321 +12322 +12323 +12324 +12325 +12326 +12327 +12328 +12330 +12331 +12332 +12333 +12334 +12335 +12337 +12339 +12340 +12341 +12345 +12346 +12347 +12348 +12349 +12350 +12351 +12352 +12353 +12354 +12355 +12356 +12358 +12359 +12361 +12362 +12364 +12366 +12368 +12372 +12374 +12375 +12376 +12380 +12381 +12383 +12385 +12386 +12388 +12390 +12392 +12393 +12394 +12395 +12396 +12398 +12399 +12400 +12401 +12403 +12404 +12405 +12406 +12407 +12408 +12410 +12411 +12412 +12413 +12419 +12420 +12421 +12422 +12425 +12429 +12430 +12432 +12433 +12435 +12436 +12437 +12438 +12440 +12442 +12443 +12446 +12452 +12454 +12456 +12462 +12463 +12464 +12466 +12467 +12470 +12473 +12480 +12481 +12482 +12483 +12486 +12490 +12492 +12493 +12494 +12496 +12497 +12500 +12501 +12502 +12504 +12505 +12510 +12511 +12512 +12515 +12518 +12521 +12522 +12524 +12525 +12529 +12532 +12534 +12536 +12538 +12541 +12544 +12545 +12546 +12547 +12549 +12550 +12551 +12553 +12555 +12556 +12558 +12559 +12561 +12563 +12565 +12566 +12567 +12569 +12570 +12571 +12572 +12573 +12574 +12576 +12579 +12580 +12581 +12582 +12584 +12589 +12590 +12592 +12593 +12594 +12596 +12600 +12601 +12603 +12610 +12613 +12614 +12615 +12616 +12618 +12619 +12621 +12622 +12624 +12625 +12626 +12627 +12628 +12629 +12631 +12632 +12633 +12634 +12635 +12639 +12640 +12642 +12643 +12645 +12646 +12647 +12648 +12650 +12652 +12653 +12656 +12658 +12660 +12662 +12664 +12666 +12667 +12670 +12671 +12673 +12674 +12675 +12676 +12677 +12678 +12679 +12680 +12683 +12684 +12685 +12686 +12687 +12688 +12689 +12691 +12692 +12693 +12694 +12696 +12698 +12699 +12700 +12701 +12702 +12703 +12707 +12708 +12709 +12710 +12711 +12712 +12713 +12714 +12716 +12719 +12721 +12722 +12728 +12729 +12730 +12731 +12732 +12733 +12734 +12735 +12736 +12737 +12738 +12739 +12740 +12741 +12742 +12743 +12748 +12750 +12751 +12753 +12754 +12756 +12758 +12759 +12760 +12761 +12766 +12767 +12768 +12769 +12770 +12771 +12772 +12773 +12774 +12775 +12776 +12777 +12779 +12780 +12781 +12782 +12783 +12784 +12785 +12786 +12787 +12789 +12790 +12792 +12793 +12794 +12795 +12797 +12799 +12800 +12801 +12803 +12805 +12806 +12807 +12808 +12809 +12810 +12811 +12815 +12816 +12817 +12818 +12819 +12820 +12821 +12822 +12823 +12824 +12825 +12826 +12827 +12828 +12829 +12830 +12831 +12832 +12833 +12834 +12835 +12836 +12837 +12838 +12839 +12840 +12841 +12842 +12843 +12846 +12847 +12848 +12849 +12855 +12857 +12860 +12863 +12865 +12869 +12870 +12871 +12872 +12873 +12875 +12876 +12877 +12878 +12880 +12881 +12882 +12884 +12886 +12887 +12888 +12889 +12890 +12891 +12893 +12896 +12897 +12898 +12900 +12902 +12903 +12904 +12906 +12907 +12909 +12912 +12913 +12914 +12915 +12919 +12920 +12922 +12923 +12924 +12926 +12927 +12928 +12933 +12934 +12935 +12936 +12937 +12938 +12939 +12940 +12942 +12943 +12944 +12946 +12948 +12949 +12950 +12951 +12955 +12956 +12957 +12961 +12962 +12963 +12965 +12966 +12967 +12968 +12969 +12970 +12971 +12975 +12976 +12977 +12978 +12980 +12981 +12982 +12984 +12985 +12989 +12993 +12994 +12995 +12997 +12999 +13000 +13001 +13002 +13005 +13006 +13009 +13014 +13016 +13020 +13021 +13023 +13025 +13026 +13027 +13028 +13030 +13031 +13032 +13035 +13037 +13039 +13040 +13041 +13044 +13045 +13047 +13048 +13049 +13050 +13051 +13052 +13053 +13055 +13056 +13057 +13058 +13059 +13060 +13061 +13062 +13064 +13066 +13067 +13068 +13069 +13070 +13071 +13073 +13075 +13077 +13078 +13080 +13083 +13084 +13085 +13087 +13088 +13089 +13092 +13093 +13094 +13095 +13097 +13099 +13100 +13102 +13104 +13106 +13107 +13108 +13109 +13113 +13118 +13120 +13125 +13127 +13133 +13134 +13135 +13136 +13139 +13140 +13143 +13144 +13145 +13148 +13149 +13150 +13151 +13152 +13154 +13155 +13156 +13157 +13158 +13162 +13163 +13164 +13168 +13169 +13170 +13171 +13172 +13173 +13174 +13175 +13177 +13179 +13181 +13182 +13186 +13187 +13189 +13193 +13196 +13199 +13202 +13203 +13205 +13206 +13211 +13212 +13213 +13214 +13215 +13221 +13223 +13224 +13225 +13227 +13228 +13229 +13232 +13234 +13235 +13236 +13240 +13245 +13246 +13247 +13248 +13249 +13254 +13255 +13256 +13258 +13259 +13260 +13261 +13263 +13264 +13265 +13266 +13267 +13268 +13269 +13270 +13271 +13276 +13279 +13280 +13283 +13285 +13286 +13297 +13298 +13299 +13300 +13301 +13305 +13306 +13307 +13309 +13310 +13311 +13312 +13313 +13315 +13316 +13317 +13318 +13322 +13324 +13325 +13327 +13328 +13329 +13330 +13331 +13333 +13335 +13336 +13338 +13339 +13340 +13341 +13343 +13345 +13347 +13348 +13349 +13351 +13352 +13353 +13355 +13356 +13357 +13359 +13361 +13363 +13364 +13368 +13371 +13375 +13377 +13378 +13380 +13381 +13384 +13385 +13386 +13387 +13389 +13390 +13395 +13397 +13398 +13399 +13401 +13405 +13406 +13414 +13417 +13423 +13426 +13427 +13429 +13432 +13433 +13437 +13443 +13444 +13447 +13451 +13452 +13453 +13454 +13455 +13466 +13471 +13473 +13475 +13476 +13477 +13478 +13479 +13480 +13481 +13482 +13483 +13484 +13485 +13486 +13488 +13491 +13492 +13493 +13494 +13495 +13497 +13498 +13504 +13506 +13508 +13517 +13518 +13521 +13522 +13523 +13524 +13525 +13530 +13532 +13533 +13534 +13535 +13537 +13539 +13540 +13541 +13542 +13543 +13545 +13546 +13547 +13551 +13553 +13554 +13559 +13565 +13572 +13577 +13581 +13589 +13592 +13594 +13600 +13602 +13603 +13604 +13606 +13608 +13609 +13612 +13613 +13614 +13617 +13619 +13620 +13621 +13622 +13623 +13624 +13625 +13630 +13631 +13633 +13635 +13640 +13641 +13642 +13643 +13644 +13648 +13649 +13650 +13651 +13653 +13654 +13655 +13656 +13657 +13658 +13659 +13660 +13663 +13664 +13665 +13667 +13668 +13670 +13671 +13672 +13673 +13674 +13676 +13677 +13678 +13682 +13684 +13686 +13688 +13689 +13690 +13692 +13694 +13695 +13697 +13698 +13699 +13700 +13701 +13702 +13705 +13708 +13710 +13711 +13712 +13716 +13720 +13723 +13724 +13725 +13726 +13727 +13731 +13732 +13733 +13734 +13735 +13736 +13737 +13739 +13741 +13742 +13743 +13744 +13745 +13746 +13747 +13748 +13750 +13751 +13754 +13755 +13756 +13758 +13760 +13764 +13765 +13766 +13767 +13769 +13770 +13771 +13772 +13773 +13774 +13775 +13776 +13777 +13779 +13781 +13782 +13785 +13786 +13787 +13789 +13790 +13791 +13792 +13794 +13798 +13799 +13800 +13801 +13802 +13803 +13804 +13805 +13807 +13808 +13810 +13811 +13812 +13813 +13814 +13815 +13816 +13817 +13818 +13819 +13820 +13821 +13822 +13823 +13824 +13825 +13826 +13827 +13830 +13831 +13832 +13833 +13834 +13836 +13837 +13838 +13839 +13840 +13841 +13842 +13843 +13844 +13845 +13846 +13848 +13849 +13850 +13851 +13852 +13853 +13854 +13855 +13857 +13858 +13859 +13860 +13861 +13862 +13863 +13864 +13865 +13866 +13867 +13868 +13871 +13872 +13873 +13874 +13875 +13876 +13877 +13878 +13879 +13881 +13882 +13883 +13884 +13885 +13886 +13887 +13888 +13889 +13890 +13893 +13894 +13895 +13896 +13897 +13898 +13899 +13900 +13901 +13902 +13903 +13904 +13905 +13906 +13907 +13908 +13909 +13911 +13915 +13919 +13920 +13922 +13923 +13924 +13928 +13929 +13930 +13932 +13935 +13939 +13940 +13941 +13942 +13943 +13944 +13946 +13948 +13949 +13952 +13953 +13955 +13956 +13957 +13958 +13959 +13961 +13962 +13963 +13964 +13965 +13969 +13970 +13971 +13973 +13975 +13976 +13977 +13981 +13982 +13983 +13984 +13985 +13986 +13987 +13989 +13991 +13992 +13993 +13995 +13996 +13997 +13999 +14001 +14002 +14003 +14005 +14006 +14008 +14009 +14010 +14011 +14012 +14015 +14016 +14019 +14020 +14021 +14024 +14025 +14027 +14028 +14030 +14031 +14032 +14034 +14035 +14037 +14038 +14039 +14041 +14042 +14043 +14045 +14047 +14048 +14049 +14050 +14051 +14054 +14055 +14057 +14058 +14063 +14064 +14065 +14068 +14072 +14078 +14081 +14082 +14083 +14084 +14087 +14089 +14090 +14094 +14096 +14097 +14098 +14099 +14100 +14101 +14102 +14103 +14105 +14107 +14108 +14110 +14111 +14114 +14115 +14116 +14121 +14125 +14126 +14128 +14130 +14131 +14134 +14135 +14136 +14139 +14143 +14147 +14149 +14150 +14152 +14153 +14154 +14155 +14158 +14161 +14163 +14164 +14167 +14170 +14171 +14175 +14176 +14177 +14178 +14179 +14182 +14183 +14187 +14190 +14194 +14195 +14199 +14201 +14204 +14212 +14215 +14216 +14218 +14219 +14220 +14223 +14224 +14225 +14226 +14228 +14236 +14237 +14238 +14239 +14240 +14242 +14244 +14245 +14246 +14247 +14248 +14249 +14251 +14252 +14253 +14254 +14255 +14256 +14258 +14259 +14261 +14263 +14264 +14265 +14266 +14267 +14269 +14271 +14273 +14275 +14276 +14280 +14281 +14283 +14285 +14286 +14287 +14289 +14290 +14292 +14293 +14294 +14297 +14298 +14300 +14302 +14303 +14304 +14305 +14306 +14308 +14309 +14310 +14312 +14313 +14318 +14322 +14323 +14325 +14326 +14328 +14332 +14334 +14335 +14342 +14343 +14345 +14347 +14349 +14350 +14351 +14352 +14353 +14354 +14359 +14361 +14362 +14363 +14364 +14365 +14366 +14368 +14371 +14372 +14373 +14374 +14376 +14377 +14378 +14379 +14380 +14382 +14383 +14384 +14385 +14387 +14388 +14393 +14394 +14396 +14399 +14400 +14401 +14402 +14403 +14404 +14405 +14406 +14408 +14409 +14410 +14411 +14412 +14413 +14414 +14415 +14416 +14417 +14418 +14420 +14421 +14422 +14423 +14424 +14427 +14430 +14431 +14435 +14437 +14440 +14441 +14445 +14446 +14447 +14451 +14454 +14459 +14460 +14462 +14463 +14465 +14473 +14478 +14479 +14480 +14481 +14483 +14484 +14485 +14486 +14487 +14488 +14489 +14492 +14494 +14497 +14500 +14502 +14505 +14506 +14507 +14508 +14510 +14512 +14513 +14515 +14516 +14518 +14521 +14524 +14526 +14530 +14534 +14541 +14542 +14545 +14558 +14566 +14578 +14579 +14580 +14581 +14585 +14589 +14592 +14593 +14595 +14599 +14601 +14602 +14603 +14607 +14610 +14611 +14613 +14615 +14616 +14617 +14620 +14621 +14622 +14623 +14624 +14625 +14626 +14628 +14632 +14633 +14634 +14639 +14640 +14647 +14649 +14651 +14653 +14655 +14656 +14659 +14663 +14664 +14665 +14666 +14668 +14670 +14673 +14676 +14677 +14678 +14679 +14680 +14681 +14682 +14683 +14684 +14686 +14690 +14692 +14696 +14697 +14698 +14699 +14702 +14703 +14705 +14707 +14708 +14709 +14710 +14713 +14714 +14715 +14716 +14717 +14719 +14721 +14722 +14723 +14724 +14725 +14729 +14730 +14734 +14736 +14740 +14743 +14744 +14746 +14748 +14749 +14753 +14759 +14760 +14765 +14766 +14767 +14768 +14769 +14771 +14773 +14774 +14775 +14777 +14779 +14783 +14787 +14790 +14792 +14793 +14795 +14797 +14802 +14804 +14806 +14807 +14809 +14813 +14814 +14818 +14821 +14823 +14834 +14841 +14842 +14851 +14852 +14854 +14855 +14859 +14863 +14864 +14865 +14867 +14868 +14869 +14877 +14880 +14884 +14887 +14892 +14893 +14900 +14901 +14902 +14906 +14907 +14912 +14916 +14919 +14922 +14924 +14927 +14928 +14929 +14930 +14936 +14937 +14938 +14941 +14942 +14943 +14947 +14952 +14953 +14957 +14958 +14962 +14963 +14964 +14965 +14966 +14967 +14968 +14973 +14974 +14975 +14977 +14978 +14980 +14981 +14983 +14984 +14986 +14989 +14990 +14992 +14995 +14997 +14998 +15001 +15006 +15008 +15009 +15013 +15014 +15016 +15018 +15021 +15024 +15028 +15029 +15030 +15032 +15033 +15048 +15061 +15062 +15063 +15065 +15067 +15068 +15070 +15071 +15072 +15073 +15077 +15079 +15080 +15085 +15087 +15093 +15096 +15097 +15099 +15101 +15106 +15107 +15108 +15110 +15111 +15112 +15114 +15116 +15119 +15121 +15122 +15126 +15127 +15128 +15129 +15133 +15136 +15138 +15139 +15140 +15143 +15144 +15146 +15149 +15159 +15165 +15166 +15167 +15168 +15169 +15170 +15171 +15173 +15174 +15175 +15179 +15182 +15186 +15187 +15188 +15189 +15191 +15192 +15195 +15197 +15198 +15202 +15203 +15204 +15206 +15207 +15209 +15210 +15211 +15212 +15215 +15218 +15220 +15223 +15230 +15234 +15236 +15237 +15239 +15240 +15241 +15242 +15244 +15249 +15250 +15255 +15257 +15260 +15263 +15272 +15281 +15291 +15299 +15300 +15303 +15304 +15306 +15307 +15309 +15310 +15318 +15320 +15321 +15322 +15338 +15340 +15341 +15342 +15343 +15344 +15345 +15347 +15350 +15353 +15366 +15367 +15370 +15372 +15374 +15377 +15383 +15394 +15396 +15399 +15403 +15405 +15408 +15410 +15416 +15421 +15422 +15423 +15430 +15432 +15434 +15435 +15439 +15440 +15442 +15444 +15446 +15448 +15449 +15452 +15456 +15458 +15459 +15466 +15468 +15476 +15477 +15478 +15480 +15489 +15493 +15506 +15519 +15520 +15521 +15525 +15534 +15535 +15537 +15538 +15540 +15542 +15543 +15544 +15545 +15549 +15550 +15552 +15556 +15557 +15561 +15563 +15564 +15566 +15571 +15572 +15573 +15576 +15577 +15578 +15579 +15581 +15582 +15588 +15589 +15597 +15599 +15600 +15601 +15602 +15608 +15612 +15617 +15623 +15624 +15637 +15644 +15648 +15652 +15653 +15658 +15659 +15666 +15667 +15672 +15676 +15679 +15682 +15683 +15685 +15688 +15689 +15694 +15700 +15703 +15704 +15705 +15707 +15709 +15711 +15714 +15715 +15716 +15717 +15718 +15719 +15720 +15721 +15722 +15723 +15724 +15726 +15727 +15729 +15730 +15735 +15737 +15742 +15745 +15747 +15748 +15755 +15759 +15762 +15764 +15766 +15773 +15774 +15775 +15778 +15781 +15785 +15786 +15797 +15800 +15802 +15803 +15807 +15812 +15821 +15824 +15827 +15828 +15829 +15831 +15836 +15837 +15840 +15841 +15845 +15848 +15850 +15855 +15859 +15863 +15865 +15867 +15870 +15872 +15878 +15879 +15881 +15882 +15883 +15887 +15891 +15892 +15898 +15899 +15910 +15912 +15917 +15920 +15921 +15922 +15929 +15934 +15936 +15937 +15939 +15940 +15943 +15944 +15945 +15947 +15957 +15958 +15959 +15960 +15962 +15975 +15976 +15981 +15982 +15989 +15992 +15993 +15994 +15996 +16000 +16001 +16002 +16003 +16005 +16006 +16009 +16012 +16013 +16014 +16017 +16018 +16026 +16027 +16037 +16039 +16042 +16044 +16049 +16054 +16058 +16059 +16060 +16069 +16070 +16072 +16077 +16080 +16083 +16090 +16091 +16092 +16094 +16095 +16098 +16099 +16101 +16105 +16106 +16107 +16114 +16115 +16117 +16119 +16127 +16131 +16138 +16139 +16141 +16142 +16144 +16145 +16147 +16148 +16150 +16153 +16154 +16156 +16157 +16161 +16163 +16167 +16170 +16172 +16175 +16176 +16177 +16185 +16190 +16191 +16196 +16197 +16198 +16199 +16212 +16213 +16215 +16217 +16223 +16225 +16234 +16239 +16241 +16242 +16249 +16254 +16256 +16257 +16260 +16262 +16269 +16272 +16278 +16279 +16282 +16286 +16287 +16288 +16289 +16290 +16294 +16300 +16305 +16310 +16311 +16312 +16313 +16315 +16316 +16317 +16320 +16326 +16332 +16333 +16339 +16340 +16341 +16345 +16346 +16347 +16355 +16358 +16361 +16362 +16371 +16373 +16374 +16377 +16378 +16382 +16386 +16387 +16388 +16389 +16390 +16391 +16395 +16398 +16403 +16412 +16414 +16416 +16419 +16421 +16422 +16423 +16424 +16430 +16433 +16434 +16435 +16442 +16445 +16453 +16454 +16457 +16458 +16461 +16472 +16474 +16475 +16476 +16479 +16492 +16496 +16501 +16502 +16506 +16507 +16510 +16511 +16515 +16520 +16524 +16527 +16528 +16538 +16540 +16543 +16545 +16549 +16550 +16553 +16558 +16566 +16574 +16582 +16583 +16585 +16586 +16589 +16590 +16591 +16597 +16601 +16603 +16608 +16614 +16616 +16617 +16620 +16624 +16628 +16633 +16646 +16647 +16649 +16651 +16652 +16653 +16657 +16658 +16663 +16666 +16667 +16668 +16670 +16678 +16687 +16691 +16692 +16693 +16702 +16703 +16706 +16711 +16724 +16725 +16727 +16728 +16733 +16734 +16737 +16738 +16742 +16743 +16748 +16750 +16753 +16757 +16758 +16761 +16762 +16763 +16772 +16775 +16777 +16781 +16785 +16787 +16788 +16789 +16790 +16798 +16799 +16801 +16803 +16810 +16811 +16815 +16816 +16823 +16827 +16831 +16832 +16834 +16836 +16839 +16842 +16843 +16846 +16847 +16848 +16849 +16853 +16863 +16871 +16872 +16876 +16877 +16878 +16879 +16880 +16883 +16885 +16888 +16890 +16892 +16896 +16898 +16900 +16903 +16905 +16906 +16909 +16914 +16915 +16918 +16926 +16929 +16938 +16946 +16955 +16960 +16961 +16963 +16965 +16968 +16970 +16971 +16973 +16975 +16979 +16980 +16986 +16987 +16991 +16992 +16996 +17001 +17002 +17005 +17006 +17008 +17011 +17015 +17019 +17020 +17024 +17027 +17030 +17035 +17036 +17037 +17038 +17050 +17051 +17055 +17061 +17079 +17085 +17092 +17105 +17108 +17109 +17112 +17116 +17119 +17124 +17126 +17131 +17133 +17139 +17142 +17144 +17153 +17154 +17156 +17157 +17163 +17165 +17167 +17169 +17176 +17178 +17181 +17184 +17186 +17190 +17191 +17201 +17206 +17208 +17209 +17215 +17226 +17229 +17246 +17247 +17251 +17252 +17253 +17262 +17266 +17267 +17270 +17272 +17273 +17278 +17284 +17288 +17295 +17296 +17297 +17299 +17303 +17304 +17307 +17308 +17310 +17311 +17312 +17314 +17315 +17317 +17324 +17327 +17328 +17336 +17337 +17338 +17339 +17340 +17343 +17344 +17346 +17348 +17349 +17351 +17352 +17353 +17354 +17357 +17358 +17359 +17360 +17361 +17362 +17364 +17365 +17367 +17368 +17371 +17372 +17373 +17374 +17375 +17377 +17378 +17380 +17382 +17383 +17385 +17386 +17387 +17390 +17391 +17392 +17393 +17394 +17398 +17399 +17400 +17401 +17402 +17403 +17404 +17405 +17406 +17407 +17408 +17409 +17410 +17411 +17412 +17413 +17414 +17415 +17417 +17418 +17420 +17421 +17422 +17423 +17424 +17425 +17427 +17428 +17429 +17430 +17433 +17438 +17440 +17442 +17445 +17446 +17447 +17448 +17449 +17450 +17453 +17454 +17455 +17456 +17457 +17458 +17461 +17462 +17463 +17464 +17466 +17468 +17469 +17480 +17484 +17490 +17491 +17496 +17497 +17501 +17502 +17506 +17507 +17508 +17510 +17514 +17515 +17516 +17518 +17519 +17520 +17521 +17524 +17527 +17532 +17537 +17538 +17549 +17558 +17563 +17564 +17565 +17566 +17567 +17568 +17569 +17572 +17576 +17577 +17582 +17584 +17586 +17589 +17592 +17593 +17594 +17596 +17597 +17599 +17601 +17603 +17604 +17608 +17609 +17610 +17611 +17613 +17616 +17617 +17619 +17623 +17624 +17625 +17628 +17631 +17632 +17633 +17634 +17635 +17639 +17641 +17642 +17644 +17646 +17648 +17649 +17650 +17652 +17653 +17655 +17657 +17658 +17659 +17660 +17661 +17662 +17664 +17670 +17674 +17676 +17677 +17678 +17679 +17682 +17686 +17687 +17690 +17709 +17719 +17720 +17721 +17723 +17724 +17725 +17727 +17728 +17731 +17732 +17733 +17734 +17735 +17736 +17737 +17738 +17740 +17741 +17742 +17743 +17746 +17760 +17761 +17762 +17763 +17768 +17769 +17772 +17773 +17778 +17780 +17781 +17782 +17784 +17785 +17787 +17790 +17791 +17792 +17794 +17796 +17797 +17801 +17802 +17804 +17809 +17810 +17811 +17812 +17813 +17815 +17816 +17818 +17819 +17824 +17833 +17837 +17839 +17840 +17843 +17844 +17845 +17848 +17850 +17851 +17853 +17854 +17855 +17856 +17857 +17858 +17859 +17860 +17861 +17862 +17864 +17866 +17867 +17868 +17869 +17870 +17871 +17872 +17874 +17875 +17876 +17877 +17878 +17879 +17880 +17883 +17884 +17886 +17887 +17888 +17889 +17893 +17895 +17898 +17900 +17903 +17912 +17916 +17917 +17923 +17927 +17932 +17934 +17936 +17937 +17938 +17944 +17945 +17946 +17947 +17948 +17951 +17952 +17953 +17956 +17957 +17958 +17959 +17961 +17962 +17963 +17964 +17968 +17969 +17970 +17971 +17973 +17974 +17977 +17978 +17981 +17982 +17983 +17985 +17988 +17989 +17990 +17997 +17998 +18003 +18005 +18006 +18010 +18015 +18018 +18019 +18020 +18021 +18022 +18023 +18024 +18025 +18026 +18028 +18030 +18033 +18034 +18035 +18036 +18037 +18039 +18043 +18048 +18049 +18055 +18057 +18059 +18061 +18062 +18064 +18065 +18066 +18068 +18069 +18070 +18071 +18075 +18076 +18079 +18083 +18084 +18086 +18087 +18088 +18089 +18090 +18091 +18092 +18094 +18095 +18097 +18098 +18101 +18102 +18104 +18105 +18106 +18108 +18109 +18111 +18112 +18113 +18114 +18115 +18118 +18119 +18121 +18123 +18124 +18128 +18134 +18135 +18136 +18138 +18139 +18140 +18142 +18143 +18144 +18146 +18148 +18149 +18150 +18158 +18159 +18165 +18166 +18171 +18172 +18175 +18176 +18202 +18203 +18204 +18205 +18207 +18211 +18214 +18215 +18216 +18217 +18219 +18220 +18221 +18222 +18225 +18226 +18228 +18229 +18231 +18233 +18234 +18235 +18237 +18238 +18239 +18241 +18244 +18245 +18247 +18248 +18249 +18250 +18252 +18255 +18257 +18258 +18259 +18262 +18263 +18267 +18268 +18269 +18271 +18272 +18273 +18274 +18275 +18278 +18281 +18282 +18284 +18285 +18287 +18288 +18294 +18295 +18296 +18297 +18301 +18302 +18303 +18304 +18305 +18306 +18307 +18309 +18320 +18323 +18324 +18325 +18326 +18329 +18330 +18331 +18332 +18333 +18334 +18336 +18341 +18342 +18347 +18349 +18353 +18355 +18356 +18357 +18358 +18359 +18362 +18366 +18371 +18379 +18384 +18386 +18389 +18390 +18392 +18394 +18396 +18397 +18398 +18399 +18401 +18402 +18403 +18405 +18410 +18411 +18417 +18420 +18421 +18422 +18424 +18439 +18440 +18441 +18442 +18443 +18444 +18445 +18446 +18449 +18450 +18451 +18453 +18454 +18457 +18458 +18459 +18460 +18461 +18464 +18467 +18468 +18471 +18476 +18477 +18478 +18480 +18484 +18485 +18486 +18487 +18489 +18490 +18492 +18493 +18494 +18495 +18496 +18497 +18499 +18504 +18505 +18506 +18507 +18508 +18509 +18510 +18512 +18513 +18514 +18515 +18516 +18520 +18521 +18522 +18523 +18527 +18529 +18530 +18531 +18533 +18534 +18535 +18537 +18538 +18539 +18540 +18541 +18544 +18545 +18547 +18548 +18549 +18551 +18552 +18553 +18554 +18556 +18557 +18558 +18560 +18563 +18568 +18571 +18572 +18573 +18574 +18575 +18576 +18579 +18581 +18583 +18584 +18585 +18586 +18587 +18588 +18589 +18590 +18591 +18592 +18593 +18595 +18596 +18599 +18601 +18602 +18604 +18605 +18608 +18609 +18610 +18611 +18613 +18614 +18615 +18616 +18617 +18618 +18619 +18620 +18621 +18622 +18629 +18630 +18631 +18632 +18634 +18635 +18636 +18639 +18640 +18644 +18645 +18649 +18655 +18657 +18658 +18660 +18662 +18663 +18667 +18668 +18669 +18670 +18678 +18679 +18682 +18683 +18685 +18689 +18692 +18694 +18695 +18696 +18701 +18703 +18713 +18721 +18723 +18726 +18728 +18729 +18733 +18736 +18738 +18739 +18742 +18751 +18752 +18753 +18757 +18761 +18764 +18765 +18773 +18778 +18779 +18780 +18782 +18789 +18790 +18793 +18794 +18796 +18799 +18800 +18803 +18806 +18807 +18814 +18815 +18820 +18823 +18824 +18826 +18827 +18832 +18833 +18834 +18835 +18839 +18840 +18841 +18845 +18846 +18847 +18848 +18850 +18851 +18852 +18853 +18855 +18856 +18857 +18858 +18861 +18862 +18863 +18864 +18865 +18866 +18867 +18868 +18869 +18870 +18871 +18872 +18883 +18884 +18886 +18888 +18898 +18900 +18904 +18906 +18909 +18911 +18912 +18913 +18919 +18922 +18925 +18926 +18927 +18931 +18935 +18937 +18940 +18941 +18943 +18944 +18946 +18947 +18948 +18951 +18952 +18953 +18955 +18960 +18977 +18978 +18979 +18982 +18984 +18986 +18987 +18994 +19010 +19011 +19014 +19019 +19025 +19027 +19029 +19031 +19032 +19033 +19037 +19050 +19054 +19067 +19068 +19071 +19073 +19075 +19076 +19077 +19078 +19080 +19081 +19084 +19085 +19086 +19090 +19091 +19094 +19098 +19100 +19102 +19103 +19104 +19106 +19109 +19110 +19114 +19115 +19118 +19119 +19120 +19122 +19123 +19125 +19127 +19128 +19131 +19132 +19133 +19135 +19136 +19137 +19138 +19140 +19141 +19142 +19143 +19144 +19145 +19148 +19149 +19150 +19152 +19153 +19154 +19155 +19156 +19157 +19158 +19159 +19161 +19163 +19164 +19167 +19170 +19173 +19174 +19177 +19178 +19179 +19181 +19183 +19185 +19188 +19191 +19192 +19193 +19195 +19196 +19197 +19199 +19200 +19202 +19211 +19212 +19213 +19216 +19217 +19218 +19219 +19220 +19221 +19224 +19225 +19226 +19227 +19228 +19230 +19231 +19232 +19234 +19236 +19237 +19238 +19242 +19250 +19252 +19255 +19256 +19257 +19258 +19259 +19260 +19262 +19263 +19264 +19265 +19268 +19269 +19273 +19274 +19275 +19282 +19287 +19288 +19290 +19291 +19293 +19294 +19295 +19298 +19300 +19302 +19303 +19304 +19305 +19308 +19309 +19314 +19315 +19316 +19318 +19319 +19322 +19323 +19328 +19329 +19330 +19331 +19334 +19335 +19337 +19340 +19341 +19342 +19343 +19345 +19346 +19347 +19348 +19349 +19350 +19352 +19354 +19358 +19361 +19362 +19363 +19364 +19365 +19367 +19369 +19371 +19372 +19375 +19377 +19379 +19380 +19383 +19384 +19386 +19400 +19401 +19402 +19403 +19405 +19412 +19415 +19417 +19419 +19421 +19432 +19433 +19434 +19435 +19438 +19439 +19440 +19444 +19445 +19446 +19448 +19449 +19450 +19451 +19454 +19455 +19457 +19459 +19469 +19470 +19471 +19475 +19476 +19479 +19481 +19482 +19483 +19484 +19488 +19490 +19492 +19493 +19494 +19495 +19496 +19499 +19500 +19501 +19504 +19506 +19507 +19509 +19511 +19512 +19514 +19515 +19516 +19517 +19518 +19519 +19520 +19522 +19523 +19524 +19525 +19526 +19528 +19530 +19536 +19537 +19539 +19540 +19541 +19544 +19545 +19546 +19547 +19548 +19549 +19551 +19553 +19554 +19555 +19561 +19564 +19565 +19566 +19573 +19575 +19576 +19580 +19581 +19582 +19585 +19586 +19588 +19590 +19595 +19604 +19605 +19606 +19609 +19611 +19612 +19613 +19614 +19616 +19620 +19622 +19625 +19626 +19627 +19628 +19630 +19631 +19632 +19633 +19636 +19638 +19639 +19640 +19641 +19642 +19646 +19647 +19653 +19654 +19661 +19665 +19666 +19667 +19668 +19670 +19671 +19673 +19674 +19675 +19676 +19677 +19679 +19680 +19681 +19682 +19685 +19686 +19687 +19688 +19690 +19691 +19692 +19694 +19695 +19696 +19697 +19703 +19705 +19707 +19708 +19712 +19715 +19716 +19718 +19719 +19720 +19722 +19726 +19730 +19731 +19732 +19735 +19736 +19739 +19743 +19744 +19754 +19755 +19756 +19757 +19761 +19762 +19765 +19767 +19768 +19771 +19772 +19775 +19780 +19790 +19793 +19795 +19796 +19799 +19800 +19803 +19811 +19815 +19817 +19829 +19831 +19833 +19835 +19836 +19837 +19838 +19839 +19843 +19850 +19856 +19859 +19860 +19861 +19862 +19863 +19864 +19865 +19869 +19870 +19871 +19872 +19877 +19880 +19885 +19887 +19892 +19893 +19894 +19895 +19900 +19903 +19905 +19906 +19907 +19908 +19912 +19914 +19915 +19916 +19920 +19922 +19924 +19925 +19926 +19927 +19928 +19931 +19933 +19936 +19941 +19943 +19944 +19945 +19950 +19951 +19954 +19955 +19962 +19965 +19966 +19967 +19968 +19969 +19970 +19971 +19972 +19973 +19974 +19976 +19983 +19986 +19988 +19990 +19995 +19996 +19997 +19999 +20006 +20008 +20016 +20017 +20018 +20019 +20022 +20023 +20024 +20027 +20028 +20030 +20035 +20040 +20041 +20042 +20043 +20045 +20047 +20048 +20051 +20052 +20053 +20054 +20055 +20056 +20059 +20060 +20061 +20062 +20064 +20066 +20068 +20069 +20072 +20079 +20080 +20082 +20084 +20085 +20090 +20091 +20095 +20096 +20098 +20100 +20101 +20102 +20106 +20108 +20109 +20113 +20114 +20115 +20116 +20120 +20121 +20124 +20125 +20126 +20127 +20138 +20139 +20141 +20143 +20145 +20146 +20147 +20151 +20152 +20153 +20160 +20161 +20163 +20169 +20170 +20172 +20176 +20183 +20185 +20190 +20191 +20193 +20196 +20197 +20205 +20207 +20209 +20212 +20213 +20214 +20216 +20217 +20218 +20219 +20220 +20221 +20222 +20223 +20224 +20226 +20228 +20229 +20230 +20231 +20234 +20237 +20238 +20246 +20248 +20249 +20255 +20260 +20263 +20264 +20265 +20267 +20269 +20276 +20278 +20279 +20280 +20281 +20282 +20284 +20285 +20286 +20287 +20288 +20289 +20293 +20294 +20295 +20298 +20301 +20303 +20309 +20310 +20315 +20316 +20317 +20323 +20324 +20325 +20326 +20328 +20331 +20333 +20335 +20345 +20346 +20347 +20353 +20354 +20355 +20356 +20358 +20360 +20361 +20362 +20363 +20364 +20365 +20366 +20371 +20372 +20374 +20377 +20379 +20383 +20384 +20388 +20390 +20392 +20395 +20396 +20397 +20398 +20403 +20404 +20405 +20406 +20407 +20408 +20411 +20412 +20413 +20414 +20415 +20416 +20418 +20420 +20422 +20423 +20425 +20435 +20439 +20442 +20444 +20447 +20450 +20454 +20455 +20458 +20460 +20469 +20470 +20471 +20473 +20474 +20476 +20478 +20479 +20483 +20487 +20490 +20491 +20492 +20493 +20496 +20497 +20498 +20502 +20503 +20504 +20509 +20510 +20513 +20514 +20515 +20516 +20517 +20518 +20521 +20522 +20524 +20526 +20527 +20528 +20532 +20536 +20540 +20541 +20544 +20547 +20548 +20550 +20551 +20552 +20553 +20555 +20556 +20560 +20561 +20564 +20566 +20567 +20570 +20573 +20575 +20576 +20579 +20581 +20584 +20587 +20588 +20589 +20590 +20592 +20593 +20594 +20595 +20596 +20599 +20601 +20602 +20604 +20605 +20607 +20611 +20612 +20615 +20616 +20617 +20619 +20620 +20622 +20624 +20626 +20627 +20628 +20629 +20633 +20637 +20639 +20640 +20641 +20642 +20649 +20650 +20652 +20654 +20658 +20659 +20660 +20661 +20662 +20663 +20664 +20665 +20666 +20667 +20668 +20670 +20677 +20678 +20680 +20682 +20685 +20686 +20687 +20688 +20690 +20691 +20692 +20693 +20694 +20699 +20700 +20702 +20703 +20704 +20705 +20708 +20709 +20710 +20711 +20712 +20713 +20714 +20715 +20717 +20718 +20722 +20723 +20725 +20726 +20730 +20731 +20732 +20736 +20740 +20747 +20752 +20753 +20754 +20755 +20761 +20762 +20765 +20766 +20775 +20781 +20784 +20785 +20786 +20787 +20788 +20789 +20790 +20791 +20792 +20795 +20796 +20798 +20804 +20806 +20807 +20810 +20812 +20817 +20821 +20822 +20823 +20828 +20829 +20831 +20834 +20835 +20837 +20838 +20841 +20845 +20846 +20848 +20849 +20850 +20851 +20852 +20853 +20857 +20858 +20859 +20861 +20864 +20868 +20871 +20874 +20875 +20878 +20879 +20880 +20882 +20884 +20888 +20893 +20895 +20896 +20898 +20900 +20901 +20902 +20904 +20906 +20909 +20911 +20915 +20917 +20918 +20919 +20924 +20925 +20934 +20935 +20936 +20938 +20939 +20940 +20941 +20944 +20945 +20947 +20948 +20953 +20954 +20957 +20961 +20966 +20977 +20978 +20979 +20980 +20983 +20986 +20987 +20996 +20999 +21000 +21005 +21007 +21010 +21012 +21015 +21022 +21023 +21024 +21025 +21026 +21029 +21033 +21035 +21036 +21037 +21038 +21040 +21041 +21042 +21043 +21044 +21045 +21046 +21047 +21048 +21049 +21051 +21052 +21053 +21054 +21055 +21056 +21057 +21061 +21062 +21063 +21064 +21065 +21066 +21067 +21070 +21074 +21076 +21081 +21082 +21084 +21085 +21086 +21090 +21092 +21098 +21099 +21110 +21111 +21113 +21117 +21119 +21120 +21122 +21123 +21126 +21131 +21134 +21136 +21143 +21144 +21145 +21146 +21147 +21149 +21151 +21154 +21156 +21157 +21161 +21164 +21167 +21168 +21169 +21170 +21172 +21177 +21178 +21179 +21184 +21187 +21188 +21194 +21195 +21196 +21197 +21198 +21199 +21200 +21202 +21217 +21219 +21222 +21227 +21230 +21236 +21250 +21255 +21266 +21267 +21269 +21277 +21279 +21280 +21282 +21284 +21285 +21295 +21301 +21303 +21305 +21309 +21310 +21311 +21313 +21314 +21315 +21316 +21317 +21319 +21325 +21334 +21335 +21337 +21341 +21343 +21347 +21350 +21354 +21355 +21356 +21359 +21361 +21367 +21368 +21372 +21374 +21375 +21376 +21380 +21384 +21386 +21387 +21395 +21396 +21400 +21405 +21406 +21409 +21412 +21413 +21417 +21423 +21426 +21427 +21429 +21430 +21431 +21436 +21439 +21449 +21457 +21462 +21465 +21467 +21474 +21477 +21482 +21483 +21484 +21487 +21492 +21493 +21495 +21499 +21500 +21505 +21509 +21513 +21514 +21515 +21517 +21520 +21522 +21526 +21527 +21530 +21531 +21532 +21535 +21539 +21543 +21545 +21546 +21547 +21548 +21552 +21558 +21561 +21567 +21571 +21574 +21576 +21577 +21578 +21579 +21583 +21587 +21588 +21589 +21596 +21599 +21602 +21604 +21609 +21613 +21616 +21618 +21621 +21623 +21625 +21633 +21635 +21636 +21639 +21642 +21643 +21646 +21647 +21648 +21657 +21659 +21660 +21661 +21663 +21664 +21666 +21667 +21670 +21671 +21673 +21676 +21677 +21678 +21684 +21686 +21687 +21688 +21690 +21700 +21709 +21710 +21711 +21714 +21715 +21719 +21720 +21721 +21722 +21725 +21726 +21727 +21732 +21737 +21738 +21739 +21741 +21742 +21745 +21746 +21747 +21752 +21755 +21756 +21758 +21759 +21760 +21761 +21763 +21770 +21771 +21772 +21775 +21776 +21779 +21780 +21782 +21784 +21789 +21790 +21793 +21794 +21798 +21799 +21801 +21802 +21804 +21809 +21813 +21814 +21815 +21816 +21817 +21821 +21824 +21825 +21826 +21827 +21828 +21829 +21830 +21832 +21834 +21836 +21837 +21839 +21840 diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py index f6eaeb24d8..83df8929fd 100644 --- a/timm/data/parsers/parser_wds.py +++ b/timm/data/parsers/parser_wds.py @@ -45,6 +45,7 @@ class SplitInfo: num_samples: int filenames: Tuple[str] shard_lengths: Tuple[int] = () + alt_label: str = '' name: str = '' @@ -54,6 +55,7 @@ def _info_convert(dict_info): num_samples=dict_info['num_samples'], filenames=tuple(dict_info['filenames']), shard_lengths=tuple(dict_info['shard_lengths']), + alt_label=dict_info.get('alt_label', ''), name=dict_info['name'], ) @@ -98,7 +100,7 @@ def _info_convert(dict_info): return split_info -def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls'): +def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls', alt_label=''): """ Custom sample decode * decode and convert PIL Image * cls byte string label to int @@ -109,7 +111,13 @@ def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls'): img.load() if image_format: img = img.convert(image_format) - return dict(jpg=img, cls=int(sample[target_key]), json=sample.get('json', None)) + if alt_label: + # alternative labels are encoded in json metadata + assert 'json' in sample + meta = json.loads(sample['json']) + return dict(jpg=img, cls=int(meta[alt_label]), json=meta) + else: + return dict(jpg=img, cls=int(sample[target_key]), json=sample.get('json', None)) class ParserWebdataset(Parser): @@ -209,7 +217,11 @@ def _lazy_init(self): wds.tarfile_to_samples(), ]) pipeline.extend([ - wds.map(partial(_decode, image_key=self.image_key, image_format=self.image_format)) + wds.map(partial( + _decode, + image_key=self.image_key, + image_format=self.image_format, + alt_label=self.split_info.alt_label)) ]) self.ds = wds.DataPipeline(*pipeline) self.init_count += 1 From 229ac6b8d88e3523db741ba74b5ac9da5b30720c Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 11 Mar 2022 19:16:04 -0800 Subject: [PATCH 51/61] Fix alternate label handling in WDS parser to skip invalid alt labels --- timm/data/parsers/parser_wds.py | 53 ++++++++++++++++++++++++++------- 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py index 83df8929fd..47ad6184ef 100644 --- a/timm/data/parsers/parser_wds.py +++ b/timm/data/parsers/parser_wds.py @@ -106,18 +106,51 @@ def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls', alt_l * cls byte string label to int * pass through JSON byte string (if it exists) without parse """ + # decode class label, skip if alternate label not valid + if alt_label: + # alternative labels are encoded in json metadata + meta = json.loads(sample['json']) + class_label = int(meta[alt_label]) + if class_label < 0: + # skipped labels currently encoded as -1, may change to a null/None value + return None + else: + class_label = int(sample[target_key]) + + # decode image with io.BytesIO(sample[image_key]) as b: img = Image.open(b) img.load() if image_format: img = img.convert(image_format) - if alt_label: - # alternative labels are encoded in json metadata - assert 'json' in sample - meta = json.loads(sample['json']) - return dict(jpg=img, cls=int(meta[alt_label]), json=meta) - else: - return dict(jpg=img, cls=int(sample[target_key]), json=sample.get('json', None)) + + # json passed through in undecoded state + return dict(jpg=img, cls=class_label, json=sample.get('json', None)) + + +def _decode_samples( + data, + image_key='jpg', + image_format='RGB', + target_key='cls', + alt_label='', + handler=wds.reraise_exception): + """Decode samples with skip.""" + for sample in data: + try: + result = _decode( + sample, image_key=image_key, image_format=image_format, target_key=target_key, alt_label=alt_label) + except Exception as exn: + if handler(exn): + continue + else: + break + + # null results are skipped + if result is not None: + if isinstance(sample, dict) and isinstance(result, dict): + result["__key__"] = sample.get("__key__") + yield result class ParserWebdataset(Parser): @@ -217,11 +250,11 @@ def _lazy_init(self): wds.tarfile_to_samples(), ]) pipeline.extend([ - wds.map(partial( - _decode, + partial( + _decode_samples, image_key=self.image_key, image_format=self.image_format, - alt_label=self.split_info.alt_label)) + alt_label=self.split_info.alt_label) ]) self.ds = wds.DataPipeline(*pipeline) self.init_count += 1 From 7eeaf521a083c6acc4d0678dd3cef67b059c0278 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 11 Mar 2022 20:42:39 -0800 Subject: [PATCH 52/61] use gopen in wds to open info file in case it's at a url/gs location --- timm/data/parsers/parser_wds.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py index 47ad6184ef..0bf3fb7ab7 100644 --- a/timm/data/parsers/parser_wds.py +++ b/timm/data/parsers/parser_wds.py @@ -31,14 +31,24 @@ def _load_info(root, basename='info'): info_json = os.path.join(root, basename + '.json') info_yaml = os.path.join(root, basename + '.yaml') - info_dict = {} - if os.path.exists(info_json): - with open(info_json, 'r') as f: + err_str = '' + try: + with wds.gopen.gopen(info_json) as f: info_dict = json.load(f) - elif os.path.exists(info_yaml): - with open(info_yaml, 'r') as f: + return info_dict + except Exception: + pass + try: + with wds.gopen.gopen(info_yaml) as f: info_dict = yaml.safe_load(f) - return info_dict + return info_dict + except Exception as e: + err_str = str(e) + # FIXME change to log + print(f'Dataset info file not found at {info_json} or {info_yaml}. Error: {err_str}. ' + f'Falling back to provided split and size arg.') + return {} + @dataclass class SplitInfo: @@ -171,6 +181,9 @@ def __init__( shuffle_size=None, ): super().__init__() + if wds is None: + raise RuntimeError( + 'Please install webdataset 0.2.x package `pip install git+https://github.com/webdataset/webdataset`.') self.root = root self.is_training = is_training self.batch_size = batch_size From ab16a358bb5ae7d7e2cdd78c90dcdd01d972963a Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 16 Mar 2022 11:44:29 -0700 Subject: [PATCH 53/61] Add log and continue handler for WDS errors, fix args.num_gpu for validation script fallback --- timm/data/parsers/parser_wds.py | 21 ++++++++++++++++----- validate.py | 1 + 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py index 0bf3fb7ab7..7011d9673e 100644 --- a/timm/data/parsers/parser_wds.py +++ b/timm/data/parsers/parser_wds.py @@ -6,8 +6,10 @@ import os import io import json -import yaml +import logging import random +import yaml + from dataclasses import dataclass from itertools import islice from functools import partial @@ -25,6 +27,8 @@ from .parser import Parser from timm.bits import get_global_device, is_global_device +_logger = logging.getLogger(__name__) + SHUFFLE_SIZE = 8192 @@ -110,6 +114,12 @@ def _info_convert(dict_info): return split_info +def log_and_continue(exn): + """Call in an exception handler to ignore any exception, isssue a warning, and continue.""" + _logger.warning(f'Handling webdataset error ({repr(exn)}). Ignoring.') + return True + + def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls', alt_label=''): """ Custom sample decode * decode and convert PIL Image @@ -135,7 +145,8 @@ def _decode(sample, image_key='jpg', image_format='RGB', target_key='cls', alt_l img = img.convert(image_format) # json passed through in undecoded state - return dict(jpg=img, cls=class_label, json=sample.get('json', None)) + decoded = dict(jpg=img, cls=class_label, json=sample.get('json', None)) + return decoded def _decode_samples( @@ -144,7 +155,7 @@ def _decode_samples( image_format='RGB', target_key='cls', alt_label='', - handler=wds.reraise_exception): + handler=log_and_continue): """Decode samples with skip.""" for sample in data: try: @@ -251,7 +262,7 @@ def _lazy_init(self): wds.detshuffle(self.shard_shuffle_size, seed=self.common_seed), self._split_by_node_and_worker, # at this point, we have an iterator over the shards assigned to each worker - wds.tarfile_to_samples(), + wds.tarfile_to_samples(handler=log_and_continue), wds.shuffle( self.sample_shuffle_size, rng=random.Random(self.worker_seed)), # this is why we lazy-init whole DataPipeline @@ -260,7 +271,7 @@ def _lazy_init(self): pipeline.extend([ self._split_by_node_and_worker, # at this point, we have an iterator over the shards assigned to each worker - wds.tarfile_to_samples(), + wds.tarfile_to_samples(handler=log_and_continue), ]) pipeline.extend([ partial( diff --git a/validate.py b/validate.py index d2eca03efe..aa2555fc51 100755 --- a/validate.py +++ b/validate.py @@ -262,6 +262,7 @@ def main(): batch_size = start_batch_size args.model = m args.checkpoint = c + args.num_gpu = 1 # FIXME support data-parallel? result = OrderedDict(model=args.model) r = {} while not r and batch_size >= 1: From ef57561d5124f831051e5996d8346e95ded69c14 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 16 Mar 2022 14:55:36 -0700 Subject: [PATCH 54/61] Fix some TPU (XLA) issues with swin transformer v2 --- timm/models/swin_transformer_v2_cr.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/timm/models/swin_transformer_v2_cr.py b/timm/models/swin_transformer_v2_cr.py index 39ea993e89..d3bf8c85dd 100644 --- a/timm/models/swin_transformer_v2_cr.py +++ b/timm/models/swin_transformer_v2_cr.py @@ -392,13 +392,15 @@ def _shifted_window_attn(self, x): x = x.view(B, H, W, C) # cyclic shift + sh, sw = self.shift_size if any(self.shift_size): - shifted_x = torch.roll(x, shifts=(-self.shift_size[0], -self.shift_size[1]), dims=(1, 2)) - else: - shifted_x = x + # FIXME PyTorch XLA needs cat impl, roll not lowered + # x = torch.cat([x[:, sh:], x[:, :sh]], dim=1) + # x = torch.cat([x[:, :, sw:], x[:, :, :sw]], dim=2) + x = torch.roll(x, shifts=(-sh, -sw), dims=(1, 2)) # partition windows - x_windows = window_partition(shifted_x, self.window_size) # num_windows * B, window_size, window_size, C + x_windows = window_partition(x, self.window_size) # num_windows * B, window_size, window_size, C x_windows = x_windows.view(-1, self.window_size[0] * self.window_size[1], C) # W-MSA/SW-MSA @@ -406,13 +408,14 @@ def _shifted_window_attn(self, x): # merge windows attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C) - shifted_x = window_reverse(attn_windows, self.window_size, self.feat_size) # B H' W' C + x = window_reverse(attn_windows, self.window_size, self.feat_size) # B H' W' C # reverse cyclic shift if any(self.shift_size): - x = torch.roll(shifted_x, shifts=self.shift_size, dims=(1, 2)) - else: - x = shifted_x + # FIXME PyTorch XLA needs cat impl, roll not lowered + # x = torch.cat([x[:, -sh:], x[:, :-sh]], dim=1) + # x = torch.cat([x[:, :, -sw:], x[:, :, :-sw]], dim=2) + x = torch.roll(x, shifts=(sh, sw), dims=(1, 2)) x = x.view(B, L, C) return x @@ -452,8 +455,10 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: Returns: output (torch.Tensor): Output tensor of the shape [B, 2 * C, H // 2, W // 2] """ - x = bchw_to_bhwc(x).unfold(dimension=1, size=2, step=2).unfold(dimension=2, size=2, step=2) - x = x.permute(0, 1, 2, 5, 4, 3).flatten(3) # permute maintains compat with ch order in official swin impl + B, C, H, W = x.shape + # unfold + BCHW -> BHWC together + # ordering, 5, 3, 1 instead of 3, 5, 1 maintains compat with original swin v1 merge + x = x.reshape(B, C, H // 2, 2, W // 2, 2).permute(0, 2, 4, 5, 3, 1).flatten(3) x = self.norm(x) x = bhwc_to_bchw(self.reduction(x)) return x From 59ffab537c7913dcbc555bed0cca87f9a2770924 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 17 Mar 2022 14:42:40 -0700 Subject: [PATCH 55/61] Fix mistake in wds sample slicing --- timm/data/parsers/parser_wds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/data/parsers/parser_wds.py b/timm/data/parsers/parser_wds.py index 7011d9673e..02ce2794be 100644 --- a/timm/data/parsers/parser_wds.py +++ b/timm/data/parsers/parser_wds.py @@ -285,7 +285,7 @@ def _lazy_init(self): def _split_by_node_and_worker(self, src): if self.global_num_workers > 1: - for s in islice(src, self.global_worker_id, self.global_num_workers): + for s in islice(src, self.global_worker_id, None, self.global_num_workers): yield s else: for s in src: From 5e1be34a60d117d2fb12425819f3c728bbb19858 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 17 Mar 2022 17:02:39 -0700 Subject: [PATCH 56/61] Add ImageNet-22k/12k TFDS dataset defs --- timm/data/parsers/tfds/__init__.py | 1 + timm/data/parsers/tfds/imagenet22k.py | 6935 +++++++++++++++++++++++++ 2 files changed, 6936 insertions(+) create mode 100644 timm/data/parsers/tfds/__init__.py create mode 100644 timm/data/parsers/tfds/imagenet22k.py diff --git a/timm/data/parsers/tfds/__init__.py b/timm/data/parsers/tfds/__init__.py new file mode 100644 index 0000000000..baf2f6658b --- /dev/null +++ b/timm/data/parsers/tfds/__init__.py @@ -0,0 +1 @@ +from .imagenet22k import Imagenet22k, Imagenet12k, imagenet12k_synsets, imagenet22k_synsets diff --git a/timm/data/parsers/tfds/imagenet22k.py b/timm/data/parsers/tfds/imagenet22k.py new file mode 100644 index 0000000000..9f5dde0b34 --- /dev/null +++ b/timm/data/parsers/tfds/imagenet22k.py @@ -0,0 +1,6935 @@ +""" ImageNet-22K (and 12K subset) TFDS dataset definitions.""" +import io +import os +import time +import re +import operator +from itertools import chain + +import numpy as np +from PIL import Image + +import tensorflow_datasets as tfds + +MIN_DIM = 56 +MAX_DIM = 448 + +_DESCRIPTION = """ +Description is **formatted** as markdown. + +It should also contain any processing which has been applied (if any), +(e.g. corrupted example skipped, images cropped,...): +""" + +_CITATION = """ +""" + +_TRAIN_12K_CSV = 'train_12k.csv' +_TRAIN_22K_CSV = 'train_22k.csv' + +# both datasets use same 12k validation samples (but different labels), there are few samples in the rest +_VALIDATION_12K_CSV = 'val_12k.csv' +_VALIDATION_22K_CSV = 'val_12k.csv' + + +class Imagenet22k(tfds.core.GeneratorBasedBuilder): + """DatasetBuilder for imagenet22k dataset.""" + + VERSION = tfds.core.Version('1.0.0') + RELEASE_NOTES = { + '1.0.0': 'Initial release.', + } + MANUAL_DOWNLOAD_INSTRUCTIONS = """\ + manual_dir must be specified and contain train & validation csv files and images. + """ + _TRAIN_CSV = _TRAIN_22K_CSV + _VALIDATION_CSV = _VALIDATION_22K_CSV + + def _labels(self): + return imagenet22k_synsets() + + def _info(self) -> tfds.core.DatasetInfo: + """Returns the dataset metadata.""" + return tfds.core.DatasetInfo( + builder=self, + description=_DESCRIPTION, + features=tfds.features.FeaturesDict( + { + 'image': tfds.features.Image(encoding_format='jpeg'), + 'label': tfds.features.ClassLabel(names=self._labels()), + 'file_name': tfds.features.Text(), # Eg: ''n15075141',_54.JPEG' + }), + supervised_keys=('image', 'label'), + homepage='http://image-net.org/', + citation=_CITATION, + ) + + def _split_generators(self, dl_manager: tfds.download.DownloadManager): + """Returns SplitGenerators.""" + manual_dir = dl_manager.manual_dir + train_records, val_records = _load_records( + train_csv=os.path.join(manual_dir, self._TRAIN_CSV), + validation_csv=os.path.join(manual_dir, self._VALIDATION_CSV), + labels=self._labels(), + ) + return { + 'train': self._generate_examples(train_records, manual_dir), + 'validation': self._generate_examples(val_records, manual_dir), + } + + def _generate_examples(self, records, manual_dir, alt_label=None, resize_short=True, max_img_size=MAX_DIM): + """Yields examples.""" + for r in records: + try: + filename, output_record = _process_record( + r, manual_dir, alt_label=alt_label, resize_short=resize_short, max_img_size=max_img_size) + yield filename, output_record + except Exception as e: + print('Exception:', e) + continue + + +class Imagenet12k(Imagenet22k): + """DatasetBuilder for imagenet12k dataset.""" + + VERSION = tfds.core.Version('1.0.0') + RELEASE_NOTES = { + '1.0.0': 'Initial release.', + } + MANUAL_DOWNLOAD_INSTRUCTIONS = """\ + manual_dir must be specified and contain train & validation csv files and images. + """ + _TRAIN_CSV = _TRAIN_12K_CSV + + def _labels(self): + return imagenet12k_synsets() + + +def _natural_key(string_): + """See http://www.codinghorror.com/blog/archives/001018.html""" + return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_.lower())] + + +def _load_records( + train_csv, + validation_csv, + labels, + alt_labels=None, + alt_label_name='', + min_img_size=MIN_DIM, +): + pd = tfds.core.lazy_imports.pandas + + train_record_df = pd.read_csv(train_csv, index_col=None) + train_record_df = train_record_df[ + ~((train_record_df.height < min_img_size) | (train_record_df.width < min_img_size))] + + val_record_df = pd.read_csv(validation_csv, index_col=None) + val_record_df = val_record_df[ + ~((val_record_df.height < min_img_size) | (val_record_df.width < min_img_size))] + + class_to_idx = {k: i for i, k in enumerate(labels)} + print('class to idx:', len(class_to_idx)) + + train_record_df['label'] = train_record_df['cls'].map(class_to_idx).astype(int) + train_record_df = train_record_df[['filename', 'label']] + train_record_df = train_record_df.sample(frac=1, random_state=42) + print('num train records:', len(train_record_df.index)) + + val_record_df['label'] = val_record_df['cls'].map(class_to_idx).astype(int) + val_record_df = val_record_df[['filename', 'label']] + val_record_df = val_record_df.sample(frac=1, random_state=42) + print('num val records:', len(val_record_df.index)) + + train_records = train_record_df.to_records(index=False) + val_records = val_record_df.to_records(index=False) + return train_records, val_records + + +def _process_record(input_record, input_dir, resize_short=True, max_img_size=600): + filename = input_record['filename'] + label = input_record['label'] + label = label.item() if isinstance(label, np.integer) else int(label) + output_record = dict(label=label) + + f = os.path.join(input_dir, filename) + img_stream = io.BytesIO(open(f, 'rb').read()) + img = Image.open(img_stream) + img.load() + + format_changed = img.format != 'JPEG' + mode_changed = False + if img.mode != "RGB": + mode_changed = True + img = img.convert('RGB') + + w, h = img.size + resized = False + if max_img_size is not None: + cmp_fn = min if resize_short else max + cmp_size = cmp_fn(img.size) + if cmp_size > max_img_size: + scale = max_img_size / float(cmp_size) + if scale != 1.0: + wn, hn = tuple(round(d * scale) for d in (w, h)) + img = img.resize((wn, hn), Image.BICUBIC, reducing_gap=3) + resized = True + + if resized or mode_changed or format_changed: + extra = {} + if 'icc_profile' in img.info: + extra['icc_profile'] = img.info['icc_profile'] + if 'exif' in img.info: + extra['exif'] = img.info['exif'] + del img_stream + img_stream = io.BytesIO() + img.save(img_stream, 'JPEG', subsampling=1, quality=90, **extra) + + img_stream.seek(0) + output_record['image'] = img_stream.read() + output_record['file_name'] = filename + return os.path.basename(filename), output_record + + +def imagenet12k_synsets(): + return [ + 'n00005787', 'n00006484', 'n00007846', 'n00015388', 'n00017222', + 'n00021265', 'n00021939', 'n00120010', 'n00141669', 'n00288000', + 'n00288384', 'n00324978', 'n00326094', 'n00433458', 'n00433661', + 'n00433802', 'n00434075', 'n00439826', 'n00440039', 'n00440382', + 'n00440509', 'n00440747', 'n00440941', 'n00441073', 'n00441824', + 'n00442115', 'n00442437', 'n00442847', 'n00442981', 'n00443231', + 'n00443692', 'n00443803', 'n00444340', 'n00444651', 'n00444846', + 'n00444937', 'n00445055', 'n00445226', 'n00445351', 'n00445685', + 'n00445802', 'n00446311', 'n00446493', 'n00446804', 'n00446980', + 'n00447073', 'n00447221', 'n00447463', 'n00447540', 'n00447957', + 'n00448126', 'n00448232', 'n00448466', 'n00448640', 'n00448748', + 'n00448872', 'n00448958', 'n00449054', 'n00449168', 'n00449295', + 'n00449517', 'n00449695', 'n00449796', 'n00449892', 'n00449977', + 'n00450070', 'n00450335', 'n00450700', 'n00450866', 'n00450998', + 'n00451186', 'n00451370', 'n00451563', 'n00451635', 'n00452034', + 'n00452152', 'n00452293', 'n00452864', 'n00453126', 'n00453313', + 'n00453396', 'n00453478', 'n00453935', 'n00454237', 'n00454395', + 'n00454493', 'n00454624', 'n00454983', 'n00455173', 'n00456465', + 'n00463246', 'n00463543', 'n00464277', 'n00464478', 'n00464651', + 'n00464894', 'n00466273', 'n00466377', 'n00466524', 'n00466630', + 'n00466712', 'n00466880', 'n00467320', 'n00467536', 'n00467719', + 'n00467995', 'n00468299', 'n00468480', 'n00469651', 'n00470554', + 'n00470682', 'n00470830', 'n00470966', 'n00471437', 'n00471613', + 'n00474568', 'n00474657', 'n00474881', 'n00475014', 'n00475273', + 'n00475403', 'n00475535', 'n00475661', 'n00475787', 'n00476235', + 'n00476389', 'n00477392', 'n00477639', 'n00478262', 'n00479076', + 'n00479440', 'n00479616', 'n00479887', 'n00480211', 'n00480366', + 'n00480508', 'n00480993', 'n00481803', 'n00482122', 'n00482298', + 'n00483205', 'n00483313', 'n00483409', 'n00483508', 'n00483605', + 'n00483705', 'n00483848', 'n00523513', 'n00825773', 'n00887544', + 'n01055165', 'n01314388', 'n01314663', 'n01314781', 'n01315213', + 'n01316422', 'n01317089', 'n01317294', 'n01317541', 'n01317813', + 'n01317916', 'n01318279', 'n01318381', 'n01318894', 'n01319467', + 'n01321123', 'n01321230', 'n01321456', 'n01321579', 'n01321770', + 'n01321854', 'n01322221', 'n01322343', 'n01322508', 'n01322604', + 'n01322685', 'n01322898', 'n01322983', 'n01323068', 'n01323155', + 'n01323261', 'n01323355', 'n01323493', 'n01323599', 'n01324431', + 'n01324610', 'n01326291', 'n01338685', 'n01339083', 'n01339336', + 'n01339471', 'n01339801', 'n01340014', 'n01379389', 'n01381044', + 'n01384164', 'n01386354', 'n01392275', 'n01392380', 'n01395254', + 'n01396048', 'n01397114', 'n01397871', 'n01402600', 'n01405007', + 'n01405616', 'n01407798', 'n01410457', 'n01415626', 'n01421807', + 'n01422335', 'n01424420', 'n01438581', 'n01439121', 'n01439514', + 'n01440160', 'n01440764', 'n01441117', 'n01442972', 'n01443243', + 'n01443537', 'n01443831', 'n01444339', 'n01446760', 'n01447331', + 'n01447658', 'n01448291', 'n01448594', 'n01448951', 'n01449374', + 'n01449712', 'n01451426', 'n01453087', 'n01454545', 'n01455778', + 'n01456756', 'n01457852', 'n01458842', 'n01459791', 'n01461315', + 'n01462042', 'n01462544', 'n01464844', 'n01468238', 'n01468712', + 'n01469103', 'n01471682', 'n01472303', 'n01477525', 'n01477875', + 'n01482071', 'n01482330', 'n01483830', 'n01484097', 'n01484285', + 'n01484850', 'n01485479', 'n01486838', 'n01487506', 'n01488038', + 'n01489501', 'n01489709', 'n01489920', 'n01490112', 'n01490360', + 'n01490670', 'n01491006', 'n01491361', 'n01491874', 'n01492569', + 'n01493146', 'n01494475', 'n01495006', 'n01495493', 'n01495701', + 'n01496331', 'n01497118', 'n01498041', 'n01498989', 'n01499396', + 'n01500091', 'n01500476', 'n01501160', 'n01503061', 'n01503976', + 'n01504179', 'n01504344', 'n01514668', 'n01514752', 'n01514859', + 'n01515303', 'n01517565', 'n01517966', 'n01518878', 'n01519563', + 'n01519873', 'n01520576', 'n01521399', 'n01521756', 'n01524359', + 'n01526521', 'n01527194', 'n01527347', 'n01527617', 'n01527917', + 'n01528396', 'n01528654', 'n01528845', 'n01529672', 'n01530439', + 'n01530575', 'n01531178', 'n01531344', 'n01531512', 'n01531811', + 'n01531971', 'n01532325', 'n01532511', 'n01532829', 'n01533000', + 'n01533339', 'n01533481', 'n01533651', 'n01533893', 'n01534155', + 'n01534433', 'n01534582', 'n01535140', 'n01535469', 'n01535690', + 'n01536035', 'n01536186', 'n01536334', 'n01536644', 'n01536780', + 'n01537134', 'n01537544', 'n01537895', 'n01538059', 'n01538200', + 'n01538630', 'n01538955', 'n01539573', 'n01539925', 'n01540090', + 'n01540233', 'n01540566', 'n01540832', 'n01541102', 'n01541386', + 'n01541760', 'n01541922', 'n01542786', 'n01543175', 'n01543632', + 'n01544389', 'n01544704', 'n01545574', 'n01546039', 'n01546506', + 'n01547832', 'n01548301', 'n01548492', 'n01548694', 'n01548865', + 'n01549053', 'n01549430', 'n01549641', 'n01549886', 'n01550172', + 'n01551080', 'n01551300', 'n01551711', 'n01552034', 'n01552813', + 'n01553142', 'n01554448', 'n01555004', 'n01555305', 'n01555809', + 'n01556182', 'n01557185', 'n01557962', 'n01558149', 'n01558307', + 'n01558461', 'n01558594', 'n01558765', 'n01558993', 'n01559477', + 'n01559639', 'n01559804', 'n01560105', 'n01560280', 'n01560419', + 'n01560636', 'n01560793', 'n01560935', 'n01561452', 'n01561732', + 'n01562014', 'n01562265', 'n01562451', 'n01563128', 'n01563449', + 'n01563746', 'n01563945', 'n01564217', 'n01564394', 'n01564773', + 'n01564914', 'n01565078', 'n01565345', 'n01565599', 'n01565930', + 'n01566207', 'n01566645', 'n01567133', 'n01567678', 'n01567879', + 'n01568294', 'n01568720', 'n01568892', 'n01569060', 'n01569262', + 'n01569423', 'n01569566', 'n01569836', 'n01569971', 'n01570267', + 'n01570421', 'n01570676', 'n01570839', 'n01571904', 'n01572328', + 'n01572489', 'n01572654', 'n01572782', 'n01573074', 'n01573240', + 'n01573360', 'n01573898', 'n01574045', 'n01574390', 'n01574560', + 'n01574801', 'n01575117', 'n01575401', 'n01575745', 'n01576076', + 'n01576695', 'n01577035', 'n01577659', 'n01577941', 'n01578180', + 'n01578575', 'n01579028', 'n01579149', 'n01579260', 'n01579410', + 'n01579578', 'n01579729', 'n01580077', 'n01580870', 'n01581166', + 'n01581730', 'n01581984', 'n01582220', 'n01582398', 'n01582856', + 'n01583209', 'n01583495', 'n01583828', 'n01584225', 'n01584695', + 'n01584853', 'n01585121', 'n01585287', 'n01585422', 'n01585715', + 'n01586020', 'n01586374', 'n01586941', 'n01587526', 'n01587834', + 'n01588002', 'n01588725', 'n01589286', 'n01589718', 'n01589893', + 'n01591005', 'n01591123', 'n01591301', 'n01591697', 'n01592084', + 'n01592257', 'n01592387', 'n01592540', 'n01592694', 'n01593028', + 'n01594004', 'n01594372', 'n01594787', 'n01594968', 'n01595168', + 'n01595450', 'n01595974', 'n01596273', 'n01596608', 'n01597022', + 'n01597336', 'n01597737', 'n01597906', 'n01598074', 'n01598588', + 'n01598988', 'n01599159', 'n01599269', 'n01599556', 'n01600085', + 'n01600657', 'n01601068', 'n01601694', 'n01602630', 'n01602832', + 'n01603152', 'n01603600', 'n01603812', 'n01603953', 'n01604330', + 'n01604968', 'n01605630', 'n01606522', 'n01606672', 'n01606809', + 'n01607600', 'n01607812', 'n01607962', 'n01608265', 'n01608432', + 'n01608814', 'n01609062', 'n01609391', 'n01609751', 'n01609956', + 'n01610100', 'n01610226', 'n01610552', 'n01610955', 'n01611472', + 'n01611800', 'n01611969', 'n01612122', 'n01612275', 'n01612476', + 'n01612628', 'n01613177', 'n01613294', 'n01613615', 'n01613807', + 'n01614038', 'n01614343', 'n01614556', 'n01614925', 'n01615121', + 'n01615303', 'n01615458', 'n01615703', 'n01616086', 'n01616318', + 'n01617095', 'n01617443', 'n01617766', 'n01618082', 'n01618503', + 'n01618922', 'n01619310', 'n01619536', 'n01619835', 'n01620135', + 'n01620414', 'n01620735', 'n01621127', 'n01621635', 'n01622120', + 'n01622352', 'n01622483', 'n01622779', 'n01622959', 'n01623110', + 'n01623425', 'n01623615', 'n01623706', 'n01623880', 'n01624115', + 'n01624212', 'n01624305', 'n01624537', 'n01624833', 'n01625562', + 'n01627424', 'n01628770', 'n01629276', 'n01629819', 'n01629962', + 'n01630284', 'n01630670', 'n01630901', 'n01631354', 'n01631512', + 'n01631663', 'n01632458', 'n01632601', 'n01632777', 'n01633406', + 'n01633781', 'n01635027', 'n01636127', 'n01636352', 'n01636829', + 'n01637615', 'n01639765', 'n01640846', 'n01641206', 'n01641391', + 'n01641577', 'n01641739', 'n01642257', 'n01642539', 'n01643507', + 'n01643896', 'n01644373', 'n01644900', 'n01645776', 'n01646292', + 'n01646388', 'n01646555', 'n01646648', 'n01646802', 'n01646902', + 'n01647303', 'n01647640', 'n01648139', 'n01648620', 'n01649170', + 'n01650167', 'n01650690', 'n01650901', 'n01651059', 'n01652026', + 'n01653223', 'n01654637', 'n01661091', 'n01662622', 'n01662784', + 'n01663401', 'n01663782', 'n01664065', 'n01664369', 'n01664492', + 'n01664674', 'n01664990', 'n01665541', 'n01665932', 'n01666228', + 'n01666585', 'n01667114', 'n01667432', 'n01667778', 'n01668091', + 'n01668436', 'n01668665', 'n01668892', 'n01669191', 'n01669372', + 'n01669654', 'n01670092', 'n01670535', 'n01670802', 'n01671125', + 'n01671479', 'n01672032', 'n01673282', 'n01674464', 'n01674990', + 'n01675722', 'n01677366', 'n01677747', 'n01678043', 'n01678343', + 'n01679307', 'n01679626', 'n01679962', 'n01680264', 'n01680478', + 'n01680655', 'n01680813', 'n01681328', 'n01681653', 'n01681940', + 'n01682172', 'n01682435', 'n01682714', 'n01683558', 'n01684133', + 'n01684578', 'n01685808', 'n01686044', 'n01687665', 'n01687978', + 'n01688243', 'n01689081', 'n01689811', 'n01690149', 'n01690466', + 'n01691217', 'n01692333', 'n01692523', 'n01693175', 'n01693334', + 'n01693783', 'n01694178', 'n01694709', 'n01694955', 'n01695060', + 'n01696633', 'n01697178', 'n01697457', 'n01697611', 'n01698434', + 'n01698640', 'n01698782', 'n01699040', 'n01699675', 'n01701859', + 'n01704323', 'n01713764', 'n01726692', 'n01727646', 'n01728572', + 'n01728920', 'n01729322', 'n01729977', 'n01730185', 'n01730307', + 'n01730563', 'n01730812', 'n01730960', 'n01731545', 'n01731941', + 'n01732244', 'n01732614', 'n01732789', 'n01733466', 'n01733757', + 'n01733957', 'n01734104', 'n01734418', 'n01734637', 'n01734808', + 'n01735189', 'n01735439', 'n01735577', 'n01737021', 'n01737472', + 'n01737728', 'n01737875', 'n01738065', 'n01738601', 'n01739381', + 'n01740131', 'n01740551', 'n01741232', 'n01741562', 'n01741943', + 'n01742172', 'n01742821', 'n01743086', 'n01743605', 'n01743936', + 'n01744100', 'n01744270', 'n01744401', 'n01745125', 'n01745484', + 'n01745902', 'n01746359', 'n01747589', 'n01747885', 'n01748264', + 'n01748389', 'n01748686', 'n01748906', 'n01749244', 'n01749582', + 'n01749742', 'n01749939', 'n01750167', 'n01750437', 'n01751036', + 'n01751472', 'n01751748', 'n01752165', 'n01752585', 'n01752736', + 'n01753032', 'n01753180', 'n01753488', 'n01753959', 'n01754370', + 'n01754533', 'n01754876', 'n01755581', 'n01755740', 'n01756089', + 'n01756291', 'n01756508', 'n01756733', 'n01757115', 'n01757343', + 'n01757677', 'n01757901', 'n01758141', 'n01758757', 'n01767661', + 'n01768244', 'n01769347', 'n01770081', 'n01770393', 'n01770795', + 'n01771417', 'n01772222', 'n01772664', 'n01773157', 'n01773549', + 'n01773797', 'n01774384', 'n01774750', 'n01775062', 'n01775370', + 'n01776313', 'n01777304', 'n01778217', 'n01779148', 'n01779629', + 'n01782209', 'n01782516', 'n01784675', 'n01785667', 'n01786646', + 'n01787835', 'n01789740', 'n01790711', 'n01791107', 'n01791463', + 'n01791625', 'n01791954', 'n01792042', 'n01792158', 'n01792429', + 'n01792640', 'n01792955', 'n01793085', 'n01793249', 'n01793435', + 'n01793715', 'n01794158', 'n01794344', 'n01794651', 'n01795088', + 'n01795545', 'n01795735', 'n01796340', 'n01796519', 'n01796729', + 'n01797020', 'n01797307', 'n01797601', 'n01797886', 'n01798168', + 'n01798484', 'n01798706', 'n01798839', 'n01799679', 'n01800424', + 'n01801876', 'n01803078', 'n01803362', 'n01804163', 'n01804478', + 'n01804653', 'n01805070', 'n01805801', 'n01806143', 'n01806297', + 'n01806364', 'n01806467', 'n01806567', 'n01806847', 'n01807105', + 'n01807496', 'n01807828', 'n01808140', 'n01809106', 'n01809371', + 'n01809752', 'n01810268', 'n01811909', 'n01812337', 'n01812662', + 'n01812866', 'n01813088', 'n01813385', 'n01813532', 'n01813948', + 'n01814217', 'n01814370', 'n01814755', 'n01814921', 'n01815601', + 'n01816887', 'n01817263', 'n01817346', 'n01817953', 'n01818299', + 'n01818515', 'n01818832', 'n01819115', 'n01819313', 'n01819465', + 'n01819734', 'n01820052', 'n01820348', 'n01820546', 'n01821076', + 'n01821203', 'n01821869', 'n01822300', 'n01823013', 'n01823414', + 'n01824035', 'n01824575', 'n01825278', 'n01826364', 'n01826680', + 'n01827403', 'n01827793', 'n01828096', 'n01828556', 'n01828970', + 'n01829413', 'n01829869', 'n01830042', 'n01830915', 'n01832167', + 'n01832493', 'n01833805', 'n01834177', 'n01834540', 'n01835276', + 'n01837072', 'n01838598', 'n01839086', 'n01839330', 'n01839598', + 'n01839750', 'n01840120', 'n01840775', 'n01841102', 'n01841288', + 'n01841441', 'n01841679', 'n01842235', 'n01842504', 'n01843065', + 'n01843383', 'n01843719', 'n01844231', 'n01844551', 'n01844917', + 'n01845132', 'n01846331', 'n01847000', 'n01847089', 'n01847170', + 'n01847253', 'n01847407', 'n01847806', 'n01847978', 'n01848123', + 'n01848323', 'n01848453', 'n01848555', 'n01848648', 'n01848840', + 'n01848976', 'n01849157', 'n01849466', 'n01849676', 'n01849863', + 'n01850192', 'n01850373', 'n01850553', 'n01850873', 'n01851038', + 'n01851207', 'n01851375', 'n01851573', 'n01851731', 'n01851895', + 'n01852142', 'n01852329', 'n01852400', 'n01852671', 'n01852861', + 'n01853195', 'n01853498', 'n01853666', 'n01853870', 'n01854415', + 'n01854700', 'n01854838', 'n01855032', 'n01855188', 'n01855476', + 'n01855672', 'n01856072', 'n01856155', 'n01856380', 'n01856553', + 'n01856890', 'n01857079', 'n01857325', 'n01857512', 'n01857632', + 'n01857851', 'n01858281', 'n01858441', 'n01858780', 'n01858845', + 'n01858906', 'n01859190', 'n01859325', 'n01859496', 'n01859689', + 'n01859852', 'n01860002', 'n01860187', 'n01860497', 'n01861778', + 'n01862399', 'n01871265', 'n01871875', 'n01872401', 'n01872772', + 'n01873310', 'n01874434', 'n01874928', 'n01875313', 'n01876034', + 'n01876326', 'n01877134', 'n01877606', 'n01877812', 'n01878061', + 'n01878929', 'n01879217', 'n01879509', 'n01881171', 'n01882125', + 'n01882714', 'n01883070', 'n01884834', 'n01885498', 'n01886756', + 'n01887474', 'n01887623', 'n01887787', 'n01887896', 'n01888045', + 'n01888181', 'n01888264', 'n01888411', 'n01889520', 'n01891633', + 'n01892030', 'n01893825', 'n01896844', 'n01897536', 'n01899894', + 'n01900150', 'n01903346', 'n01904029', 'n01904806', 'n01904886', + 'n01905661', 'n01906749', 'n01909906', 'n01910747', 'n01913166', + 'n01914609', 'n01914830', 'n01915700', 'n01915811', 'n01916187', + 'n01916388', 'n01916481', 'n01916925', 'n01917289', 'n01917611', + 'n01917882', 'n01918744', 'n01922303', 'n01923025', 'n01924916', + 'n01930112', 'n01934440', 'n01935395', 'n01937909', 'n01938454', + 'n01940736', 'n01942869', 'n01943087', 'n01943899', 'n01944118', + 'n01944390', 'n01944812', 'n01944955', 'n01945143', 'n01945685', + 'n01946630', 'n01947396', 'n01947997', 'n01948573', 'n01949085', + 'n01950731', 'n01951274', 'n01951613', 'n01953361', 'n01953594', + 'n01953762', 'n01955084', 'n01955933', 'n01956344', 'n01956481', + 'n01956764', 'n01957335', 'n01958038', 'n01958346', 'n01958531', + 'n01959492', 'n01959985', 'n01960177', 'n01960459', 'n01961985', + 'n01963317', 'n01963571', 'n01964049', 'n01964271', 'n01964441', + 'n01965529', 'n01965889', 'n01968897', 'n01970164', 'n01970667', + 'n01971280', 'n01972541', 'n01974773', 'n01976146', 'n01976868', + 'n01976957', 'n01978287', 'n01978455', 'n01979874', 'n01980166', + 'n01981276', 'n01982068', 'n01982347', 'n01982650', 'n01983481', + 'n01984245', 'n01984695', 'n01985128', 'n01985493', 'n01986214', + 'n01986806', 'n01987545', 'n01990007', 'n01990800', 'n01991028', + 'n01991520', 'n01992773', 'n01994910', 'n01998183', 'n01998741', + 'n01999186', 'n02000954', 'n02002075', 'n02002556', 'n02002724', + 'n02003037', 'n02003204', 'n02003577', 'n02003839', 'n02004131', + 'n02004492', 'n02004855', 'n02005399', 'n02005790', 'n02006063', + 'n02006364', 'n02006656', 'n02006985', 'n02007284', 'n02007558', + 'n02008041', 'n02008497', 'n02008643', 'n02008796', 'n02009229', + 'n02009380', 'n02009508', 'n02009750', 'n02009912', 'n02010272', + 'n02010453', 'n02010728', 'n02011016', 'n02011281', 'n02011460', + 'n02011805', 'n02011943', 'n02012185', 'n02012849', 'n02013177', + 'n02013567', 'n02013706', 'n02014237', 'n02014524', 'n02014941', + 'n02015357', 'n02015554', 'n02016066', 'n02016358', 'n02016659', + 'n02016816', 'n02016956', 'n02017213', 'n02017475', 'n02017725', + 'n02018027', 'n02018207', 'n02018368', 'n02018795', 'n02019190', + 'n02019929', 'n02021050', 'n02021795', 'n02022684', 'n02023341', + 'n02023855', 'n02023992', 'n02024185', 'n02024479', 'n02024763', + 'n02025043', 'n02025239', 'n02025389', 'n02026059', 'n02026629', + 'n02026948', 'n02027075', 'n02027357', 'n02027492', 'n02027897', + 'n02028035', 'n02028175', 'n02028342', 'n02028451', 'n02028727', + 'n02028900', 'n02029087', 'n02029378', 'n02029706', 'n02030035', + 'n02030287', 'n02030837', 'n02030996', 'n02031585', 'n02031934', + 'n02032222', 'n02032355', 'n02032480', 'n02033041', 'n02033208', + 'n02033561', 'n02033779', 'n02034129', 'n02034295', 'n02034661', + 'n02034971', 'n02035210', 'n02036053', 'n02036711', 'n02037110', + 'n02037464', 'n02037869', 'n02038466', 'n02038993', 'n02039171', + 'n02040266', 'n02041085', 'n02041246', 'n02041678', 'n02041875', + 'n02042046', 'n02042180', 'n02042472', 'n02042759', 'n02043063', + 'n02043333', 'n02043808', 'n02044178', 'n02044517', 'n02044778', + 'n02044908', 'n02045369', 'n02045596', 'n02045864', 'n02046171', + 'n02046759', 'n02046939', 'n02047045', 'n02047260', 'n02047411', + 'n02047517', 'n02047614', 'n02047975', 'n02048115', 'n02048353', + 'n02049088', 'n02050004', 'n02050313', 'n02050442', 'n02050586', + 'n02050809', 'n02051059', 'n02051845', 'n02052204', 'n02052365', + 'n02052775', 'n02053083', 'n02053425', 'n02053584', 'n02054036', + 'n02054502', 'n02054711', 'n02055107', 'n02055658', 'n02055803', + 'n02056228', 'n02056570', 'n02056728', 'n02057035', 'n02057330', + 'n02057731', 'n02058221', 'n02058594', 'n02059162', 'n02060133', + 'n02060411', 'n02060569', 'n02060889', 'n02062017', 'n02062430', + 'n02062744', 'n02063224', 'n02063662', 'n02064338', 'n02064816', + 'n02065026', 'n02065263', 'n02065407', 'n02066245', 'n02066707', + 'n02067240', 'n02068541', 'n02068974', 'n02069412', 'n02069701', + 'n02069974', 'n02070174', 'n02070430', 'n02071294', 'n02071636', + 'n02072040', 'n02072798', 'n02073831', 'n02074367', 'n02075296', + 'n02075612', 'n02075927', 'n02076196', 'n02076402', 'n02076779', + 'n02077152', 'n02077384', 'n02077658', 'n02077787', 'n02077923', + 'n02078292', 'n02078574', 'n02078738', 'n02079005', 'n02079389', + 'n02079851', 'n02080146', 'n02080415', 'n02080713', 'n02081571', + 'n02081798', 'n02082791', 'n02083346', 'n02083672', 'n02084071', + 'n02084732', 'n02084861', 'n02085272', 'n02085374', 'n02085620', + 'n02085936', 'n02086079', 'n02086240', 'n02086478', 'n02086646', + 'n02086753', 'n02086910', 'n02087046', 'n02087122', 'n02087394', + 'n02087551', 'n02088094', 'n02088238', 'n02088364', 'n02088466', + 'n02088632', 'n02088839', 'n02089232', 'n02089468', 'n02089555', + 'n02089973', 'n02090379', 'n02090475', 'n02090622', 'n02090721', + 'n02090827', 'n02091032', 'n02091134', 'n02091244', 'n02091467', + 'n02091831', 'n02092002', 'n02092339', 'n02092468', 'n02093056', + 'n02093256', 'n02093428', 'n02093647', 'n02093754', 'n02093859', + 'n02093991', 'n02094114', 'n02094258', 'n02094433', 'n02094562', + 'n02094721', 'n02094931', 'n02095050', 'n02095314', 'n02095412', + 'n02095570', 'n02095727', 'n02095889', 'n02096051', 'n02096177', + 'n02096294', 'n02096437', 'n02096585', 'n02096756', 'n02097047', + 'n02097130', 'n02097209', 'n02097298', 'n02097474', 'n02097658', + 'n02097786', 'n02098105', 'n02098286', 'n02098413', 'n02098550', + 'n02098806', 'n02098906', 'n02099029', 'n02099267', 'n02099429', + 'n02099601', 'n02099712', 'n02099849', 'n02099997', 'n02100236', + 'n02100399', 'n02100583', 'n02100735', 'n02100877', 'n02101006', + 'n02101108', 'n02101388', 'n02101556', 'n02101861', 'n02102040', + 'n02102177', 'n02102318', 'n02102480', 'n02102605', 'n02102973', + 'n02103406', 'n02103841', 'n02104029', 'n02104280', 'n02104365', + 'n02104523', 'n02104882', 'n02105056', 'n02105162', 'n02105251', + 'n02105412', 'n02105505', 'n02105641', 'n02105855', 'n02106030', + 'n02106166', 'n02106382', 'n02106550', 'n02106662', 'n02106854', + 'n02106966', 'n02107142', 'n02107312', 'n02107420', 'n02107574', + 'n02107683', 'n02107908', 'n02108000', 'n02108089', 'n02108254', + 'n02108422', 'n02108551', 'n02108672', 'n02108915', 'n02109047', + 'n02109525', 'n02109811', 'n02109961', 'n02110063', 'n02110185', + 'n02110341', 'n02110627', 'n02110806', 'n02110958', 'n02111129', + 'n02111277', 'n02111500', 'n02111626', 'n02111889', 'n02112018', + 'n02112137', 'n02112350', 'n02112497', 'n02112826', 'n02113023', + 'n02113186', 'n02113335', 'n02113624', 'n02113712', 'n02113799', + 'n02114100', 'n02114367', 'n02114548', 'n02114712', 'n02114855', + 'n02115096', 'n02115335', 'n02115641', 'n02115913', 'n02116738', + 'n02117135', 'n02117512', 'n02117900', 'n02118333', 'n02119022', + 'n02119477', 'n02119634', 'n02119789', 'n02120079', 'n02120505', + 'n02120997', 'n02121620', 'n02121808', 'n02122298', 'n02122430', + 'n02122510', 'n02122580', 'n02122725', 'n02122878', 'n02122948', + 'n02123045', 'n02123159', 'n02123242', 'n02123394', 'n02123478', + 'n02123597', 'n02123785', 'n02123917', 'n02124075', 'n02124313', + 'n02124484', 'n02124623', 'n02125010', 'n02125081', 'n02125311', + 'n02125494', 'n02126028', 'n02126139', 'n02126640', 'n02126787', + 'n02127052', 'n02127292', 'n02127381', 'n02127482', 'n02127586', + 'n02127678', 'n02127808', 'n02128385', 'n02128669', 'n02128757', + 'n02128925', 'n02129165', 'n02129463', 'n02129604', 'n02129837', + 'n02129923', 'n02129991', 'n02130308', 'n02131653', 'n02132136', + 'n02132466', 'n02132580', 'n02132788', 'n02133161', 'n02133704', + 'n02134084', 'n02134418', 'n02135220', 'n02136103', 'n02136452', + 'n02137015', 'n02137549', 'n02138441', 'n02138647', 'n02138777', + 'n02139199', 'n02139671', 'n02140049', 'n02146371', 'n02146700', + 'n02147173', 'n02147328', 'n02147591', 'n02147947', 'n02149420', + 'n02150482', 'n02152740', 'n02152881', 'n02153109', 'n02156871', + 'n02157206', 'n02159955', 'n02160947', 'n02161338', 'n02161457', + 'n02162561', 'n02163297', 'n02164464', 'n02165105', 'n02165456', + 'n02165877', 'n02166567', 'n02166826', 'n02167151', 'n02167820', + 'n02168245', 'n02168699', 'n02169023', 'n02169497', 'n02169705', + 'n02169974', 'n02172182', 'n02172518', 'n02172870', 'n02173113', + 'n02173373', 'n02174001', 'n02174659', 'n02175014', 'n02175569', + 'n02175916', 'n02176261', 'n02176439', 'n02176747', 'n02177972', + 'n02180875', 'n02181235', 'n02181477', 'n02181724', 'n02183096', + 'n02183857', 'n02184473', 'n02188699', 'n02190166', 'n02190790', + 'n02191773', 'n02191979', 'n02192252', 'n02192513', 'n02195526', + 'n02195819', 'n02196119', 'n02196344', 'n02197185', 'n02197689', + 'n02198859', 'n02200198', 'n02200509', 'n02200850', 'n02201000', + 'n02201626', 'n02202006', 'n02203152', 'n02204907', 'n02205219', + 'n02205673', 'n02206856', 'n02207179', 'n02207345', 'n02207805', + 'n02208280', 'n02208498', 'n02208848', 'n02209111', 'n02209354', + 'n02209624', 'n02210427', 'n02211444', 'n02211627', 'n02212062', + 'n02212958', 'n02213107', 'n02213239', 'n02213543', 'n02213663', + 'n02213788', 'n02214341', 'n02214773', 'n02215621', 'n02215770', + 'n02216211', 'n02216365', 'n02217563', 'n02218371', 'n02219486', + 'n02220518', 'n02220804', 'n02221083', 'n02221414', 'n02222035', + 'n02223266', 'n02226429', 'n02226821', 'n02226970', 'n02227247', + 'n02227966', 'n02228341', 'n02229156', 'n02229544', 'n02229765', + 'n02230187', 'n02231052', 'n02231487', 'n02231803', 'n02233338', + 'n02233943', 'n02234355', 'n02234848', 'n02236044', 'n02236241', + 'n02236355', 'n02236896', 'n02237581', 'n02239774', 'n02240068', + 'n02240517', 'n02241426', 'n02242137', 'n02243562', 'n02244797', + 'n02246628', 'n02247216', 'n02250822', 'n02251775', 'n02252226', + 'n02254697', 'n02256656', 'n02257284', 'n02257985', 'n02258198', + 'n02259212', 'n02259708', 'n02262449', 'n02262803', 'n02264232', + 'n02264363', 'n02264885', 'n02266050', 'n02266864', 'n02268148', + 'n02268443', 'n02268853', 'n02270623', 'n02272871', 'n02273392', + 'n02274024', 'n02274259', 'n02274822', 'n02275560', 'n02275773', + 'n02276078', 'n02276258', 'n02276355', 'n02276749', 'n02276902', + 'n02277094', 'n02277268', 'n02277742', 'n02278024', 'n02278210', + 'n02278839', 'n02278980', 'n02279257', 'n02279637', 'n02279972', + 'n02280649', 'n02281015', 'n02281136', 'n02281406', 'n02281787', + 'n02282257', 'n02282385', 'n02282553', 'n02282903', 'n02283077', + 'n02283201', 'n02283951', 'n02284611', 'n02284884', 'n02285801', + 'n02286089', 'n02287004', 'n02287799', 'n02288268', 'n02288789', + 'n02289610', 'n02291748', 'n02292692', 'n02295064', 'n02295390', + 'n02297442', 'n02298218', 'n02298541', 'n02299157', 'n02299505', + 'n02299846', 'n02300797', 'n02301935', 'n02302244', 'n02302459', + 'n02302620', 'n02302969', 'n02303284', 'n02304036', 'n02304432', + 'n02305085', 'n02305929', 'n02307325', 'n02307681', 'n02308139', + 'n02308471', 'n02308735', 'n02309242', 'n02309337', 'n02310334', + 'n02310585', 'n02310717', 'n02310941', 'n02311060', 'n02311617', + 'n02312006', 'n02312427', 'n02312640', 'n02313008', 'n02315487', + 'n02316707', 'n02317335', 'n02317781', 'n02318167', 'n02319095', + 'n02319308', 'n02319555', 'n02321170', 'n02321529', 'n02322047', + 'n02323449', 'n02324045', 'n02324431', 'n02324514', 'n02324587', + 'n02324850', 'n02325366', 'n02325722', 'n02326432', 'n02326862', + 'n02327028', 'n02327656', 'n02327842', 'n02328150', 'n02328429', + 'n02329401', 'n02330245', 'n02331046', 'n02332156', 'n02332755', + 'n02333190', 'n02333546', 'n02333909', 'n02334201', 'n02336641', + 'n02337001', 'n02338145', 'n02339376', 'n02341475', 'n02341974', + 'n02342885', 'n02343320', 'n02343772', 'n02346627', 'n02348173', + 'n02350105', 'n02352591', 'n02353861', 'n02355227', 'n02355477', + 'n02356381', 'n02356612', 'n02356798', 'n02356977', 'n02357111', + 'n02357401', 'n02357585', 'n02357911', 'n02358091', 'n02358390', + 'n02358584', 'n02358890', 'n02359047', 'n02359324', 'n02359556', + 'n02359915', 'n02360282', 'n02361337', 'n02361587', 'n02361706', + 'n02363005', 'n02363245', 'n02363351', 'n02364520', 'n02364673', + 'n02364840', 'n02365108', 'n02365480', 'n02366002', 'n02366959', + 'n02367492', 'n02369293', 'n02370806', 'n02372584', 'n02372952', + 'n02373336', 'n02374149', 'n02374451', 'n02375302', 'n02376542', + 'n02376679', 'n02376791', 'n02376918', 'n02377063', 'n02377181', + 'n02377291', 'n02377388', 'n02377480', 'n02377603', 'n02377703', + 'n02378541', 'n02378969', 'n02379081', 'n02379183', 'n02379329', + 'n02379430', 'n02379630', 'n02379908', 'n02380052', 'n02380335', + 'n02380464', 'n02380583', 'n02380745', 'n02380875', 'n02381004', + 'n02381261', 'n02381364', 'n02381460', 'n02381609', 'n02381831', + 'n02382039', 'n02382132', 'n02382204', 'n02382338', 'n02382437', + 'n02382635', 'n02382750', 'n02382850', 'n02382948', 'n02383231', + 'n02385214', 'n02386014', 'n02386141', 'n02386224', 'n02386310', + 'n02386496', 'n02386853', 'n02386968', 'n02387093', 'n02387254', + 'n02387346', 'n02387722', 'n02387887', 'n02388143', 'n02388276', + 'n02388735', 'n02388832', 'n02388917', 'n02389026', 'n02389128', + 'n02389261', 'n02389346', 'n02389559', 'n02389779', 'n02390015', + 'n02390101', 'n02390454', 'n02390640', 'n02391049', 'n02391234', + 'n02391373', 'n02391508', 'n02391994', 'n02392434', 'n02392824', + 'n02393161', 'n02393580', 'n02393807', 'n02393940', 'n02394477', + 'n02395003', 'n02395406', 'n02395694', 'n02396014', 'n02396088', + 'n02396427', 'n02397096', 'n02397529', 'n02397744', 'n02398521', + 'n02399000', 'n02402010', 'n02402175', 'n02402425', 'n02403003', + 'n02403231', 'n02403325', 'n02403454', 'n02403740', 'n02403920', + 'n02404186', 'n02404432', 'n02404573', 'n02404906', 'n02405101', + 'n02405302', 'n02405440', 'n02405799', 'n02405929', 'n02406174', + 'n02406432', 'n02406533', 'n02406647', 'n02406749', 'n02406859', + 'n02407071', 'n02407276', 'n02407390', 'n02407521', 'n02407625', + 'n02407959', 'n02408429', 'n02408817', 'n02409508', 'n02410011', + 'n02410509', 'n02410702', 'n02410900', 'n02411206', 'n02411705', + 'n02411999', 'n02412080', 'n02412210', 'n02412440', 'n02412629', + 'n02413050', 'n02413131', 'n02413593', 'n02414209', 'n02414290', + 'n02414578', 'n02414763', 'n02415253', 'n02415435', 'n02415577', + 'n02415829', 'n02416104', 'n02416519', 'n02416820', 'n02416880', + 'n02416964', 'n02417070', 'n02417387', 'n02417534', 'n02417663', + 'n02417914', 'n02418465', 'n02419336', 'n02419634', 'n02419796', + 'n02420509', 'n02420828', 'n02421136', 'n02421449', 'n02421792', + 'n02422106', 'n02422391', 'n02422699', 'n02423022', 'n02423218', + 'n02423589', 'n02424085', 'n02424305', 'n02424486', 'n02424909', + 'n02425228', 'n02425887', 'n02426481', 'n02426813', 'n02427032', + 'n02427470', 'n02427576', 'n02427724', 'n02428349', 'n02428508', + 'n02429456', 'n02430045', 'n02430559', 'n02430830', 'n02431122', + 'n02431337', 'n02431441', 'n02431628', 'n02431785', 'n02431976', + 'n02432291', 'n02432511', 'n02432704', 'n02432983', 'n02433318', + 'n02433546', 'n02433925', 'n02434190', 'n02434954', 'n02437136', + 'n02437312', 'n02437482', 'n02437616', 'n02438173', 'n02438272', + 'n02438580', 'n02439033', 'n02439398', 'n02441326', 'n02441942', + 'n02442336', 'n02442845', 'n02443015', 'n02443114', 'n02443346', + 'n02443484', 'n02444819', 'n02445004', 'n02445171', 'n02445394', + 'n02445715', 'n02446206', 'n02447366', 'n02447762', 'n02448060', + 'n02449350', 'n02450295', 'n02453108', 'n02454379', 'n02454794', + 'n02456962', 'n02457408', 'n02457945', 'n02458135', 'n02460009', + 'n02460451', 'n02461128', 'n02461830', 'n02469248', 'n02469472', + 'n02469914', 'n02470238', 'n02470325', 'n02472293', 'n02472987', + 'n02473307', 'n02474777', 'n02475078', 'n02475669', 'n02480153', + 'n02480495', 'n02480855', 'n02481103', 'n02481235', 'n02481366', + 'n02481500', 'n02481823', 'n02482286', 'n02482474', 'n02482650', + 'n02483362', 'n02483708', 'n02484322', 'n02484473', 'n02484975', + 'n02485536', 'n02486261', 'n02486410', 'n02486657', 'n02486908', + 'n02487347', 'n02487547', 'n02487675', 'n02487847', 'n02488291', + 'n02488415', 'n02488702', 'n02488894', 'n02489166', 'n02490219', + 'n02490811', 'n02491107', 'n02492035', 'n02492660', 'n02493509', + 'n02493793', 'n02494079', 'n02496913', 'n02497673', 'n02499022', + 'n02499316', 'n02499808', 'n02500267', 'n02501583', 'n02503517', + 'n02504013', 'n02504458', 'n02508021', 'n02508213', 'n02508742', + 'n02509197', 'n02509515', 'n02509815', 'n02510455', 'n02512053', + 'n02512830', 'n02512938', 'n02514041', 'n02516188', 'n02517442', + 'n02518324', 'n02519148', 'n02519686', 'n02519862', 'n02520147', + 'n02522399', 'n02523427', 'n02524202', 'n02525382', 'n02526121', + 'n02527057', 'n02527271', 'n02527622', 'n02530421', 'n02530831', + 'n02530999', 'n02532028', 'n02532602', 'n02533209', 'n02533834', + 'n02534734', 'n02535163', 'n02535258', 'n02535537', 'n02535759', + 'n02536165', 'n02536456', 'n02536864', 'n02537085', 'n02537319', + 'n02537525', 'n02537716', 'n02538010', 'n02538216', 'n02540412', + 'n02541687', 'n02542432', 'n02543565', 'n02548247', 'n02549248', + 'n02549989', 'n02555863', 'n02556846', 'n02557182', 'n02557318', + 'n02557591', 'n02557749', 'n02557909', 'n02560110', 'n02561108', + 'n02561381', 'n02561514', 'n02561661', 'n02562315', 'n02562796', + 'n02563182', 'n02563648', 'n02563792', 'n02564270', 'n02564720', + 'n02565072', 'n02565324', 'n02565573', 'n02568087', 'n02568447', + 'n02568959', 'n02569484', 'n02570164', 'n02570838', 'n02572196', + 'n02572484', 'n02573704', 'n02574271', 'n02576575', 'n02576906', + 'n02577403', 'n02578771', 'n02578928', 'n02579303', 'n02579928', + 'n02580336', 'n02580679', 'n02580830', 'n02581957', 'n02583890', + 'n02584145', 'n02584449', 'n02585872', 'n02586543', 'n02587618', + 'n02588286', 'n02589623', 'n02590094', 'n02590702', 'n02592055', + 'n02593019', 'n02595702', 'n02596067', 'n02596381', 'n02597608', + 'n02598573', 'n02598878', 'n02599052', 'n02599347', 'n02599557', + 'n02601344', 'n02603317', 'n02603540', 'n02605316', 'n02605703', + 'n02605936', 'n02606052', 'n02606384', 'n02607072', 'n02607201', + 'n02607470', 'n02607862', 'n02610066', 'n02610664', 'n02611561', + 'n02613181', 'n02616851', 'n02618827', 'n02619165', 'n02619550', + 'n02620167', 'n02624167', 'n02624551', 'n02624807', 'n02624987', + 'n02625258', 'n02625612', 'n02625851', 'n02626265', 'n02626762', + 'n02627292', 'n02627532', 'n02627835', 'n02628062', 'n02629230', + 'n02630281', 'n02630615', 'n02630739', 'n02631041', 'n02631330', + 'n02631475', 'n02639087', 'n02639605', 'n02640242', 'n02640626', + 'n02640857', 'n02641379', 'n02643112', 'n02643566', 'n02643836', + 'n02644113', 'n02649546', 'n02650050', 'n02652132', 'n02653145', + 'n02653497', 'n02654112', 'n02654425', 'n02654745', 'n02655020', + 'n02655848', 'n02656032', 'n02656670', 'n02657368', 'n02657694', + 'n02658531', 'n02660208', 'n02660640', 'n02663211', 'n02666196', + 'n02666501', 'n02666624', 'n02666943', 'n02667093', 'n02667244', + 'n02667379', 'n02667478', 'n02667576', 'n02669295', 'n02669534', + 'n02669723', 'n02670186', 'n02670382', 'n02670683', 'n02670935', + 'n02672371', 'n02672831', 'n02675219', 'n02676566', 'n02676938', + 'n02677718', 'n02678897', 'n02679257', 'n02680110', 'n02680512', + 'n02680754', 'n02681392', 'n02682311', 'n02682569', 'n02682922', + 'n02683323', 'n02683454', 'n02683558', 'n02683791', 'n02685082', + 'n02685995', 'n02686121', 'n02686227', 'n02686379', 'n02686568', + 'n02687172', 'n02687423', 'n02687821', 'n02687992', 'n02688273', + 'n02688443', 'n02689144', 'n02689274', 'n02689434', 'n02689748', + 'n02690373', 'n02691156', 'n02692086', 'n02692232', 'n02692877', + 'n02693246', 'n02694045', 'n02694426', 'n02694662', 'n02695627', + 'n02696165', 'n02697221', 'n02697675', 'n02698634', 'n02699494', + 'n02699629', 'n02699770', 'n02699915', 'n02700064', 'n02700258', + 'n02700895', 'n02701002', 'n02702989', 'n02703275', 'n02704645', + 'n02704792', 'n02704949', 'n02705201', 'n02705429', 'n02705944', + 'n02706806', 'n02708093', 'n02708433', 'n02708555', 'n02708711', + 'n02709101', 'n02709367', 'n02709637', 'n02709908', 'n02710044', + 'n02710201', 'n02710324', 'n02710429', 'n02710600', 'n02713003', + 'n02713364', 'n02714751', 'n02715229', 'n02715513', 'n02715712', + 'n02720048', 'n02723165', 'n02725872', 'n02726017', 'n02726210', + 'n02726305', 'n02726681', 'n02727016', 'n02727141', 'n02727426', + 'n02728440', 'n02729837', 'n02729965', 'n02730930', 'n02731398', + 'n02731629', 'n02731900', 'n02732072', 'n02732572', 'n02732827', + 'n02733213', 'n02733524', 'n02734725', 'n02734835', 'n02735361', + 'n02735538', 'n02735688', 'n02736798', 'n02737660', 'n02738031', + 'n02738535', 'n02738741', 'n02738859', 'n02739427', 'n02739550', + 'n02739668', 'n02739889', 'n02740300', 'n02740533', 'n02740764', + 'n02741475', 'n02742322', 'n02742468', 'n02742753', 'n02744323', + 'n02744844', 'n02745611', 'n02746365', 'n02746595', 'n02747177', + 'n02747672', 'n02747802', 'n02749479', 'n02749953', 'n02750070', + 'n02750169', 'n02751215', 'n02751295', 'n02752496', 'n02752615', + 'n02752810', 'n02753044', 'n02753394', 'n02754103', 'n02754656', + 'n02755140', 'n02755529', 'n02755823', 'n02756098', 'n02756977', + 'n02757061', 'n02757337', 'n02757462', 'n02757714', 'n02757810', + 'n02758134', 'n02758863', 'n02758960', 'n02759257', 'n02759387', + 'n02759963', 'n02760099', 'n02760199', 'n02760429', 'n02760658', + 'n02760855', 'n02761206', 'n02761392', 'n02761557', 'n02761696', + 'n02761834', 'n02762371', 'n02762508', 'n02763306', 'n02763604', + 'n02763901', 'n02764044', 'n02764398', 'n02764505', 'n02764779', + 'n02764935', 'n02766320', 'n02766534', 'n02766792', 'n02767038', + 'n02767147', 'n02767433', 'n02767665', 'n02767956', 'n02768114', + 'n02768226', 'n02768655', 'n02768973', 'n02769075', 'n02769290', + 'n02769669', 'n02769748', 'n02769963', 'n02770211', 'n02770721', + 'n02770830', 'n02771004', 'n02771166', 'n02771286', 'n02771750', + 'n02772101', 'n02772435', 'n02772700', 'n02773037', 'n02773838', + 'n02774152', 'n02774630', 'n02774921', 'n02775039', 'n02775178', + 'n02775483', 'n02775897', 'n02776007', 'n02776205', 'n02776631', + 'n02776825', 'n02776978', 'n02777100', 'n02777292', 'n02777734', + 'n02778294', 'n02778456', 'n02778669', 'n02779435', 'n02780704', + 'n02780815', 'n02781121', 'n02781338', 'n02782093', 'n02782602', + 'n02782681', 'n02782778', 'n02783161', 'n02783324', 'n02783459', + 'n02783900', 'n02783994', 'n02784124', 'n02785648', 'n02786058', + 'n02786198', 'n02786331', 'n02786736', 'n02786837', 'n02787435', + 'n02787622', 'n02788021', 'n02788148', 'n02788572', 'n02789487', + 'n02790669', 'n02790823', 'n02790996', 'n02791124', 'n02791270', + 'n02791665', 'n02792409', 'n02792552', 'n02793089', 'n02793199', + 'n02793495', 'n02793842', 'n02794008', 'n02794156', 'n02794664', + 'n02795169', 'n02795528', 'n02795670', 'n02796207', 'n02796318', + 'n02796995', 'n02797295', 'n02797535', 'n02797692', 'n02799071', + 'n02799175', 'n02799323', 'n02799897', 'n02800213', 'n02800497', + 'n02800675', 'n02801184', 'n02801450', 'n02801525', 'n02801823', + 'n02801938', 'n02802215', 'n02802426', 'n02802544', 'n02802721', + 'n02802990', 'n02803349', 'n02803539', 'n02803666', 'n02803934', + 'n02804123', 'n02804252', 'n02804414', 'n02804515', 'n02804610', + 'n02805983', 'n02806088', 'n02806379', 'n02806530', 'n02807133', + 'n02807523', 'n02807616', 'n02807731', 'n02808185', 'n02808304', + 'n02808440', 'n02809105', 'n02809241', 'n02810270', 'n02810471', + 'n02810782', 'n02811059', 'n02811204', 'n02811350', 'n02811468', + 'n02811618', 'n02811719', 'n02811936', 'n02812201', 'n02812949', + 'n02813252', 'n02813399', 'n02813544', 'n02813645', 'n02813752', + 'n02814116', 'n02814428', 'n02814533', 'n02814774', 'n02814860', + 'n02815749', 'n02815834', 'n02815950', 'n02816656', 'n02816768', + 'n02817031', 'n02817516', 'n02818135', 'n02818832', 'n02820210', + 'n02820556', 'n02820675', 'n02821202', 'n02821415', 'n02821627', + 'n02821943', 'n02822064', 'n02822220', 'n02822579', 'n02823124', + 'n02823335', 'n02823428', 'n02823510', 'n02823586', 'n02823750', + 'n02823848', 'n02823964', 'n02824058', 'n02824319', 'n02824448', + 'n02825153', 'n02825442', 'n02825657', 'n02825961', 'n02826068', + 'n02826589', 'n02826886', 'n02827606', 'n02828299', 'n02828427', + 'n02828884', 'n02829596', 'n02831237', 'n02831335', 'n02831595', + 'n02831724', 'n02831894', 'n02833793', 'n02834397', 'n02834506', + 'n02834778', 'n02835271', 'n02835412', 'n02835724', 'n02835829', + 'n02835915', 'n02836035', 'n02836174', 'n02836392', 'n02837789', + 'n02837887', 'n02838345', 'n02838728', 'n02839110', 'n02839351', + 'n02839592', 'n02839910', 'n02840134', 'n02840245', 'n02840619', + 'n02841187', 'n02841315', 'n02841506', 'n02842573', 'n02842809', + 'n02843029', 'n02843158', 'n02843276', 'n02843553', 'n02843684', + 'n02844307', 'n02846141', 'n02846511', 'n02846733', 'n02847631', + 'n02847852', 'n02848216', 'n02848523', 'n02848921', 'n02849154', + 'n02849885', 'n02850732', 'n02850950', 'n02851099', 'n02851939', + 'n02852043', 'n02852173', 'n02852360', 'n02853016', 'n02854378', + 'n02854532', 'n02854739', 'n02854926', 'n02855089', 'n02855390', + 'n02855701', 'n02855925', 'n02856237', 'n02857477', 'n02857644', + 'n02858304', 'n02859184', 'n02859343', 'n02859443', 'n02859955', + 'n02860415', 'n02860640', 'n02860847', 'n02861022', 'n02861147', + 'n02861387', 'n02861886', 'n02862048', 'n02862916', 'n02863014', + 'n02863426', 'n02863536', 'n02863750', 'n02864504', 'n02864593', + 'n02865351', 'n02865665', 'n02865931', 'n02866386', 'n02866578', + 'n02867715', 'n02867966', 'n02868638', 'n02868975', 'n02869155', + 'n02869249', 'n02869563', 'n02869737', 'n02869837', 'n02870526', + 'n02870676', 'n02870880', 'n02871005', 'n02871147', 'n02871314', + 'n02871439', 'n02871525', 'n02871824', 'n02871963', 'n02872333', + 'n02872529', 'n02872752', 'n02873520', 'n02873733', 'n02873839', + 'n02874086', 'n02874442', 'n02874537', 'n02874750', 'n02876084', + 'n02876326', 'n02876657', 'n02877266', 'n02877765', 'n02877962', + 'n02878222', 'n02878425', 'n02878628', 'n02879087', 'n02879309', + 'n02879718', 'n02880189', 'n02880393', 'n02880546', 'n02880842', + 'n02880940', 'n02881193', 'n02881757', 'n02881906', 'n02882190', + 'n02882301', 'n02882647', 'n02882894', 'n02883004', 'n02883205', + 'n02883344', 'n02884994', 'n02885108', 'n02885338', 'n02885462', + 'n02885882', 'n02886321', 'n02886434', 'n02887079', 'n02887209', + 'n02887489', 'n02887970', 'n02888270', 'n02889425', 'n02889646', + 'n02890188', 'n02890351', 'n02890513', 'n02890662', 'n02890940', + 'n02891188', 'n02891788', 'n02892201', 'n02892304', 'n02892499', + 'n02892767', 'n02892948', 'n02893608', 'n02893692', 'n02893941', + 'n02894158', 'n02894337', 'n02894605', 'n02895154', 'n02895328', + 'n02895438', 'n02896442', 'n02897097', 'n02897820', 'n02898269', + 'n02898369', 'n02898585', 'n02898711', 'n02899439', 'n02900160', + 'n02900705', 'n02901114', 'n02901259', 'n02901377', 'n02901793', + 'n02902079', 'n02902687', 'n02902916', 'n02903126', 'n02903204', + 'n02903852', 'n02904233', 'n02904640', 'n02904803', 'n02904927', + 'n02905036', 'n02905152', 'n02906734', 'n02907082', 'n02907391', + 'n02907656', 'n02907873', 'n02908217', 'n02908773', 'n02909285', + 'n02909870', 'n02910145', 'n02910353', 'n02910542', 'n02910864', + 'n02911332', 'n02911485', 'n02912065', 'n02912319', 'n02912557', + 'n02912894', 'n02913152', 'n02914991', 'n02915904', 'n02916179', + 'n02916350', 'n02916936', 'n02917067', 'n02917377', 'n02917521', + 'n02917607', 'n02917964', 'n02918112', 'n02918330', 'n02918595', + 'n02918831', 'n02918964', 'n02919148', 'n02919414', 'n02919792', + 'n02919890', 'n02920083', 'n02920259', 'n02920369', 'n02920658', + 'n02921029', 'n02921195', 'n02921756', 'n02921884', 'n02922292', + 'n02922578', 'n02922798', 'n02923682', 'n02924116', 'n02925009', + 'n02925107', 'n02925519', 'n02925666', 'n02926426', 'n02926591', + 'n02927161', 'n02927764', 'n02927887', 'n02928049', 'n02928299', + 'n02928608', 'n02929289', 'n02929582', 'n02930080', 'n02930214', + 'n02930645', 'n02930766', 'n02931148', 'n02931294', 'n02931417', + 'n02931836', 'n02932019', 'n02932400', 'n02932523', 'n02932693', + 'n02932891', 'n02933112', 'n02933340', 'n02933462', 'n02933649', + 'n02933990', 'n02934168', 'n02934451', 'n02935017', 'n02935387', + 'n02935658', 'n02935891', 'n02936176', 'n02936281', 'n02936402', + 'n02936570', 'n02936714', 'n02937958', 'n02938886', 'n02939185', + 'n02939866', 'n02940385', 'n02940570', 'n02942349', 'n02942460', + 'n02942699', 'n02943241', 'n02943871', 'n02943964', 'n02944075', + 'n02944146', 'n02944459', 'n02944579', 'n02946127', 'n02946270', + 'n02946348', 'n02946509', 'n02946824', 'n02946921', 'n02947660', + 'n02947818', 'n02948072', 'n02948403', 'n02948557', 'n02949202', + 'n02949542', 'n02950256', 'n02950632', 'n02950826', 'n02950943', + 'n02951358', 'n02951585', 'n02951703', 'n02951843', 'n02952109', + 'n02952237', 'n02952374', 'n02952485', 'n02952585', 'n02952674', + 'n02953197', 'n02953455', 'n02954163', 'n02954340', 'n02954938', + 'n02955065', 'n02955247', 'n02955540', 'n02955767', 'n02956699', + 'n02956795', 'n02956883', 'n02957008', 'n02957135', 'n02957755', + 'n02958343', 'n02959942', 'n02960352', 'n02960690', 'n02960903', + 'n02961035', 'n02961225', 'n02961451', 'n02961544', 'n02962061', + 'n02962200', 'n02962414', 'n02962843', 'n02963159', 'n02963302', + 'n02963503', 'n02963692', 'n02963821', 'n02963987', 'n02964843', + 'n02965216', 'n02965300', 'n02965783', 'n02966193', 'n02966545', + 'n02966687', 'n02967294', 'n02967626', 'n02967782', 'n02968074', + 'n02968333', 'n02968473', 'n02969010', 'n02969323', 'n02970408', + 'n02970534', 'n02970685', 'n02970849', 'n02971167', 'n02971356', + 'n02971473', 'n02971579', 'n02971691', 'n02972397', 'n02973017', + 'n02973236', 'n02973805', 'n02973904', 'n02974003', 'n02974348', + 'n02974697', 'n02975212', 'n02976123', 'n02976249', 'n02976350', + 'n02976455', 'n02976939', 'n02977058', 'n02977330', 'n02977438', + 'n02977619', 'n02977936', 'n02978055', 'n02978367', 'n02978478', + 'n02978753', 'n02978881', 'n02979074', 'n02979186', 'n02979290', + 'n02979399', 'n02979836', 'n02980036', 'n02980441', 'n02981024', + 'n02981321', 'n02981792', 'n02981911', 'n02982232', 'n02982416', + 'n02982515', 'n02982599', 'n02983189', 'n02983357', 'n02984061', + 'n02984203', 'n02984469', 'n02985963', 'n02986160', 'n02987379', + 'n02987492', 'n02988066', 'n02988156', 'n02988304', 'n02988486', + 'n02988679', 'n02988963', 'n02989099', 'n02990373', 'n02991302', + 'n02991847', 'n02992032', 'n02992211', 'n02992368', 'n02992529', + 'n02992795', 'n02993194', 'n02993368', 'n02994573', 'n02995345', + 'n02995871', 'n02995998', 'n02997391', 'n02997607', 'n02997910', + 'n02998003', 'n02998563', 'n02998841', 'n02999138', 'n02999410', + 'n02999936', 'n03000134', 'n03000247', 'n03000684', 'n03001115', + 'n03001627', 'n03002096', 'n03002341', 'n03002711', 'n03002816', + 'n03002948', 'n03003091', 'n03004275', 'n03004824', 'n03005033', + 'n03005147', 'n03005285', 'n03006626', 'n03007130', 'n03007444', + 'n03007591', 'n03008177', 'n03008976', 'n03009794', 'n03010473', + 'n03010656', 'n03010795', 'n03010915', 'n03011018', 'n03011355', + 'n03011741', 'n03012013', 'n03012373', 'n03012897', 'n03013438', + 'n03013580', 'n03013850', 'n03014440', 'n03014705', 'n03015149', + 'n03015254', 'n03015478', 'n03015851', 'n03016389', 'n03016609', + 'n03016737', 'n03016868', 'n03016953', 'n03017070', 'n03017168', + 'n03018209', 'n03018349', 'n03018712', 'n03019434', 'n03019685', + 'n03019806', 'n03019938', 'n03020034', 'n03020416', 'n03020692', + 'n03021228', 'n03024064', 'n03025165', 'n03025250', 'n03026506', + 'n03026907', 'n03027001', 'n03027108', 'n03027250', 'n03027625', + 'n03028079', 'n03028596', 'n03028785', 'n03029197', 'n03029445', + 'n03029925', 'n03030262', 'n03030353', 'n03030557', 'n03030880', + 'n03031012', 'n03031152', 'n03031422', 'n03032252', 'n03032453', + 'n03032811', 'n03033362', 'n03033986', 'n03034244', 'n03034405', + 'n03034663', 'n03035252', 'n03035832', 'n03036022', 'n03036469', + 'n03037404', 'n03037709', 'n03038281', 'n03038480', 'n03038685', + 'n03038870', 'n03039015', 'n03039259', 'n03039493', 'n03039827', + 'n03039947', 'n03040376', 'n03041114', 'n03041449', 'n03041632', + 'n03041810', 'n03042139', 'n03042490', 'n03042697', 'n03043423', + 'n03043693', 'n03043958', 'n03044934', 'n03045228', 'n03045337', + 'n03045698', 'n03046029', 'n03046133', 'n03046257', 'n03046802', + 'n03046921', 'n03047052', 'n03047690', 'n03047799', 'n03047941', + 'n03048883', 'n03049066', 'n03049782', 'n03049924', 'n03050453', + 'n03050546', 'n03050655', 'n03050864', 'n03051041', 'n03051249', + 'n03051396', 'n03051540', 'n03054901', 'n03055418', 'n03055857', + 'n03057021', 'n03057541', 'n03057636', 'n03057920', 'n03058107', + 'n03058603', 'n03059685', 'n03061211', 'n03061345', 'n03061505', + 'n03061674', 'n03061893', 'n03062015', 'n03062122', 'n03062245', + 'n03062336', 'n03062985', 'n03063073', 'n03063199', 'n03063338', + 'n03063485', 'n03063599', 'n03063689', 'n03063968', 'n03064250', + 'n03064350', 'n03064758', 'n03064935', 'n03065243', 'n03065424', + 'n03066359', 'n03066849', 'n03067093', 'n03067212', 'n03067339', + 'n03067518', 'n03068181', 'n03068998', 'n03069752', 'n03070059', + 'n03070193', 'n03071021', 'n03071160', 'n03072201', 'n03072440', + 'n03072682', 'n03073296', 'n03073545', 'n03073694', 'n03073977', + 'n03074380', 'n03074855', 'n03075097', 'n03075370', 'n03075634', + 'n03075768', 'n03075946', 'n03077616', 'n03077741', 'n03078287', + 'n03078802', 'n03078995', 'n03079136', 'n03079230', 'n03079494', + 'n03080497', 'n03080633', 'n03081986', 'n03082280', 'n03082656', + 'n03082807', 'n03082979', 'n03084420', 'n03084834', 'n03085013', + 'n03085219', 'n03085602', 'n03085915', 'n03086457', 'n03086580', + 'n03086670', 'n03086868', 'n03087069', 'n03087245', 'n03087366', + 'n03087816', 'n03088389', 'n03088580', 'n03089624', 'n03089753', + 'n03089879', 'n03090000', 'n03090172', 'n03091044', 'n03091374', + 'n03092166', 'n03092314', 'n03092656', 'n03092883', 'n03094159', + 'n03094503', 'n03095699', 'n03096960', 'n03097362', 'n03097535', + 'n03097673', 'n03098140', 'n03098688', 'n03098959', 'n03099147', + 'n03099274', 'n03099454', 'n03099945', 'n03100240', 'n03100346', + 'n03100490', 'n03100897', 'n03101156', 'n03101517', 'n03101664', + 'n03101796', 'n03101986', 'n03102371', 'n03102654', 'n03103396', + 'n03103563', 'n03104512', 'n03105088', 'n03105306', 'n03105467', + 'n03106898', 'n03107046', 'n03107488', 'n03108455', 'n03108853', + 'n03109150', 'n03109253', 'n03109693', 'n03109881', 'n03110669', + 'n03111041', 'n03111177', 'n03111296', 'n03112719', 'n03112869', + 'n03113152', 'n03113657', 'n03113835', 'n03114236', 'n03114379', + 'n03114504', 'n03115180', 'n03115400', 'n03115762', 'n03115897', + 'n03116530', 'n03116767', 'n03118969', 'n03119203', 'n03119396', + 'n03119510', 'n03120491', 'n03120778', 'n03121298', 'n03121431', + 'n03121897', 'n03122073', 'n03122202', 'n03122295', 'n03123553', + 'n03123809', 'n03123917', 'n03124043', 'n03124170', 'n03124474', + 'n03124590', 'n03125057', 'n03125729', 'n03125870', 'n03126385', + 'n03126580', 'n03126707', 'n03127203', 'n03127408', 'n03127747', + 'n03127925', 'n03128085', 'n03128248', 'n03128427', 'n03128519', + 'n03129001', 'n03129471', 'n03129753', 'n03129848', 'n03130761', + 'n03131574', 'n03131669', 'n03131967', 'n03132076', 'n03132261', + 'n03132666', 'n03132776', 'n03133050', 'n03133415', 'n03133878', + 'n03134739', 'n03134853', 'n03135030', 'n03135532', 'n03136369', + 'n03137473', 'n03138344', 'n03138669', 'n03139464', 'n03140126', + 'n03140292', 'n03140431', 'n03140652', 'n03141065', 'n03141327', + 'n03141455', 'n03141702', 'n03141823', 'n03142679', 'n03145147', + 'n03145522', 'n03145719', 'n03146219', 'n03146687', 'n03146777', + 'n03146846', 'n03147280', 'n03147509', 'n03148324', 'n03148727', + 'n03149135', 'n03149686', 'n03150232', 'n03150511', 'n03151077', + 'n03152303', 'n03154073', 'n03154895', 'n03155178', 'n03156279', + 'n03156767', 'n03157348', 'n03158186', 'n03158885', 'n03159535', + 'n03159640', 'n03160309', 'n03160740', 'n03161450', 'n03163222', + 'n03163381', 'n03164344', 'n03164605', 'n03164722', 'n03165096', + 'n03165466', 'n03165616', 'n03166514', 'n03167978', 'n03168107', + 'n03168217', 'n03169176', 'n03170635', 'n03170872', 'n03171228', + 'n03171356', 'n03171635', 'n03172038', 'n03173270', 'n03173387', + 'n03173929', 'n03174450', 'n03174731', 'n03175081', 'n03175189', + 'n03175457', 'n03176386', 'n03176594', 'n03176763', 'n03177059', + 'n03177165', 'n03178000', 'n03178430', 'n03178674', 'n03179701', + 'n03179910', 'n03180011', 'n03180384', 'n03180504', 'n03180865', + 'n03180969', 'n03181293', 'n03183080', 'n03186285', 'n03186818', + 'n03187037', 'n03187268', 'n03187595', 'n03188531', 'n03188725', + 'n03189083', 'n03191286', 'n03192543', 'n03193107', 'n03193260', + 'n03193423', 'n03193597', 'n03195332', 'n03195959', 'n03196062', + 'n03196217', 'n03196598', 'n03196990', 'n03197201', 'n03197337', + 'n03198500', 'n03199647', 'n03199775', 'n03199901', 'n03200231', + 'n03200357', 'n03200539', 'n03200701', 'n03200906', 'n03201035', + 'n03201208', 'n03201529', 'n03201638', 'n03201776', 'n03202354', + 'n03202940', 'n03204306', 'n03204558', 'n03205458', 'n03205574', + 'n03205669', 'n03206158', 'n03206282', 'n03206718', 'n03206908', + 'n03207305', 'n03207630', 'n03207743', 'n03207835', 'n03207941', + 'n03208556', 'n03208938', 'n03209359', 'n03209477', 'n03209910', + 'n03210245', 'n03210372', 'n03210552', 'n03211117', 'n03211789', + 'n03212114', 'n03212811', 'n03213538', 'n03213715', 'n03213826', + 'n03214253', 'n03214582', 'n03215508', 'n03216402', 'n03216710', + 'n03216828', 'n03218198', 'n03219010', 'n03219135', 'n03219483', + 'n03219612', 'n03219859', 'n03219966', 'n03220237', 'n03220513', + 'n03220692', 'n03221059', 'n03221351', 'n03221540', 'n03221720', + 'n03222176', 'n03222318', 'n03222516', 'n03223162', 'n03223299', + 'n03223553', 'n03223686', 'n03224603', 'n03224753', 'n03225108', + 'n03225777', 'n03225988', 'n03226254', 'n03226375', 'n03226538', + 'n03226880', 'n03227184', 'n03227317', 'n03228254', 'n03228365', + 'n03228692', 'n03228967', 'n03229244', 'n03231160', 'n03231368', + 'n03231819', 'n03232309', 'n03232543', 'n03233123', 'n03233624', + 'n03233744', 'n03233905', 'n03234164', 'n03234952', 'n03235042', + 'n03235180', 'n03235327', 'n03235796', 'n03236093', 'n03236217', + 'n03236423', 'n03236735', 'n03237340', 'n03237416', 'n03237839', + 'n03237992', 'n03238131', 'n03238286', 'n03238586', 'n03239054', + 'n03239259', 'n03239726', 'n03240140', 'n03240683', 'n03240892', + 'n03241093', 'n03241335', 'n03241496', 'n03242506', 'n03243218', + 'n03244047', 'n03244231', 'n03244775', 'n03244919', 'n03245724', + 'n03245889', 'n03246454', 'n03246933', 'n03247083', 'n03249342', + 'n03249569', 'n03250089', 'n03250279', 'n03250405', 'n03250847', + 'n03250952', 'n03251533', 'n03251766', 'n03251932', 'n03252637', + 'n03253279', 'n03253796', 'n03253886', 'n03254046', 'n03254189', + 'n03254374', 'n03254862', 'n03255030', 'n03255899', 'n03256032', + 'n03256166', 'n03256788', 'n03256928', 'n03257210', 'n03257586', + 'n03258330', 'n03258577', 'n03258905', 'n03259009', 'n03259280', + 'n03259401', 'n03259505', 'n03260849', 'n03261019', 'n03261603', + 'n03261776', 'n03262072', 'n03262248', 'n03262519', 'n03262717', + 'n03262809', 'n03262932', 'n03263076', 'n03264906', 'n03266371', + 'n03266749', 'n03267113', 'n03267468', 'n03267821', 'n03268142', + 'n03268311', 'n03268645', 'n03268790', 'n03268918', 'n03269203', + 'n03269401', 'n03270165', 'n03270854', 'n03271030', 'n03271574', + 'n03272010', 'n03272125', 'n03272239', 'n03272383', 'n03272562', + 'n03272810', 'n03272940', 'n03273061', 'n03273551', 'n03273740', + 'n03273913', 'n03274265', 'n03274435', 'n03275681', 'n03276696', + 'n03277459', 'n03277771', 'n03278248', 'n03278914', 'n03279508', + 'n03280644', 'n03281145', 'n03281673', 'n03282295', 'n03282401', + 'n03283221', 'n03284308', 'n03284743', 'n03284886', 'n03284981', + 'n03285578', 'n03285912', 'n03287351', 'n03287733', 'n03288003', + 'n03288500', 'n03288886', 'n03289660', 'n03289985', 'n03290096', + 'n03290195', 'n03290653', 'n03291413', 'n03291741', 'n03291819', + 'n03291963', 'n03292475', 'n03292603', 'n03293741', 'n03293863', + 'n03294048', 'n03294604', 'n03294833', 'n03295012', 'n03295246', + 'n03296081', 'n03296328', 'n03296478', 'n03297103', 'n03297226', + 'n03297495', 'n03297644', 'n03297735', 'n03298089', 'n03298716', + 'n03298858', 'n03300216', 'n03300443', 'n03301291', 'n03301568', + 'n03301833', 'n03301940', 'n03302671', 'n03302938', 'n03303217', + 'n03303831', 'n03306385', 'n03307037', 'n03307792', 'n03308152', + 'n03308481', 'n03309110', 'n03309356', 'n03309465', 'n03309687', + 'n03309808', 'n03313333', 'n03314227', 'n03314608', 'n03314780', + 'n03314884', 'n03315644', 'n03316105', 'n03316406', 'n03317788', + 'n03317889', 'n03318136', 'n03318294', 'n03318865', 'n03318983', + 'n03319457', 'n03319745', 'n03320046', 'n03320262', 'n03320421', + 'n03320519', 'n03320959', 'n03321103', 'n03321563', 'n03321954', + 'n03322570', 'n03322704', 'n03322836', 'n03322940', 'n03323096', + 'n03323703', 'n03324928', 'n03325088', 'n03325403', 'n03325584', + 'n03325691', 'n03325941', 'n03326660', 'n03326795', 'n03326948', + 'n03327133', 'n03327234', 'n03327553', 'n03327691', 'n03329302', + 'n03329536', 'n03329663', 'n03331077', 'n03331599', 'n03332005', + 'n03332271', 'n03332393', 'n03332989', 'n03333129', 'n03333252', + 'n03333610', 'n03333711', 'n03334291', 'n03334382', 'n03334912', + 'n03335030', 'n03336282', 'n03336575', 'n03337140', 'n03337383', + 'n03338821', 'n03339529', 'n03339643', 'n03340723', 'n03341153', + 'n03341297', 'n03341606', 'n03342015', 'n03342127', 'n03342262', + 'n03342657', 'n03343354', 'n03343560', 'n03343737', 'n03343853', + 'n03344305', 'n03344393', 'n03344642', 'n03345487', 'n03345837', + 'n03346135', 'n03346455', 'n03347037', 'n03347617', 'n03348868', + 'n03349469', 'n03349771', 'n03349892', 'n03350204', 'n03350602', + 'n03351434', 'n03351979', 'n03352628', 'n03353951', 'n03354207', + 'n03354903', 'n03355768', 'n03355925', 'n03356858', 'n03356982', + 'n03357267', 'n03357716', 'n03358172', 'n03358380', 'n03358726', + 'n03359137', 'n03359285', 'n03359436', 'n03359566', 'n03360300', + 'n03360431', 'n03360622', 'n03361297', 'n03361380', 'n03361550', + 'n03362890', 'n03363363', 'n03363549', 'n03363749', 'n03364008', + 'n03364599', 'n03365231', 'n03365374', 'n03365592', 'n03365991', + 'n03366823', 'n03366974', 'n03367059', 'n03367410', 'n03367545', + 'n03368352', 'n03369276', 'n03370387', 'n03371875', 'n03372029', + 'n03372549', 'n03373237', 'n03373611', 'n03373943', 'n03374372', + 'n03374473', 'n03374649', 'n03374838', 'n03375171', 'n03375329', + 'n03375575', 'n03376159', 'n03376279', 'n03376595', 'n03376938', + 'n03378005', 'n03378174', 'n03379051', 'n03379204', 'n03379343', + 'n03379828', 'n03380724', 'n03380867', 'n03381126', 'n03382292', + 'n03382413', 'n03382856', 'n03383099', 'n03384352', 'n03384891', + 'n03385557', 'n03386011', 'n03386544', 'n03386726', 'n03386870', + 'n03387653', 'n03388043', 'n03388183', 'n03388323', 'n03388549', + 'n03389611', 'n03389761', 'n03389889', 'n03390075', 'n03390786', + 'n03390983', 'n03391301', 'n03391770', 'n03392741', 'n03393017', + 'n03393761', 'n03393912', 'n03394272', 'n03394480', 'n03394649', + 'n03394916', 'n03395256', 'n03395514', 'n03395859', 'n03396074', + 'n03396580', 'n03396654', 'n03397087', 'n03397266', 'n03397532', + 'n03397947', 'n03398153', 'n03398228', 'n03399677', 'n03399761', + 'n03399971', 'n03400231', 'n03400972', 'n03401129', 'n03401279', + 'n03402188', 'n03402369', 'n03402941', 'n03403643', 'n03404149', + 'n03404251', 'n03404360', 'n03404449', 'n03405111', 'n03405265', + 'n03405595', 'n03405725', 'n03406966', 'n03407369', 'n03407865', + 'n03408054', 'n03408444', 'n03409297', 'n03409393', 'n03409591', + 'n03410423', 'n03410571', 'n03410740', 'n03410938', 'n03411079', + 'n03412058', 'n03413684', 'n03414029', 'n03414162', 'n03414676', + 'n03415252', 'n03415486', 'n03415749', 'n03416094', 'n03416489', + 'n03416640', 'n03416775', 'n03416900', 'n03417042', 'n03417202', + 'n03417345', 'n03417749', 'n03417970', 'n03418158', 'n03418242', + 'n03418402', 'n03418618', 'n03418915', 'n03419014', 'n03420345', + 'n03420801', 'n03421117', 'n03421324', 'n03421485', 'n03421669', + 'n03422072', 'n03422771', 'n03423306', 'n03423479', 'n03423568', + 'n03423719', 'n03423877', 'n03424325', 'n03424489', 'n03424630', + 'n03424862', 'n03425241', 'n03425325', 'n03425413', 'n03425595', + 'n03425769', 'n03426134', 'n03426285', 'n03427202', 'n03427296', + 'n03428090', 'n03428226', 'n03428349', 'n03429003', 'n03429137', + 'n03429288', 'n03429682', 'n03429914', 'n03430091', 'n03430313', + 'n03430418', 'n03430551', 'n03431243', 'n03431745', 'n03432061', + 'n03432129', 'n03433877', 'n03434188', 'n03434285', 'n03435593', + 'n03435743', 'n03435991', 'n03436075', 'n03436182', 'n03436417', + 'n03436549', 'n03436891', 'n03437430', 'n03437741', 'n03437829', + 'n03437941', 'n03438071', 'n03438257', 'n03438661', 'n03438863', + 'n03439348', 'n03439814', 'n03440216', 'n03440682', 'n03441112', + 'n03441345', 'n03441582', 'n03442597', 'n03442756', 'n03443005', + 'n03443149', 'n03443371', 'n03443912', 'n03444034', 'n03445326', + 'n03445617', 'n03445777', 'n03445924', 'n03446070', 'n03446268', + 'n03446832', 'n03447075', 'n03447358', 'n03447447', 'n03447721', + 'n03448590', 'n03448956', 'n03449309', 'n03449451', 'n03450230', + 'n03450516', 'n03450734', 'n03450974', 'n03451120', 'n03451711', + 'n03451798', 'n03452267', 'n03452449', 'n03452594', 'n03452741', + 'n03453231', 'n03453443', 'n03454110', 'n03454211', 'n03454442', + 'n03454536', 'n03454707', 'n03454885', 'n03455488', 'n03456024', + 'n03456186', 'n03456299', 'n03456447', 'n03456548', 'n03456665', + 'n03457008', 'n03457686', 'n03457902', 'n03458271', 'n03459328', + 'n03459775', 'n03460040', 'n03460147', 'n03460297', 'n03461288', + 'n03461385', 'n03461988', 'n03462110', 'n03463381', 'n03463666', + 'n03464053', 'n03465151', 'n03465426', 'n03465500', 'n03465718', + 'n03466493', 'n03466600', 'n03466839', 'n03467068', 'n03467517', + 'n03467796', 'n03467984', 'n03468696', 'n03468821', 'n03469175', + 'n03469493', 'n03469903', 'n03470629', 'n03471190', 'n03472232', + 'n03473227', 'n03474779', 'n03474896', 'n03475581', 'n03475823', + 'n03476083', 'n03476313', 'n03476684', 'n03476991', 'n03477512', + 'n03478589', 'n03478756', 'n03478907', 'n03479121', 'n03479397', + 'n03479502', 'n03480579', 'n03480719', 'n03481172', 'n03482252', + 'n03482405', 'n03482523', 'n03482877', 'n03483230', 'n03483316', + 'n03483823', 'n03483971', 'n03484083', 'n03484487', 'n03484576', + 'n03484809', 'n03484931', 'n03485198', 'n03485309', 'n03485407', + 'n03485794', 'n03487090', 'n03487331', 'n03487444', 'n03487533', + 'n03487642', 'n03487774', 'n03487886', 'n03488188', 'n03488438', + 'n03488887', 'n03489162', 'n03490006', 'n03490119', 'n03490884', + 'n03491032', 'n03491988', 'n03492250', 'n03492542', 'n03492922', + 'n03494278', 'n03494537', 'n03494706', 'n03495039', 'n03495258', + 'n03495570', 'n03496296', 'n03496612', 'n03496892', 'n03497352', + 'n03497657', 'n03498441', 'n03498662', 'n03498781', 'n03498962', + 'n03499354', 'n03499468', 'n03499907', 'n03500209', 'n03500389', + 'n03500699', 'n03501152', 'n03501614', 'n03502200', 'n03502331', + 'n03502509', 'n03503233', 'n03503477', 'n03503997', 'n03504205', + 'n03504723', 'n03505133', 'n03505383', 'n03505504', 'n03505667', + 'n03506028', 'n03506184', 'n03506370', 'n03506560', 'n03506727', + 'n03506880', 'n03507241', 'n03507458', 'n03507963', 'n03508101', + 'n03508881', 'n03509394', 'n03509608', 'n03510244', 'n03511175', + 'n03511333', 'n03512147', 'n03512911', 'n03513137', 'n03513376', + 'n03514451', 'n03514693', 'n03514894', 'n03516367', 'n03516844', + 'n03516996', 'n03517647', 'n03517760', 'n03517899', 'n03518135', + 'n03518305', 'n03518445', 'n03518943', 'n03519081', 'n03519387', + 'n03520493', 'n03521076', 'n03521544', 'n03521675', 'n03521899', + 'n03522003', 'n03522100', 'n03522634', 'n03523134', 'n03523987', + 'n03524150', 'n03524574', 'n03524745', 'n03525074', 'n03525454', + 'n03527149', 'n03527444', 'n03527565', 'n03528263', 'n03528523', + 'n03528901', 'n03529175', 'n03529444', 'n03529629', 'n03529860', + 'n03530511', 'n03530642', 'n03530910', 'n03531281', 'n03532342', + 'n03532672', 'n03532919', 'n03533014', 'n03534580', 'n03534776', + 'n03535024', 'n03535780', 'n03536122', 'n03536761', 'n03537241', + 'n03537412', 'n03538037', 'n03538179', 'n03538406', 'n03538634', + 'n03539433', 'n03539546', 'n03539678', 'n03540090', 'n03540267', + 'n03540595', 'n03540914', 'n03541091', 'n03541269', 'n03541537', + 'n03541696', 'n03541923', 'n03542333', 'n03542605', 'n03542860', + 'n03543012', 'n03543112', 'n03543254', 'n03543394', 'n03543603', + 'n03543735', 'n03543945', 'n03544143', 'n03544238', 'n03544360', + 'n03545150', 'n03545470', 'n03545756', 'n03546112', 'n03546235', + 'n03546340', 'n03547054', 'n03547229', 'n03548086', 'n03548402', + 'n03548626', 'n03549199', 'n03549473', 'n03549589', 'n03549732', + 'n03549897', 'n03550153', 'n03550289', 'n03551395', 'n03551582', + 'n03552749', 'n03553019', 'n03553248', 'n03554460', 'n03555006', + 'n03555426', 'n03555564', 'n03555662', 'n03556679', 'n03556992', + 'n03557270', 'n03557360', 'n03557590', 'n03557692', 'n03558176', + 'n03558404', 'n03558633', 'n03558739', 'n03559999', 'n03560430', + 'n03561047', 'n03563200', 'n03563460', 'n03565288', 'n03565830', + 'n03566193', 'n03566730', 'n03567066', 'n03568117', 'n03569293', + 'n03571280', 'n03571625', 'n03571942', 'n03572107', 'n03572321', + 'n03574243', 'n03574555', 'n03574816', 'n03577090', 'n03577672', + 'n03578055', 'n03578251', 'n03578656', 'n03579538', 'n03580518', + 'n03580845', 'n03581125', 'n03582508', 'n03582959', 'n03584254', + 'n03584400', 'n03584829', 'n03585073', 'n03585438', 'n03585682', + 'n03586219', 'n03586631', 'n03587205', 'n03588841', 'n03588951', + 'n03589513', 'n03589791', 'n03590306', 'n03590588', 'n03590841', + 'n03590932', 'n03592245', 'n03592669', 'n03592773', 'n03592931', + 'n03593122', 'n03593526', 'n03594148', 'n03594523', 'n03594734', + 'n03594945', 'n03595264', 'n03595409', 'n03595523', 'n03595614', + 'n03595860', 'n03596285', 'n03596543', 'n03597916', 'n03598151', + 'n03598299', 'n03598515', 'n03598930', 'n03599486', 'n03600285', + 'n03600475', 'n03600722', 'n03600977', 'n03601442', 'n03601638', + 'n03601840', 'n03602081', 'n03602883', 'n03603442', 'n03603594', + 'n03603722', 'n03604156', 'n03604311', 'n03604400', 'n03604843', + 'n03605598', 'n03605722', 'n03605915', 'n03606251', 'n03607029', + 'n03607659', 'n03607923', 'n03609235', 'n03609397', 'n03610098', + 'n03610418', 'n03610524', 'n03610682', 'n03612010', 'n03612814', + 'n03612965', 'n03613294', 'n03613592', 'n03614007', 'n03614532', + 'n03614782', 'n03615300', 'n03615406', 'n03615563', 'n03615655', + 'n03615790', 'n03616428', 'n03616763', 'n03616979', 'n03617095', + 'n03617312', 'n03617480', 'n03618101', 'n03618982', 'n03619196', + 'n03619275', 'n03619396', 'n03619650', 'n03619793', 'n03619890', + 'n03620052', 'n03620967', 'n03621049', 'n03621377', 'n03622058', + 'n03622526', 'n03622839', 'n03622931', 'n03623198', 'n03623338', + 'n03623556', 'n03624134', 'n03624400', 'n03625355', 'n03625539', + 'n03625646', 'n03625943', 'n03626115', 'n03626760', 'n03627232', + 'n03627954', 'n03628215', 'n03628511', 'n03629100', 'n03629231', + 'n03629520', 'n03630262', 'n03630383', 'n03631177', 'n03631922', + 'n03632577', 'n03632729', 'n03632852', 'n03633091', 'n03633886', + 'n03634034', 'n03635032', 'n03635108', 'n03635330', 'n03635668', + 'n03636248', 'n03636649', 'n03637181', 'n03637318', 'n03637898', + 'n03638883', 'n03639077', 'n03639497', 'n03640850', 'n03640988', + 'n03641569', 'n03642444', 'n03642806', 'n03643149', 'n03643253', + 'n03643491', 'n03643737', 'n03644378', 'n03644858', 'n03645011', + 'n03645577', 'n03646020', 'n03646148', 'n03646296', 'n03646916', + 'n03647520', 'n03648431', 'n03649161', 'n03649674', 'n03649797', + 'n03649909', 'n03650551', 'n03651388', 'n03651843', 'n03652100', + 'n03652729', 'n03652932', 'n03653110', 'n03653220', 'n03653583', + 'n03653740', 'n03653833', 'n03653975', 'n03654576', 'n03655072', + 'n03655720', 'n03656484', 'n03656957', 'n03657121', 'n03657511', + 'n03658185', 'n03658858', 'n03659292', 'n03659686', 'n03659809', + 'n03659950', 'n03660124', 'n03660909', 'n03661043', 'n03661340', + 'n03662601', 'n03662719', 'n03662887', 'n03663531', 'n03664943', + 'n03665366', 'n03665924', 'n03666362', 'n03666591', 'n03666917', + 'n03667235', 'n03667552', 'n03667664', 'n03667829', 'n03668067', + 'n03668279', 'n03668488', 'n03668803', 'n03669534', 'n03669886', + 'n03670208', 'n03671914', 'n03672827', 'n03673027', 'n03673450', + 'n03674270', 'n03674440', 'n03674731', 'n03675235', 'n03676087', + 'n03676483', 'n03676623', 'n03676759', 'n03677115', 'n03678558', + 'n03678729', 'n03679384', 'n03679712', 'n03680355', 'n03680512', + 'n03680734', 'n03680858', 'n03680942', 'n03682487', 'n03682877', + 'n03683079', 'n03683457', 'n03683606', 'n03683708', 'n03683995', + 'n03684143', 'n03684224', 'n03684611', 'n03684823', 'n03685307', + 'n03685820', 'n03686130', 'n03686924', 'n03687137', 'n03687928', + 'n03688192', 'n03688405', 'n03688605', 'n03688943', 'n03689157', + 'n03690279', 'n03690473', 'n03690938', 'n03691459', 'n03691817', + 'n03692379', 'n03692522', 'n03693293', 'n03693474', 'n03693707', + 'n03693860', 'n03694639', 'n03695452', 'n03695753', 'n03695857', + 'n03696065', 'n03696301', 'n03696568', 'n03697007', 'n03697552', + 'n03697913', 'n03698360', 'n03698604', 'n03698723', 'n03698815', + 'n03699280', 'n03699591', 'n03699975', 'n03700963', 'n03701391', + 'n03701790', 'n03703730', 'n03703862', 'n03703945', 'n03704549', + 'n03706229', 'n03706653', 'n03708036', 'n03708843', 'n03709206', + 'n03709363', 'n03709823', 'n03710193', 'n03710637', 'n03710721', + 'n03711044', 'n03711999', 'n03712111', 'n03712337', 'n03713069', + 'n03713436', 'n03714235', 'n03715114', 'n03715386', 'n03715669', + 'n03715892', 'n03716887', 'n03716966', 'n03717131', 'n03717285', + 'n03717447', 'n03717622', 'n03718212', 'n03718335', 'n03718458', + 'n03718581', 'n03718789', 'n03718935', 'n03719053', 'n03719343', + 'n03719743', 'n03720163', 'n03720891', 'n03721047', 'n03721252', + 'n03721384', 'n03721590', 'n03722007', 'n03722288', 'n03723267', + 'n03723781', 'n03724066', 'n03724417', 'n03724538', 'n03724623', + 'n03724756', 'n03724870', 'n03725035', 'n03725600', 'n03725717', + 'n03726760', 'n03726993', 'n03727067', 'n03727465', 'n03727605', + 'n03727837', 'n03727946', 'n03728437', 'n03728982', 'n03729308', + 'n03729826', 'n03730153', 'n03730334', 'n03730494', 'n03730893', + 'n03731019', 'n03731483', 'n03731695', 'n03732020', 'n03732114', + 'n03732458', 'n03733131', 'n03733281', 'n03733644', 'n03733805', + 'n03733925', 'n03735637', 'n03735963', 'n03736064', 'n03736470', + 'n03736970', 'n03738066', 'n03738241', 'n03738472', 'n03739518', + 'n03739693', 'n03742019', 'n03742115', 'n03743016', 'n03743279', + 'n03743902', 'n03744276', 'n03744840', 'n03745146', 'n03745571', + 'n03746005', 'n03746155', 'n03746330', 'n03746486', 'n03748162', + 'n03749807', 'n03751269', 'n03751458', 'n03751757', 'n03752185', + 'n03753077', 'n03757604', 'n03758089', 'n03759243', 'n03759661', + 'n03759954', 'n03760310', 'n03760671', 'n03760944', 'n03761084', + 'n03762332', 'n03762434', 'n03762602', 'n03763968', 'n03764276', + 'n03764736', 'n03764822', 'n03764995', 'n03765561', 'n03765934', + 'n03766044', 'n03766322', 'n03766508', 'n03766935', 'n03767112', + 'n03767203', 'n03767459', 'n03767745', 'n03767966', 'n03768916', + 'n03769610', 'n03769881', 'n03770085', 'n03770316', 'n03770439', + 'n03770679', 'n03770954', 'n03772077', 'n03772269', 'n03772584', + 'n03773035', 'n03773504', 'n03774327', 'n03774461', 'n03775071', + 'n03775199', 'n03775388', 'n03775546', 'n03775636', 'n03775747', + 'n03775847', 'n03776460', 'n03776877', 'n03777568', 'n03777754', + 'n03778817', 'n03779128', 'n03781244', 'n03781683', 'n03781787', + 'n03782006', 'n03782190', 'n03782794', 'n03783430', 'n03784270', + 'n03784896', 'n03785016', 'n03785237', 'n03785721', 'n03786194', + 'n03786313', 'n03786621', 'n03786715', 'n03786901', 'n03787032', + 'n03787523', 'n03788047', 'n03788195', 'n03788365', 'n03788498', + 'n03788601', 'n03788914', 'n03789171', 'n03789946', 'n03790230', + 'n03790512', 'n03790755', 'n03790953', 'n03791053', 'n03791235', + 'n03792048', 'n03792334', 'n03792526', 'n03792782', 'n03792972', + 'n03793489', 'n03793850', 'n03794056', 'n03794136', 'n03794798', + 'n03795123', 'n03795269', 'n03795758', 'n03795976', 'n03796401', + 'n03796522', 'n03796605', 'n03797182', 'n03797264', 'n03797390', + 'n03797896', 'n03798061', 'n03798442', 'n03799876', 'n03800933', + 'n03801353', 'n03801533', 'n03801671', 'n03801760', 'n03801880', + 'n03802007', 'n03802393', 'n03802643', 'n03803284', 'n03804744', + 'n03805180', 'n03805280', 'n03805725', 'n03809312', 'n03809603', + 'n03810952', 'n03811295', 'n03811444', 'n03811847', 'n03811965', + 'n03812924', 'n03813078', 'n03814639', 'n03814817', 'n03814906', + 'n03815149', 'n03815482', 'n03815615', 'n03816005', 'n03816136', + 'n03816530', 'n03816849', 'n03817191', 'n03817647', 'n03818343', + 'n03819336', 'n03819448', 'n03819595', 'n03819994', 'n03820318', + 'n03820728', 'n03821518', 'n03822171', 'n03822504', 'n03822656', + 'n03822767', 'n03823111', 'n03823216', 'n03823312', 'n03824381', + 'n03824713', 'n03825080', 'n03825271', 'n03825788', 'n03826039', + 'n03826186', 'n03827536', 'n03828020', 'n03829954', 'n03831382', + 'n03832144', 'n03832673', 'n03834040', 'n03835197', 'n03836062', + 'n03836451', 'n03836906', 'n03836976', 'n03837422', 'n03837606', + 'n03837698', 'n03837869', 'n03838298', 'n03838899', 'n03839424', + 'n03839671', 'n03840681', 'n03840823', 'n03841143', 'n03841290', + 'n03841666', 'n03842012', 'n03842156', 'n03842377', 'n03842986', + 'n03843316', 'n03843438', 'n03843555', 'n03844045', 'n03844233', + 'n03844673', 'n03844815', 'n03845190', 'n03846100', 'n03846234', + 'n03846431', 'n03846677', 'n03847471', 'n03847823', 'n03848168', + 'n03848348', 'n03849679', 'n03849814', 'n03850053', 'n03850245', + 'n03850492', 'n03851341', 'n03851787', 'n03852280', 'n03852688', + 'n03853924', 'n03854065', 'n03854421', 'n03854506', 'n03854722', + 'n03854815', 'n03855214', 'n03855333', 'n03855604', 'n03855756', + 'n03856012', 'n03856465', 'n03857291', 'n03857687', 'n03857828', + 'n03858085', 'n03858183', 'n03858418', 'n03859000', 'n03859170', + 'n03859280', 'n03859495', 'n03859608', 'n03859958', 'n03860404', + 'n03861271', 'n03861430', 'n03861842', 'n03862676', 'n03862862', + 'n03863108', 'n03863262', 'n03863923', 'n03864139', 'n03864356', + 'n03864692', 'n03865371', 'n03865557', 'n03865949', 'n03866082', + 'n03868242', 'n03868406', 'n03868643', 'n03868863', 'n03870105', + 'n03870672', 'n03870980', 'n03871083', 'n03871371', 'n03871524', + 'n03871628', 'n03871724', 'n03873416', 'n03873699', 'n03874138', + 'n03874293', 'n03874487', 'n03874599', 'n03874823', 'n03875218', + 'n03875806', 'n03875955', 'n03876231', 'n03877351', 'n03877472', + 'n03877674', 'n03877845', 'n03878066', 'n03878211', 'n03878828', + 'n03878963', 'n03879705', 'n03880323', 'n03880531', 'n03882611', + 'n03882960', 'n03883054', 'n03883385', 'n03883524', 'n03884397', + 'n03884778', 'n03884926', 'n03885028', 'n03885194', 'n03885293', + 'n03885535', 'n03885669', 'n03885788', 'n03885904', 'n03886053', + 'n03886641', 'n03886762', 'n03887185', 'n03887330', 'n03887697', + 'n03888257', 'n03888605', 'n03889503', 'n03889726', 'n03889871', + 'n03890093', 'n03890233', 'n03890514', 'n03891051', 'n03891251', + 'n03891332', 'n03891538', 'n03892178', 'n03892425', 'n03892557', + 'n03892728', 'n03894051', 'n03894379', 'n03894677', 'n03895866', + 'n03896103', 'n03896233', 'n03896419', 'n03896526', 'n03897943', + 'n03898129', 'n03898271', 'n03898395', 'n03898633', 'n03899768', + 'n03899933', 'n03900393', 'n03900979', 'n03901229', 'n03901750', + 'n03901974', 'n03902125', 'n03902482', 'n03902756', 'n03903424', + 'n03903733', 'n03903868', 'n03904060', 'n03904183', 'n03904433', + 'n03904657', 'n03904782', 'n03904909', 'n03905947', 'n03906224', + 'n03906463', 'n03906997', 'n03908204', 'n03908618', 'n03908714', + 'n03909020', 'n03909160', 'n03909406', 'n03911513', 'n03911658', + 'n03911767', 'n03911866', 'n03912218', 'n03913343', 'n03914106', + 'n03914337', 'n03914438', 'n03914583', 'n03914831', 'n03915118', + 'n03915437', 'n03915900', 'n03916031', 'n03916470', 'n03916720', + 'n03917198', 'n03917814', 'n03918480', 'n03918737', 'n03919096', + 'n03919289', 'n03919430', 'n03920288', 'n03920641', 'n03920737', + 'n03920867', 'n03923379', 'n03923918', 'n03924069', 'n03924679', + 'n03926148', 'n03927091', 'n03927299', 'n03927539', 'n03928116', + 'n03928814', 'n03929660', 'n03929855', 'n03930313', 'n03930630', + 'n03931765', 'n03931885', 'n03933933', 'n03934042', 'n03934229', + 'n03934311', 'n03934565', 'n03934656', 'n03935116', 'n03935234', + 'n03935335', 'n03936466', 'n03937543', 'n03937835', 'n03937931', + 'n03938037', 'n03938244', 'n03938401', 'n03938522', 'n03938725', + 'n03939178', 'n03939677', 'n03939844', 'n03940256', 'n03941013', + 'n03941231', 'n03941417', 'n03941684', 'n03942813', 'n03942920', + 'n03943115', 'n03943266', 'n03943714', 'n03943920', 'n03944024', + 'n03944138', 'n03944341', 'n03946076', 'n03946162', 'n03947466', + 'n03947798', 'n03947888', 'n03948242', 'n03948459', 'n03948830', + 'n03948950', 'n03949145', 'n03949317', 'n03950228', 'n03950537', + 'n03950899', 'n03952576', 'n03953901', 'n03954393', 'n03954731', + 'n03955296', 'n03955489', 'n03956157', 'n03956623', 'n03956785', + 'n03956922', 'n03957315', 'n03957420', 'n03957762', 'n03957991', + 'n03958227', 'n03958752', 'n03959014', 'n03959701', 'n03960374', + 'n03960490', 'n03961711', 'n03961939', 'n03962852', 'n03963198', + 'n03963294', 'n03963645', 'n03964495', 'n03964611', 'n03965456', + 'n03965907', 'n03966206', 'n03966976', 'n03967270', 'n03967396', + 'n03967562', 'n03967942', 'n03968293', 'n03968581', 'n03968728', + 'n03970156', 'n03970546', 'n03971218', 'n03973285', 'n03973402', + 'n03973628', 'n03973839', 'n03973945', 'n03974070', 'n03974915', + 'n03975035', 'n03975657', 'n03975788', 'n03976467', 'n03976657', + 'n03977592', 'n03977966', 'n03978421', 'n03978686', 'n03978815', + 'n03978966', 'n03980026', 'n03980478', 'n03980874', 'n03981340', + 'n03981566', 'n03981760', 'n03981924', 'n03982232', 'n03982331', + 'n03982430', 'n03982642', 'n03982895', 'n03983396', 'n03983612', + 'n03984234', 'n03984381', 'n03984643', 'n03984759', 'n03985069', + 'n03985232', 'n03985441', 'n03985881', 'n03986224', 'n03986355', + 'n03986562', 'n03986704', 'n03986949', 'n03987266', 'n03987376', + 'n03987990', 'n03988170', 'n03989665', 'n03990474', 'n03991062', + 'n03991202', 'n03991646', 'n03991837', 'n03992325', 'n03992436', + 'n03992509', 'n03992703', 'n03993053', 'n03993180', 'n03993403', + 'n03993703', 'n03994008', 'n03994614', 'n03995265', 'n03995372', + 'n03995535', 'n03995856', 'n03996145', 'n03996416', 'n03996849', + 'n03998194', 'n03998333', 'n03999160', 'n03999992', 'n04000311', + 'n04000592', 'n04000998', 'n04001265', 'n04001499', 'n04001845', + 'n04002262', 'n04003241', 'n04003856', 'n04004210', 'n04004475', + 'n04004767', 'n04004990', 'n04005197', 'n04005630', 'n04008385', + 'n04008634', 'n04009552', 'n04009801', 'n04011827', 'n04012084', + 'n04012482', 'n04013729', 'n04015908', 'n04016240', 'n04016576', + 'n04016684', 'n04016846', 'n04018155', 'n04018667', 'n04019101', + 'n04019541', 'n04019696', 'n04020087', 'n04020298', 'n04020912', + 'n04021028', 'n04021798', 'n04022332', 'n04023695', 'n04023962', + 'n04024274', 'n04024862', 'n04024983', 'n04025508', 'n04026053', + 'n04026180', 'n04026417', 'n04026813', 'n04026918', 'n04027023', + 'n04027706', 'n04028074', 'n04028221', 'n04028315', 'n04028581', + 'n04028764', 'n04029647', 'n04029734', 'n04030274', 'n04030518', + 'n04032603', 'n04033425', 'n04033901', 'n04033995', 'n04034262', + 'n04035836', 'n04035912', 'n04036303', 'n04037220', 'n04037443', + 'n04037964', 'n04038231', 'n04038338', 'n04038440', 'n04038727', + 'n04039381', 'n04039742', 'n04039848', 'n04040247', 'n04040373', + 'n04040759', 'n04041069', 'n04041243', 'n04041408', 'n04041544', + 'n04041747', 'n04042358', 'n04043411', 'n04043733', 'n04044307', + 'n04044498', 'n04044716', 'n04045255', 'n04045397', 'n04045644', + 'n04046091', 'n04046277', 'n04046400', 'n04046590', 'n04046974', + 'n04047401', 'n04048441', 'n04049303', 'n04049405', 'n04049585', + 'n04049753', 'n04050066', 'n04050313', 'n04050933', 'n04051549', + 'n04051825', 'n04052442', 'n04052658', 'n04052757', 'n04053508', + 'n04053677', 'n04054361', 'n04054670', 'n04055180', 'n04056180', + 'n04056413', 'n04056932', 'n04057047', 'n04057215', 'n04057981', + 'n04058096', 'n04058239', 'n04058594', 'n04059157', 'n04059516', + 'n04059947', 'n04060647', 'n04061681', 'n04061793', 'n04061969', + 'n04062428', 'n04063154', 'n04063373', 'n04063868', 'n04064401', + 'n04064747', 'n04064862', 'n04065272', 'n04065464', 'n04065789', + 'n04066270', 'n04067472', 'n04067658', 'n04067818', 'n04067921', + 'n04068441', 'n04068601', 'n04069276', 'n04069434', 'n04070003', + 'n04070207', 'n04070415', 'n04070727', 'n04071263', 'n04071393', + 'n04072193', 'n04072551', 'n04072960', 'n04073948', 'n04074185', + 'n04074963', 'n04075291', 'n04075468', 'n04075715', 'n04075916', + 'n04076284', 'n04076713', 'n04077430', 'n04078574', 'n04079244', + 'n04079933', 'n04080138', 'n04080454', 'n04080705', 'n04080833', + 'n04081281', 'n04081699', 'n04082562', 'n04082710', 'n04082886', + 'n04083309', 'n04083800', 'n04084889', 'n04086273', 'n04086446', + 'n04087432', 'n04087709', 'n04087826', 'n04089376', 'n04089666', + 'n04089836', 'n04089976', 'n04090263', 'n04091097', 'n04091693', + 'n04093625', 'n04093775', 'n04094720', 'n04095109', 'n04095210', + 'n04095342', 'n04095577', 'n04096066', 'n04097373', 'n04097760', + 'n04097866', 'n04098513', 'n04099003', 'n04099175', 'n04099429', + 'n04099969', 'n04100519', 'n04101701', 'n04102037', 'n04102162', + 'n04102285', 'n04102406', 'n04102618', 'n04102872', 'n04103094', + 'n04103206', 'n04103364', 'n04103665', 'n04103769', 'n04103918', + 'n04104147', 'n04104384', 'n04104500', 'n04104770', 'n04105068', + 'n04105704', 'n04105893', 'n04107743', 'n04108268', 'n04108822', + 'n04110178', 'n04110955', 'n04111190', 'n04111414', 'n04111531', + 'n04111668', 'n04112147', 'n04112252', 'n04112430', 'n04112579', + 'n04112654', 'n04112752', 'n04113194', 'n04113316', 'n04113406', + 'n04113641', 'n04113765', 'n04114069', 'n04114844', 'n04115144', + 'n04115256', 'n04115456', 'n04115802', 'n04115996', 'n04116098', + 'n04116294', 'n04116512', 'n04117464', 'n04118021', 'n04118538', + 'n04118635', 'n04118776', 'n04119091', 'n04119230', 'n04119360', + 'n04119478', 'n04119751', 'n04120489', 'n04120842', 'n04121426', + 'n04121511', 'n04121728', 'n04122349', 'n04122492', 'n04122578', + 'n04122685', 'n04122825', 'n04123026', 'n04123448', 'n04123567', + 'n04123740', 'n04124098', 'n04124202', 'n04124370', 'n04124488', + 'n04125021', 'n04125257', 'n04125853', 'n04126066', 'n04127249', + 'n04127395', 'n04127521', 'n04127633', 'n04127904', 'n04128413', + 'n04128499', 'n04128710', 'n04128837', 'n04130143', 'n04130257', + 'n04130907', 'n04131208', 'n04131368', 'n04131690', 'n04131929', + 'n04132158', 'n04132603', 'n04132985', 'n04133789', 'n04134008', + 'n04134170', 'n04134523', 'n04134632', 'n04135024', 'n04135118', + 'n04135315', 'n04135710', 'n04136045', 'n04136161', 'n04136333', + 'n04136510', 'n04136800', 'n04137089', 'n04137217', 'n04137355', + 'n04137444', 'n04137773', 'n04137897', 'n04138261', 'n04138977', + 'n04139140', 'n04139395', 'n04139859', 'n04140064', 'n04140631', + 'n04141076', 'n04141198', 'n04141327', 'n04141712', 'n04141838', + 'n04141975', 'n04142434', 'n04142731', 'n04142999', 'n04143140', + 'n04143897', 'n04144241', 'n04144539', 'n04145863', 'n04146050', + 'n04146343', 'n04146504', 'n04146614', 'n04146862', 'n04147183', + 'n04147793', 'n04148054', 'n04148579', 'n04148703', 'n04149083', + 'n04149374', 'n04149813', 'n04150153', 'n04150980', 'n04152387', + 'n04152593', 'n04153025', 'n04153751', 'n04154152', 'n04154340', + 'n04154565', 'n04154938', 'n04155068', 'n04156040', 'n04156140', + 'n04156946', 'n04157099', 'n04157320', 'n04158807', 'n04158956', + 'n04160372', 'n04160586', 'n04160847', 'n04161358', 'n04161981', + 'n04162433', 'n04162706', 'n04163530', 'n04164002', 'n04164406', + 'n04164757', 'n04164868', 'n04165409', 'n04166281', 'n04167346', + 'n04168199', 'n04169437', 'n04170037', 'n04170933', 'n04171208', + 'n04171459', 'n04171629', 'n04171831', 'n04172107', 'n04172230', + 'n04172342', 'n04172776', 'n04172904', 'n04173046', 'n04173511', + 'n04173907', 'n04174101', 'n04175039', 'n04175147', 'n04176068', + 'n04176190', 'n04176295', 'n04177041', 'n04177755', 'n04177820', + 'n04177931', 'n04178190', 'n04178329', 'n04179126', 'n04179712', + 'n04179824', 'n04179913', 'n04180063', 'n04180229', 'n04180888', + 'n04181228', 'n04181561', 'n04182152', 'n04182322', 'n04183217', + 'n04183329', 'n04184316', 'n04184435', 'n04184880', 'n04185071', + 'n04185529', 'n04185804', 'n04185946', 'n04186051', 'n04186268', + 'n04186455', 'n04186848', 'n04187061', 'n04187233', 'n04187547', + 'n04187970', 'n04188179', 'n04189282', 'n04189651', 'n04189816', + 'n04190052', 'n04190376', 'n04190997', 'n04191595', 'n04191943', + 'n04192238', 'n04192698', 'n04192858', 'n04193377', 'n04194127', + 'n04194289', 'n04196502', 'n04197110', 'n04197391', 'n04197781', + 'n04198355', 'n04198453', 'n04198562', 'n04198722', 'n04198797', + 'n04199027', 'n04200000', 'n04200258', 'n04200537', 'n04200800', + 'n04200908', 'n04201064', 'n04201297', 'n04201733', 'n04202417', + 'n04204081', 'n04204238', 'n04204347', 'n04205318', 'n04205505', + 'n04206225', 'n04206356', 'n04206570', 'n04206790', 'n04207151', + 'n04207343', 'n04207596', 'n04207763', 'n04207903', 'n04208065', + 'n04208210', 'n04208427', 'n04208760', 'n04208936', 'n04209133', + 'n04209239', 'n04209509', 'n04209613', 'n04210120', 'n04210288', + 'n04210390', 'n04210591', 'n04211219', 'n04211356', 'n04211528', + 'n04211857', 'n04211970', 'n04212165', 'n04212282', 'n04212467', + 'n04213353', 'n04214046', 'n04214282', 'n04215153', 'n04215402', + 'n04216634', 'n04216860', 'n04216963', 'n04217546', 'n04217882', + 'n04218564', 'n04219185', 'n04219424', 'n04220250', 'n04221823', + 'n04222210', 'n04222307', 'n04222470', 'n04222723', 'n04223170', + 'n04223299', 'n04224543', 'n04224842', 'n04225031', 'n04225729', + 'n04225987', 'n04226464', 'n04226826', 'n04227144', 'n04227900', + 'n04228054', 'n04228215', 'n04228581', 'n04228693', 'n04229007', + 'n04229107', 'n04229480', 'n04229737', 'n04229816', 'n04229959', + 'n04230387', 'n04230603', 'n04230808', 'n04231272', 'n04231693', + 'n04231905', 'n04232153', 'n04232800', 'n04233124', 'n04233715', + 'n04234455', 'n04234887', 'n04235291', 'n04235860', 'n04236377', + 'n04236809', 'n04236935', 'n04237423', 'n04238128', 'n04238321', + 'n04238617', 'n04238763', 'n04239074', 'n04239436', 'n04239786', + 'n04240752', 'n04241249', 'n04241573', 'n04242084', 'n04242408', + 'n04242704', 'n04243546', 'n04243941', 'n04244379', 'n04244997', + 'n04245508', 'n04246060', 'n04246271', 'n04246731', 'n04246855', + 'n04247011', 'n04247630', 'n04247736', 'n04247876', 'n04248396', + 'n04248507', 'n04248851', 'n04249415', 'n04249582', 'n04249882', + 'n04250224', 'n04250473', 'n04250692', 'n04250850', 'n04251144', + 'n04251701', 'n04251791', 'n04252077', 'n04252225', 'n04252331', + 'n04252560', 'n04252653', 'n04253057', 'n04253168', 'n04253931', + 'n04254009', 'n04254120', 'n04254680', 'n04254777', 'n04255163', + 'n04255586', 'n04255899', 'n04256520', 'n04256891', 'n04257223', + 'n04257684', 'n04257790', 'n04257986', 'n04258138', 'n04258333', + 'n04258438', 'n04258618', 'n04258732', 'n04258859', 'n04259630', + 'n04260364', 'n04261116', 'n04261281', 'n04261638', 'n04262161', + 'n04263257', 'n04263336', 'n04263502', 'n04264628', 'n04264765', + 'n04264914', 'n04265275', 'n04265904', 'n04266014', 'n04266162', + 'n04266375', 'n04266486', 'n04266968', 'n04267435', 'n04269270', + 'n04269822', 'n04269944', 'n04270147', 'n04270371', 'n04270891', + 'n04271531', 'n04272054', 'n04272389', 'n04272928', 'n04273285', + 'n04273569', 'n04273659', 'n04273796', 'n04273972', 'n04274985', + 'n04275175', 'n04275548', 'n04275661', 'n04277352', 'n04277493', + 'n04277826', 'n04278247', 'n04278353', 'n04278447', 'n04279172', + 'n04279353', 'n04279462', 'n04281260', 'n04281375', 'n04282231', + 'n04282494', 'n04282872', 'n04282992', 'n04283096', 'n04283255', + 'n04283378', 'n04283585', 'n04283905', 'n04284002', 'n04284341', + 'n04284438', 'n04284572', 'n04284869', 'n04285008', 'n04285146', + 'n04285803', 'n04285965', 'n04286575', 'n04287451', 'n04287747', + 'n04287898', 'n04288272', 'n04288533', 'n04289027', 'n04289195', + 'n04289576', 'n04289690', 'n04289827', 'n04290079', 'n04290259', + 'n04290507', 'n04290615', 'n04291992', 'n04292080', 'n04292414', + 'n04292572', 'n04292921', 'n04293119', 'n04294426', 'n04294614', + 'n04294879', 'n04295081', 'n04295571', 'n04295881', 'n04296562', + 'n04297098', 'n04297750', 'n04297847', 'n04298053', 'n04298661', + 'n04299215', 'n04299370', 'n04299963', 'n04300643', 'n04301000', + 'n04301760', 'n04303357', 'n04303497', 'n04304215', 'n04304375', + 'n04304680', 'n04305210', 'n04305323', 'n04305572', 'n04306080', + 'n04306592', 'n04306847', 'n04307767', 'n04307986', 'n04308084', + 'n04308273', 'n04308397', 'n04309049', 'n04309348', 'n04309548', + 'n04309833', 'n04310018', 'n04310157', 'n04310721', 'n04310904', + 'n04311004', 'n04311174', 'n04311595', 'n04312154', 'n04312432', + 'n04313220', 'n04313503', 'n04313628', 'n04314522', 'n04314914', + 'n04315342', 'n04315713', 'n04315948', 'n04316498', 'n04317063', + 'n04317175', 'n04317325', 'n04317420', 'n04317833', 'n04317976', + 'n04318787', 'n04318892', 'n04319937', 'n04320973', 'n04321453', + 'n04322026', 'n04322801', 'n04323819', 'n04324297', 'n04324387', + 'n04325041', 'n04325704', 'n04326547', 'n04326676', 'n04326799', + 'n04326896', 'n04327204', 'n04327682', 'n04328186', 'n04328329', + 'n04328946', 'n04329834', 'n04329958', 'n04330267', 'n04330340', + 'n04330746', 'n04330998', 'n04331277', 'n04331639', 'n04332074', + 'n04332243', 'n04332580', 'n04333129', 'n04333869', 'n04334105', + 'n04334365', 'n04334599', 'n04335209', 'n04335435', 'n04335693', + 'n04335886', 'n04336792', 'n04337287', 'n04338517', 'n04338963', + 'n04339879', 'n04340521', 'n04340750', 'n04340935', 'n04341133', + 'n04341686', 'n04344003', 'n04344734', 'n04344873', 'n04345028', + 'n04345201', 'n04346003', 'n04346157', 'n04346328', 'n04346428', + 'n04347119', 'n04347519', 'n04347754', 'n04348359', 'n04349306', + 'n04349401', 'n04350458', 'n04350581', 'n04350769', 'n04350905', + 'n04351699', 'n04353573', 'n04354026', 'n04354182', 'n04354487', + 'n04354589', 'n04355115', 'n04355267', 'n04355338', 'n04355511', + 'n04355933', 'n04356056', 'n04356595', 'n04356925', 'n04357121', + 'n04357314', 'n04357531', 'n04358117', 'n04358491', 'n04358707', + 'n04358874', 'n04359335', 'n04359500', 'n04360798', 'n04360914', + 'n04361095', 'n04361260', 'n04363082', 'n04363777', 'n04363991', + 'n04364160', 'n04364545', 'n04365328', 'n04366033', 'n04366116', + 'n04366367', 'n04367011', 'n04367371', 'n04367480', 'n04367746', + 'n04367950', 'n04368496', 'n04369025', 'n04369282', 'n04370048', + 'n04370288', 'n04370456', 'n04370774', 'n04371050', 'n04371430', + 'n04371563', 'n04371774', 'n04372370', 'n04373089', 'n04373428', + 'n04373704', 'n04373795', 'n04373894', 'n04374315', 'n04374735', + 'n04375241', 'n04375405', 'n04375615', 'n04376400', 'n04376876', + 'n04377057', 'n04378956', 'n04379243', 'n04379964', 'n04380255', + 'n04380346', 'n04380533', 'n04380916', 'n04381073', 'n04381587', + 'n04381724', 'n04381860', 'n04381994', 'n04382438', 'n04382695', + 'n04382880', 'n04383015', 'n04383130', 'n04383839', 'n04384593', + 'n04384910', 'n04385536', 'n04385799', 'n04386051', 'n04386664', + 'n04386792', 'n04387095', 'n04387201', 'n04387261', 'n04387400', + 'n04387706', 'n04387932', 'n04388743', 'n04389033', 'n04389430', + 'n04389521', 'n04389718', 'n04389854', 'n04390577', 'n04390873', + 'n04390977', 'n04391445', 'n04391838', 'n04392113', 'n04392526', + 'n04392764', 'n04392985', 'n04393095', 'n04393549', 'n04393808', + 'n04393913', 'n04394630', 'n04395024', 'n04395106', 'n04395651', + 'n04396335', 'n04396808', 'n04396902', 'n04397027', 'n04397452', + 'n04397645', 'n04397768', 'n04398044', 'n04398497', 'n04398688', + 'n04398834', 'n04398951', 'n04399158', 'n04399537', 'n04399846', + 'n04400109', 'n04400289', 'n04400737', 'n04401088', 'n04401578', + 'n04401680', 'n04401828', 'n04401949', 'n04402057', 'n04402449', + 'n04402580', 'n04402746', 'n04402984', 'n04403413', 'n04403524', + 'n04403638', 'n04403925', 'n04404412', 'n04404817', 'n04404997', + 'n04405540', 'n04405762', 'n04405907', 'n04406239', 'n04406817', + 'n04407435', 'n04407686', 'n04408871', 'n04409011', 'n04409128', + 'n04409384', 'n04409515', 'n04409625', 'n04409806', 'n04410086', + 'n04411264', 'n04412097', 'n04412416', 'n04413969', 'n04414199', + 'n04414319', 'n04414476', 'n04414675', 'n04414909', 'n04415663', + 'n04416005', 'n04417086', 'n04417180', 'n04417361', 'n04417672', + 'n04417809', 'n04418357', 'n04419073', 'n04419642', 'n04419868', + 'n04421872', 'n04422409', 'n04422727', 'n04422875', 'n04423845', + 'n04424692', 'n04425804', 'n04426316', 'n04426427', 'n04427715', + 'n04428191', 'n04428634', 'n04429376', 'n04430475', 'n04430896', + 'n04431025', 'n04431745', 'n04432203', 'n04432662', 'n04433585', + 'n04434207', 'n04434531', 'n04434932', 'n04435180', 'n04435653', + 'n04436012', 'n04436185', 'n04436329', 'n04437953', 'n04438304', + 'n04438507', 'n04438897', 'n04439585', 'n04439712', 'n04440963', + 'n04441662', 'n04441790', 'n04442312', 'n04442441', 'n04442741', + 'n04443164', 'n04443257', 'n04443766', 'n04444749', 'n04445040', + 'n04445154', 'n04445327', 'n04445952', 'n04446276', 'n04446844', + 'n04447028', 'n04447276', 'n04447443', 'n04447861', 'n04448070', + 'n04448361', 'n04449290', 'n04449966', 'n04450133', 'n04450243', + 'n04450640', 'n04450749', 'n04450994', 'n04451318', 'n04451818', + 'n04452528', 'n04452615', 'n04452757', 'n04453037', 'n04453156', + 'n04453390', 'n04453666', 'n04454908', 'n04455250', 'n04455652', + 'n04456115', 'n04457157', 'n04457474', 'n04457767', 'n04457910', + 'n04458633', 'n04458843', 'n04459018', 'n04459362', 'n04459610', + 'n04459773', 'n04459909', 'n04460130', 'n04461437', 'n04461570', + 'n04461696', 'n04461879', 'n04462011', 'n04462240', 'n04463679', + 'n04464615', 'n04464852', 'n04465050', 'n04465358', 'n04465501', + 'n04465666', 'n04466871', 'n04467099', 'n04467307', 'n04467665', + 'n04468005', 'n04469003', 'n04469514', 'n04469813', 'n04471148', + 'n04471632', 'n04472563', 'n04473108', 'n04474035', 'n04474187', + 'n04474466', 'n04475411', 'n04475631', 'n04476116', 'n04476259', + 'n04476831', 'n04476972', 'n04477219', 'n04477387', 'n04477548', + 'n04478512', 'n04479046', 'n04479823', 'n04479939', 'n04480033', + 'n04480853', 'n04482177', 'n04482297', 'n04482393', 'n04483073', + 'n04483307', 'n04483925', 'n04484432', 'n04485082', 'n04485423', + 'n04485884', 'n04486054', 'n04486213', 'n04486934', 'n04487081', + 'n04487394', 'n04487724', 'n04488202', 'n04488427', 'n04488530', + 'n04488742', 'n04488857', 'n04489008', 'n04489695', 'n04489817', + 'n04490091', 'n04491388', 'n04491638', 'n04491769', 'n04492060', + 'n04492375', 'n04492749', 'n04493381', 'n04494204', 'n04495698', + 'n04495843', 'n04496614', 'n04496726', 'n04496872', 'n04497442', + 'n04497570', 'n04497801', 'n04498389', 'n04499062', 'n04499446', + 'n04500060', 'n04501370', 'n04501550', 'n04501837', 'n04501947', + 'n04502059', 'n04502197', 'n04502502', 'n04502670', 'n04502851', + 'n04503413', 'n04503593', 'n04504141', 'n04505036', 'n04505345', + 'n04505470', 'n04506289', 'n04506506', 'n04506688', 'n04507155', + 'n04508163', 'n04508489', 'n04508949', 'n04509171', 'n04509260', + 'n04509417', 'n04510706', 'n04511002', 'n04513827', 'n04513998', + 'n04514241', 'n04515003', 'n04516116', 'n04516214', 'n04516354', + 'n04516672', 'n04517211', 'n04517408', 'n04517823', 'n04518132', + 'n04518343', 'n04518643', 'n04518764', 'n04519153', 'n04520170', + 'n04520382', 'n04520784', 'n04521863', 'n04522168', 'n04523525', + 'n04523831', 'n04524142', 'n04524313', 'n04524941', 'n04525038', + 'n04525191', 'n04525305', 'n04525417', 'n04525584', 'n04525821', + 'n04526964', 'n04527648', 'n04528079', 'n04528968', 'n04529108', + 'n04529681', 'n04529962', 'n04530283', 'n04530566', 'n04531098', + 'n04531873', 'n04532106', 'n04532398', 'n04532670', 'n04532831', + 'n04533199', 'n04533499', 'n04533594', 'n04533700', 'n04533802', + 'n04533946', 'n04534127', 'n04534359', 'n04534520', 'n04534895', + 'n04535252', 'n04535370', 'n04535524', 'n04536153', 'n04536335', + 'n04536595', 'n04536866', 'n04538552', 'n04539053', 'n04539203', + 'n04539794', 'n04540053', 'n04540255', 'n04541320', 'n04541987', + 'n04542095', 'n04542715', 'n04542858', 'n04542943', 'n04543158', + 'n04543636', 'n04543772', 'n04543996', 'n04544325', 'n04544450', + 'n04545305', 'n04545748', 'n04545858', 'n04546194', 'n04546340', + 'n04547592', 'n04548280', 'n04548362', 'n04549028', 'n04549122', + 'n04549629', 'n04549919', 'n04550184', 'n04551055', 'n04552348', + 'n04552696', 'n04553561', 'n04553703', 'n04554211', 'n04554406', + 'n04554684', 'n04554871', 'n04555291', 'n04555400', 'n04555600', + 'n04555700', 'n04555897', 'n04556408', 'n04556533', 'n04556948', + 'n04557648', 'n04557751', 'n04558478', 'n04559166', 'n04559451', + 'n04559730', 'n04559910', 'n04560113', 'n04560292', 'n04560804', + 'n04560882', 'n04561287', 'n04561422', 'n04561734', 'n04562262', + 'n04562496', 'n04562935', 'n04563204', 'n04563413', 'n04564278', + 'n04564581', 'n04565375', 'n04566257', 'n04566561', 'n04566756', + 'n04568069', 'n04568557', 'n04568841', 'n04569063', 'n04569822', + 'n04570214', 'n04570815', 'n04570958', 'n04571292', 'n04571566', + 'n04571686', 'n04571958', 'n04573281', 'n04573379', 'n04573513', + 'n04573937', 'n04574067', 'n04574999', 'n04575723', 'n04575824', + 'n04576002', 'n04576211', 'n04577426', 'n04577769', 'n04578934', + 'n04579056', 'n04579145', 'n04579230', 'n04579432', 'n04579667', + 'n04579986', 'n04580493', 'n04581102', 'n04581829', 'n04582205', + 'n04582349', 'n04582771', 'n04582869', 'n04583212', 'n04583620', + 'n04584207', 'n04584373', 'n04585128', 'n04585745', 'n04585980', + 'n04586072', 'n04586581', 'n04586932', 'n04587327', 'n04587404', + 'n04587559', 'n04587648', 'n04588739', 'n04589190', 'n04589325', + 'n04589593', 'n04589890', 'n04590021', 'n04590129', 'n04590263', + 'n04590553', 'n04590746', 'n04590933', 'n04591056', 'n04591157', + 'n04591517', 'n04591713', 'n04591887', 'n04592005', 'n04592099', + 'n04592465', 'n04592741', 'n04593077', 'n04593185', 'n04593376', + 'n04593524', 'n04593866', 'n04594114', 'n04594218', 'n04594489', + 'n04594828', 'n04595028', 'n04595285', 'n04595855', 'n04596742', + 'n04596852', 'n04597309', 'n04597400', 'n04597804', 'n04597913', + 'n04598318', 'n04598582', 'n04598965', 'n04599124', 'n04599235', + 'n04600312', 'n04600912', 'n04602762', 'n04602956', 'n04603399', + 'n04603729', 'n04603872', 'n04604644', 'n04605163', 'n04605321', + 'n04605572', 'n04605726', 'n04606251', 'n04606574', 'n04607035', + 'n04607242', 'n04607869', 'n04608329', 'n04608435', 'n04608567', + 'n04608923', 'n04609531', 'n04609651', 'n04610013', 'n04610274', + 'n04610503', 'n04610676', 'n04611916', 'n04612026', 'n04612373', + 'n04612504', 'n04613015', 'n04613696', 'n04613939', 'n04614655', + 'n04615226', 'n04615644', 'n04950952', 'n04951071', 'n04951186', + 'n04953296', 'n04955160', 'n04959672', 'n04960277', 'n04960582', + 'n04961062', 'n04961331', 'n04961691', 'n04962062', 'n04962240', + 'n04963307', 'n04963588', 'n04963740', 'n04964001', 'n04964799', + 'n04964878', 'n04965179', 'n04965451', 'n04965661', 'n04966543', + 'n04966941', 'n04967191', 'n04967674', 'n04967801', 'n04967882', + 'n04968056', 'n04968139', 'n04968749', 'n04968895', 'n04969242', + 'n04969540', 'n04969798', 'n04969952', 'n04970059', 'n04970398', + 'n04970470', 'n04970916', 'n04971211', 'n04971313', 'n04972350', + 'n04972451', 'n04972801', 'n04973291', 'n04973386', 'n04973585', + 'n04973816', 'n04974859', 'n04976319', 'n04976952', 'n04977412', + 'n04979002', 'n04981658', 'n05218119', 'n05238282', 'n05239437', + 'n05242928', 'n05244934', 'n05245192', 'n05258051', 'n05259914', + 'n05260127', 'n05260240', 'n05261310', 'n05262422', 'n05262534', + 'n05262698', 'n05263183', 'n05263448', 'n05282652', 'n05302499', + 'n05399034', 'n05399243', 'n05418717', 'n05450617', 'n05451384', + 'n05453657', 'n05458576', 'n05486510', 'n05526957', 'n05538625', + 'n05578095', 'n05581932', 'n05586759', 'n05716342', 'n06255081', + 'n06263609', 'n06266633', 'n06266973', 'n06267145', 'n06267564', + 'n06267655', 'n06267758', 'n06267893', 'n06267991', 'n06271778', + 'n06272290', 'n06272612', 'n06272803', 'n06273414', 'n06273555', + 'n06273743', 'n06273986', 'n06274760', 'n06275095', 'n06275353', + 'n06275471', 'n06276501', 'n06276697', 'n06277135', 'n06277280', + 'n06278338', 'n06278475', 'n06281040', 'n06359193', 'n06359467', + 'n06415688', 'n06417096', 'n06470073', 'n06592281', 'n06595351', + 'n06596364', 'n06596474', 'n06596607', 'n06596727', 'n06785654', + 'n06793231', 'n06794110', 'n06874185', 'n06883725', 'n06892775', + 'n06998748', 'n07005523', 'n07248320', 'n07273802', 'n07461050', + 'n07556406', 'n07556637', 'n07556970', 'n07557434', 'n07560193', + 'n07560331', 'n07560542', 'n07560652', 'n07560903', 'n07561112', + 'n07561590', 'n07561848', 'n07562495', 'n07563207', 'n07564008', + 'n07564796', 'n07564971', 'n07565083', 'n07565161', 'n07565259', + 'n07566340', 'n07567707', 'n07567980', 'n07568502', 'n07568818', + 'n07569106', 'n07569644', 'n07570720', 'n07572616', 'n07572957', + 'n07573347', 'n07573696', 'n07574176', 'n07574426', 'n07574504', + 'n07574602', 'n07574780', 'n07574923', 'n07575076', 'n07575392', + 'n07575510', 'n07575726', 'n07575984', 'n07576182', 'n07576438', + 'n07576781', 'n07577144', 'n07577374', 'n07577538', 'n07578093', + 'n07579575', 'n07579688', 'n07579787', 'n07579917', 'n07580053', + 'n07580253', 'n07580359', 'n07580470', 'n07580592', 'n07581249', + 'n07581346', 'n07581775', 'n07581931', 'n07582152', 'n07582277', + 'n07582609', 'n07582892', 'n07583066', 'n07584110', 'n07584332', + 'n07584423', 'n07584593', 'n07585107', 'n07585208', 'n07585557', + 'n07585758', 'n07585906', 'n07586099', 'n07586318', 'n07586604', + 'n07586718', 'n07586894', 'n07587023', 'n07587111', 'n07587331', + 'n07587441', 'n07587618', 'n07587700', 'n07587962', 'n07588111', + 'n07588193', 'n07588299', 'n07588419', 'n07588574', 'n07588817', + 'n07588947', 'n07589458', 'n07589543', 'n07590320', 'n07590502', + 'n07590611', 'n07590752', 'n07591049', 'n07591473', 'n07591586', + 'n07591961', 'n07592094', 'n07592400', 'n07592481', 'n07592656', + 'n07592768', 'n07593004', 'n07593199', 'n07593471', 'n07594066', + 'n07595180', 'n07595649', 'n07595914', 'n07596684', 'n07596967', + 'n07597145', 'n07597365', 'n07598256', 'n07598734', 'n07599161', + 'n07599911', 'n07599998', 'n07600177', 'n07600285', 'n07600696', + 'n07601290', 'n07601572', 'n07601686', 'n07601809', 'n07604956', + 'n07605040', 'n07605380', 'n07605474', 'n07605597', 'n07605804', + 'n07605944', 'n07606538', 'n07606669', 'n07606764', 'n07607138', + 'n07607605', 'n07607967', 'n07608098', 'n07608339', 'n07608429', + 'n07608866', 'n07609215', 'n07609407', 'n07609632', 'n07609840', + 'n07610620', 'n07611046', 'n07611148', 'n07611267', 'n07611358', + 'n07611839', 'n07611991', 'n07612137', 'n07612367', 'n07612632', + 'n07612996', 'n07613266', 'n07613480', 'n07613815', 'n07614198', + 'n07614500', 'n07614730', 'n07614825', 'n07615190', 'n07615289', + 'n07615460', 'n07615569', 'n07615671', 'n07615774', 'n07616046', + 'n07616386', 'n07616487', 'n07616590', 'n07616748', 'n07617051', + 'n07617611', 'n07617708', 'n07617932', 'n07618029', 'n07618119', + 'n07618432', 'n07619004', 'n07619208', 'n07619409', 'n07620689', + 'n07621618', 'n07623136', 'n07624466', 'n07624666', 'n07624924', + 'n07625061', 'n07627931', 'n07628068', 'n07631926', 'n07639069', + 'n07641928', 'n07642361', 'n07642471', 'n07642742', 'n07642933', + 'n07643026', 'n07643200', 'n07643306', 'n07643474', 'n07643891', + 'n07643981', 'n07648913', 'n07648997', 'n07650903', 'n07651025', + 'n07654148', 'n07654298', 'n07655263', 'n07665438', 'n07666176', + 'n07678729', 'n07679034', 'n07679356', 'n07680313', 'n07680517', + 'n07680761', 'n07680932', 'n07681450', 'n07681691', 'n07681926', + 'n07682197', 'n07682316', 'n07682477', 'n07682624', 'n07682808', + 'n07682952', 'n07683039', 'n07683360', 'n07683490', 'n07683617', + 'n07683786', 'n07684084', 'n07684164', 'n07684289', 'n07684517', + 'n07684600', 'n07684938', 'n07685031', 'n07685218', 'n07685399', + 'n07685546', 'n07685730', 'n07685918', 'n07686021', 'n07686202', + 'n07686720', 'n07686873', 'n07687053', 'n07687211', 'n07687381', + 'n07687469', 'n07687626', 'n07687789', 'n07688130', 'n07688624', + 'n07688898', 'n07689003', 'n07689842', 'n07690019', 'n07690152', + 'n07690273', 'n07690431', 'n07690511', 'n07690585', 'n07690739', + 'n07690892', 'n07691091', 'n07691237', 'n07691539', 'n07691650', + 'n07691758', 'n07691863', 'n07691954', 'n07692614', 'n07693048', + 'n07693223', 'n07693590', 'n07693725', 'n07693972', 'n07694403', + 'n07694516', 'n07694659', 'n07694839', 'n07695652', 'n07695742', + 'n07695878', 'n07695965', 'n07696403', 'n07696527', 'n07696625', + 'n07696728', 'n07696839', 'n07696977', 'n07697100', 'n07697313', + 'n07697537', 'n07697699', 'n07697825', 'n07698250', 'n07698401', + 'n07698543', 'n07698672', 'n07698782', 'n07700003', 'n07704054', + 'n07704205', 'n07705931', 'n07707451', 'n07708124', 'n07708398', + 'n07708685', 'n07709046', 'n07709172', 'n07709333', 'n07709881', + 'n07710283', 'n07710616', 'n07710952', 'n07711080', 'n07711232', + 'n07711371', 'n07711569', 'n07712063', 'n07712267', 'n07712382', + 'n07712559', 'n07712748', 'n07712856', 'n07712959', 'n07713074', + 'n07713267', 'n07713395', 'n07713763', 'n07713895', 'n07714078', + 'n07714188', 'n07714287', 'n07714448', 'n07714571', 'n07714802', + 'n07714895', 'n07714990', 'n07715103', 'n07715221', 'n07715407', + 'n07715561', 'n07715721', 'n07716034', 'n07716203', 'n07716358', + 'n07716504', 'n07716906', 'n07717070', 'n07717410', 'n07717556', + 'n07718472', 'n07718747', 'n07719213', 'n07719616', 'n07719839', + 'n07720277', 'n07720442', 'n07720615', 'n07720875', 'n07721018', + 'n07721195', 'n07721325', 'n07721456', 'n07721678', 'n07721942', + 'n07722052', 'n07722217', 'n07722485', 'n07722763', 'n07722888', + 'n07723039', 'n07723177', 'n07723330', 'n07723559', 'n07723968', + 'n07724269', 'n07724492', 'n07724654', 'n07724943', 'n07725255', + 'n07725376', 'n07725531', 'n07725789', 'n07725888', 'n07726095', + 'n07726525', 'n07726672', 'n07726796', 'n07727048', 'n07727458', + 'n07727578', 'n07727868', 'n07728053', 'n07728181', 'n07728391', + 'n07728585', 'n07728708', 'n07729384', 'n07729485', 'n07729828', + 'n07729926', 'n07730033', 'n07730207', 'n07730320', 'n07730406', + 'n07730708', 'n07730855', 'n07731006', 'n07731284', 'n07731587', + 'n07731767', 'n07731952', 'n07732168', 'n07732636', 'n07732747', + 'n07732904', 'n07733394', 'n07733567', 'n07733712', 'n07734017', + 'n07734183', 'n07734292', 'n07734417', 'n07734555', 'n07734744', + 'n07734879', 'n07735404', 'n07735510', 'n07735687', 'n07735803', + 'n07736087', 'n07736256', 'n07736371', 'n07736692', 'n07736813', + 'n07737745', 'n07739125', 'n07739344', 'n07739506', 'n07740033', + 'n07740220', 'n07740342', 'n07740461', 'n07740597', 'n07740954', + 'n07741138', 'n07741461', 'n07742012', 'n07742313', 'n07742704', + 'n07743224', 'n07743544', 'n07743902', 'n07744057', 'n07744246', + 'n07744430', 'n07744682', 'n07744811', 'n07745046', 'n07745466', + 'n07745940', 'n07746186', 'n07746334', 'n07746551', 'n07747055', + 'n07747607', 'n07747951', 'n07748157', 'n07748276', 'n07748416', + 'n07748574', 'n07748753', 'n07748912', 'n07749192', 'n07749312', + 'n07749446', 'n07749582', 'n07749731', 'n07749870', 'n07749969', + 'n07750146', 'n07750449', 'n07750736', 'n07750872', 'n07751004', + 'n07751148', 'n07751280', 'n07751451', 'n07751858', 'n07752109', + 'n07752377', 'n07752514', 'n07752664', 'n07752966', 'n07753113', + 'n07753275', 'n07753592', 'n07753743', 'n07753980', 'n07754451', + 'n07754684', 'n07754894', 'n07755089', 'n07755411', 'n07755707', + 'n07755929', 'n07756325', 'n07756641', 'n07756951', 'n07757132', + 'n07757312', 'n07757511', 'n07757990', 'n07758680', 'n07759194', + 'n07759816', 'n07760153', 'n07760859', 'n07761141', 'n07761309', + 'n07761611', 'n07762114', 'n07762244', 'n07762740', 'n07762913', + 'n07763107', 'n07763629', 'n07763792', 'n07763987', 'n07764155', + 'n07764315', 'n07764630', 'n07764847', 'n07765073', 'n07765208', + 'n07765361', 'n07765862', 'n07765999', 'n07766173', 'n07766891', + 'n07767002', 'n07767171', 'n07767344', 'n07767549', 'n07767709', + 'n07767847', 'n07768068', 'n07768230', 'n07768423', 'n07768694', + 'n07768858', 'n07769584', 'n07769731', 'n07770034', 'n07770763', + 'n07771212', 'n07771731', 'n07772147', 'n07772274', 'n07772788', + 'n07772935', 'n07774596', 'n07774719', 'n07774842', 'n07775050', + 'n07775197', 'n07800740', 'n07801091', 'n07801342', 'n07801508', + 'n07801779', 'n07801892', 'n07802026', 'n07802417', 'n07802863', + 'n07802963', 'n07803093', 'n07803545', 'n07804323', 'n07804543', + 'n07804657', 'n07804771', 'n07804900', 'n07805594', 'n07805731', + 'n07805966', 'n07806120', 'n07806221', 'n07806633', 'n07806774', + 'n07807002', 'n07807171', 'n07807317', 'n07807472', 'n07807594', + 'n07807710', 'n07807834', 'n07807922', 'n07808022', 'n07808587', + 'n07808904', 'n07809096', 'n07810907', 'n07812046', 'n07812184', + 'n07814203', 'n07814390', 'n07814487', 'n07814634', 'n07814790', + 'n07815424', 'n07815588', 'n07815839', 'n07816052', 'n07816164', + 'n07816296', 'n07816398', 'n07816575', 'n07816839', 'n07817024', + 'n07817160', 'n07817315', 'n07817871', 'n07818133', 'n07818277', + 'n07818572', 'n07818689', 'n07818825', 'n07818995', 'n07819166', + 'n07819480', 'n07819769', 'n07819896', 'n07820145', 'n07820297', + 'n07820497', 'n07820683', 'n07820960', 'n07821260', 'n07821610', + 'n07821758', 'n07821919', 'n07822197', 'n07822323', 'n07822518', + 'n07822845', 'n07823105', 'n07823280', 'n07823460', 'n07823698', + 'n07823951', 'n07824191', 'n07824702', 'n07825194', 'n07825717', + 'n07825972', 'n07826091', 'n07826340', 'n07826453', 'n07826930', + 'n07827130', 'n07827284', 'n07827410', 'n07827750', 'n07827896', + 'n07828642', 'n07829248', 'n07829331', 'n07829412', 'n07830593', + 'n07831146', 'n07831267', 'n07832416', 'n07832902', 'n07834065', + 'n07834507', 'n07834618', 'n07834872', 'n07835331', 'n07835457', + 'n07835921', 'n07836838', 'n07837002', 'n07837362', 'n07837912', + 'n07838073', 'n07838233', 'n07838441', 'n07838551', 'n07840027', + 'n07840520', 'n07840804', 'n07841345', 'n07841495', 'n07841639', + 'n07841800', 'n07841907', 'n07842044', 'n07842130', 'n07842202', + 'n07842308', 'n07842433', 'n07842605', 'n07842753', 'n07843117', + 'n07843464', 'n07843636', 'n07843775', 'n07844042', 'n07844604', + 'n07844867', 'n07845087', 'n07845702', 'n07845863', 'n07846143', + 'n07847198', 'n07847453', 'n07847827', 'n07847917', 'n07848093', + 'n07848196', 'n07848338', 'n07849336', 'n07849619', 'n07849733', + 'n07849912', 'n07850083', 'n07850329', 'n07851298', 'n07851443', + 'n07851554', 'n07851641', 'n07851767', 'n07852045', 'n07852229', + 'n07852302', 'n07852614', 'n07852833', 'n07852919', 'n07853560', + 'n07854184', 'n07854982', 'n07855510', 'n07855907', 'n07857170', + 'n07857731', 'n07858114', 'n07858978', 'n07859284', 'n07859583', + 'n07859796', 'n07860103', 'n07860331', 'n07860447', 'n07860805', + 'n07860988', 'n07861158', 'n07861557', 'n07861813', 'n07861983', + 'n07862095', 'n07862244', 'n07862348', 'n07862461', 'n07862611', + 'n07863374', 'n07863547', 'n07863802', 'n07864065', 'n07864756', + 'n07864934', 'n07865105', 'n07865196', 'n07865484', 'n07866015', + 'n07866151', 'n07866277', 'n07866409', 'n07866723', 'n07866868', + 'n07867021', 'n07867164', 'n07867324', 'n07867421', 'n07867616', + 'n07867751', 'n07868200', 'n07868340', 'n07868508', 'n07868830', + 'n07868955', 'n07869291', 'n07869391', 'n07869522', 'n07869611', + 'n07869775', 'n07870069', 'n07870167', 'n07870313', 'n07870894', + 'n07871234', 'n07871436', 'n07871720', 'n07871810', 'n07872593', + 'n07873057', 'n07873348', 'n07873464', 'n07873807', 'n07874063', + 'n07874159', 'n07874259', 'n07874343', 'n07874441', 'n07874780', + 'n07875152', 'n07875436', 'n07875560', 'n07875693', 'n07876189', + 'n07876651', 'n07877187', 'n07877299', 'n07877675', 'n07877849', + 'n07877961', 'n07878647', 'n07878785', 'n07878926', 'n07879072', + 'n07879174', 'n07879350', 'n07879450', 'n07879659', 'n07879953', + 'n07880080', 'n07880213', 'n07880325', 'n07880458', 'n07880751', + 'n07880880', 'n07880968', 'n07881205', 'n07881404', 'n07881800', + 'n07882420', 'n07882497', 'n07883031', 'n07883251', 'n07884567', + 'n07885705', 'n07886057', 'n07886176', 'n07886463', 'n07886572', + 'n07886849', 'n07887099', 'n07887192', 'n07887304', 'n07887461', + 'n07887634', 'n07887967', 'n07888229', 'n07888465', 'n07888816', + 'n07889274', 'n07889510', 'n07889814', 'n07890068', 'n07890226', + 'n07890352', 'n07890540', 'n07890750', 'n07891189', 'n07891309', + 'n07891433', 'n07891726', 'n07892418', 'n07892512', 'n07892813', + 'n07893253', 'n07893528', 'n07893642', 'n07893891', 'n07894102', + 'n07894298', 'n07894451', 'n07894551', 'n07894703', 'n07894799', + 'n07894965', 'n07895100', 'n07895237', 'n07895435', 'n07895595', + 'n07895710', 'n07895839', 'n07895962', 'n07896060', 'n07896165', + 'n07896287', 'n07896661', 'n07896765', 'n07896893', 'n07896994', + 'n07897116', 'n07897438', 'n07897600', 'n07897750', 'n07897865', + 'n07897975', 'n07898117', 'n07898247', 'n07898333', 'n07898443', + 'n07898617', 'n07898745', 'n07899003', 'n07899108', 'n07899292', + 'n07899434', 'n07899533', 'n07899660', 'n07899769', 'n07899899', + 'n07900225', 'n07900406', 'n07900616', 'n07900734', 'n07900825', + 'n07900958', 'n07901355', 'n07901457', 'n07901587', 'n07902121', + 'n07902336', 'n07902443', 'n07902799', 'n07902937', 'n07903101', + 'n07903208', 'n07903543', 'n07903643', 'n07903731', 'n07903841', + 'n07903962', 'n07904293', 'n07904395', 'n07904637', 'n07904760', + 'n07904865', 'n07904934', 'n07905038', 'n07905296', 'n07905386', + 'n07905474', 'n07905979', 'n07906111', 'n07906284', 'n07906572', + 'n07906718', 'n07906877', 'n07907037', 'n07907161', 'n07907342', + 'n07907429', 'n07907548', 'n07907831', 'n07907943', 'n07908411', + 'n07908567', 'n07908647', 'n07908812', 'n07909129', 'n07909593', + 'n07910048', 'n07910152', 'n07910379', 'n07910538', 'n07910656', + 'n07911249', 'n07911371', 'n07911677', 'n07912211', 'n07913393', + 'n07913882', 'n07914006', 'n07914128', 'n07914271', 'n07914413', + 'n07914586', 'n07914777', 'n07914995', 'n07915094', 'n07915491', + 'n07915618', 'n07915918', 'n07916041', 'n07916183', 'n07916319', + 'n07916437', 'n07917133', 'n07917272', 'n07917392', 'n07917507', + 'n07917618', 'n07918028', 'n07918193', 'n07918309', 'n07918879', + 'n07919310', 'n07919441', 'n07919572', 'n07920052', 'n07920222', + 'n07920349', 'n07920540', 'n07920663', 'n07920872', 'n07920989', + 'n07921239', 'n07921455', 'n07921615', 'n07921834', 'n07922041', + 'n07922147', 'n07922512', 'n07922764', 'n07923748', 'n07924033', + 'n07924276', 'n07924443', 'n07924560', 'n07924747', 'n07924834', + 'n07924955', 'n07925116', 'n07925229', 'n07925500', 'n07925608', + 'n07925966', 'n07926250', 'n07926346', 'n07926785', 'n07926920', + 'n07927197', 'n07927512', 'n07927836', 'n07927931', 'n07928163', + 'n07928367', 'n07928488', 'n07928696', 'n07928790', 'n07928887', + 'n07929172', 'n07929351', 'n07929519', 'n07930062', 'n07930315', + 'n07930433', 'n07930554', 'n07930864', 'n07931001', 'n07931452', + 'n07931612', 'n07931870', 'n07932039', 'n07932841', 'n07933154', + 'n07933274', 'n07933799', 'n07934282', 'n07935043', 'n07935379', + 'n07935504', 'n07935737', 'n07935878', 'n07936263', 'n07936548', + 'n07936745', 'n07937461', 'n07938007', 'n07938149', 'n07938313', + 'n07938594', 'n07942152', 'n07951464', 'n07954211', 'n07977870', + 'n08182379', 'n08242223', 'n08249459', 'n08256735', 'n08376250', + 'n08492461', 'n08494231', 'n08495908', 'n08505018', 'n08517676', + 'n08518171', 'n08521623', 'n08524735', 'n08539072', 'n08547468', + 'n08547544', 'n08551296', 'n08555710', 'n08560295', 'n08571898', + 'n08573842', 'n08578517', 'n08579352', 'n08580944', 'n08583292', + 'n08583455', 'n08584914', 'n08596076', 'n08598301', 'n08598568', + 'n08611339', 'n08614632', 'n08616050', 'n08628141', 'n08633683', + 'n08640531', 'n08640739', 'n08640962', 'n08645104', 'n08645212', + 'n08649711', 'n08658309', 'n08659446', 'n08659861', 'n08663703', + 'n08673039', 'n08677424', 'n08896327', 'n09189157', 'n09191635', + 'n09193705', 'n09194227', 'n09199101', 'n09205509', 'n09206896', + 'n09206985', 'n09208496', 'n09210862', 'n09217230', 'n09218315', + 'n09218494', 'n09218641', 'n09219233', 'n09224725', 'n09228055', + 'n09229709', 'n09230041', 'n09230202', 'n09231117', 'n09233446', + 'n09238926', 'n09239302', 'n09242389', 'n09245515', 'n09246464', + 'n09247410', 'n09248399', 'n09249034', 'n09251407', 'n09256479', + 'n09257843', 'n09259025', 'n09259219', 'n09260907', 'n09263912', + 'n09265620', 'n09267854', 'n09269341', 'n09269472', 'n09270735', + 'n09274152', 'n09279986', 'n09282208', 'n09283193', 'n09283405', + 'n09283767', 'n09283866', 'n09287968', 'n09288635', 'n09289331', + 'n09290444', 'n09294877', 'n09295946', 'n09300905', 'n09302616', + 'n09303008', 'n09303528', 'n09304750', 'n09305898', 'n09308572', + 'n09308743', 'n09309168', 'n09309292', 'n09326662', 'n09331251', + 'n09332890', 'n09335809', 'n09336555', 'n09337253', 'n09344324', + 'n09348460', 'n09349648', 'n09359803', 'n09361517', 'n09362945', + 'n09366317', 'n09376198', 'n09376526', 'n09376786', 'n09381242', + 'n09382099', 'n09384106', 'n09392402', 'n09393605', 'n09396465', + 'n09396608', 'n09398076', 'n09398677', 'n09399592', 'n09400987', + 'n09403211', 'n09403427', 'n09403734', 'n09405078', 'n09406793', + 'n09409512', 'n09409752', 'n09410224', 'n09411189', 'n09415584', + 'n09415671', 'n09416076', 'n09416890', 'n09421799', 'n09421951', + 'n09428293', 'n09428628', 'n09432283', 'n09433442', 'n09433839', + 'n09435739', 'n09436444', 'n09436708', 'n09437454', 'n09438844', + 'n09438940', 'n09439213', 'n09442595', 'n09443281', 'n09443641', + 'n09444783', 'n09445008', 'n09445289', 'n09447666', 'n09448690', + 'n09450163', 'n09451237', 'n09452395', 'n09452760', 'n09453008', + 'n09454153', 'n09454412', 'n09457979', 'n09460046', 'n09461069', + 'n09466678', 'n09468604', 'n09472413', 'n09472597', 'n09475044', + 'n09475179', 'n09475925', 'n09481120', 'n09505153', 'n09606527', + 'n09607630', 'n09607903', 'n09608709', 'n09610405', 'n09616922', + 'n09618760', 'n09618880', 'n09618957', 'n09619168', 'n09620078', + 'n09620794', 'n09621232', 'n09622049', 'n09622302', 'n09624168', + 'n09624559', 'n09626238', 'n09627906', 'n09629752', 'n09632518', + 'n09635534', 'n09636339', 'n09637339', 'n09638454', 'n09638875', + 'n09639919', 'n09640715', 'n09641002', 'n09643799', 'n09644152', + 'n09648743', 'n09650729', 'n09651123', 'n09652149', 'n09654518', + 'n09656077', 'n09659039', 'n09659188', 'n09661873', 'n09666883', + 'n09670521', 'n09675922', 'n09676021', 'n09676247', 'n09676884', + 'n09679170', 'n09681234', 'n09683757', 'n09683924', 'n09684901', + 'n09686401', 'n09688804', 'n09689435', 'n09689958', 'n09690621', + 'n09691729', 'n09691858', 'n09692915', 'n09693982', 'n09694664', + 'n09694771', 'n09695514', 'n09695620', 'n09695979', 'n09696456', + 'n09696585', 'n09696763', 'n09697401', 'n09698644', 'n09700964', + 'n09701148', 'n09701833', 'n09703485', 'n09703708', 'n09705124', + 'n09705784', 'n09706255', 'n09707289', 'n09708750', 'n09708889', + 'n09710041', 'n09711435', 'n09712324', 'n09712448', 'n09712696', + 'n09713108', 'n09714694', 'n09715427', 'n09717233', 'n09718217', + 'n09718811', 'n09718936', 'n09719309', 'n09719794', 'n09720033', + 'n09720256', 'n09720595', 'n09720842', 'n09722658', 'n09723067', + 'n09724533', 'n09724656', 'n09724785', 'n09725000', 'n09725653', + 'n09725772', 'n09726621', 'n09727440', 'n09727826', 'n09728137', + 'n09728285', 'n09730077', 'n09730204', 'n09730824', 'n09731343', + 'n09731436', 'n09732170', 'n09733793', 'n09734185', 'n09734450', + 'n09734535', 'n09734639', 'n09736798', 'n09736945', 'n09738121', + 'n09740724', 'n09741816', 'n09742101', 'n09742315', 'n09743487', + 'n09743792', 'n09744161', 'n09744834', 'n09747191', 'n09747495', + 'n09749386', 'n09750282', 'n09750641', 'n09750770', 'n09750891', + 'n09751496', 'n09751895', 'n09752023', 'n09752519', 'n09753792', + 'n09754217', 'n09755241', 'n09756049', 'n09757449', 'n09758885', + 'n09759501', 'n09760609', 'n09761068', 'n09763784', 'n09764598', + 'n09764900', 'n09765118', 'n09767197', 'n09770179', 'n09770359', + 'n09772930', 'n09774783', 'n09776346', 'n09779790', 'n09782167', + 'n09782397', 'n09785659', 'n09785891', 'n09787534', 'n09787765', + 'n09789566', 'n09791014', 'n09791419', 'n09791816', 'n09792555', + 'n09792969', 'n09793141', 'n09796809', 'n09797873', 'n09800964', + 'n09801533', 'n09805151', 'n09805324', 'n09809538', 'n09809749', + 'n09809925', 'n09811852', 'n09813219', 'n09814660', 'n09816771', + 'n09818022', 'n09820263', 'n09822830', 'n09823502', 'n09823832', + 'n09824135', 'n09824609', 'n09827246', 'n09827363', 'n09828216', + 'n09830194', 'n09830400', 'n09830629', 'n09832456', 'n09833441', + 'n09833536', 'n09834378', 'n09834699', 'n09835230', 'n09835348', + 'n09835506', 'n09836160', 'n09836343', 'n09836519', 'n09836786', + 'n09838621', 'n09839702', 'n09840217', 'n09840520', 'n09841188', + 'n09841696', 'n09842047', 'n09842395', 'n09842528', 'n09843443', + 'n09843824', 'n09844457', 'n09845401', 'n09846469', 'n09846755', + 'n09846894', 'n09847543', 'n09850760', 'n09851165', 'n09851575', + 'n09854218', 'n09854421', 'n09855433', 'n09856671', 'n09858165', + 'n09859152', 'n09861599', 'n09861863', 'n09861946', 'n09862621', + 'n09863031', 'n09866817', 'n09871229', 'n09871681', 'n09871867', + 'n09872066', 'n09873348', 'n09873473', 'n09873899', 'n09874428', + 'n09874725', 'n09874862', 'n09877288', 'n09877750', 'n09877951', + 'n09881265', 'n09881895', 'n09886403', 'n09889065', 'n09889170', + 'n09889941', 'n09890749', 'n09893191', 'n09893344', 'n09893502', + 'n09894143', 'n09894445', 'n09894654', 'n09895222', 'n09895561', + 'n09896170', 'n09896401', 'n09896685', 'n09899671', 'n09899782', + 'n09899929', 'n09901337', 'n09901921', 'n09902731', 'n09902954', + 'n09903153', 'n09903501', 'n09904208', 'n09904837', 'n09905185', + 'n09906449', 'n09911226', 'n09913455', 'n09913593', 'n09915434', + 'n09915651', 'n09916348', 'n09917214', 'n09917345', 'n09917593', + 'n09918248', 'n09918554', 'n09919451', 'n09920283', 'n09923186', + 'n09923418', 'n09923561', 'n09923673', 'n09924106', 'n09924195', + 'n09924996', 'n09927451', 'n09928136', 'n09929298', 'n09929577', + 'n09930257', 'n09930876', 'n09931165', 'n09931640', 'n09932098', + 'n09932336', 'n09932508', 'n09933098', 'n09934337', 'n09934774', + 'n09936825', 'n09938449', 'n09941089', 'n09941787', 'n09941964', + 'n09942970', 'n09943239', 'n09943811', 'n09944022', 'n09944430', + 'n09945745', 'n09946814', 'n09951274', 'n09951616', 'n09953350', + 'n09954639', 'n09959142', 'n09964202', 'n09967967', 'n09970822', + 'n09971273', 'n09972010', 'n09972458', 'n09974648', 'n09975425', + 'n09976283', 'n09976429', 'n09980985', 'n09981278', 'n09981540', + 'n09981939', 'n09988063', 'n09988493', 'n09988703', 'n09989502', + 'n09990415', 'n09990690', 'n09990777', 'n09991867', 'n09993252', + 'n09994673', 'n10001217', 'n10001481', 'n10002760', 'n10004718', + 'n10005934', 'n10007684', 'n10009276', 'n10013811', 'n10015485', + 'n10017272', 'n10019072', 'n10019406', 'n10020670', 'n10020890', + 'n10024362', 'n10025635', 'n10026976', 'n10027246', 'n10033412', + 'n10033663', 'n10034201', 'n10034614', 'n10036692', 'n10036929', + 'n10037385', 'n10037922', 'n10038409', 'n10039271', 'n10039946', + 'n10040945', 'n10042845', 'n10043491', 'n10043643', 'n10048612', + 'n10049363', 'n10053439', 'n10053808', 'n10054657', 'n10055410', + 'n10058962', 'n10060352', 'n10063635', 'n10069296', 'n10069981', + 'n10070108', 'n10070711', 'n10075693', 'n10076224', 'n10076604', + 'n10076957', 'n10077593', 'n10078131', 'n10078719', 'n10078806', + 'n10079399', 'n10080869', 'n10081204', 'n10082043', 'n10082687', + 'n10082997', 'n10084295', 'n10085869', 'n10086383', 'n10087434', + 'n10091450', 'n10091564', 'n10091651', 'n10092488', 'n10092643', + 'n10092794', 'n10092978', 'n10093475', 'n10093818', 'n10095769', + 'n10095869', 'n10098245', 'n10098517', 'n10098624', 'n10098710', + 'n10098862', 'n10102800', 'n10104064', 'n10105733', 'n10107303', + 'n10108018', 'n10112129', 'n10115430', 'n10116702', 'n10117739', + 'n10117851', 'n10120330', 'n10120671', 'n10123122', 'n10123844', + 'n10127689', 'n10129825', 'n10131151', 'n10131815', 'n10132035', + 'n10134178', 'n10134982', 'n10135129', 'n10137825', 'n10140597', + 'n10140929', 'n10141364', 'n10141732', 'n10142391', 'n10142747', + 'n10143172', 'n10144338', 'n10145239', 'n10145340', 'n10145480', + 'n10145590', 'n10145774', 'n10145902', 'n10146002', 'n10146104', + 'n10146416', 'n10146816', 'n10147121', 'n10147262', 'n10147935', + 'n10148035', 'n10150071', 'n10150940', 'n10151760', 'n10152763', + 'n10153414', 'n10153594', 'n10155849', 'n10157128', 'n10159045', + 'n10159533', 'n10160280', 'n10164233', 'n10164492', 'n10165448', + 'n10167152', 'n10167838', 'n10168837', 'n10169147', 'n10173410', + 'n10173771', 'n10174330', 'n10174445', 'n10175248', 'n10178216', + 'n10182190', 'n10183931', 'n10185483', 'n10185793', 'n10186068', + 'n10186216', 'n10187491', 'n10187990', 'n10188957', 'n10189278', + 'n10191001', 'n10192839', 'n10194231', 'n10195593', 'n10198437', + 'n10200781', 'n10202624', 'n10203949', 'n10205231', 'n10205457', + 'n10207169', 'n10208189', 'n10208950', 'n10209082', 'n10209731', + 'n10210911', 'n10212231', 'n10212501', 'n10215623', 'n10216106', + 'n10221312', 'n10222170', 'n10223177', 'n10225219', 'n10225931', + 'n10226413', 'n10227985', 'n10229883', 'n10233248', 'n10235024', + 'n10235385', 'n10236304', 'n10237069', 'n10237196', 'n10237464', + 'n10237676', 'n10241300', 'n10242328', 'n10243137', 'n10243273', + 'n10243664', 'n10247358', 'n10247880', 'n10249270', 'n10249459', + 'n10252222', 'n10253122', 'n10253296', 'n10253479', 'n10253703', + 'n10258786', 'n10259348', 'n10259780', 'n10259997', 'n10260706', + 'n10260800', 'n10261624', 'n10262445', 'n10262561', 'n10262655', + 'n10263411', 'n10263790', 'n10267311', 'n10267865', 'n10274815', + 'n10275395', 'n10276477', 'n10277027', 'n10279018', 'n10280674', + 'n10282482', 'n10282672', 'n10283170', 'n10288964', 'n10289039', + 'n10289462', 'n10290919', 'n10291822', 'n10293332', 'n10296176', + 'n10296444', 'n10297234', 'n10297531', 'n10297841', 'n10298647', + 'n10298912', 'n10299250', 'n10300154', 'n10300303', 'n10300500', + 'n10303814', 'n10304086', 'n10304914', 'n10305802', 'n10308168', + 'n10308732', 'n10313000', 'n10313239', 'n10313724', 'n10314054', + 'n10314517', 'n10314836', 'n10315456', 'n10315561', 'n10316360', + 'n10317007', 'n10317500', 'n10318293', 'n10318607', 'n10320863', + 'n10321340', 'n10323634', 'n10324560', 'n10325774', 'n10327987', + 'n10328123', 'n10328328', 'n10331167', 'n10332385', 'n10332861', + 'n10333439', 'n10333601', 'n10333838', 'n10334009', 'n10339717', + 'n10340312', 'n10341343', 'n10341573', 'n10342992', 'n10343355', + 'n10345015', 'n10346015', 'n10347446', 'n10348526', 'n10353016', + 'n10355142', 'n10355449', 'n10355688', 'n10356877', 'n10357613', + 'n10359546', 'n10360747', 'n10362319', 'n10362557', 'n10364198', + 'n10366276', 'n10366966', 'n10368291', 'n10368528', 'n10368624', + 'n10369317', 'n10370955', 'n10373390', 'n10375052', 'n10375314', + 'n10375402', 'n10376523', 'n10377021', 'n10377185', 'n10377291', + 'n10378026', 'n10380672', 'n10382710', 'n10382825', 'n10384392', + 'n10384496', 'n10385566', 'n10386984', 'n10387196', 'n10387324', + 'n10393909', 'n10395073', 'n10395828', 'n10396106', 'n10400108', + 'n10400437', 'n10400618', 'n10401331', 'n10401639', 'n10403876', + 'n10405694', 'n10406266', 'n10406391', 'n10406765', 'n10407310', + 'n10407954', 'n10410246', 'n10411551', 'n10415037', 'n10418735', + 'n10419472', 'n10419785', 'n10420507', 'n10421016', 'n10421470', + 'n10421956', 'n10422405', 'n10427764', 'n10431625', 'n10432189', + 'n10432441', 'n10435169', 'n10435988', 'n10438842', 'n10439373', + 'n10439851', 'n10441037', 'n10441962', 'n10449664', 'n10450161', + 'n10450303', 'n10451450', 'n10453184', 'n10461060', 'n10464052', + 'n10465451', 'n10465831', 'n10467179', 'n10467395', 'n10469874', + 'n10470779', 'n10472129', 'n10473917', 'n10474645', 'n10476467', + 'n10477713', 'n10481268', 'n10482220', 'n10483138', 'n10483799', + 'n10485883', 'n10486166', 'n10487182', 'n10488656', 'n10493685', + 'n10495756', 'n10498816', 'n10498986', 'n10499232', 'n10499355', + 'n10500217', 'n10500419', 'n10500603', 'n10502329', 'n10504206', + 'n10505613', 'n10506915', 'n10508141', 'n10508710', 'n10509063', + 'n10510245', 'n10512372', 'n10513823', 'n10514429', 'n10521100', + 'n10521662', 'n10522035', 'n10522759', 'n10523341', 'n10524076', + 'n10525436', 'n10525617', 'n10528023', 'n10529231', 'n10530150', + 'n10530383', 'n10530959', 'n10536416', 'n10540114', 'n10542608', + 'n10542761', 'n10542888', 'n10548537', 'n10548681', 'n10550369', + 'n10553235', 'n10559288', 'n10559508', 'n10559996', 'n10560106', + 'n10562135', 'n10562283', 'n10563314', 'n10563403', 'n10565667', + 'n10566072', 'n10568358', 'n10568608', 'n10569179', 'n10572706', + 'n10572889', 'n10574538', 'n10574840', 'n10575463', 'n10577284', + 'n10578021', 'n10578471', 'n10580030', 'n10581890', 'n10582746', + 'n10583387', 'n10583790', 'n10585077', 'n10588074', 'n10588357', + 'n10588965', 'n10590146', 'n10592811', 'n10593521', 'n10595164', + 'n10595647', 'n10598181', 'n10599806', 'n10602470', 'n10602985', + 'n10603851', 'n10604380', 'n10604979', 'n10607291', 'n10607478', + 'n10610465', 'n10610850', 'n10611267', 'n10611613', 'n10613996', + 'n10618342', 'n10620586', 'n10620758', 'n10622053', 'n10624074', + 'n10624310', 'n10624437', 'n10624540', 'n10627252', 'n10628644', + 'n10629939', 'n10630188', 'n10631309', 'n10633450', 'n10634849', + 'n10635788', 'n10638922', 'n10639359', 'n10639637', 'n10642596', + 'n10644598', 'n10645017', 'n10646140', 'n10649197', 'n10652605', + 'n10655594', 'n10657835', 'n10661563', 'n10665587', 'n10665698', + 'n10667477', 'n10667863', 'n10669991', 'n10671613', 'n10671736', + 'n10672371', 'n10672662', 'n10674713', 'n10675010', 'n10678937', + 'n10679174', 'n10680609', 'n10680796', 'n10682953', 'n10685398', + 'n10686073', 'n10686885', 'n10688356', 'n10689306', 'n10690648', + 'n10692482', 'n10693824', 'n10694258', 'n10696508', 'n10698368', + 'n10699981', 'n10701180', 'n10701644', 'n10701962', 'n10702167', + 'n10707134', 'n10707233', 'n10709529', 'n10711766', 'n10718131', + 'n10719132', 'n10721321', 'n10726031', 'n10727171', 'n10727458', + 'n10728624', 'n10730728', 'n10732010', 'n10734394', 'n10734891', + 'n10737103', 'n10738111', 'n10739391', 'n10740868', 'n10741367', + 'n10744164', 'n10745006', 'n10746931', 'n10747119', 'n10748620', + 'n10750031', 'n10750640', 'n10751152', 'n10753442', 'n10754189', + 'n10755080', 'n10755648', 'n10756148', 'n10757050', 'n10757492', + 'n10761190', 'n10763075', 'n10763383', 'n10763620', 'n10765679', + 'n10772092', 'n10773665', 'n10780284', 'n10780632', 'n10782471', + 'n10782791', 'n10782940', 'n10787470', 'n10791115', 'n10791221', + 'n10792335', 'n10792856', 'n10793570', 'n10802507', 'n10804287', + 'n10806113', 'n11448153', 'n11487732', 'n11508382', 'n11524451', + 'n11532682', 'n11533212', 'n11536673', 'n11537327', 'n11542137', + 'n11542640', 'n11544015', 'n11545524', 'n11545714', 'n11547855', + 'n11552133', 'n11552806', 'n11552976', 'n11599324', 'n11600372', + 'n11601177', 'n11601333', 'n11601918', 'n11602873', 'n11603246', + 'n11603835', 'n11608250', 'n11609475', 'n11609862', 'n11610047', + 'n11610215', 'n11610437', 'n11611087', 'n11611233', 'n11611356', + 'n11611561', 'n11611758', 'n11612018', 'n11612349', 'n11612575', + 'n11613219', 'n11613459', 'n11614039', 'n11614250', 'n11614420', + 'n11614713', 'n11615026', 'n11615387', 'n11615607', 'n11615967', + 'n11616486', 'n11616662', 'n11617090', 'n11617272', 'n11617631', + 'n11618290', 'n11618525', 'n11618861', 'n11619227', 'n11619455', + 'n11620389', 'n11620673', 'n11621029', 'n11621281', 'n11621547', + 'n11621727', 'n11621950', 'n11622184', 'n11622368', 'n11622591', + 'n11622771', 'n11623105', 'n11623815', 'n11623967', 'n11624192', + 'n11624531', 'n11625003', 'n11625223', 'n11625632', 'n11625804', + 'n11626152', 'n11626409', 'n11626585', 'n11626826', 'n11627168', + 'n11627512', 'n11627908', 'n11628087', 'n11628456', 'n11628793', + 'n11630017', 'n11631854', 'n11632167', 'n11632619', 'n11634736', + 'n11635152', 'n11635433', 'n11635830', 'n11636204', 'n11636835', + 'n11639445', 'n11640132', 'n11643835', 'n11644046', 'n11644226', + 'n11644462', 'n11645590', 'n11645914', 'n11646167', 'n11646344', + 'n11646694', 'n11647306', 'n11647703', 'n11650558', 'n11652376', + 'n11653904', 'n11654293', 'n11655974', 'n11656123', 'n11658331', + 'n11658544', 'n11660300', 'n11661372', 'n11661909', 'n11662371', + 'n11664418', 'n11665372', 'n11666854', 'n11669786', 'n11669921', + 'n11672269', 'n11672400', 'n11675025', 'n11676500', 'n11678010', + 'n11680596', 'n11682659', 'n11686912', 'n11689483', 'n11690254', + 'n11690455', 'n11691046', 'n11691857', 'n11692265', 'n11692792', + 'n11693981', 'n11694664', 'n11695599', 'n11695974', 'n11698042', + 'n11699442', 'n11700058', 'n11701066', 'n11703669', 'n11704093', + 'n11704620', 'n11705171', 'n11705387', 'n11705776', 'n11706761', + 'n11707229', 'n11707827', 'n11709205', 'n11709674', 'n11710136', + 'n11710393', 'n11710827', 'n11711537', 'n11711764', 'n11712282', + 'n11714382', 'n11715430', 'n11715678', 'n11717577', 'n11719286', + 'n11720353', 'n11720643', 'n11720891', 'n11721337', 'n11722466', + 'n11722982', 'n11723227', 'n11723770', 'n11724109', 'n11725015', + 'n11725311', 'n11725480', 'n11725821', 'n11725973', 'n11726269', + 'n11726707', 'n11727091', 'n11727358', 'n11727540', 'n11727738', + 'n11728099', 'n11728945', 'n11730602', 'n11731659', 'n11732567', + 'n11733054', 'n11733312', 'n11733548', 'n11735053', 'n11736694', + 'n11736851', 'n11737534', 'n11748811', 'n11752937', 'n11753143', + 'n11753355', 'n11753700', 'n11754893', 'n11756092', 'n11756669', + 'n11756870', 'n11757653', 'n11757851', 'n11758122', 'n11758276', + 'n11758483', 'n11758799', 'n11759224', 'n11759404', 'n11759853', + 'n11760785', 'n11761202', 'n11761650', 'n11762433', 'n11769176', + 'n11769621', 'n11769803', 'n11770256', 'n11772408', 'n11772879', + 'n11773987', 'n11774513', 'n11777080', 'n11778257', 'n11779300', + 'n11780148', 'n11781176', 'n11782036', 'n11782761', 'n11783920', + 'n11784126', 'n11784497', 'n11785668', 'n11786131', 'n11786539', + 'n11788727', 'n11789066', 'n11789589', 'n11791341', 'n11791569', + 'n11792029', 'n11792341', 'n11792742', 'n11793779', 'n11794024', + 'n11794519', 'n11795049', 'n11797321', 'n11800236', 'n11801891', + 'n11802586', 'n11802800', 'n11805544', 'n11805956', 'n11806219', + 'n11806679', 'n11807108', 'n11807525', 'n11807979', 'n11808299', + 'n11808468', 'n11808721', 'n11808932', 'n11809094', 'n11809271', + 'n11809437', 'n11809594', 'n11809754', 'n11810358', 'n11811473', + 'n11811706', 'n11811921', 'n11812094', 'n11812910', 'n11813077', + 'n11814584', 'n11815491', 'n11815721', 'n11815918', 'n11816121', + 'n11816336', 'n11816649', 'n11816829', 'n11817914', 'n11818069', + 'n11819509', 'n11819912', 'n11820965', 'n11821184', 'n11823436', + 'n11824146', 'n11825351', 'n11826198', 'n11828577', 'n11830906', + 'n11832214', 'n11832480', 'n11834654', 'n11836722', 'n11837970', + 'n11838916', 'n11839568', 'n11839823', 'n11840067', 'n11844371', + 'n11844892', 'n11845557', 'n11845793', 'n11845913', 'n11846765', + 'n11847169', 'n11848479', 'n11849467', 'n11849871', 'n11849983', + 'n11850521', 'n11851258', 'n11851578', 'n11851839', 'n11852028', + 'n11853356', 'n11853813', 'n11854479', 'n11855274', 'n11855553', + 'n11855842', 'n11857875', 'n11858077', 'n11859275', 'n11859472', + 'n11859737', 'n11860555', 'n11861641', 'n11861853', 'n11862835', + 'n11865874', 'n11866248', 'n11869689', 'n11870418', 'n11870747', + 'n11872146', 'n11874081', 'n11875523', 'n11875691', 'n11875938', + 'n11876204', 'n11876432', 'n11876634', 'n11876803', 'n11877193', + 'n11877283', 'n11877646', 'n11878101', 'n11879054', 'n11879722', + 'n11879895', 'n11881189', 'n11882074', 'n11882426', 'n11883328', + 'n11885856', 'n11887119', 'n11888800', 'n11889619', 'n11890150', + 'n11891175', 'n11892029', 'n11892637', 'n11892817', 'n11893640', + 'n11894327', 'n11894558', 'n11894770', 'n11895092', 'n11896722', + 'n11897116', 'n11898775', 'n11900569', 'n11901294', 'n11901597', + 'n11901759', 'n11901977', 'n11902200', 'n11902389', 'n11902709', + 'n11902982', 'n11903671', 'n11904109', 'n11905392', 'n11905749', + 'n11906917', 'n11907100', 'n11907689', 'n11908549', 'n11908846', + 'n11910271', 'n11910460', 'n11915214', 'n11915658', 'n11915899', + 'n11916467', 'n11916696', 'n11918286', 'n11918473', 'n11919447', + 'n11919975', 'n11920133', 'n11921395', 'n11923174', 'n11923397', + 'n11923637', 'n11924445', 'n11924849', 'n11925303', 'n11925898', + 'n11926365', 'n11926833', 'n11927215', 'n11928352', 'n11928858', + 'n11929743', 'n11931540', 'n11931918', 'n11933546', 'n11933728', + 'n11934616', 'n11934807', 'n11935330', 'n11935469', 'n11939180', + 'n11939491', 'n11939699', 'n11940006', 'n11940599', 'n11941924', + 'n11943407', 'n11943660', 'n11943992', 'n11944196', 'n11944954', + 'n11945367', 'n11945514', 'n11945783', 'n11946727', 'n11946918', + 'n11947629', 'n11947802', 'n11948264', 'n11948864', 'n11949015', + 'n11949402', 'n11950345', 'n11950686', 'n11950877', 'n11951511', + 'n11952541', 'n11953038', 'n11953610', 'n11953884', 'n11954161', + 'n11954345', 'n11954642', 'n11955153', 'n11955896', 'n11956348', + 'n11956850', 'n11957678', 'n11958080', 'n11959632', 'n11959862', + 'n11960245', 'n11961100', 'n11961446', 'n11961871', 'n11962272', + 'n11962667', 'n11963932', 'n11965218', 'n11965627', 'n11966083', + 'n11966215', 'n11966617', 'n11966896', 'n11968704', 'n11968931', + 'n11969166', 'n11969607', 'n11970586', 'n11971248', 'n11971406', + 'n11971783', 'n11971927', 'n11972291', 'n11972759', 'n11973341', + 'n11977303', 'n11978233', 'n11978551', 'n11978713', 'n11978961', + 'n11979527', 'n11979715', 'n11979964', 'n11980318', 'n11980682', + 'n11981192', 'n11982115', 'n11984144', 'n11984542', 'n11986511', + 'n11987126', 'n11988596', 'n11989087', 'n11989393', 'n11989869', + 'n11990167', 'n11990313', 'n11991263', 'n11992806', 'n11995092', + 'n11998888', 'n12001707', 'n12002428', 'n12003167', 'n12003696', + 'n12004547', 'n12005656', 'n12006766', 'n12006930', 'n12007196', + 'n12007406', 'n12008252', 'n12008487', 'n12008749', 'n12009420', + 'n12011620', 'n12012111', 'n12014085', 'n12015221', 'n12015525', + 'n12015959', 'n12016567', 'n12018760', 'n12019035', 'n12019827', + 'n12020184', 'n12020507', 'n12020736', 'n12020941', 'n12022054', + 'n12023108', 'n12023407', 'n12023726', 'n12024445', 'n12024690', + 'n12026018', 'n12026476', 'n12026981', 'n12027222', 'n12027658', + 'n12029635', 'n12030908', 'n12031139', 'n12031927', 'n12033709', + 'n12034141', 'n12034384', 'n12035631', 'n12036939', 'n12037499', + 'n12037691', 'n12038038', 'n12038406', 'n12038585', 'n12038898', + 'n12039317', 'n12041446', 'n12043444', 'n12043673', 'n12043836', + 'n12044467', 'n12046028', 'n12046428', 'n12046815', 'n12047345', + 'n12047884', 'n12048056', 'n12048399', 'n12049282', 'n12049562', + 'n12050533', 'n12050959', 'n12051103', 'n12052447', 'n12052787', + 'n12053405', 'n12053690', 'n12055516', 'n12056217', 'n12056601', + 'n12056758', 'n12057211', 'n12057447', 'n12057660', 'n12058192', + 'n12058630', 'n12058822', 'n12059314', 'n12059625', 'n12061380', + 'n12061614', 'n12062468', 'n12062626', 'n12062781', 'n12063639', + 'n12064389', 'n12064591', 'n12065316', 'n12065777', 'n12066018', + 'n12066261', 'n12066630', 'n12067193', 'n12068432', 'n12069217', + 'n12069679', 'n12070016', 'n12070381', 'n12070583', 'n12070712', + 'n12071744', 'n12072722', 'n12073554', 'n12073991', 'n12074408', + 'n12074867', 'n12075010', 'n12075151', 'n12075299', 'n12075830', + 'n12076223', 'n12076577', 'n12076852', 'n12077944', 'n12078172', + 'n12079120', 'n12079963', 'n12080395', 'n12080820', 'n12081215', + 'n12083113', 'n12083591', 'n12083847', 'n12084158', 'n12084555', + 'n12084890', 'n12085267', 'n12085664', 'n12086012', 'n12086192', + 'n12086539', 'n12086778', 'n12087961', 'n12088223', 'n12090890', + 'n12091213', 'n12091377', 'n12091550', 'n12091953', 'n12092262', + 'n12092417', 'n12093329', 'n12093600', 'n12094612', 'n12095020', + 'n12095647', 'n12097396', 'n12098403', 'n12098524', 'n12099342', + 'n12101870', 'n12102133', 'n12104238', 'n12104501', 'n12104734', + 'n12105125', 'n12107710', 'n12107970', 'n12108871', 'n12109365', + 'n12110085', 'n12110778', 'n12112008', 'n12112609', 'n12112918', + 'n12113195', 'n12115180', 'n12116429', 'n12119238', 'n12121610', + 'n12122725', 'n12123741', 'n12124627', 'n12124818', 'n12126084', + 'n12127460', 'n12127768', 'n12128071', 'n12129134', 'n12133462', + 'n12133682', 'n12134025', 'n12135049', 'n12136392', 'n12137120', + 'n12137569', 'n12139575', 'n12140903', 'n12141167', 'n12141385', + 'n12142085', 'n12144313', 'n12144580', 'n12145477', 'n12146311', + 'n12146654', 'n12148757', 'n12150722', 'n12151615', 'n12152532', + 'n12152722', 'n12154773', 'n12155009', 'n12157056', 'n12158031', + 'n12158443', 'n12159055', 'n12159388', 'n12160303', 'n12160490', + 'n12160857', 'n12161056', 'n12161969', 'n12162181', 'n12162425', + 'n12164363', 'n12164656', 'n12164881', 'n12165170', 'n12165758', + 'n12166128', 'n12166424', 'n12166793', 'n12167075', 'n12167436', + 'n12167602', 'n12168565', 'n12171098', 'n12171316', 'n12171966', + 'n12172364', 'n12172481', 'n12172906', 'n12173069', 'n12173664', + 'n12173912', 'n12174311', 'n12174521', 'n12174926', 'n12178896', + 'n12179122', 'n12180168', 'n12180885', 'n12184912', 'n12185859', + 'n12187247', 'n12187891', 'n12189429', 'n12189987', 'n12190410', + 'n12190869', 'n12194147', 'n12195533', 'n12196336', 'n12196527', + 'n12196694', 'n12198286', 'n12199790', 'n12200143', 'n12201331', + 'n12201580', 'n12202936', 'n12203529', 'n12203896', 'n12204032', + 'n12204175', 'n12205694', 'n12214789', 'n12215022', 'n12215579', + 'n12217453', 'n12223569', 'n12223764', 'n12224978', 'n12225563', + 'n12227658', 'n12228229', 'n12228387', 'n12230794', 'n12237486', + 'n12237641', 'n12238913', 'n12240477', 'n12242409', 'n12243109', + 'n12244153', 'n12244650', 'n12244819', 'n12245319', 'n12246232', + 'n12249542', 'n12252168', 'n12256920', 'n12257570', 'n12258885', + 'n12260799', 'n12261571', 'n12261808', 'n12262018', 'n12262185', + 'n12263038', 'n12263204', 'n12263738', 'n12263987', 'n12264512', + 'n12265600', 'n12266217', 'n12266796', 'n12267411', 'n12267677', + 'n12268246', 'n12269241', 'n12269406', 'n12270027', 'n12270741', + 'n12270946', 'n12271933', 'n12272239', 'n12272883', 'n12273114', + 'n12273344', 'n12273768', 'n12273939', 'n12274358', 'n12274863', + 'n12275131', 'n12275675', 'n12275888', 'n12276110', 'n12276477', + 'n12276628', 'n12276872', 'n12277150', 'n12277578', 'n12277800', + 'n12278107', 'n12278371', 'n12278650', 'n12278865', 'n12279458', + 'n12279772', 'n12280060', 'n12281241', 'n12281788', 'n12281974', + 'n12282235', 'n12282527', 'n12282737', 'n12282933', 'n12283147', + 'n12283542', 'n12284262', 'n12284821', 'n12285369', 'n12285900', + 'n12286826', 'n12286988', 'n12287836', 'n12288005', 'n12288823', + 'n12289433', 'n12290748', 'n12291143', 'n12291959', 'n12293723', + 'n12294124', 'n12294331', 'n12294723', 'n12294871', 'n12295033', + 'n12295429', 'n12295796', 'n12296432', 'n12300840', 'n12301180', + 'n12301445', 'n12302071', 'n12302248', 'n12302565', 'n12303083', + 'n12303462', 'n12304115', 'n12304703', 'n12304899', 'n12305089', + 'n12305293', 'n12305475', 'n12305819', 'n12305986', 'n12306089', + 'n12306717', 'n12307076', 'n12307240', 'n12307756', 'n12309277', + 'n12311579', 'n12312728', 'n12315598', 'n12315999', 'n12316444', + 'n12316572', 'n12317296', 'n12318378', 'n12318965', 'n12319204', + 'n12319414', 'n12320010', 'n12320806', 'n12321077', 'n12322099', + 'n12322501', 'n12322699', 'n12325234', 'n12328398', 'n12328567', + 'n12329260', 'n12329473', 'n12330469', 'n12330587', 'n12330891', + 'n12331655', 'n12332030', 'n12332555', 'n12333053', 'n12333530', + 'n12333771', 'n12334293', 'n12334891', 'n12336092', 'n12336224', + 'n12336333', 'n12336727', 'n12336973', 'n12337391', 'n12337617', + 'n12338258', 'n12338454', 'n12338655', 'n12338796', 'n12339831', + 'n12340383', 'n12340755', 'n12342299', 'n12342498', 'n12342852', + 'n12343480', 'n12344283', 'n12344483', 'n12344700', 'n12344837', + 'n12345280', 'n12345899', 'n12346813', 'n12347158', 'n12350758', + 'n12352287', 'n12352639', 'n12352844', 'n12352990', 'n12353203', + 'n12353754', 'n12356023', 'n12356960', 'n12357485', 'n12360108', + 'n12360684', 'n12360958', 'n12361135', 'n12361946', 'n12362274', + 'n12362668', 'n12367611', 'n12368028', 'n12368257', 'n12368451', + 'n12369309', 'n12371439', 'n12373100', 'n12374418', 'n12374862', + 'n12377198', 'n12383894', 'n12384037', 'n12384227', 'n12384375', + 'n12384839', 'n12385429', 'n12385566', 'n12387633', 'n12387839', + 'n12388143', 'n12388858', 'n12388989', 'n12389130', 'n12389501', + 'n12390099', 'n12390314', 'n12392549', 'n12393269', 'n12397431', + 'n12399132', 'n12399384', 'n12400489', 'n12400720', 'n12401684', + 'n12402051', 'n12402348', 'n12402596', 'n12402840', 'n12403994', + 'n12405714', 'n12406488', 'n12406715', 'n12406902', 'n12407079', + 'n12407222', 'n12407715', 'n12407890', 'n12408077', 'n12408717', + 'n12409231', 'n12409470', 'n12409840', 'n12412355', 'n12412606', + 'n12413165', 'n12413301', 'n12413419', 'n12413642', 'n12413880', + 'n12414035', 'n12414159', 'n12414449', 'n12414818', 'n12414932', + 'n12415595', 'n12416073', 'n12416703', 'n12418221', 'n12420722', + 'n12421137', 'n12421683', 'n12421917', 'n12422129', 'n12426623', + 'n12426749', 'n12427184', 'n12427391', 'n12427566', 'n12427757', + 'n12428076', 'n12428412', 'n12428747', 'n12429352', 'n12432356', + 'n12433081', 'n12433178', 'n12433769', 'n12435152', 'n12435649', + 'n12435777', 'n12437513', 'n12437769', 'n12437930', 'n12441183', + 'n12441390', 'n12441958', 'n12443323', 'n12446519', 'n12448700', + 'n12449296', 'n12449526', 'n12450344', 'n12450840', 'n12451070', + 'n12451240', 'n12451399', 'n12451915', 'n12452836', 'n12453186', + 'n12454159', 'n12454436', 'n12454556', 'n12454705', 'n12454949', + 'n12455950', 'n12457091', 'n12458550', 'n12459629', 'n12460697', + 'n12460957', 'n12461109', 'n12461466', 'n12461673', 'n12462805', + 'n12463134', 'n12465557', 'n12466727', 'n12469517', 'n12472024', + 'n12473608', 'n12473840', 'n12474167', 'n12475035', 'n12475242', + 'n12476510', 'n12477163', 'n12477401', 'n12477583', 'n12477747', + 'n12478768', 'n12479537', 'n12480456', 'n12480895', 'n12481458', + 'n12482437', 'n12482668', 'n12482893', 'n12483427', 'n12483625', + 'n12483841', 'n12484784', 'n12485653', 'n12485981', 'n12486574', + 'n12489815', 'n12491017', 'n12491826', 'n12492106', 'n12493208', + 'n12494794', 'n12495146', 'n12495895', 'n12496427', 'n12496949', + 'n12498055', 'n12499979', 'n12501202', 'n12504570', 'n12504783', + 'n12506341', 'n12506991', 'n12508309', 'n12509476', 'n12509665', + 'n12513172', 'n12513613', 'n12513933', 'n12514138', 'n12515711', + 'n12515925', 'n12516828', 'n12517445', 'n12517642', 'n12519089', + 'n12519563', 'n12521394', 'n12523475', 'n12527738', 'n12528549', + 'n12528974', 'n12529220', 'n12530629', 'n12530818', 'n12532564', + 'n12537253', 'n12539306', 'n12540250', 'n12544539', 'n12545635', + 'n12546183', 'n12546617', 'n12546962', 'n12547215', 'n12547503', + 'n12548280', 'n12549192', 'n12552309', 'n12554911', 'n12556656', + 'n12557064', 'n12557438', 'n12557556', 'n12557681', 'n12558230', + 'n12558425', 'n12560282', 'n12560621', 'n12560775', 'n12561169', + 'n12562785', 'n12564083', 'n12566954', 'n12568186', 'n12570394', + 'n12570703', 'n12570972', 'n12571781', 'n12573474', 'n12574320', + 'n12574866', 'n12575322', 'n12575812', 'n12576323', 'n12577895', + 'n12578626', 'n12578916', 'n12579038', 'n12580654', 'n12580896', + 'n12582231', 'n12582665', 'n12582846', 'n12583126', 'n12583401', + 'n12584191', 'n12584715', 'n12585629', 'n12587132', 'n12587803', + 'n12588320', 'n12588780', 'n12590232', 'n12590499', 'n12591017', + 'n12591351', 'n12593994', 'n12595699', 'n12595964', 'n12596148', + 'n12596345', 'n12596709', 'n12596849', 'n12597134', 'n12597466', + 'n12597798', 'n12598027', 'n12599435', 'n12602262', 'n12602980', + 'n12603449', 'n12604228', 'n12606438', 'n12606545', 'n12607456', + 'n12610328', 'n12614477', 'n12615232', 'n12620196', 'n12620546', + 'n12620969', 'n12621410', 'n12622297', 'n12622875', 'n12623077', + 'n12624381', 'n12624568', 'n12625383', 'n12627119', 'n12628986', + 'n12629305', 'n12629666', 'n12630763', 'n12631331', 'n12631932', + 'n12632335', 'n12633638', 'n12633994', 'n12634211', 'n12634429', + 'n12634734', 'n12634986', 'n12635532', 'n12635744', 'n12635955', + 'n12636224', 'n12637123', 'n12638218', 'n12638753', 'n12638964', + 'n12639584', 'n12640839', 'n12641007', 'n12641413', 'n12642090', + 'n12642200', 'n12643313', 'n12643473', 'n12644902', 'n12645174', + 'n12646072', 'n12646397', 'n12646605', 'n12646740', 'n12647560', + 'n12647893', 'n12648045', 'n12648888', 'n12649065', 'n12649317', + 'n12649539', 'n12650379', 'n12650556', 'n12651229', 'n12651611', + 'n12651821', 'n12653218', 'n12655869', 'n12656369', 'n12656685', + 'n12657082', 'n12658118', 'n12658308', 'n12658481', 'n12659064', + 'n12659356', 'n12659539', 'n12662772', 'n12663023', 'n12663359', + 'n12665048', 'n12665271', 'n12665857', 'n12666965', 'n12670758', + 'n12671651', 'n12674895', 'n12675299', 'n12675876', 'n12676534', + 'n12676703', 'n12679593', 'n12680402', 'n12680864', 'n12681893', + 'n12682411', 'n12682668', 'n12683096', 'n12683407', 'n12683571', + 'n12683791', 'n12684379', 'n12685431', 'n12685831', 'n12686077', + 'n12686274', 'n12686676', 'n12687044', 'n12687462', 'n12687698', + 'n12687957', 'n12688716', 'n12691428', 'n12691661', 'n12694486', + 'n12695975', 'n12696492', 'n12698598', 'n12700088', 'n12703190', + 'n12703383', 'n12703557', 'n12703856', 'n12704343', 'n12706410', + 'n12707781', 'n12708293', 'n12708654', 'n12708941', 'n12709103', + 'n12709688', 'n12709901', 'n12710295', 'n12710415', 'n12710577', + 'n12710693', 'n12711596', 'n12711817', 'n12711984', 'n12713063', + 'n12713866', 'n12714755', 'n12717072', 'n12717224', 'n12719684', + 'n12719944', 'n12720200', 'n12723610', 'n12724942', 'n12725521', + 'n12725738', 'n12726159', 'n12726670', 'n12727101', 'n12727518', + 'n12729315', 'n12729521', 'n12729729', 'n12731029', 'n12731401', + 'n12731835', 'n12732009', 'n12732491', 'n12732756', 'n12732966', + 'n12733218', 'n12733428', 'n12733647', 'n12733870', 'n12734070', + 'n12737383', 'n12737898', 'n12739332', 'n12741222', 'n12741792', + 'n12743009', 'n12743352', 'n12744387', 'n12745386', 'n12746884', + 'n12749049', 'n12749456', 'n12749679', 'n12749852', 'n12752205', + 'n12753007', 'n12753245', 'n12753573', 'n12753762', 'n12754003', + 'n12754468', 'n12754648', 'n12754781', 'n12754981', 'n12755225', + 'n12755387', 'n12755727', 'n12756457', 'n12757303', 'n12757458', + 'n12757816', 'n12759273', 'n12761284', 'n12762049', 'n12762896', + 'n12764202', 'n12765115', 'n12766595', 'n12766869', 'n12767648', + 'n12768682', 'n12771192', 'n12771390', 'n12771597', 'n12772753', + 'n12772908', 'n12773651', 'n12774299', 'n12774641', 'n12775919', + 'n12777680', 'n12778398', 'n12778605', 'n12779603', 'n12779851', + 'n12781940', 'n12782530', 'n12782915', 'n12784889', 'n12785724', + 'n12785889', 'n12788854', 'n12789054', 'n12790430', 'n12791064', + 'n12791329', 'n12793015', 'n12793284', 'n12793494', 'n12794135', + 'n12794367', 'n12794985', 'n12795352', 'n12795555', 'n12796022', + 'n12797860', 'n12799776', 'n12801520', 'n12801781', 'n12803754', + 'n12805146', 'n12805561', 'n12806015', 'n12806732', 'n12807251', + 'n12807409', 'n12807773', 'n12808007', 'n12810595', 'n12811027', + 'n12812478', 'n12813189', 'n12814643', 'n12815198', 'n12816508', + 'n12817464', 'n12817694', 'n12818346', 'n12818966', 'n12819728', + 'n12820853', 'n12821505', 'n12821895', 'n12822115', 'n12822769', + 'n12822955', 'n12823717', 'n12823859', 'n12824053', 'n12825497', + 'n12827270', 'n12827537', 'n12828220', 'n12828379', 'n12828791', + 'n12830222', 'n12830568', 'n12831932', 'n12832315', 'n12832538', + 'n12833149', 'n12833985', 'n12834798', 'n12835331', 'n12836212', + 'n12836337', 'n12836508', 'n12836862', 'n12837803', 'n12840362', + 'n12840749', 'n12841007', 'n12841193', 'n12841354', 'n12843557', + 'n12843970', 'n12844939', 'n12845413', 'n12847008', 'n12847374', + 'n12847927', 'n12848499', 'n12849061', 'n12849279', 'n12849416', + 'n12849952', 'n12850168', 'n12850336', 'n12850906', 'n12851469', + 'n12853482', 'n12854048', 'n12854600', 'n12855494', 'n12856091', + 'n12856287', 'n12856479', 'n12856680', 'n12857779', 'n12858150', + 'n12858397', 'n12858618', 'n12858871', 'n12859986', 'n12860365', + 'n12861345', 'n12861541', 'n12861892', 'n12862512', 'n12863624', + 'n12864160', 'n12865037', 'n12865562', 'n12865708', 'n12865824', + 'n12866002', 'n12866162', 'n12866459', 'n12866635', 'n12867826', + 'n12868019', 'n12869061', 'n12869478', 'n12870535', 'n12870682', + 'n12870891', 'n12872458', 'n12875269', 'n12877838', 'n12879527', + 'n12879963', 'n12880244', 'n12880462', 'n12882779', 'n12882945', + 'n12884100', 'n12884260', 'n12887293', 'n12889219', 'n12889713', + 'n12890265', 'n12890490', 'n12890685', 'n12890928', 'n12891093', + 'n12891305', 'n12891469', 'n12891643', 'n12893463', 'n12893993', + 'n12895811', 'n12898774', 'n12899537', 'n12899752', 'n12901724', + 'n12902662', 'n12904314', 'n12905412', 'n12906214', 'n12906498', + 'n12908093', 'n12908645', 'n12909421', 'n12909917', 'n12911079', + 'n12911440', 'n12911673', 'n12912670', 'n12913791', 'n12914923', + 'n12915568', 'n12915811', 'n12916179', 'n12916511', 'n12917901', + 'n12918609', 'n12919403', 'n12919646', 'n12919847', 'n12920204', + 'n12920955', 'n12921868', 'n12922763', 'n12924623', 'n12925179', + 'n12926480', 'n12926689', 'n12927013', 'n12927494', 'n12928071', + 'n12929403', 'n12931542', 'n12932173', 'n12932365', 'n12932966', + 'n12934036', 'n12934174', 'n12934479', 'n12934985', 'n12935609', + 'n12937130', 'n12938193', 'n12939282', 'n12939874', 'n12940226', + 'n12940609', 'n12942395', 'n12942572', 'n12946849', 'n12947313', + 'n12947544', 'n12947895', 'n12948053', 'n12948251', 'n12948495', + 'n12950126', 'n12950314', 'n12951146', 'n12951835', 'n12953206', + 'n12953484', 'n12954799', 'n12956367', 'n12957924', 'n12961879', + 'n12963628', 'n12964920', 'n12965626', 'n12966945', 'n12969131', + 'n12969425', 'n12973443', 'n12974987', 'n12975804', 'n12979829', + 'n12980840', 'n12981443', 'n12982468', 'n12983048', 'n12985420', + 'n12985773', 'n12985857', 'n12986227', 'n12987056', 'n12988158', + 'n12989938', 'n12991184', 'n12991837', 'n12992177', 'n12992868', + 'n12995601', 'n12997654', 'n12997919', 'n12998815', 'n13000891', + 'n13001041', 'n13001206', 'n13001366', 'n13001529', 'n13001930', + 'n13002750', 'n13002925', 'n13003061', 'n13003254', 'n13003522', + 'n13003712', 'n13004423', 'n13005329', 'n13005984', 'n13006171', + 'n13006631', 'n13006894', 'n13007034', 'n13007417', 'n13008315', + 'n13009085', 'n13009429', 'n13011595', 'n13012253', 'n13012973', + 'n13013534', 'n13013764', 'n13014409', 'n13014741', 'n13017102', + 'n13017240', 'n13019835', 'n13020191', 'n13020964', 'n13021689', + 'n13022210', 'n13022709', 'n13023134', 'n13024012', 'n13025647', + 'n13028611', 'n13029326', 'n13029760', 'n13032115', 'n13032381', + 'n13032618', 'n13032923', 'n13033134', 'n13033577', 'n13034062', + 'n13035241', 'n13035707', 'n13035925', 'n13037406', 'n13038068', + 'n13038744', 'n13039349', 'n13040303', 'n13040629', 'n13041312', + 'n13043926', 'n13044375', 'n13044778', 'n13046669', 'n13049953', + 'n13050397', 'n13052670', 'n13052931', 'n13053608', 'n13054073', + 'n13054560', 'n13055423', 'n13055577', 'n13055949', 'n13060190', + 'n13061348', 'n13062421', 'n13065089', 'n13066448', 'n13068255', + 'n13072528', 'n13074619', 'n13077033', 'n13077295', 'n13079073', + 'n13083023', 'n13084184', 'n13084834', 'n13085747', 'n13090871', + 'n13091620', 'n13094273', 'n13099999', 'n13100677', 'n13102775', + 'n13103877', 'n13104059', 'n13107694', 'n13107891', 'n13108131', + 'n13108323', 'n13108481', 'n13108545', 'n13108841', 'n13111881', + 'n13121349', 'n13122364', 'n13123431', 'n13125117', 'n13126856', + 'n13127843', 'n13128976', 'n13130726', 'n13131028', 'n13131618', + 'n13132338', 'n13132656', 'n13133613', 'n13133932', 'n13134947', + 'n13135832', 'n13136316', 'n13136556', 'n13137409', 'n13138308', + 'n13138842', 'n13139055', 'n13141415', 'n13141564', 'n13142504', + 'n13145040', 'n13145250', 'n13146583', 'n13147270', 'n13147386', + 'n13148208', 'n13150894', 'n13154388', 'n13154494', 'n13155095', + 'n13155305', 'n13155611', 'n13157684', 'n13158512', 'n13160604', + 'n13163991', 'n13172923', 'n13173488', 'n13173882', 'n13177048', + 'n13177884', 'n13180534', 'n13180875', 'n13181055', 'n13181811', + 'n13183056', 'n13183489', 'n13185269', 'n13187367', 'n13188096', + 'n13190747', 'n13192625', 'n13193642', 'n13193856', 'n13194036', + 'n13194572', 'n13195341', 'n13196003', 'n13197274', 'n13197507', + 'n13198914', 'n13199717', 'n13199970', 'n13200651', 'n13201969', + 'n13205058', 'n13206178', 'n13206817', 'n13207094', 'n13207335', + 'n13208302', 'n13209808', 'n13211020', 'n13213066', 'n13214340', + 'n13215586', 'n13219422', 'n13219833', 'n13219976', 'n13220122', + 'n13221529', 'n13223588', 'n13223710', 'n13223843', 'n13226871', + 'n13229543', 'n13230662', 'n13231078', 'n13232779', 'n13234678', + 'n13235159', 'n13235503', 'n13237188', 'n13238375', 'n13238988', + 'n13579829', 'n13653902', 'n13862407', 'n13863020', 'n13863771', + 'n13864035', 'n13865298', 'n13865483', 'n13865904', 'n13868944', + 'n13869547', 'n13869788', 'n13869896', 'n13872592', 'n13872822', + 'n13873502', 'n13873917', 'n13875392', 'n13875571', 'n13876561', + 'n13878306', 'n13879049', 'n13879320', 'n13880994', 'n13881644', + 'n13882201', 'n13882276', 'n13882563', 'n13886260', 'n13895262', + 'n13896100', 'n13896217', 'n13897996', 'n13898207', 'n13900287', + 'n13900422', 'n13901211', 'n13901321', 'n13901858', 'n13902048', + 'n13902336', 'n13905792', 'n13907272', 'n13908201', 'n13908580', + 'n13912260', 'n13912540', 'n13914608', 'n13915023', 'n13915113', + 'n13916721', 'n13918274', 'n13918387', 'n13919547', 'n13919919', + 'n13926786', 'n14131950', 'n14564779', 'n14685296', 'n14696793', + 'n14698884', 'n14765422', 'n14785065', 'n14810561', 'n14820180', + 'n14844693', 'n14858292', 'n14900342', 'n14908027', 'n14915184', + 'n14919819', 'n14973585', 'n14974264', 'n14976759', 'n14976871', + 'n14977504', 'n15019030', 'n15062057', 'n15067877', 'n15075141', + 'n15086247', 'n15089258', 'n15090065', 'n15091129', 'n15091304', + 'n15091473', 'n15091669', 'n15091846', 'n15092059', 'n15092227', + 'n15092650', 'n15092942', 'n15093137', 'n15093298', 'n15102455', + 'n15102894', + ] + + +def imagenet22k_synsets(): + return [ + 'n00004475', 'n00005787', 'n00006024', 'n00006484', 'n00007846', + 'n00015388', 'n00017222', 'n00021265', 'n00021939', 'n00120010', + 'n00141669', 'n00288000', 'n00288190', 'n00288384', 'n00324978', + 'n00326094', 'n00433458', 'n00433661', 'n00433802', 'n00434075', + 'n00439826', 'n00440039', 'n00440218', 'n00440382', 'n00440509', + 'n00440643', 'n00440747', 'n00440941', 'n00441073', 'n00441824', + 'n00442115', 'n00442437', 'n00442847', 'n00442981', 'n00443231', + 'n00443375', 'n00443517', 'n00443692', 'n00443803', 'n00443917', + 'n00444142', 'n00444340', 'n00444490', 'n00444651', 'n00444846', + 'n00444937', 'n00445055', 'n00445226', 'n00445351', 'n00445685', + 'n00445802', 'n00446311', 'n00446411', 'n00446493', 'n00446632', + 'n00446804', 'n00446980', 'n00447073', 'n00447221', 'n00447361', + 'n00447463', 'n00447540', 'n00447957', 'n00448126', 'n00448232', + 'n00448466', 'n00448640', 'n00448748', 'n00448872', 'n00448958', + 'n00449054', 'n00449168', 'n00449295', 'n00449517', 'n00449695', + 'n00449796', 'n00449892', 'n00449977', 'n00450070', 'n00450335', + 'n00450700', 'n00450866', 'n00450998', 'n00451186', 'n00451370', + 'n00451563', 'n00451635', 'n00451768', 'n00451866', 'n00452034', + 'n00452152', 'n00452293', 'n00452734', 'n00452864', 'n00453126', + 'n00453313', 'n00453396', 'n00453478', 'n00453631', 'n00453935', + 'n00454237', 'n00454395', 'n00454493', 'n00454624', 'n00454855', + 'n00454983', 'n00455076', 'n00455173', 'n00456465', 'n00463246', + 'n00463543', 'n00464277', 'n00464478', 'n00464651', 'n00464894', + 'n00466273', 'n00466377', 'n00466524', 'n00466630', 'n00466712', + 'n00466880', 'n00467320', 'n00467536', 'n00467719', 'n00467995', + 'n00468299', 'n00468480', 'n00469651', 'n00470554', 'n00470682', + 'n00470830', 'n00470966', 'n00471437', 'n00471613', 'n00474568', + 'n00474657', 'n00474769', 'n00474881', 'n00475014', 'n00475142', + 'n00475273', 'n00475403', 'n00475535', 'n00475661', 'n00475787', + 'n00476140', 'n00476235', 'n00476389', 'n00477392', 'n00477639', + 'n00477827', 'n00478262', 'n00479076', 'n00479440', 'n00479616', + 'n00479734', 'n00479887', 'n00480211', 'n00480366', 'n00480508', + 'n00480885', 'n00480993', 'n00481803', 'n00481938', 'n00482122', + 'n00482298', 'n00483205', 'n00483313', 'n00483409', 'n00483508', + 'n00483605', 'n00483705', 'n00483848', 'n00523513', 'n00812526', + 'n00825773', 'n00887544', 'n01035504', 'n01035667', 'n01055165', + 'n01314388', 'n01314663', 'n01314781', 'n01314910', 'n01315213', + 'n01315330', 'n01315581', 'n01315805', 'n01316422', 'n01316579', + 'n01316734', 'n01316949', 'n01317089', 'n01317294', 'n01317391', + 'n01317541', 'n01317813', 'n01317916', 'n01318053', 'n01318279', + 'n01318381', 'n01318478', 'n01318660', 'n01318894', 'n01319001', + 'n01319187', 'n01319467', 'n01319685', 'n01320872', 'n01321123', + 'n01321230', 'n01321456', 'n01321579', 'n01321770', 'n01321854', + 'n01322221', 'n01322343', 'n01322508', 'n01322604', 'n01322685', + 'n01322898', 'n01322983', 'n01323068', 'n01323155', 'n01323261', + 'n01323355', 'n01323493', 'n01323599', 'n01323781', 'n01324305', + 'n01324431', 'n01324610', 'n01324799', 'n01324916', 'n01325060', + 'n01326291', 'n01327909', 'n01329186', 'n01330126', 'n01330497', + 'n01332181', 'n01333082', 'n01333483', 'n01333610', 'n01334217', + 'n01334690', 'n01335218', 'n01337191', 'n01337734', 'n01338685', + 'n01339083', 'n01339336', 'n01339471', 'n01339801', 'n01340014', + 'n01340522', 'n01340785', 'n01340935', 'n01341090', 'n01342269', + 'n01347583', 'n01349735', 'n01350226', 'n01350701', 'n01351170', + 'n01351315', 'n01357328', 'n01357507', 'n01358572', 'n01359762', + 'n01362336', 'n01363719', 'n01365474', 'n01365885', 'n01366700', + 'n01367772', 'n01368672', 'n01369358', 'n01369484', 'n01374703', + 'n01374846', 'n01375204', 'n01376237', 'n01376437', 'n01376543', + 'n01377278', 'n01377510', 'n01377694', 'n01378545', 'n01379389', + 'n01380610', 'n01380754', 'n01381044', 'n01382033', 'n01384084', + 'n01384164', 'n01384687', 'n01385017', 'n01385330', 'n01386007', + 'n01386182', 'n01386354', 'n01387065', 'n01389507', 'n01390123', + 'n01390763', 'n01392275', 'n01392380', 'n01393486', 'n01394040', + 'n01394492', 'n01394771', 'n01395254', 'n01396048', 'n01396617', + 'n01397114', 'n01397690', 'n01397871', 'n01400247', 'n01400391', + 'n01402600', 'n01403457', 'n01404365', 'n01404495', 'n01405007', + 'n01405616', 'n01407798', 'n01410457', 'n01411450', 'n01412694', + 'n01413457', 'n01414216', 'n01415626', 'n01415920', 'n01416213', + 'n01418498', 'n01418620', 'n01419332', 'n01419573', 'n01419888', + 'n01421333', 'n01421807', 'n01422185', 'n01422335', 'n01422450', + 'n01423302', 'n01423617', 'n01424420', 'n01425223', 'n01427399', + 'n01429172', 'n01438208', 'n01438581', 'n01439121', 'n01439514', + 'n01439808', 'n01440160', 'n01440242', 'n01440467', 'n01440764', + 'n01441117', 'n01441272', 'n01441425', 'n01441910', 'n01442450', + 'n01442710', 'n01442972', 'n01443243', 'n01443537', 'n01443831', + 'n01444339', 'n01444783', 'n01445429', 'n01445593', 'n01445857', + 'n01446152', 'n01446589', 'n01446760', 'n01447139', 'n01447331', + 'n01447658', 'n01447946', 'n01448291', 'n01448594', 'n01448951', + 'n01449374', 'n01449712', 'n01449980', 'n01450661', 'n01450950', + 'n01451115', 'n01451295', 'n01451426', 'n01451863', 'n01452345', + 'n01453087', 'n01453475', 'n01453742', 'n01454545', 'n01454856', + 'n01455317', 'n01455461', 'n01455778', 'n01456137', 'n01456454', + 'n01456756', 'n01457082', 'n01457407', 'n01457852', 'n01458746', + 'n01458842', 'n01459791', 'n01460303', 'n01461315', 'n01461646', + 'n01462042', 'n01462544', 'n01462803', 'n01464844', 'n01466257', + 'n01467336', 'n01467804', 'n01468238', 'n01468712', 'n01469103', + 'n01469723', 'n01470145', 'n01470479', 'n01470733', 'n01470895', + 'n01471682', 'n01472303', 'n01472502', 'n01473806', 'n01474283', + 'n01474864', 'n01475232', 'n01475940', 'n01476418', 'n01477080', + 'n01477525', 'n01477875', 'n01478511', 'n01478969', 'n01479213', + 'n01479820', 'n01480106', 'n01480516', 'n01480880', 'n01481331', + 'n01481498', 'n01482071', 'n01482330', 'n01483021', 'n01483522', + 'n01483830', 'n01484097', 'n01484285', 'n01484447', 'n01484562', + 'n01484850', 'n01485479', 'n01486010', 'n01486540', 'n01486838', + 'n01487506', 'n01488038', 'n01488918', 'n01489501', 'n01489709', + 'n01489920', 'n01490112', 'n01490360', 'n01490670', 'n01491006', + 'n01491361', 'n01491661', 'n01491874', 'n01492357', 'n01492569', + 'n01492708', 'n01492860', 'n01493146', 'n01493541', 'n01493829', + 'n01494041', 'n01494475', 'n01494757', 'n01494882', 'n01495006', + 'n01495493', 'n01495701', 'n01496331', 'n01497118', 'n01497413', + 'n01497738', 'n01498041', 'n01498406', 'n01498699', 'n01498989', + 'n01499396', 'n01499732', 'n01500091', 'n01500476', 'n01500854', + 'n01501160', 'n01501641', 'n01501777', 'n01501948', 'n01502101', + 'n01503061', 'n01503976', 'n01504179', 'n01504344', 'n01514668', + 'n01514752', 'n01514859', 'n01514926', 'n01515078', 'n01515217', + 'n01515303', 'n01516212', 'n01517389', 'n01517565', 'n01517966', + 'n01518878', 'n01519563', 'n01519873', 'n01520576', 'n01521399', + 'n01521756', 'n01522450', 'n01523105', 'n01524359', 'n01524761', + 'n01525720', 'n01526521', 'n01526766', 'n01527194', 'n01527347', + 'n01527617', 'n01527917', 'n01528396', 'n01528654', 'n01528845', + 'n01529672', 'n01530439', 'n01530575', 'n01531178', 'n01531344', + 'n01531512', 'n01531639', 'n01531811', 'n01531971', 'n01532325', + 'n01532511', 'n01532829', 'n01533000', 'n01533339', 'n01533481', + 'n01533651', 'n01533893', 'n01534155', 'n01534433', 'n01534582', + 'n01534762', 'n01535140', 'n01535469', 'n01535690', 'n01536035', + 'n01536186', 'n01536334', 'n01536644', 'n01536780', 'n01537134', + 'n01537544', 'n01537895', 'n01538059', 'n01538200', 'n01538362', + 'n01538630', 'n01538955', 'n01539272', 'n01539573', 'n01539925', + 'n01540090', 'n01540233', 'n01540566', 'n01540832', 'n01541102', + 'n01541386', 'n01541760', 'n01541922', 'n01542168', 'n01542433', + 'n01542786', 'n01543175', 'n01543383', 'n01543632', 'n01543936', + 'n01544208', 'n01544389', 'n01544704', 'n01545574', 'n01546039', + 'n01546506', 'n01546921', 'n01547832', 'n01548301', 'n01548492', + 'n01548694', 'n01548865', 'n01549053', 'n01549430', 'n01549641', + 'n01549886', 'n01550172', 'n01550761', 'n01551080', 'n01551300', + 'n01551711', 'n01552034', 'n01552333', 'n01552813', 'n01553142', + 'n01553527', 'n01553762', 'n01554017', 'n01554448', 'n01555004', + 'n01555305', 'n01555809', 'n01556182', 'n01556514', 'n01557185', + 'n01557962', 'n01558149', 'n01558307', 'n01558461', 'n01558594', + 'n01558765', 'n01558993', 'n01559160', 'n01559477', 'n01559639', + 'n01559804', 'n01560105', 'n01560280', 'n01560419', 'n01560636', + 'n01560793', 'n01560935', 'n01561181', 'n01561452', 'n01561732', + 'n01562014', 'n01562265', 'n01562451', 'n01563128', 'n01563449', + 'n01563746', 'n01563945', 'n01564101', 'n01564217', 'n01564394', + 'n01564773', 'n01564914', 'n01565078', 'n01565345', 'n01565599', + 'n01565930', 'n01566207', 'n01566645', 'n01567133', 'n01567678', + 'n01567879', 'n01568132', 'n01568294', 'n01568720', 'n01568892', + 'n01569060', 'n01569262', 'n01569423', 'n01569566', 'n01569836', + 'n01569971', 'n01570267', 'n01570421', 'n01570676', 'n01570839', + 'n01571410', 'n01571904', 'n01572328', 'n01572489', 'n01572654', + 'n01572782', 'n01573074', 'n01573240', 'n01573360', 'n01573627', + 'n01573898', 'n01574045', 'n01574390', 'n01574560', 'n01574801', + 'n01575117', 'n01575401', 'n01575745', 'n01576076', 'n01576358', + 'n01576695', 'n01577035', 'n01577458', 'n01577659', 'n01577941', + 'n01578180', 'n01578575', 'n01579028', 'n01579149', 'n01579260', + 'n01579410', 'n01579578', 'n01579729', 'n01580077', 'n01580379', + 'n01580490', 'n01580772', 'n01580870', 'n01581166', 'n01581434', + 'n01581730', 'n01581874', 'n01581984', 'n01582220', 'n01582398', + 'n01582498', 'n01582856', 'n01583209', 'n01583495', 'n01583828', + 'n01584225', 'n01584695', 'n01584853', 'n01585121', 'n01585287', + 'n01585422', 'n01585715', 'n01586020', 'n01586374', 'n01586941', + 'n01587278', 'n01587526', 'n01587834', 'n01588002', 'n01588431', + 'n01588725', 'n01588996', 'n01589286', 'n01589718', 'n01589893', + 'n01590220', 'n01591005', 'n01591123', 'n01591301', 'n01591697', + 'n01592084', 'n01592257', 'n01592387', 'n01592540', 'n01592694', + 'n01593028', 'n01593282', 'n01593553', 'n01594004', 'n01594372', + 'n01594787', 'n01594968', 'n01595168', 'n01595450', 'n01595624', + 'n01595974', 'n01596273', 'n01596608', 'n01597022', 'n01597336', + 'n01597737', 'n01597906', 'n01598074', 'n01598271', 'n01598588', + 'n01598988', 'n01599159', 'n01599269', 'n01599388', 'n01599556', + 'n01599741', 'n01600085', 'n01600341', 'n01600657', 'n01601068', + 'n01601410', 'n01601694', 'n01602080', 'n01602209', 'n01602630', + 'n01602832', 'n01603000', 'n01603152', 'n01603600', 'n01603812', + 'n01603953', 'n01604330', 'n01604968', 'n01605630', 'n01606097', + 'n01606177', 'n01606522', 'n01606672', 'n01606809', 'n01606978', + 'n01607309', 'n01607429', 'n01607600', 'n01607812', 'n01607962', + 'n01608265', 'n01608432', 'n01608814', 'n01609062', 'n01609391', + 'n01609751', 'n01609956', 'n01610100', 'n01610226', 'n01610552', + 'n01610955', 'n01611472', 'n01611674', 'n01611800', 'n01611969', + 'n01612122', 'n01612275', 'n01612476', 'n01612628', 'n01612955', + 'n01613177', 'n01613294', 'n01613615', 'n01613807', 'n01614038', + 'n01614343', 'n01614556', 'n01614925', 'n01615121', 'n01615303', + 'n01615458', 'n01615703', 'n01616086', 'n01616318', 'n01616551', + 'n01616764', 'n01617095', 'n01617443', 'n01617766', 'n01618082', + 'n01618503', 'n01618922', 'n01619310', 'n01619536', 'n01619835', + 'n01620135', 'n01620414', 'n01620735', 'n01621127', 'n01621635', + 'n01622120', 'n01622352', 'n01622483', 'n01622779', 'n01622959', + 'n01623110', 'n01623425', 'n01623615', 'n01623706', 'n01623880', + 'n01624115', 'n01624212', 'n01624305', 'n01624537', 'n01624833', + 'n01625121', 'n01625562', 'n01627424', 'n01628331', 'n01628770', + 'n01629276', 'n01629819', 'n01629962', 'n01630148', 'n01630284', + 'n01630670', 'n01630901', 'n01631175', 'n01631354', 'n01631512', + 'n01631663', 'n01632047', 'n01632308', 'n01632458', 'n01632601', + 'n01632777', 'n01632952', 'n01633406', 'n01633781', 'n01634227', + 'n01634522', 'n01635027', 'n01635176', 'n01635480', 'n01636127', + 'n01636352', 'n01636510', 'n01636829', 'n01637112', 'n01637338', + 'n01637615', 'n01637932', 'n01638194', 'n01638329', 'n01638722', + 'n01639187', 'n01639765', 'n01640846', 'n01641206', 'n01641391', + 'n01641577', 'n01641739', 'n01641930', 'n01642097', 'n01642257', + 'n01642391', 'n01642539', 'n01642943', 'n01643255', 'n01643507', + 'n01643896', 'n01644373', 'n01644900', 'n01645466', 'n01645776', + 'n01646292', 'n01646388', 'n01646555', 'n01646648', 'n01646802', + 'n01646902', 'n01647033', 'n01647180', 'n01647303', 'n01647466', + 'n01647640', 'n01648139', 'n01648356', 'n01648620', 'n01649170', + 'n01649412', 'n01649556', 'n01649726', 'n01650167', 'n01650690', + 'n01650901', 'n01651059', 'n01651285', 'n01651487', 'n01651641', + 'n01651778', 'n01652026', 'n01652297', 'n01653026', 'n01653223', + 'n01653509', 'n01653773', 'n01654083', 'n01654637', 'n01654863', + 'n01655344', 'n01661091', 'n01661592', 'n01661818', 'n01662060', + 'n01662622', 'n01662784', 'n01663401', 'n01663782', 'n01664065', + 'n01664369', 'n01664492', 'n01664674', 'n01664990', 'n01665541', + 'n01665932', 'n01666228', 'n01666585', 'n01667114', 'n01667432', + 'n01667778', 'n01668091', 'n01668436', 'n01668665', 'n01668892', + 'n01669191', 'n01669372', 'n01669654', 'n01670092', 'n01670535', + 'n01670802', 'n01671125', 'n01671479', 'n01671705', 'n01672032', + 'n01672432', 'n01672611', 'n01673282', 'n01674216', 'n01674464', + 'n01674990', 'n01675352', 'n01675722', 'n01676755', 'n01677366', + 'n01677747', 'n01678043', 'n01678343', 'n01678657', 'n01679005', + 'n01679307', 'n01679626', 'n01679962', 'n01680264', 'n01680478', + 'n01680655', 'n01680813', 'n01680983', 'n01681328', 'n01681653', + 'n01681940', 'n01682172', 'n01682435', 'n01682714', 'n01683201', + 'n01683558', 'n01684133', 'n01684578', 'n01684741', 'n01685439', + 'n01685808', 'n01686044', 'n01686220', 'n01686403', 'n01686609', + 'n01686808', 'n01687128', 'n01687290', 'n01687665', 'n01687978', + 'n01688243', 'n01688961', 'n01689081', 'n01689411', 'n01689811', + 'n01690149', 'n01690466', 'n01691217', 'n01691652', 'n01691951', + 'n01692333', 'n01692523', 'n01692864', 'n01693175', 'n01693334', + 'n01693783', 'n01694178', 'n01694311', 'n01694709', 'n01694955', + 'n01695060', 'n01696633', 'n01697178', 'n01697457', 'n01697611', + 'n01697749', 'n01697978', 'n01698434', 'n01698640', 'n01698782', + 'n01699040', 'n01699254', 'n01699675', 'n01701551', 'n01701859', + 'n01702256', 'n01702479', 'n01703011', 'n01703161', 'n01703569', + 'n01704103', 'n01704323', 'n01704626', 'n01705010', 'n01705591', + 'n01705934', 'n01707294', 'n01708106', 'n01708998', 'n01709484', + 'n01709876', 'n01710177', 'n01711160', 'n01712008', 'n01712752', + 'n01713170', 'n01713764', 'n01714231', 'n01715888', 'n01717016', + 'n01717229', 'n01717467', 'n01718096', 'n01718414', 'n01719403', + 'n01721174', 'n01721898', 'n01722670', 'n01722998', 'n01723579', + 'n01724231', 'n01724840', 'n01725086', 'n01725713', 'n01726203', + 'n01726692', 'n01727646', 'n01728266', 'n01728572', 'n01728920', + 'n01729322', 'n01729672', 'n01729977', 'n01730185', 'n01730307', + 'n01730563', 'n01730812', 'n01730960', 'n01731137', 'n01731277', + 'n01731545', 'n01731764', 'n01731941', 'n01732093', 'n01732244', + 'n01732614', 'n01732789', 'n01732989', 'n01733214', 'n01733466', + 'n01733757', 'n01733957', 'n01734104', 'n01734418', 'n01734637', + 'n01734808', 'n01735189', 'n01735439', 'n01735577', 'n01735728', + 'n01736032', 'n01736375', 'n01736796', 'n01737021', 'n01737472', + 'n01737728', 'n01737875', 'n01738065', 'n01738306', 'n01738601', + 'n01738731', 'n01739094', 'n01739381', 'n01739647', 'n01739871', + 'n01740131', 'n01740551', 'n01740885', 'n01741232', 'n01741442', + 'n01741562', 'n01741943', 'n01742172', 'n01742447', 'n01742821', + 'n01743086', 'n01743605', 'n01743936', 'n01744100', 'n01744270', + 'n01744401', 'n01744555', 'n01745125', 'n01745484', 'n01745902', + 'n01746191', 'n01746359', 'n01746952', 'n01747285', 'n01747589', + 'n01747885', 'n01748264', 'n01748389', 'n01748686', 'n01748906', + 'n01749244', 'n01749582', 'n01749742', 'n01749939', 'n01750167', + 'n01750437', 'n01750743', 'n01751036', 'n01751215', 'n01751472', + 'n01751748', 'n01752165', 'n01752585', 'n01752736', 'n01753032', + 'n01753180', 'n01753488', 'n01753959', 'n01754370', 'n01754533', + 'n01754876', 'n01755581', 'n01755740', 'n01755952', 'n01756089', + 'n01756291', 'n01756508', 'n01756733', 'n01756916', 'n01757115', + 'n01757343', 'n01757677', 'n01757901', 'n01758141', 'n01758757', + 'n01758895', 'n01767661', 'n01768244', 'n01769347', 'n01770081', + 'n01770393', 'n01770795', 'n01771100', 'n01771417', 'n01771766', + 'n01772222', 'n01772664', 'n01773157', 'n01773549', 'n01773797', + 'n01774097', 'n01774384', 'n01774750', 'n01775062', 'n01775370', + 'n01775730', 'n01776192', 'n01776313', 'n01776705', 'n01777304', + 'n01777467', 'n01777649', 'n01777909', 'n01778217', 'n01778487', + 'n01778621', 'n01778801', 'n01779148', 'n01779463', 'n01779629', + 'n01779939', 'n01780142', 'n01780426', 'n01780696', 'n01781071', + 'n01781570', 'n01781698', 'n01781875', 'n01782209', 'n01782516', + 'n01783017', 'n01783706', 'n01784293', 'n01784675', 'n01785667', + 'n01786646', 'n01787006', 'n01787191', 'n01787835', 'n01788291', + 'n01788579', 'n01788864', 'n01789386', 'n01789740', 'n01790171', + 'n01790304', 'n01790398', 'n01790557', 'n01790711', 'n01790812', + 'n01791107', 'n01791314', 'n01791388', 'n01791463', 'n01791625', + 'n01791954', 'n01792042', 'n01792158', 'n01792429', 'n01792530', + 'n01792640', 'n01792808', 'n01792955', 'n01793085', 'n01793159', + 'n01793249', 'n01793340', 'n01793435', 'n01793565', 'n01793715', + 'n01794158', 'n01794344', 'n01794651', 'n01795088', 'n01795545', + 'n01795735', 'n01795900', 'n01796019', 'n01796105', 'n01796340', + 'n01796519', 'n01796729', 'n01797020', 'n01797307', 'n01797601', + 'n01797886', 'n01798168', 'n01798484', 'n01798706', 'n01798839', + 'n01798979', 'n01799302', 'n01799679', 'n01800195', 'n01800424', + 'n01800633', 'n01801088', 'n01801479', 'n01801672', 'n01801876', + 'n01802159', 'n01802721', 'n01803078', 'n01803362', 'n01803641', + 'n01803893', 'n01804163', 'n01804478', 'n01804653', 'n01804921', + 'n01805070', 'n01805321', 'n01805801', 'n01806061', 'n01806143', + 'n01806297', 'n01806364', 'n01806467', 'n01806567', 'n01806847', + 'n01807105', 'n01807496', 'n01807828', 'n01808140', 'n01808291', + 'n01808596', 'n01809106', 'n01809371', 'n01809752', 'n01810268', + 'n01810700', 'n01811243', 'n01811909', 'n01812187', 'n01812337', + 'n01812662', 'n01812866', 'n01813088', 'n01813385', 'n01813532', + 'n01813658', 'n01813948', 'n01814217', 'n01814370', 'n01814549', + 'n01814620', 'n01814755', 'n01814921', 'n01815036', 'n01815270', + 'n01815601', 'n01816017', 'n01816140', 'n01816474', 'n01816887', + 'n01817263', 'n01817346', 'n01817953', 'n01818299', 'n01818515', + 'n01818832', 'n01819115', 'n01819313', 'n01819465', 'n01819734', + 'n01820052', 'n01820348', 'n01820546', 'n01820801', 'n01821076', + 'n01821203', 'n01821554', 'n01821869', 'n01822300', 'n01822602', + 'n01823013', 'n01823414', 'n01823740', 'n01824035', 'n01824344', + 'n01824575', 'n01824749', 'n01825278', 'n01825930', 'n01826364', + 'n01826680', 'n01826844', 'n01827403', 'n01827793', 'n01828096', + 'n01828556', 'n01828970', 'n01829413', 'n01829869', 'n01830042', + 'n01830479', 'n01830915', 'n01831360', 'n01831712', 'n01832167', + 'n01832493', 'n01832813', 'n01833112', 'n01833415', 'n01833805', + 'n01834177', 'n01834540', 'n01835276', 'n01835769', 'n01835918', + 'n01836087', 'n01836673', 'n01837072', 'n01837526', 'n01838038', + 'n01838598', 'n01839086', 'n01839330', 'n01839598', 'n01839750', + 'n01839949', 'n01840120', 'n01840412', 'n01840775', 'n01841102', + 'n01841288', 'n01841441', 'n01841679', 'n01841943', 'n01842235', + 'n01842504', 'n01842788', 'n01843065', 'n01843383', 'n01843719', + 'n01844231', 'n01844551', 'n01844746', 'n01844917', 'n01845132', + 'n01845477', 'n01846331', 'n01847000', 'n01847089', 'n01847170', + 'n01847253', 'n01847407', 'n01847806', 'n01847978', 'n01848123', + 'n01848323', 'n01848453', 'n01848555', 'n01848648', 'n01848840', + 'n01848976', 'n01849157', 'n01849466', 'n01849676', 'n01849863', + 'n01850192', 'n01850373', 'n01850553', 'n01850873', 'n01851038', + 'n01851207', 'n01851375', 'n01851573', 'n01851731', 'n01851895', + 'n01852142', 'n01852329', 'n01852400', 'n01852671', 'n01852861', + 'n01853195', 'n01853498', 'n01853666', 'n01853870', 'n01854415', + 'n01854700', 'n01854838', 'n01855032', 'n01855188', 'n01855476', + 'n01855672', 'n01856072', 'n01856155', 'n01856380', 'n01856553', + 'n01856890', 'n01857079', 'n01857325', 'n01857512', 'n01857632', + 'n01857851', 'n01858281', 'n01858441', 'n01858780', 'n01858845', + 'n01858906', 'n01859190', 'n01859325', 'n01859496', 'n01859689', + 'n01859852', 'n01860002', 'n01860187', 'n01860497', 'n01860864', + 'n01861148', 'n01861330', 'n01861778', 'n01862399', 'n01871265', + 'n01871543', 'n01871875', 'n01872401', 'n01872772', 'n01873310', + 'n01874434', 'n01874928', 'n01875313', 'n01875610', 'n01876034', + 'n01876326', 'n01876667', 'n01877134', 'n01877606', 'n01877812', + 'n01878061', 'n01878335', 'n01878639', 'n01878929', 'n01879217', + 'n01879509', 'n01879837', 'n01880152', 'n01880473', 'n01880716', + 'n01880813', 'n01881171', 'n01881564', 'n01881857', 'n01882125', + 'n01882714', 'n01883070', 'n01883513', 'n01883920', 'n01884104', + 'n01884203', 'n01884476', 'n01884834', 'n01885158', 'n01885498', + 'n01886045', 'n01886756', 'n01887474', 'n01887623', 'n01887787', + 'n01887896', 'n01888045', 'n01888181', 'n01888264', 'n01888411', + 'n01889074', 'n01889520', 'n01889849', 'n01890144', 'n01890564', + 'n01890860', 'n01891013', 'n01891274', 'n01891633', 'n01892030', + 'n01892145', 'n01892385', 'n01892551', 'n01892744', 'n01893021', + 'n01893164', 'n01893399', 'n01893825', 'n01894207', 'n01894522', + 'n01894956', 'n01896844', 'n01897257', 'n01897426', 'n01897536', + 'n01897667', 'n01898593', 'n01899894', 'n01900150', 'n01903234', + 'n01903346', 'n01903498', 'n01904029', 'n01904806', 'n01904886', + 'n01905321', 'n01905661', 'n01906749', 'n01907287', 'n01907738', + 'n01908042', 'n01908958', 'n01909422', 'n01909788', 'n01909906', + 'n01910252', 'n01910747', 'n01911063', 'n01911403', 'n01911839', + 'n01912152', 'n01912454', 'n01912809', 'n01913166', 'n01913346', + 'n01913440', 'n01914163', 'n01914609', 'n01914830', 'n01915700', + 'n01915811', 'n01916187', 'n01916388', 'n01916481', 'n01916588', + 'n01916925', 'n01917289', 'n01917611', 'n01917882', 'n01918744', + 'n01919385', 'n01920051', 'n01920438', 'n01921059', 'n01922303', + 'n01922717', 'n01922948', 'n01923025', 'n01923404', 'n01923890', + 'n01924800', 'n01924916', 'n01925270', 'n01925695', 'n01925916', + 'n01926379', 'n01926689', 'n01927159', 'n01927456', 'n01927928', + 'n01928215', 'n01928517', 'n01928865', 'n01929186', 'n01930112', + 'n01930852', 'n01931140', 'n01931520', 'n01931714', 'n01932151', + 'n01932936', 'n01933151', 'n01933478', 'n01933988', 'n01934440', + 'n01934844', 'n01935176', 'n01935395', 'n01936391', 'n01936671', + 'n01936858', 'n01937579', 'n01937909', 'n01938454', 'n01938735', + 'n01940736', 'n01941223', 'n01941340', 'n01942177', 'n01942869', + 'n01943087', 'n01943541', 'n01943899', 'n01944118', 'n01944390', + 'n01944812', 'n01944955', 'n01945143', 'n01945340', 'n01945685', + 'n01945845', 'n01946277', 'n01946630', 'n01946827', 'n01947139', + 'n01947396', 'n01947997', 'n01948446', 'n01948573', 'n01949085', + 'n01949499', 'n01949973', 'n01950731', 'n01951274', 'n01951613', + 'n01952029', 'n01952712', 'n01953361', 'n01953594', 'n01953762', + 'n01954516', 'n01955084', 'n01955933', 'n01956344', 'n01956481', + 'n01956764', 'n01957335', 'n01958038', 'n01958346', 'n01958435', + 'n01958531', 'n01959029', 'n01959492', 'n01959985', 'n01960177', + 'n01960459', 'n01961234', 'n01961600', 'n01961985', 'n01962506', + 'n01962788', 'n01963317', 'n01963479', 'n01963571', 'n01964049', + 'n01964271', 'n01964441', 'n01964957', 'n01965252', 'n01965529', + 'n01965889', 'n01966377', 'n01966586', 'n01967094', 'n01967308', + 'n01967963', 'n01968315', 'n01968897', 'n01969726', 'n01970164', + 'n01970667', 'n01971094', 'n01971280', 'n01971620', 'n01971850', + 'n01972131', 'n01972541', 'n01973148', 'n01974773', 'n01975687', + 'n01976146', 'n01976868', 'n01976957', 'n01977485', 'n01978010', + 'n01978136', 'n01978287', 'n01978455', 'n01978587', 'n01978930', + 'n01979269', 'n01979526', 'n01979874', 'n01980166', 'n01980655', + 'n01981276', 'n01981702', 'n01982068', 'n01982347', 'n01982650', + 'n01983048', 'n01983481', 'n01983674', 'n01983829', 'n01984245', + 'n01984695', 'n01985128', 'n01985493', 'n01985797', 'n01986214', + 'n01986806', 'n01987076', 'n01987545', 'n01987727', 'n01988203', + 'n01988701', 'n01988869', 'n01989516', 'n01989869', 'n01990007', + 'n01990516', 'n01990800', 'n01991028', 'n01991520', 'n01992262', + 'n01992423', 'n01992773', 'n01993525', 'n01993830', 'n01994910', + 'n01995514', 'n01995686', 'n01996280', 'n01996585', 'n01997119', + 'n01997825', 'n01998183', 'n01998741', 'n01999186', 'n01999767', + 'n02000954', 'n02002075', 'n02002556', 'n02002724', 'n02003037', + 'n02003204', 'n02003577', 'n02003839', 'n02004131', 'n02004492', + 'n02004855', 'n02005399', 'n02005790', 'n02006063', 'n02006364', + 'n02006656', 'n02006985', 'n02007284', 'n02007558', 'n02008041', + 'n02008497', 'n02008643', 'n02008796', 'n02009229', 'n02009380', + 'n02009508', 'n02009750', 'n02009912', 'n02010272', 'n02010453', + 'n02010728', 'n02011016', 'n02011281', 'n02011460', 'n02011805', + 'n02011943', 'n02012185', 'n02012849', 'n02013177', 'n02013567', + 'n02013706', 'n02014237', 'n02014524', 'n02014941', 'n02015357', + 'n02015554', 'n02015797', 'n02016066', 'n02016358', 'n02016659', + 'n02016816', 'n02016956', 'n02017213', 'n02017475', 'n02017725', + 'n02018027', 'n02018207', 'n02018368', 'n02018795', 'n02019190', + 'n02019438', 'n02019929', 'n02020219', 'n02020578', 'n02021050', + 'n02021281', 'n02021795', 'n02022684', 'n02023341', 'n02023855', + 'n02023992', 'n02024185', 'n02024479', 'n02024763', 'n02025043', + 'n02025239', 'n02025389', 'n02026059', 'n02026629', 'n02026948', + 'n02027075', 'n02027357', 'n02027492', 'n02027897', 'n02028035', + 'n02028175', 'n02028342', 'n02028451', 'n02028727', 'n02028900', + 'n02029087', 'n02029378', 'n02029706', 'n02030035', 'n02030224', + 'n02030287', 'n02030568', 'n02030837', 'n02030996', 'n02031298', + 'n02031585', 'n02031934', 'n02032222', 'n02032355', 'n02032480', + 'n02032769', 'n02033041', 'n02033208', 'n02033324', 'n02033561', + 'n02033779', 'n02033882', 'n02034129', 'n02034295', 'n02034661', + 'n02034971', 'n02035210', 'n02035402', 'n02035656', 'n02036053', + 'n02036228', 'n02036711', 'n02037110', 'n02037464', 'n02037869', + 'n02038141', 'n02038466', 'n02038993', 'n02039171', 'n02039497', + 'n02039780', 'n02040266', 'n02040505', 'n02041085', 'n02041246', + 'n02041678', 'n02041875', 'n02042046', 'n02042180', 'n02042472', + 'n02042759', 'n02043063', 'n02043333', 'n02043808', 'n02044178', + 'n02044517', 'n02044778', 'n02044908', 'n02045369', 'n02045596', + 'n02045864', 'n02046171', 'n02046759', 'n02046939', 'n02047045', + 'n02047260', 'n02047411', 'n02047517', 'n02047614', 'n02047975', + 'n02048115', 'n02048353', 'n02048698', 'n02049088', 'n02049532', + 'n02050004', 'n02050313', 'n02050442', 'n02050586', 'n02050809', + 'n02051059', 'n02051474', 'n02051845', 'n02052204', 'n02052365', + 'n02052775', 'n02053083', 'n02053425', 'n02053584', 'n02054036', + 'n02054502', 'n02054711', 'n02055107', 'n02055658', 'n02055803', + 'n02056228', 'n02056570', 'n02056728', 'n02057035', 'n02057330', + 'n02057731', 'n02057898', 'n02058221', 'n02058594', 'n02058747', + 'n02059162', 'n02059541', 'n02059852', 'n02060133', 'n02060411', + 'n02060569', 'n02060889', 'n02061217', 'n02061560', 'n02061853', + 'n02062017', 'n02062430', 'n02062744', 'n02063224', 'n02063662', + 'n02064000', 'n02064338', 'n02064816', 'n02065026', 'n02065263', + 'n02065407', 'n02065726', 'n02066245', 'n02066707', 'n02067240', + 'n02067603', 'n02067768', 'n02068206', 'n02068541', 'n02068974', + 'n02069412', 'n02069701', 'n02069974', 'n02070174', 'n02070430', + 'n02070624', 'n02070776', 'n02071028', 'n02071294', 'n02071636', + 'n02072040', 'n02072493', 'n02072798', 'n02073250', 'n02073831', + 'n02074367', 'n02074726', 'n02075296', 'n02075612', 'n02075927', + 'n02076196', 'n02076402', 'n02076779', 'n02077152', 'n02077384', + 'n02077658', 'n02077787', 'n02077923', 'n02078292', 'n02078574', + 'n02078738', 'n02079005', 'n02079389', 'n02079851', 'n02080146', + 'n02080415', 'n02080713', 'n02081060', 'n02081571', 'n02081798', + 'n02081927', 'n02082056', 'n02082190', 'n02082791', 'n02083346', + 'n02083672', 'n02083780', 'n02084071', 'n02084732', 'n02084861', + 'n02085019', 'n02085118', 'n02085272', 'n02085374', 'n02085620', + 'n02085782', 'n02085936', 'n02086079', 'n02086240', 'n02086346', + 'n02086478', 'n02086646', 'n02086753', 'n02086910', 'n02087046', + 'n02087122', 'n02087314', 'n02087394', 'n02087551', 'n02088094', + 'n02088238', 'n02088364', 'n02088466', 'n02088632', 'n02088745', + 'n02088839', 'n02088992', 'n02089078', 'n02089232', 'n02089468', + 'n02089555', 'n02089725', 'n02089867', 'n02089973', 'n02090129', + 'n02090253', 'n02090379', 'n02090475', 'n02090622', 'n02090721', + 'n02090827', 'n02091032', 'n02091134', 'n02091244', 'n02091467', + 'n02091635', 'n02091831', 'n02092002', 'n02092173', 'n02092339', + 'n02092468', 'n02093056', 'n02093256', 'n02093428', 'n02093647', + 'n02093754', 'n02093859', 'n02093991', 'n02094114', 'n02094258', + 'n02094433', 'n02094562', 'n02094721', 'n02094931', 'n02095050', + 'n02095212', 'n02095314', 'n02095412', 'n02095570', 'n02095727', + 'n02095889', 'n02096051', 'n02096177', 'n02096294', 'n02096437', + 'n02096585', 'n02096756', 'n02097047', 'n02097130', 'n02097209', + 'n02097298', 'n02097474', 'n02097658', 'n02097786', 'n02097967', + 'n02098105', 'n02098286', 'n02098413', 'n02098550', 'n02098806', + 'n02098906', 'n02099029', 'n02099267', 'n02099429', 'n02099601', + 'n02099712', 'n02099849', 'n02099997', 'n02100236', 'n02100399', + 'n02100583', 'n02100735', 'n02100877', 'n02101006', 'n02101108', + 'n02101388', 'n02101556', 'n02101670', 'n02101861', 'n02102040', + 'n02102177', 'n02102318', 'n02102480', 'n02102605', 'n02102806', + 'n02102973', 'n02103181', 'n02103406', 'n02103841', 'n02104029', + 'n02104184', 'n02104280', 'n02104365', 'n02104523', 'n02104882', + 'n02105056', 'n02105162', 'n02105251', 'n02105412', 'n02105505', + 'n02105641', 'n02105855', 'n02106030', 'n02106166', 'n02106382', + 'n02106550', 'n02106662', 'n02106854', 'n02106966', 'n02107142', + 'n02107312', 'n02107420', 'n02107574', 'n02107683', 'n02107908', + 'n02108000', 'n02108089', 'n02108254', 'n02108422', 'n02108551', + 'n02108672', 'n02108915', 'n02109047', 'n02109150', 'n02109256', + 'n02109391', 'n02109525', 'n02109687', 'n02109811', 'n02109961', + 'n02110063', 'n02110185', 'n02110341', 'n02110532', 'n02110627', + 'n02110806', 'n02110958', 'n02111129', 'n02111277', 'n02111500', + 'n02111626', 'n02111889', 'n02112018', 'n02112137', 'n02112350', + 'n02112497', 'n02112706', 'n02112826', 'n02113023', 'n02113186', + 'n02113335', 'n02113624', 'n02113712', 'n02113799', 'n02113892', + 'n02113978', 'n02114100', 'n02114367', 'n02114548', 'n02114712', + 'n02114855', 'n02115012', 'n02115096', 'n02115335', 'n02115641', + 'n02115913', 'n02116185', 'n02116450', 'n02116738', 'n02117135', + 'n02117512', 'n02117646', 'n02117900', 'n02118176', 'n02118333', + 'n02118643', 'n02118707', 'n02119022', 'n02119247', 'n02119359', + 'n02119477', 'n02119634', 'n02119789', 'n02120079', 'n02120278', + 'n02120505', 'n02120997', 'n02121620', 'n02121808', 'n02122298', + 'n02122430', 'n02122510', 'n02122580', 'n02122725', 'n02122810', + 'n02122878', 'n02122948', 'n02123045', 'n02123159', 'n02123242', + 'n02123394', 'n02123478', 'n02123597', 'n02123785', 'n02123917', + 'n02124075', 'n02124157', 'n02124313', 'n02124484', 'n02124623', + 'n02125010', 'n02125081', 'n02125311', 'n02125494', 'n02125689', + 'n02125872', 'n02126028', 'n02126139', 'n02126317', 'n02126640', + 'n02126787', 'n02127052', 'n02127292', 'n02127381', 'n02127482', + 'n02127586', 'n02127678', 'n02127808', 'n02128385', 'n02128598', + 'n02128669', 'n02128757', 'n02128925', 'n02129165', 'n02129463', + 'n02129530', 'n02129604', 'n02129837', 'n02129923', 'n02129991', + 'n02130086', 'n02130308', 'n02130545', 'n02130925', 'n02131653', + 'n02132136', 'n02132320', 'n02132466', 'n02132580', 'n02132788', + 'n02133161', 'n02133400', 'n02133704', 'n02134084', 'n02134418', + 'n02134971', 'n02135220', 'n02135610', 'n02135844', 'n02136103', + 'n02136285', 'n02136452', 'n02136794', 'n02137015', 'n02137302', + 'n02137549', 'n02137722', 'n02137888', 'n02138169', 'n02138441', + 'n02138647', 'n02138777', 'n02139199', 'n02139671', 'n02140049', + 'n02140179', 'n02140268', 'n02140491', 'n02140858', 'n02141306', + 'n02141611', 'n02141713', 'n02142407', 'n02142734', 'n02142898', + 'n02143142', 'n02143439', 'n02143891', 'n02144251', 'n02144593', + 'n02144936', 'n02145424', 'n02145910', 'n02146201', 'n02146371', + 'n02146700', 'n02146879', 'n02147173', 'n02147328', 'n02147591', + 'n02147947', 'n02148088', 'n02148512', 'n02148835', 'n02148991', + 'n02149420', 'n02149653', 'n02149861', 'n02150134', 'n02150482', + 'n02150885', 'n02151230', 'n02152740', 'n02152881', 'n02152991', + 'n02153109', 'n02153203', 'n02153809', 'n02156732', 'n02156871', + 'n02157206', 'n02157285', 'n02159955', 'n02160947', 'n02161225', + 'n02161338', 'n02161457', 'n02161588', 'n02162561', 'n02163008', + 'n02163297', 'n02164464', 'n02165105', 'n02165456', 'n02165877', + 'n02166229', 'n02166567', 'n02166826', 'n02167151', 'n02167505', + 'n02167820', 'n02167944', 'n02168245', 'n02168427', 'n02168699', + 'n02169023', 'n02169218', 'n02169497', 'n02169705', 'n02169974', + 'n02170400', 'n02170599', 'n02170738', 'n02170993', 'n02171164', + 'n02171453', 'n02171869', 'n02172182', 'n02172518', 'n02172678', + 'n02172761', 'n02172870', 'n02173113', 'n02173373', 'n02173784', + 'n02174001', 'n02174355', 'n02174659', 'n02175014', 'n02175569', + 'n02175916', 'n02176261', 'n02176439', 'n02176747', 'n02176916', + 'n02177196', 'n02177506', 'n02177775', 'n02177972', 'n02178411', + 'n02178717', 'n02179012', 'n02179192', 'n02179340', 'n02179891', + 'n02180233', 'n02180427', 'n02180875', 'n02181235', 'n02181477', + 'n02181724', 'n02182045', 'n02182355', 'n02182642', 'n02182930', + 'n02183096', 'n02183507', 'n02183857', 'n02184473', 'n02184589', + 'n02184720', 'n02185167', 'n02185481', 'n02186153', 'n02186717', + 'n02187150', 'n02187279', 'n02187554', 'n02187900', 'n02188699', + 'n02189363', 'n02189670', 'n02190166', 'n02190790', 'n02191273', + 'n02191773', 'n02191979', 'n02192252', 'n02192513', 'n02192814', + 'n02193009', 'n02193163', 'n02194249', 'n02194750', 'n02195091', + 'n02195526', 'n02195819', 'n02196119', 'n02196344', 'n02196896', + 'n02197185', 'n02197689', 'n02197877', 'n02198129', 'n02198532', + 'n02198859', 'n02199170', 'n02199502', 'n02200198', 'n02200509', + 'n02200630', 'n02200850', 'n02201000', 'n02201497', 'n02201626', + 'n02202006', 'n02202124', 'n02202287', 'n02202678', 'n02203152', + 'n02203592', 'n02203978', 'n02204249', 'n02204722', 'n02204907', + 'n02205219', 'n02205673', 'n02206270', 'n02206856', 'n02207179', + 'n02207345', 'n02207449', 'n02207647', 'n02207805', 'n02208280', + 'n02208498', 'n02208848', 'n02208979', 'n02209111', 'n02209354', + 'n02209624', 'n02209964', 'n02210427', 'n02210921', 'n02211444', + 'n02211627', 'n02211896', 'n02212062', 'n02212602', 'n02212958', + 'n02213107', 'n02213239', 'n02213543', 'n02213663', 'n02213788', + 'n02214096', 'n02214341', 'n02214499', 'n02214660', 'n02214773', + 'n02215161', 'n02215621', 'n02215770', 'n02216211', 'n02216365', + 'n02216740', 'n02217563', 'n02217839', 'n02218134', 'n02218371', + 'n02218713', 'n02219015', 'n02219486', 'n02220055', 'n02220225', + 'n02220518', 'n02220804', 'n02221083', 'n02221414', 'n02221571', + 'n02221715', 'n02221820', 'n02222035', 'n02222321', 'n02222582', + 'n02223266', 'n02223520', 'n02224023', 'n02224713', 'n02225081', + 'n02225798', 'n02226183', 'n02226429', 'n02226821', 'n02226970', + 'n02227247', 'n02227604', 'n02227966', 'n02228341', 'n02228697', + 'n02229156', 'n02229544', 'n02229765', 'n02230023', 'n02230187', + 'n02230480', 'n02230634', 'n02231052', 'n02231487', 'n02231803', + 'n02232223', 'n02233338', 'n02233943', 'n02234355', 'n02234570', + 'n02234848', 'n02235205', 'n02236044', 'n02236241', 'n02236355', + 'n02236896', 'n02237424', 'n02237581', 'n02237868', 'n02238235', + 'n02238358', 'n02238594', 'n02238887', 'n02239192', 'n02239528', + 'n02239774', 'n02240068', 'n02240517', 'n02241008', 'n02241426', + 'n02241569', 'n02241799', 'n02242137', 'n02242455', 'n02243209', + 'n02243562', 'n02243878', 'n02244173', 'n02244515', 'n02244797', + 'n02245111', 'n02245443', 'n02246011', 'n02246628', 'n02246941', + 'n02247216', 'n02247511', 'n02247655', 'n02248062', 'n02248368', + 'n02248510', 'n02248887', 'n02249134', 'n02249515', 'n02249809', + 'n02250280', 'n02250822', 'n02251067', 'n02251233', 'n02251593', + 'n02251775', 'n02252226', 'n02252799', 'n02252972', 'n02253127', + 'n02253264', 'n02253494', 'n02253715', 'n02253913', 'n02254246', + 'n02254697', 'n02254901', 'n02255023', 'n02255391', 'n02256172', + 'n02256656', 'n02257003', 'n02257284', 'n02257715', 'n02257985', + 'n02258198', 'n02258508', 'n02258629', 'n02259212', 'n02259377', + 'n02259708', 'n02259987', 'n02260421', 'n02260863', 'n02261063', + 'n02261419', 'n02261757', 'n02262178', 'n02262449', 'n02262803', + 'n02263378', 'n02264021', 'n02264232', 'n02264363', 'n02264591', + 'n02264885', 'n02265330', 'n02266050', 'n02266269', 'n02266421', + 'n02266864', 'n02267208', 'n02267483', 'n02268148', 'n02268443', + 'n02268853', 'n02269196', 'n02269340', 'n02269522', 'n02269657', + 'n02270011', 'n02270200', 'n02270623', 'n02270945', 'n02271222', + 'n02271570', 'n02271897', 'n02272286', 'n02272552', 'n02272871', + 'n02273392', 'n02274024', 'n02274259', 'n02274822', 'n02275560', + 'n02275773', 'n02276078', 'n02276258', 'n02276355', 'n02276749', + 'n02276902', 'n02277094', 'n02277268', 'n02277422', 'n02277742', + 'n02278024', 'n02278210', 'n02278463', 'n02278839', 'n02278980', + 'n02279257', 'n02279637', 'n02279972', 'n02280458', 'n02280649', + 'n02281015', 'n02281136', 'n02281267', 'n02281406', 'n02281787', + 'n02282257', 'n02282385', 'n02282553', 'n02282903', 'n02283077', + 'n02283201', 'n02283617', 'n02283951', 'n02284224', 'n02284611', + 'n02284884', 'n02285179', 'n02285548', 'n02285801', 'n02286089', + 'n02286425', 'n02286654', 'n02287004', 'n02287352', 'n02287622', + 'n02287799', 'n02287987', 'n02288122', 'n02288268', 'n02288789', + 'n02289307', 'n02289610', 'n02289988', 'n02290340', 'n02290664', + 'n02290870', 'n02291220', 'n02291572', 'n02291748', 'n02292085', + 'n02292401', 'n02292692', 'n02293352', 'n02293868', 'n02294097', + 'n02294407', 'n02294577', 'n02295064', 'n02295390', 'n02295870', + 'n02296021', 'n02296276', 'n02296612', 'n02296912', 'n02297294', + 'n02297442', 'n02297819', 'n02297938', 'n02298095', 'n02298218', + 'n02298541', 'n02299039', 'n02299157', 'n02299378', 'n02299505', + 'n02299846', 'n02300173', 'n02300554', 'n02300797', 'n02301452', + 'n02301935', 'n02302244', 'n02302459', 'n02302620', 'n02302969', + 'n02303284', 'n02303585', 'n02303777', 'n02304036', 'n02304432', + 'n02304657', 'n02304797', 'n02305085', 'n02305407', 'n02305636', + 'n02305929', 'n02306433', 'n02306825', 'n02307176', 'n02307325', + 'n02307515', 'n02307681', 'n02307910', 'n02308033', 'n02308139', + 'n02308471', 'n02308618', 'n02308735', 'n02309120', 'n02309242', + 'n02309337', 'n02309841', 'n02310000', 'n02310149', 'n02310334', + 'n02310585', 'n02310717', 'n02310941', 'n02311060', 'n02311617', + 'n02311748', 'n02312006', 'n02312175', 'n02312325', 'n02312427', + 'n02312640', 'n02312912', 'n02313008', 'n02313360', 'n02313709', + 'n02315487', 'n02315821', 'n02316707', 'n02317335', 'n02317781', + 'n02318167', 'n02318687', 'n02319095', 'n02319308', 'n02319555', + 'n02319829', 'n02320127', 'n02320465', 'n02321170', 'n02321529', + 'n02322047', 'n02322992', 'n02323449', 'n02323902', 'n02324045', + 'n02324431', 'n02324514', 'n02324587', 'n02324850', 'n02325366', + 'n02325722', 'n02325884', 'n02326074', 'n02326432', 'n02326763', + 'n02326862', 'n02327028', 'n02327175', 'n02327435', 'n02327656', + 'n02327842', 'n02328009', 'n02328150', 'n02328429', 'n02328820', + 'n02328942', 'n02329401', 'n02330245', 'n02331046', 'n02331309', + 'n02331842', 'n02332156', 'n02332447', 'n02332755', 'n02332954', + 'n02333190', 'n02333546', 'n02333733', 'n02333819', 'n02333909', + 'n02334201', 'n02334460', 'n02334728', 'n02335127', 'n02335231', + 'n02336011', 'n02336275', 'n02336641', 'n02336826', 'n02337001', + 'n02337171', 'n02337332', 'n02337598', 'n02337902', 'n02338145', + 'n02338449', 'n02338722', 'n02338901', 'n02339282', 'n02339376', + 'n02339922', 'n02340186', 'n02340358', 'n02340640', 'n02340930', + 'n02341288', 'n02341475', 'n02341616', 'n02341974', 'n02342250', + 'n02342534', 'n02342885', 'n02343058', 'n02343320', 'n02343772', + 'n02344175', 'n02344270', 'n02344408', 'n02344528', 'n02344918', + 'n02345078', 'n02345340', 'n02345600', 'n02345774', 'n02345997', + 'n02346170', 'n02346627', 'n02346998', 'n02347274', 'n02347573', + 'n02347744', 'n02348173', 'n02348788', 'n02349205', 'n02349390', + 'n02349557', 'n02349847', 'n02350105', 'n02350357', 'n02350670', + 'n02350989', 'n02351343', 'n02351870', 'n02352002', 'n02352290', + 'n02352591', 'n02352932', 'n02353172', 'n02353411', 'n02353861', + 'n02354162', 'n02354320', 'n02354621', 'n02354781', 'n02355227', + 'n02355477', 'n02356381', 'n02356612', 'n02356798', 'n02356977', + 'n02357111', 'n02357401', 'n02357585', 'n02357911', 'n02358091', + 'n02358390', 'n02358584', 'n02358712', 'n02358890', 'n02359047', + 'n02359324', 'n02359556', 'n02359667', 'n02359915', 'n02360282', + 'n02360480', 'n02360781', 'n02360933', 'n02361090', 'n02361337', + 'n02361587', 'n02361706', 'n02361850', 'n02362194', 'n02363005', + 'n02363245', 'n02363351', 'n02363996', 'n02364520', 'n02364673', + 'n02364840', 'n02365108', 'n02365480', 'n02366002', 'n02366301', + 'n02366579', 'n02366959', 'n02367492', 'n02367812', 'n02368116', + 'n02368399', 'n02368821', 'n02369293', 'n02369555', 'n02369680', + 'n02369935', 'n02370137', 'n02370525', 'n02370806', 'n02371344', + 'n02372140', 'n02372584', 'n02372952', 'n02373336', 'n02374149', + 'n02374451', 'n02375302', 'n02375438', 'n02375757', 'n02375862', + 'n02376542', 'n02376679', 'n02376791', 'n02376918', 'n02377063', + 'n02377181', 'n02377291', 'n02377388', 'n02377480', 'n02377603', + 'n02377703', 'n02378149', 'n02378299', 'n02378415', 'n02378541', + 'n02378625', 'n02378755', 'n02378870', 'n02378969', 'n02379081', + 'n02379183', 'n02379329', 'n02379430', 'n02379630', 'n02379743', + 'n02379908', 'n02380052', 'n02380335', 'n02380464', 'n02380583', + 'n02380745', 'n02380875', 'n02381004', 'n02381119', 'n02381261', + 'n02381364', 'n02381460', 'n02381609', 'n02381831', 'n02382039', + 'n02382132', 'n02382204', 'n02382338', 'n02382437', 'n02382635', + 'n02382750', 'n02382850', 'n02382948', 'n02383231', 'n02384741', + 'n02384858', 'n02385002', 'n02385098', 'n02385214', 'n02385580', + 'n02385676', 'n02385776', 'n02385898', 'n02386014', 'n02386141', + 'n02386224', 'n02386310', 'n02386496', 'n02386746', 'n02386853', + 'n02386968', 'n02387093', 'n02387254', 'n02387346', 'n02387452', + 'n02387722', 'n02387887', 'n02387983', 'n02388143', 'n02388276', + 'n02388453', 'n02388588', 'n02388735', 'n02388832', 'n02388917', + 'n02389026', 'n02389128', 'n02389261', 'n02389346', 'n02389559', + 'n02389779', 'n02389865', 'n02389943', 'n02390015', 'n02390101', + 'n02390258', 'n02390454', 'n02390640', 'n02390738', 'n02390834', + 'n02390938', 'n02391049', 'n02391234', 'n02391373', 'n02391508', + 'n02391617', 'n02391994', 'n02392434', 'n02392555', 'n02392824', + 'n02393161', 'n02393580', 'n02393807', 'n02393940', 'n02394477', + 'n02395003', 'n02395406', 'n02395694', 'n02395855', 'n02395931', + 'n02396014', 'n02396088', 'n02396157', 'n02396427', 'n02396796', + 'n02397096', 'n02397529', 'n02397744', 'n02397987', 'n02398521', + 'n02399000', 'n02401031', 'n02402010', 'n02402175', 'n02402425', + 'n02403003', 'n02403153', 'n02403231', 'n02403325', 'n02403454', + 'n02403740', 'n02403820', 'n02403920', 'n02404028', 'n02404186', + 'n02404432', 'n02404573', 'n02404906', 'n02405101', 'n02405302', + 'n02405440', 'n02405577', 'n02405692', 'n02405799', 'n02405929', + 'n02406046', 'n02406174', 'n02406432', 'n02406533', 'n02406647', + 'n02406749', 'n02406859', 'n02406952', 'n02407071', 'n02407172', + 'n02407276', 'n02407390', 'n02407521', 'n02407625', 'n02407763', + 'n02407959', 'n02408429', 'n02408660', 'n02408817', 'n02409038', + 'n02409202', 'n02409508', 'n02409870', 'n02410011', 'n02410141', + 'n02410509', 'n02410702', 'n02410900', 'n02411206', 'n02411705', + 'n02411999', 'n02412080', 'n02412210', 'n02412440', 'n02412629', + 'n02412700', 'n02412787', 'n02412909', 'n02412977', 'n02413050', + 'n02413131', 'n02413484', 'n02413593', 'n02413717', 'n02413824', + 'n02413917', 'n02414043', 'n02414209', 'n02414290', 'n02414442', + 'n02414578', 'n02414763', 'n02414904', 'n02415130', 'n02415253', + 'n02415435', 'n02415577', 'n02415829', 'n02416104', 'n02416519', + 'n02416820', 'n02416880', 'n02416964', 'n02417070', 'n02417242', + 'n02417387', 'n02417534', 'n02417663', 'n02417785', 'n02417914', + 'n02418064', 'n02418465', 'n02418770', 'n02419056', 'n02419336', + 'n02419634', 'n02419796', 'n02420509', 'n02420828', 'n02421136', + 'n02421449', 'n02421792', 'n02422106', 'n02422391', 'n02422699', + 'n02423022', 'n02423218', 'n02423362', 'n02423589', 'n02424085', + 'n02424305', 'n02424486', 'n02424589', 'n02424695', 'n02424909', + 'n02425086', 'n02425228', 'n02425532', 'n02425887', 'n02426176', + 'n02426481', 'n02426813', 'n02427032', 'n02427183', 'n02427470', + 'n02427576', 'n02427724', 'n02428089', 'n02428349', 'n02428508', + 'n02428842', 'n02429456', 'n02430045', 'n02430559', 'n02430643', + 'n02430748', 'n02430830', 'n02431122', 'n02431337', 'n02431441', + 'n02431542', 'n02431628', 'n02431785', 'n02431976', 'n02432291', + 'n02432511', 'n02432704', 'n02432983', 'n02433318', 'n02433546', + 'n02433729', 'n02433925', 'n02434190', 'n02434415', 'n02434712', + 'n02434954', 'n02435216', 'n02435517', 'n02435853', 'n02436224', + 'n02436353', 'n02436645', 'n02437136', 'n02437312', 'n02437482', + 'n02437616', 'n02437971', 'n02438173', 'n02438272', 'n02438580', + 'n02439033', 'n02439398', 'n02441326', 'n02441942', 'n02442172', + 'n02442336', 'n02442446', 'n02442572', 'n02442668', 'n02442845', + 'n02443015', 'n02443114', 'n02443346', 'n02443484', 'n02443808', + 'n02443959', 'n02444251', 'n02444819', 'n02445004', 'n02445171', + 'n02445394', 'n02445715', 'n02446206', 'n02446352', 'n02446645', + 'n02447021', 'n02447366', 'n02447762', 'n02448060', 'n02448318', + 'n02448633', 'n02448885', 'n02449183', 'n02449350', 'n02449699', + 'n02450034', 'n02450295', 'n02450426', 'n02450561', 'n02450677', + 'n02450829', 'n02451125', 'n02451415', 'n02451575', 'n02453108', + 'n02453611', 'n02454379', 'n02454794', 'n02455135', 'n02455428', + 'n02455720', 'n02456008', 'n02456275', 'n02456962', 'n02457408', + 'n02457945', 'n02458135', 'n02458517', 'n02459190', 'n02460009', + 'n02460451', 'n02460817', 'n02461128', 'n02461830', 'n02462213', + 'n02469248', 'n02469472', 'n02469914', 'n02470238', 'n02470325', + 'n02470709', 'n02470899', 'n02471300', 'n02471762', 'n02472293', + 'n02472987', 'n02473307', 'n02473554', 'n02473720', 'n02473857', + 'n02473983', 'n02474110', 'n02474282', 'n02474605', 'n02474777', + 'n02475078', 'n02475358', 'n02475669', 'n02476219', 'n02476567', + 'n02476870', 'n02477028', 'n02477187', 'n02477329', 'n02477516', + 'n02477782', 'n02478239', 'n02478875', 'n02479332', 'n02480153', + 'n02480495', 'n02480855', 'n02481103', 'n02481235', 'n02481366', + 'n02481500', 'n02481823', 'n02482060', 'n02482286', 'n02482474', + 'n02482650', 'n02483092', 'n02483362', 'n02483708', 'n02484322', + 'n02484473', 'n02484975', 'n02485225', 'n02485371', 'n02485536', + 'n02485688', 'n02485988', 'n02486261', 'n02486410', 'n02486657', + 'n02486908', 'n02487079', 'n02487347', 'n02487547', 'n02487675', + 'n02487847', 'n02488003', 'n02488291', 'n02488415', 'n02488702', + 'n02488894', 'n02489166', 'n02489589', 'n02490219', 'n02490597', + 'n02490811', 'n02491107', 'n02491329', 'n02491474', 'n02492035', + 'n02492356', 'n02492660', 'n02492948', 'n02493224', 'n02493509', + 'n02493793', 'n02494079', 'n02494383', 'n02495242', 'n02496052', + 'n02496913', 'n02497673', 'n02498153', 'n02498743', 'n02499022', + 'n02499316', 'n02499568', 'n02499808', 'n02500267', 'n02500596', + 'n02501583', 'n02501923', 'n02502006', 'n02502514', 'n02502807', + 'n02503127', 'n02503517', 'n02503756', 'n02504013', 'n02504458', + 'n02504770', 'n02505063', 'n02505238', 'n02505485', 'n02505998', + 'n02506947', 'n02507148', 'n02507649', 'n02508021', 'n02508213', + 'n02508346', 'n02508742', 'n02509197', 'n02509515', 'n02509815', + 'n02510455', 'n02511730', 'n02512053', 'n02512752', 'n02512830', + 'n02512938', 'n02513248', 'n02513355', 'n02513560', 'n02513727', + 'n02513805', 'n02513939', 'n02514041', 'n02515214', 'n02515713', + 'n02516188', 'n02516776', 'n02517442', 'n02517938', 'n02518324', + 'n02518622', 'n02519148', 'n02519340', 'n02519472', 'n02519686', + 'n02519862', 'n02520147', 'n02520525', 'n02520810', 'n02521646', + 'n02522399', 'n02522637', 'n02522722', 'n02522866', 'n02523110', + 'n02523427', 'n02523877', 'n02524202', 'n02524524', 'n02524659', + 'n02524928', 'n02525382', 'n02525703', 'n02526121', 'n02526425', + 'n02526818', 'n02527057', 'n02527271', 'n02527622', 'n02528163', + 'n02529293', 'n02529772', 'n02530052', 'n02530188', 'n02530421', + 'n02530637', 'n02530831', 'n02530999', 'n02531114', 'n02531625', + 'n02532028', 'n02532272', 'n02532451', 'n02532602', 'n02532786', + 'n02532918', 'n02533209', 'n02533545', 'n02533834', 'n02534165', + 'n02534559', 'n02534734', 'n02535080', 'n02535163', 'n02535258', + 'n02535537', 'n02535759', 'n02536165', 'n02536456', 'n02536864', + 'n02537085', 'n02537319', 'n02537525', 'n02537716', 'n02538010', + 'n02538216', 'n02538406', 'n02538562', 'n02538985', 'n02539424', + 'n02539573', 'n02539894', 'n02540412', 'n02540983', 'n02541257', + 'n02541687', 'n02542017', 'n02542432', 'n02542958', 'n02543255', + 'n02543565', 'n02544274', 'n02545841', 'n02546028', 'n02546331', + 'n02546627', 'n02547014', 'n02547733', 'n02548247', 'n02548689', + 'n02548884', 'n02549248', 'n02549376', 'n02549989', 'n02550203', + 'n02550460', 'n02550655', 'n02551134', 'n02551668', 'n02552171', + 'n02553028', 'n02554730', 'n02555863', 'n02556373', 'n02556846', + 'n02557182', 'n02557318', 'n02557591', 'n02557749', 'n02557909', + 'n02558206', 'n02558860', 'n02559144', 'n02559383', 'n02559862', + 'n02560110', 'n02561108', 'n02561381', 'n02561514', 'n02561661', + 'n02561803', 'n02561937', 'n02562315', 'n02562796', 'n02562971', + 'n02563079', 'n02563182', 'n02563648', 'n02563792', 'n02563949', + 'n02564270', 'n02564403', 'n02564720', 'n02564935', 'n02565072', + 'n02565324', 'n02565573', 'n02566109', 'n02566489', 'n02566665', + 'n02567334', 'n02567633', 'n02568087', 'n02568447', 'n02568959', + 'n02569484', 'n02569631', 'n02569905', 'n02570164', 'n02570484', + 'n02570838', 'n02571167', 'n02571652', 'n02571810', 'n02572196', + 'n02572484', 'n02573249', 'n02573704', 'n02574271', 'n02574910', + 'n02575325', 'n02575590', 'n02576223', 'n02576575', 'n02576906', + 'n02577041', 'n02577164', 'n02577403', 'n02577662', 'n02577952', + 'n02578233', 'n02578454', 'n02578771', 'n02578928', 'n02579303', + 'n02579557', 'n02579762', 'n02579928', 'n02580336', 'n02580679', + 'n02580830', 'n02581108', 'n02581482', 'n02581642', 'n02581957', + 'n02582220', 'n02582349', 'n02582721', 'n02583567', 'n02583890', + 'n02584145', 'n02584449', 'n02585872', 'n02586238', 'n02586543', + 'n02587051', 'n02587300', 'n02587479', 'n02587618', 'n02587877', + 'n02588286', 'n02588794', 'n02588945', 'n02589062', 'n02589196', + 'n02589316', 'n02589623', 'n02589796', 'n02590094', 'n02590495', + 'n02590702', 'n02590987', 'n02591330', 'n02591613', 'n02591911', + 'n02592055', 'n02592371', 'n02592734', 'n02593019', 'n02593191', + 'n02593453', 'n02593679', 'n02594250', 'n02594942', 'n02595056', + 'n02595339', 'n02595702', 'n02596067', 'n02596252', 'n02596381', + 'n02596720', 'n02597004', 'n02597367', 'n02597608', 'n02597818', + 'n02597972', 'n02598134', 'n02598573', 'n02598878', 'n02599052', + 'n02599347', 'n02599557', 'n02599958', 'n02600298', 'n02600503', + 'n02600798', 'n02601344', 'n02601767', 'n02601921', 'n02602059', + 'n02602405', 'n02602760', 'n02603317', 'n02603540', 'n02603862', + 'n02604157', 'n02604480', 'n02604954', 'n02605316', 'n02605703', + 'n02605936', 'n02606052', 'n02606384', 'n02606751', 'n02607072', + 'n02607201', 'n02607470', 'n02607862', 'n02608284', 'n02608547', + 'n02608860', 'n02608996', 'n02609302', 'n02609823', 'n02610066', + 'n02610373', 'n02610664', 'n02610980', 'n02611561', 'n02611898', + 'n02612167', 'n02613181', 'n02613572', 'n02613820', 'n02614140', + 'n02614482', 'n02614653', 'n02614978', 'n02615298', 'n02616128', + 'n02616397', 'n02616851', 'n02617537', 'n02618094', 'n02618513', + 'n02618827', 'n02619165', 'n02619550', 'n02619861', 'n02620167', + 'n02620578', 'n02621258', 'n02621908', 'n02622249', 'n02622547', + 'n02622712', 'n02622955', 'n02623445', 'n02624167', 'n02624551', + 'n02624807', 'n02624987', 'n02625258', 'n02625612', 'n02625851', + 'n02626089', 'n02626265', 'n02626471', 'n02626762', 'n02627037', + 'n02627292', 'n02627532', 'n02627835', 'n02628062', 'n02628259', + 'n02628600', 'n02629230', 'n02629716', 'n02630281', 'n02630615', + 'n02630739', 'n02631041', 'n02631330', 'n02631475', 'n02631628', + 'n02631775', 'n02632039', 'n02632494', 'n02633422', 'n02633677', + 'n02633977', 'n02634545', 'n02635154', 'n02635580', 'n02636170', + 'n02636405', 'n02636550', 'n02636854', 'n02637179', 'n02637475', + 'n02637977', 'n02638596', 'n02639087', 'n02639605', 'n02639922', + 'n02640242', 'n02640626', 'n02640857', 'n02641379', 'n02642107', + 'n02642644', 'n02643112', 'n02643316', 'n02643566', 'n02643836', + 'n02644113', 'n02644360', 'n02644501', 'n02644665', 'n02644817', + 'n02645538', 'n02645691', 'n02645953', 'n02646667', 'n02646892', + 'n02648035', 'n02648625', 'n02648916', 'n02649218', 'n02649546', + 'n02650050', 'n02650413', 'n02650541', 'n02651060', 'n02652132', + 'n02652668', 'n02653145', 'n02653497', 'n02653786', 'n02654112', + 'n02654425', 'n02654745', 'n02655020', 'n02655523', 'n02655848', + 'n02656032', 'n02656301', 'n02656670', 'n02656969', 'n02657368', + 'n02657694', 'n02658079', 'n02658531', 'n02658811', 'n02659176', + 'n02659478', 'n02659808', 'n02660091', 'n02660208', 'n02660519', + 'n02660640', 'n02661017', 'n02661473', 'n02661618', 'n02662239', + 'n02662397', 'n02662559', 'n02662825', 'n02662993', 'n02663211', + 'n02663485', 'n02663849', 'n02664285', 'n02664642', 'n02665250', + 'n02665985', 'n02666196', 'n02666501', 'n02666624', 'n02666943', + 'n02667093', 'n02667244', 'n02667379', 'n02667478', 'n02667576', + 'n02667693', 'n02668393', 'n02668613', 'n02669295', 'n02669442', + 'n02669534', 'n02669723', 'n02670186', 'n02670382', 'n02670683', + 'n02670935', 'n02671780', 'n02672152', 'n02672371', 'n02672831', + 'n02675077', 'n02675219', 'n02675522', 'n02676097', 'n02676261', + 'n02676566', 'n02676670', 'n02676938', 'n02677028', 'n02677136', + 'n02677436', 'n02677718', 'n02678010', 'n02678384', 'n02678897', + 'n02679142', 'n02679257', 'n02679961', 'n02680110', 'n02680512', + 'n02680638', 'n02680754', 'n02681392', 'n02682311', 'n02682407', + 'n02682569', 'n02682811', 'n02682922', 'n02683183', 'n02683323', + 'n02683454', 'n02683558', 'n02683791', 'n02684248', 'n02684356', + 'n02684515', 'n02684649', 'n02684962', 'n02685082', 'n02685253', + 'n02685365', 'n02685701', 'n02685995', 'n02686121', 'n02686227', + 'n02686379', 'n02686568', 'n02687172', 'n02687423', 'n02687682', + 'n02687821', 'n02687992', 'n02688273', 'n02688443', 'n02689144', + 'n02689274', 'n02689434', 'n02689748', 'n02689819', 'n02690373', + 'n02690715', 'n02691156', 'n02692086', 'n02692232', 'n02692513', + 'n02692680', 'n02692877', 'n02693246', 'n02693413', 'n02693540', + 'n02694045', 'n02694279', 'n02694426', 'n02694662', 'n02694966', + 'n02695627', 'n02695762', 'n02696165', 'n02696246', 'n02696569', + 'n02696843', 'n02697022', 'n02697221', 'n02697576', 'n02697675', + 'n02697876', 'n02698244', 'n02698473', 'n02698634', 'n02699494', + 'n02699629', 'n02699770', 'n02699915', 'n02700064', 'n02700258', + 'n02700895', 'n02701002', 'n02701260', 'n02701730', 'n02702989', + 'n02703124', 'n02703275', 'n02704645', 'n02704792', 'n02704949', + 'n02705201', 'n02705429', 'n02705944', 'n02706221', 'n02706806', + 'n02708093', 'n02708224', 'n02708433', 'n02708555', 'n02708711', + 'n02708885', 'n02709101', 'n02709367', 'n02709637', 'n02709763', + 'n02709908', 'n02710044', 'n02710201', 'n02710324', 'n02710429', + 'n02710600', 'n02711237', 'n02711780', 'n02712545', 'n02712643', + 'n02713003', 'n02713218', 'n02713364', 'n02713496', 'n02714315', + 'n02714535', 'n02714751', 'n02715229', 'n02715513', 'n02715712', + 'n02716626', 'n02720048', 'n02720576', 'n02721813', 'n02723165', + 'n02724722', 'n02725872', 'n02726017', 'n02726210', 'n02726305', + 'n02726681', 'n02727016', 'n02727141', 'n02727426', 'n02727825', + 'n02728440', 'n02729222', 'n02729837', 'n02729965', 'n02730265', + 'n02730568', 'n02730930', 'n02731251', 'n02731398', 'n02731629', + 'n02731900', 'n02732072', 'n02732572', 'n02732827', 'n02733213', + 'n02733524', 'n02734725', 'n02734835', 'n02735268', 'n02735361', + 'n02735538', 'n02735688', 'n02736396', 'n02736798', 'n02737351', + 'n02737660', 'n02738031', 'n02738271', 'n02738449', 'n02738535', + 'n02738741', 'n02738859', 'n02738978', 'n02739123', 'n02739427', + 'n02739550', 'n02739668', 'n02739889', 'n02740061', 'n02740300', + 'n02740533', 'n02740764', 'n02741367', 'n02741475', 'n02742070', + 'n02742194', 'n02742322', 'n02742468', 'n02742753', 'n02743426', + 'n02744323', 'n02744844', 'n02744961', 'n02745492', 'n02745611', + 'n02745816', 'n02746008', 'n02746225', 'n02746365', 'n02746595', + 'n02746683', 'n02746978', 'n02747063', 'n02747177', 'n02747672', + 'n02747802', 'n02748183', 'n02748359', 'n02748491', 'n02749169', + 'n02749292', 'n02749479', 'n02749670', 'n02749790', 'n02749953', + 'n02750070', 'n02750169', 'n02750320', 'n02750652', 'n02751067', + 'n02751215', 'n02751295', 'n02751490', 'n02752199', 'n02752496', + 'n02752615', 'n02752810', 'n02752917', 'n02753044', 'n02753394', + 'n02753710', 'n02754103', 'n02754656', 'n02755140', 'n02755352', + 'n02755529', 'n02755675', 'n02755823', 'n02755984', 'n02756098', + 'n02756854', 'n02756977', 'n02757061', 'n02757337', 'n02757462', + 'n02757714', 'n02757810', 'n02757927', 'n02758134', 'n02758490', + 'n02758863', 'n02758960', 'n02759257', 'n02759387', 'n02759700', + 'n02759963', 'n02760099', 'n02760199', 'n02760298', 'n02760429', + 'n02760658', 'n02760855', 'n02761034', 'n02761206', 'n02761392', + 'n02761557', 'n02761696', 'n02761834', 'n02762169', 'n02762371', + 'n02762508', 'n02762725', 'n02762909', 'n02763083', 'n02763198', + 'n02763306', 'n02763604', 'n02763714', 'n02763901', 'n02764044', + 'n02764398', 'n02764505', 'n02764614', 'n02764779', 'n02764935', + 'n02765028', 'n02766168', 'n02766320', 'n02766534', 'n02766792', + 'n02767038', 'n02767147', 'n02767433', 'n02767665', 'n02767956', + 'n02768114', 'n02768226', 'n02768433', 'n02768655', 'n02768973', + 'n02769075', 'n02769290', 'n02769669', 'n02769748', 'n02769963', + 'n02770078', 'n02770211', 'n02770585', 'n02770721', 'n02770830', + 'n02771004', 'n02771166', 'n02771286', 'n02771547', 'n02771750', + 'n02772101', 'n02772435', 'n02772554', 'n02772700', 'n02773037', + 'n02773838', 'n02774152', 'n02774630', 'n02774921', 'n02775039', + 'n02775178', 'n02775483', 'n02775689', 'n02775813', 'n02775897', + 'n02776007', 'n02776205', 'n02776505', 'n02776631', 'n02776825', + 'n02776978', 'n02777100', 'n02777292', 'n02777402', 'n02777638', + 'n02777734', 'n02777927', 'n02778131', 'n02778294', 'n02778456', + 'n02778588', 'n02778669', 'n02779435', 'n02779609', 'n02779719', + 'n02779971', 'n02780315', 'n02780445', 'n02780588', 'n02780704', + 'n02780815', 'n02781121', 'n02781213', 'n02781338', 'n02781517', + 'n02781764', 'n02782093', 'n02782432', 'n02782602', 'n02782681', + 'n02782778', 'n02783035', 'n02783161', 'n02783324', 'n02783459', + 'n02783900', 'n02783994', 'n02784124', 'n02784998', 'n02785648', + 'n02786058', 'n02786198', 'n02786331', 'n02786463', 'n02786611', + 'n02786736', 'n02786837', 'n02787120', 'n02787269', 'n02787435', + 'n02787622', 'n02788021', 'n02788148', 'n02788386', 'n02788462', + 'n02788572', 'n02788689', 'n02789487', 'n02790669', 'n02790823', + 'n02790996', 'n02791124', 'n02791270', 'n02791532', 'n02791665', + 'n02791795', 'n02792409', 'n02792552', 'n02792948', 'n02793089', + 'n02793199', 'n02793296', 'n02793414', 'n02793495', 'n02793684', + 'n02793842', 'n02793930', 'n02794008', 'n02794156', 'n02794368', + 'n02794474', 'n02794664', 'n02794779', 'n02794972', 'n02795169', + 'n02795528', 'n02795670', 'n02795783', 'n02795978', 'n02796207', + 'n02796318', 'n02796412', 'n02796623', 'n02796995', 'n02797295', + 'n02797535', 'n02797692', 'n02797881', 'n02799071', 'n02799175', + 'n02799323', 'n02799897', 'n02800213', 'n02800497', 'n02800675', + 'n02800940', 'n02801047', 'n02801184', 'n02801450', 'n02801525', + 'n02801823', 'n02801938', 'n02802215', 'n02802426', 'n02802544', + 'n02802721', 'n02802990', 'n02803349', 'n02803539', 'n02803666', + 'n02803809', 'n02803934', 'n02804123', 'n02804252', 'n02804414', + 'n02804515', 'n02804610', 'n02805283', 'n02805845', 'n02805983', + 'n02806088', 'n02806379', 'n02806530', 'n02806762', 'n02806875', + 'n02806992', 'n02807133', 'n02807523', 'n02807616', 'n02807731', + 'n02808185', 'n02808304', 'n02808440', 'n02808829', 'n02808968', + 'n02809105', 'n02809241', 'n02809364', 'n02809491', 'n02809605', + 'n02809736', 'n02810139', 'n02810270', 'n02810471', 'n02810782', + 'n02811059', 'n02811204', 'n02811350', 'n02811468', 'n02811618', + 'n02811719', 'n02811936', 'n02812201', 'n02812342', 'n02812631', + 'n02812785', 'n02812949', 'n02813252', 'n02813399', 'n02813544', + 'n02813645', 'n02813752', 'n02813981', 'n02814116', 'n02814338', + 'n02814428', 'n02814533', 'n02814774', 'n02814860', 'n02815478', + 'n02815749', 'n02815834', 'n02815950', 'n02816494', 'n02816656', + 'n02816768', 'n02817031', 'n02817251', 'n02817386', 'n02817516', + 'n02817650', 'n02817799', 'n02818135', 'n02818254', 'n02818687', + 'n02818832', 'n02819697', 'n02820085', 'n02820210', 'n02820556', + 'n02820675', 'n02821202', 'n02821415', 'n02821543', 'n02821627', + 'n02821943', 'n02822064', 'n02822220', 'n02822399', 'n02822579', + 'n02822762', 'n02822865', 'n02823124', 'n02823335', 'n02823428', + 'n02823510', 'n02823586', 'n02823750', 'n02823848', 'n02823964', + 'n02824058', 'n02824152', 'n02824319', 'n02824448', 'n02825153', + 'n02825240', 'n02825442', 'n02825657', 'n02825872', 'n02825961', + 'n02826068', 'n02826259', 'n02826459', 'n02826589', 'n02826683', + 'n02826812', 'n02826886', 'n02827148', 'n02827606', 'n02828115', + 'n02828299', 'n02828427', 'n02828884', 'n02829246', 'n02829353', + 'n02829510', 'n02829596', 'n02830157', 'n02831237', 'n02831335', + 'n02831595', 'n02831724', 'n02831894', 'n02831998', 'n02833040', + 'n02833140', 'n02833275', 'n02833403', 'n02833793', 'n02834027', + 'n02834397', 'n02834506', 'n02834642', 'n02834778', 'n02835271', + 'n02835412', 'n02835551', 'n02835724', 'n02835829', 'n02835915', + 'n02836035', 'n02836174', 'n02836268', 'n02836392', 'n02836513', + 'n02836607', 'n02836900', 'n02837134', 'n02837567', 'n02837789', + 'n02837887', 'n02838014', 'n02838178', 'n02838345', 'n02838577', + 'n02838728', 'n02838958', 'n02839110', 'n02839351', 'n02839592', + 'n02839910', 'n02840134', 'n02840245', 'n02840515', 'n02840619', + 'n02841063', 'n02841187', 'n02841315', 'n02841506', 'n02841641', + 'n02841847', 'n02842133', 'n02842573', 'n02842809', 'n02843029', + 'n02843158', 'n02843276', 'n02843465', 'n02843553', 'n02843684', + 'n02843777', 'n02843909', 'n02844056', 'n02844214', 'n02844307', + 'n02844714', 'n02845130', 'n02845293', 'n02845985', 'n02846141', + 'n02846260', 'n02846511', 'n02846619', 'n02846733', 'n02846874', + 'n02847461', 'n02847631', 'n02847852', 'n02848118', 'n02848216', + 'n02848523', 'n02848806', 'n02848921', 'n02849154', 'n02849885', + 'n02850060', 'n02850358', 'n02850732', 'n02850950', 'n02851099', + 'n02851795', 'n02851939', 'n02852043', 'n02852173', 'n02852360', + 'n02853016', 'n02853218', 'n02853336', 'n02853745', 'n02853870', + 'n02854378', 'n02854532', 'n02854630', 'n02854739', 'n02854926', + 'n02855089', 'n02855390', 'n02855701', 'n02855793', 'n02855925', + 'n02856013', 'n02856237', 'n02856362', 'n02857365', 'n02857477', + 'n02857644', 'n02857907', 'n02858304', 'n02859184', 'n02859343', + 'n02859443', 'n02859557', 'n02859729', 'n02859955', 'n02860415', + 'n02860640', 'n02860847', 'n02861022', 'n02861147', 'n02861286', + 'n02861387', 'n02861509', 'n02861658', 'n02861777', 'n02861886', + 'n02862048', 'n02862916', 'n02863014', 'n02863176', 'n02863340', + 'n02863426', 'n02863536', 'n02863638', 'n02863750', 'n02864122', + 'n02864504', 'n02864593', 'n02864987', 'n02865351', 'n02865665', + 'n02865931', 'n02866106', 'n02866386', 'n02866578', 'n02867401', + 'n02867592', 'n02867715', 'n02867966', 'n02868240', 'n02868429', + 'n02868546', 'n02868638', 'n02868975', 'n02869155', 'n02869249', + 'n02869563', 'n02869737', 'n02869837', 'n02870526', 'n02870676', + 'n02870772', 'n02870880', 'n02871005', 'n02871147', 'n02871314', + 'n02871439', 'n02871525', 'n02871631', 'n02871824', 'n02871963', + 'n02872333', 'n02872529', 'n02872752', 'n02873520', 'n02873623', + 'n02873733', 'n02873839', 'n02874086', 'n02874214', 'n02874336', + 'n02874442', 'n02874537', 'n02874642', 'n02874750', 'n02875436', + 'n02875626', 'n02875948', 'n02876084', 'n02876326', 'n02876457', + 'n02876657', 'n02877266', 'n02877513', 'n02877642', 'n02877765', + 'n02877962', 'n02878107', 'n02878222', 'n02878425', 'n02878534', + 'n02878628', 'n02878796', 'n02879087', 'n02879309', 'n02879422', + 'n02879517', 'n02879718', 'n02880189', 'n02880393', 'n02880546', + 'n02880842', 'n02880940', 'n02881193', 'n02881546', 'n02881757', + 'n02881906', 'n02882190', 'n02882301', 'n02882483', 'n02882647', + 'n02882894', 'n02883004', 'n02883101', 'n02883205', 'n02883344', + 'n02884225', 'n02884450', 'n02884859', 'n02884994', 'n02885108', + 'n02885233', 'n02885338', 'n02885462', 'n02885882', 'n02886321', + 'n02886434', 'n02886599', 'n02887079', 'n02887209', 'n02887489', + 'n02887832', 'n02887970', 'n02888270', 'n02888429', 'n02888569', + 'n02888898', 'n02889425', 'n02889646', 'n02889856', 'n02889996', + 'n02890188', 'n02890351', 'n02890513', 'n02890662', 'n02890804', + 'n02890940', 'n02891188', 'n02891788', 'n02892201', 'n02892304', + 'n02892392', 'n02892499', 'n02892626', 'n02892767', 'n02892948', + 'n02893269', 'n02893418', 'n02893608', 'n02893692', 'n02893941', + 'n02894024', 'n02894158', 'n02894337', 'n02894605', 'n02894847', + 'n02895008', 'n02895154', 'n02895328', 'n02895438', 'n02896074', + 'n02896294', 'n02896442', 'n02896694', 'n02896856', 'n02896949', + 'n02897097', 'n02897389', 'n02897820', 'n02898093', 'n02898173', + 'n02898269', 'n02898369', 'n02898585', 'n02898711', 'n02899439', + 'n02900160', 'n02900459', 'n02900594', 'n02900705', 'n02900857', + 'n02900987', 'n02901114', 'n02901259', 'n02901377', 'n02901481', + 'n02901620', 'n02901793', 'n02901901', 'n02902079', 'n02902687', + 'n02902816', 'n02902916', 'n02903006', 'n02903126', 'n02903204', + 'n02903727', 'n02903852', 'n02904109', 'n02904233', 'n02904505', + 'n02904640', 'n02904803', 'n02904927', 'n02905036', 'n02905152', + 'n02905886', 'n02906734', 'n02906963', 'n02907082', 'n02907296', + 'n02907391', 'n02907656', 'n02907873', 'n02908123', 'n02908217', + 'n02908773', 'n02908951', 'n02909053', 'n02909165', 'n02909285', + 'n02909706', 'n02909870', 'n02910145', 'n02910241', 'n02910353', + 'n02910542', 'n02910701', 'n02910864', 'n02910964', 'n02911332', + 'n02911485', 'n02912065', 'n02912319', 'n02912557', 'n02912894', + 'n02913152', 'n02914991', 'n02915904', 'n02916065', 'n02916179', + 'n02916350', 'n02916936', 'n02917067', 'n02917377', 'n02917521', + 'n02917607', 'n02917742', 'n02917964', 'n02918112', 'n02918330', + 'n02918455', 'n02918595', 'n02918831', 'n02918964', 'n02919148', + 'n02919308', 'n02919414', 'n02919648', 'n02919792', 'n02919890', + 'n02919976', 'n02920083', 'n02920164', 'n02920259', 'n02920369', + 'n02920503', 'n02920658', 'n02921029', 'n02921195', 'n02921292', + 'n02921406', 'n02921592', 'n02921756', 'n02921884', 'n02922159', + 'n02922292', 'n02922461', 'n02922578', 'n02922798', 'n02922877', + 'n02923129', 'n02923535', 'n02923682', 'n02923915', 'n02924116', + 'n02925009', 'n02925107', 'n02925385', 'n02925519', 'n02925666', + 'n02926426', 'n02926591', 'n02927053', 'n02927161', 'n02927764', + 'n02927887', 'n02928049', 'n02928299', 'n02928413', 'n02928608', + 'n02929184', 'n02929289', 'n02929462', 'n02929582', 'n02929923', + 'n02930080', 'n02930214', 'n02930339', 'n02930645', 'n02930766', + 'n02931013', 'n02931148', 'n02931294', 'n02931417', 'n02931836', + 'n02932019', 'n02932400', 'n02932523', 'n02932693', 'n02932891', + 'n02933112', 'n02933340', 'n02933462', 'n02933649', 'n02933750', + 'n02933990', 'n02934168', 'n02934451', 'n02935017', 'n02935387', + 'n02935490', 'n02935658', 'n02935891', 'n02936176', 'n02936281', + 'n02936402', 'n02936570', 'n02936714', 'n02936921', 'n02937010', + 'n02937336', 'n02937958', 'n02938218', 'n02938321', 'n02938886', + 'n02939185', 'n02939763', 'n02939866', 'n02940289', 'n02940385', + 'n02940570', 'n02940706', 'n02941095', 'n02941228', 'n02941845', + 'n02942015', 'n02942147', 'n02942349', 'n02942460', 'n02942699', + 'n02943241', 'n02943465', 'n02943686', 'n02943871', 'n02943964', + 'n02944075', 'n02944146', 'n02944256', 'n02944459', 'n02944579', + 'n02944826', 'n02945161', 'n02945813', 'n02945964', 'n02946127', + 'n02946270', 'n02946348', 'n02946509', 'n02946753', 'n02946824', + 'n02946921', 'n02947212', 'n02947660', 'n02947818', 'n02947977', + 'n02948072', 'n02948293', 'n02948403', 'n02948557', 'n02948834', + 'n02948942', 'n02949084', 'n02949202', 'n02949356', 'n02949542', + 'n02950018', 'n02950120', 'n02950186', 'n02950256', 'n02950482', + 'n02950632', 'n02950826', 'n02950943', 'n02951358', 'n02951585', + 'n02951703', 'n02951843', 'n02952109', 'n02952237', 'n02952374', + 'n02952485', 'n02952585', 'n02952674', 'n02952798', 'n02952935', + 'n02953056', 'n02953197', 'n02953455', 'n02953552', 'n02953673', + 'n02953850', 'n02954163', 'n02954340', 'n02954938', 'n02955065', + 'n02955247', 'n02955540', 'n02955767', 'n02956393', 'n02956699', + 'n02956795', 'n02956883', 'n02957008', 'n02957135', 'n02957252', + 'n02957427', 'n02957755', 'n02957862', 'n02958343', 'n02959942', + 'n02960352', 'n02960690', 'n02960903', 'n02961035', 'n02961225', + 'n02961451', 'n02961544', 'n02961947', 'n02962061', 'n02962200', + 'n02962414', 'n02962843', 'n02962938', 'n02963159', 'n02963302', + 'n02963503', 'n02963692', 'n02963821', 'n02963987', 'n02964075', + 'n02964196', 'n02964295', 'n02964634', 'n02964843', 'n02964934', + 'n02965024', 'n02965122', 'n02965216', 'n02965300', 'n02965529', + 'n02965783', 'n02966068', 'n02966193', 'n02966545', 'n02966687', + 'n02966786', 'n02966942', 'n02967081', 'n02967170', 'n02967294', + 'n02967407', 'n02967540', 'n02967626', 'n02967782', 'n02967991', + 'n02968074', 'n02968210', 'n02968333', 'n02968473', 'n02969010', + 'n02969163', 'n02969323', 'n02969527', 'n02969634', 'n02969886', + 'n02970408', 'n02970534', 'n02970685', 'n02970849', 'n02971167', + 'n02971356', 'n02971473', 'n02971579', 'n02971691', 'n02971940', + 'n02972397', 'n02972714', 'n02972934', 'n02973017', 'n02973236', + 'n02973805', 'n02973904', 'n02974003', 'n02974348', 'n02974454', + 'n02974565', 'n02974697', 'n02975212', 'n02975589', 'n02975994', + 'n02976123', 'n02976249', 'n02976350', 'n02976455', 'n02976552', + 'n02976641', 'n02976815', 'n02976939', 'n02977058', 'n02977330', + 'n02977438', 'n02977619', 'n02977936', 'n02978055', 'n02978205', + 'n02978367', 'n02978478', 'n02978753', 'n02978881', 'n02979074', + 'n02979186', 'n02979290', 'n02979399', 'n02979516', 'n02979836', + 'n02980036', 'n02980203', 'n02980441', 'n02980625', 'n02981024', + 'n02981198', 'n02981321', 'n02981565', 'n02981792', 'n02981911', + 'n02982232', 'n02982416', 'n02982515', 'n02982599', 'n02983072', + 'n02983189', 'n02983357', 'n02983507', 'n02983904', 'n02984061', + 'n02984203', 'n02984469', 'n02984699', 'n02985137', 'n02985606', + 'n02985828', 'n02985963', 'n02986066', 'n02986160', 'n02986348', + 'n02987047', 'n02987379', 'n02987492', 'n02987706', 'n02987823', + 'n02987950', 'n02988066', 'n02988156', 'n02988304', 'n02988486', + 'n02988679', 'n02988963', 'n02989099', 'n02990373', 'n02990758', + 'n02991048', 'n02991302', 'n02991847', 'n02992032', 'n02992211', + 'n02992368', 'n02992529', 'n02992795', 'n02993194', 'n02993368', + 'n02993546', 'n02994573', 'n02994743', 'n02995345', 'n02995871', + 'n02995998', 'n02997391', 'n02997607', 'n02997910', 'n02998003', + 'n02998107', 'n02998563', 'n02998696', 'n02998841', 'n02999138', + 'n02999410', 'n02999936', 'n03000134', 'n03000247', 'n03000530', + 'n03000684', 'n03001115', 'n03001282', 'n03001540', 'n03001627', + 'n03002096', 'n03002210', 'n03002341', 'n03002555', 'n03002711', + 'n03002816', 'n03002948', 'n03003091', 'n03003633', 'n03004275', + 'n03004409', 'n03004531', 'n03004620', 'n03004713', 'n03004824', + 'n03005033', 'n03005147', 'n03005285', 'n03005515', 'n03005619', + 'n03006626', 'n03006788', 'n03006903', 'n03007130', 'n03007297', + 'n03007444', 'n03007591', 'n03008177', 'n03008817', 'n03008976', + 'n03009111', 'n03009269', 'n03009794', 'n03010473', 'n03010656', + 'n03010795', 'n03010915', 'n03011018', 'n03011355', 'n03011741', + 'n03012013', 'n03012159', 'n03012373', 'n03012499', 'n03012644', + 'n03012734', 'n03012897', 'n03013006', 'n03013438', 'n03013580', + 'n03013850', 'n03014440', 'n03014705', 'n03015149', 'n03015254', + 'n03015478', 'n03015631', 'n03015851', 'n03016209', 'n03016389', + 'n03016609', 'n03016737', 'n03016868', 'n03016953', 'n03017070', + 'n03017168', 'n03017698', 'n03017835', 'n03018209', 'n03018349', + 'n03018614', 'n03018712', 'n03018848', 'n03019198', 'n03019304', + 'n03019434', 'n03019685', 'n03019806', 'n03019938', 'n03020034', + 'n03020416', 'n03020692', 'n03021228', 'n03024064', 'n03024233', + 'n03024333', 'n03024518', 'n03025070', 'n03025165', 'n03025250', + 'n03025886', 'n03026506', 'n03026907', 'n03027001', 'n03027108', + 'n03027250', 'n03027505', 'n03027625', 'n03028079', 'n03028596', + 'n03028785', 'n03029066', 'n03029197', 'n03029296', 'n03029445', + 'n03029925', 'n03030262', 'n03030353', 'n03030557', 'n03030880', + 'n03031012', 'n03031152', 'n03031422', 'n03031756', 'n03032252', + 'n03032453', 'n03032811', 'n03033267', 'n03033362', 'n03033986', + 'n03034244', 'n03034405', 'n03034516', 'n03034663', 'n03035252', + 'n03035510', 'n03035715', 'n03035832', 'n03036022', 'n03036149', + 'n03036244', 'n03036341', 'n03036469', 'n03036701', 'n03036866', + 'n03037108', 'n03037228', 'n03037404', 'n03037590', 'n03037709', + 'n03038041', 'n03038281', 'n03038480', 'n03038685', 'n03038870', + 'n03039015', 'n03039259', 'n03039353', 'n03039493', 'n03039827', + 'n03039947', 'n03040229', 'n03040376', 'n03040836', 'n03041114', + 'n03041265', 'n03041449', 'n03041632', 'n03041810', 'n03042139', + 'n03042384', 'n03042490', 'n03042697', 'n03042829', 'n03042984', + 'n03043173', 'n03043274', 'n03043423', 'n03043693', 'n03043798', + 'n03043958', 'n03044671', 'n03044801', 'n03044934', 'n03045074', + 'n03045228', 'n03045337', 'n03045698', 'n03045800', 'n03046029', + 'n03046133', 'n03046257', 'n03046802', 'n03046921', 'n03047052', + 'n03047171', 'n03047690', 'n03047799', 'n03047941', 'n03048883', + 'n03049066', 'n03049326', 'n03049457', 'n03049782', 'n03049924', + 'n03050026', 'n03050453', 'n03050546', 'n03050655', 'n03050864', + 'n03051041', 'n03051249', 'n03051396', 'n03051540', 'n03052464', + 'n03052917', 'n03053047', 'n03053976', 'n03054491', 'n03054605', + 'n03054901', 'n03055159', 'n03055418', 'n03055670', 'n03055857', + 'n03056097', 'n03056215', 'n03056288', 'n03056493', 'n03056583', + 'n03056873', 'n03057021', 'n03057541', 'n03057636', 'n03057724', + 'n03057841', 'n03057920', 'n03058107', 'n03058603', 'n03058949', + 'n03059103', 'n03059236', 'n03059366', 'n03059685', 'n03059934', + 'n03060728', 'n03061050', 'n03061211', 'n03061345', 'n03061505', + 'n03061674', 'n03061819', 'n03061893', 'n03062015', 'n03062122', + 'n03062245', 'n03062336', 'n03062651', 'n03062798', 'n03062985', + 'n03063073', 'n03063199', 'n03063338', 'n03063485', 'n03063599', + 'n03063689', 'n03063834', 'n03063968', 'n03064250', 'n03064350', + 'n03064562', 'n03064758', 'n03064935', 'n03065243', 'n03065424', + 'n03065708', 'n03066232', 'n03066359', 'n03066464', 'n03066849', + 'n03067093', 'n03067212', 'n03067339', 'n03067518', 'n03068181', + 'n03068998', 'n03069752', 'n03070059', 'n03070193', 'n03070396', + 'n03070587', 'n03070854', 'n03071021', 'n03071160', 'n03071288', + 'n03071552', 'n03072056', 'n03072201', 'n03072440', 'n03072682', + 'n03073296', 'n03073384', 'n03073545', 'n03073694', 'n03073977', + 'n03074380', 'n03074855', 'n03075097', 'n03075248', 'n03075370', + 'n03075500', 'n03075634', 'n03075768', 'n03075946', 'n03076411', + 'n03076623', 'n03076708', 'n03077442', 'n03077616', 'n03077741', + 'n03078287', 'n03078506', 'n03078670', 'n03078802', 'n03078995', + 'n03079136', 'n03079230', 'n03079494', 'n03079616', 'n03079741', + 'n03080309', 'n03080497', 'n03080633', 'n03080731', 'n03080904', + 'n03081859', 'n03081986', 'n03082127', 'n03082280', 'n03082450', + 'n03082656', 'n03082807', 'n03082979', 'n03084420', 'n03084834', + 'n03085013', 'n03085219', 'n03085333', 'n03085602', 'n03085781', + 'n03085915', 'n03086183', 'n03086457', 'n03086580', 'n03086670', + 'n03086868', 'n03087069', 'n03087245', 'n03087366', 'n03087521', + 'n03087643', 'n03087816', 'n03088389', 'n03088580', 'n03088707', + 'n03089477', 'n03089624', 'n03089753', 'n03089879', 'n03090000', + 'n03090172', 'n03090437', 'n03090710', 'n03090856', 'n03091044', + 'n03091223', 'n03091374', 'n03091907', 'n03092053', 'n03092166', + 'n03092314', 'n03092476', 'n03092656', 'n03092883', 'n03093427', + 'n03093792', 'n03094159', 'n03094503', 'n03095699', 'n03095965', + 'n03096439', 'n03096960', 'n03097362', 'n03097535', 'n03097673', + 'n03098140', 'n03098515', 'n03098688', 'n03098806', 'n03098959', + 'n03099147', 'n03099274', 'n03099454', 'n03099622', 'n03099771', + 'n03099945', 'n03100240', 'n03100346', 'n03100490', 'n03100897', + 'n03101156', 'n03101302', 'n03101375', 'n03101517', 'n03101664', + 'n03101796', 'n03101986', 'n03102371', 'n03102516', 'n03102654', + 'n03102859', 'n03103128', 'n03103396', 'n03103563', 'n03103904', + 'n03104019', 'n03104512', 'n03105088', 'n03105214', 'n03105306', + 'n03105467', 'n03105645', 'n03105810', 'n03105974', 'n03106722', + 'n03106898', 'n03107046', 'n03107488', 'n03107716', 'n03108455', + 'n03108624', 'n03108759', 'n03108853', 'n03109033', 'n03109150', + 'n03109253', 'n03109693', 'n03109881', 'n03110202', 'n03110669', + 'n03111041', 'n03111177', 'n03111296', 'n03111690', 'n03112240', + 'n03112719', 'n03112869', 'n03113152', 'n03113505', 'n03113657', + 'n03113835', 'n03114041', 'n03114236', 'n03114379', 'n03114504', + 'n03114743', 'n03114839', 'n03115014', 'n03115180', 'n03115400', + 'n03115663', 'n03115762', 'n03115897', 'n03116008', 'n03116163', + 'n03116530', 'n03116767', 'n03117199', 'n03117642', 'n03118346', + 'n03118969', 'n03119203', 'n03119396', 'n03119510', 'n03120198', + 'n03120491', 'n03120778', 'n03121040', 'n03121190', 'n03121298', + 'n03121431', 'n03121897', 'n03122073', 'n03122202', 'n03122295', + 'n03122748', 'n03123553', 'n03123666', 'n03123809', 'n03123917', + 'n03124043', 'n03124170', 'n03124313', 'n03124474', 'n03124590', + 'n03125057', 'n03125588', 'n03125729', 'n03125870', 'n03126090', + 'n03126385', 'n03126580', 'n03126707', 'n03126927', 'n03127024', + 'n03127203', 'n03127408', 'n03127531', 'n03127747', 'n03127925', + 'n03128085', 'n03128248', 'n03128427', 'n03128519', 'n03129001', + 'n03129471', 'n03129636', 'n03129753', 'n03129848', 'n03130066', + 'n03130233', 'n03130563', 'n03130761', 'n03130866', 'n03131193', + 'n03131574', 'n03131669', 'n03131967', 'n03132076', 'n03132261', + 'n03132438', 'n03132666', 'n03132776', 'n03133050', 'n03133415', + 'n03133878', 'n03134118', 'n03134232', 'n03134394', 'n03134739', + 'n03134853', 'n03135030', 'n03135532', 'n03135656', 'n03135788', + 'n03135917', 'n03136051', 'n03136254', 'n03136369', 'n03136504', + 'n03137473', 'n03137579', 'n03138128', 'n03138217', 'n03138344', + 'n03138669', 'n03139089', 'n03139464', 'n03139640', 'n03139998', + 'n03140126', 'n03140292', 'n03140431', 'n03140546', 'n03140652', + 'n03140771', 'n03140900', 'n03141065', 'n03141327', 'n03141455', + 'n03141612', 'n03141702', 'n03141823', 'n03142099', 'n03142205', + 'n03142325', 'n03142431', 'n03142679', 'n03143400', 'n03143572', + 'n03143754', 'n03144156', 'n03144873', 'n03144982', 'n03145147', + 'n03145277', 'n03145384', 'n03145522', 'n03145719', 'n03145843', + 'n03146219', 'n03146342', 'n03146449', 'n03146560', 'n03146687', + 'n03146777', 'n03146846', 'n03147084', 'n03147156', 'n03147280', + 'n03147509', 'n03148324', 'n03148518', 'n03148727', 'n03148808', + 'n03149135', 'n03149401', 'n03149686', 'n03149810', 'n03150232', + 'n03150511', 'n03150661', 'n03150795', 'n03151077', 'n03152303', + 'n03152951', 'n03153246', 'n03153585', 'n03153948', 'n03154073', + 'n03154316', 'n03154446', 'n03154616', 'n03154745', 'n03154895', + 'n03155178', 'n03155502', 'n03155915', 'n03156071', 'n03156279', + 'n03156405', 'n03156767', 'n03157348', 'n03158186', 'n03158414', + 'n03158668', 'n03158796', 'n03158885', 'n03159535', 'n03159640', + 'n03160001', 'n03160186', 'n03160309', 'n03160740', 'n03161016', + 'n03161450', 'n03161893', 'n03162297', 'n03162460', 'n03162556', + 'n03162714', 'n03162818', 'n03163222', 'n03163381', 'n03163488', + 'n03163798', 'n03163973', 'n03164192', 'n03164344', 'n03164605', + 'n03164722', 'n03164929', 'n03165096', 'n03165211', 'n03165466', + 'n03165616', 'n03165823', 'n03165955', 'n03166120', 'n03166514', + 'n03166600', 'n03166685', 'n03166809', 'n03166951', 'n03167153', + 'n03167978', 'n03168107', 'n03168217', 'n03168543', 'n03168663', + 'n03168774', 'n03168933', 'n03169063', 'n03169176', 'n03170292', + 'n03170459', 'n03170635', 'n03170872', 'n03171228', 'n03171356', + 'n03171635', 'n03171910', 'n03172038', 'n03172738', 'n03172965', + 'n03173270', 'n03173387', 'n03173929', 'n03174079', 'n03174450', + 'n03174731', 'n03175081', 'n03175189', 'n03175301', 'n03175457', + 'n03175604', 'n03175843', 'n03175983', 'n03176238', 'n03176386', + 'n03176594', 'n03176763', 'n03177059', 'n03177165', 'n03177708', + 'n03178000', 'n03178173', 'n03178430', 'n03178538', 'n03178674', + 'n03179701', 'n03179910', 'n03180011', 'n03180384', 'n03180504', + 'n03180732', 'n03180865', 'n03180969', 'n03181293', 'n03181667', + 'n03182140', 'n03182232', 'n03182912', 'n03183080', 'n03185868', + 'n03186199', 'n03186285', 'n03186818', 'n03187037', 'n03187153', + 'n03187268', 'n03187595', 'n03187751', 'n03188290', 'n03188531', + 'n03188725', 'n03188871', 'n03189083', 'n03189311', 'n03189818', + 'n03190458', 'n03191286', 'n03191451', 'n03191561', 'n03191776', + 'n03192543', 'n03192907', 'n03193107', 'n03193260', 'n03193423', + 'n03193597', 'n03193754', 'n03194170', 'n03194297', 'n03194812', + 'n03194992', 'n03195332', 'n03195485', 'n03195799', 'n03195959', + 'n03196062', 'n03196217', 'n03196324', 'n03196598', 'n03196990', + 'n03197201', 'n03197337', 'n03197446', 'n03198223', 'n03198500', + 'n03199358', 'n03199488', 'n03199647', 'n03199775', 'n03199901', + 'n03200231', 'n03200357', 'n03200539', 'n03200701', 'n03200906', + 'n03201035', 'n03201208', 'n03201529', 'n03201638', 'n03201776', + 'n03201895', 'n03201996', 'n03202354', 'n03202481', 'n03202760', + 'n03202940', 'n03203089', 'n03203806', 'n03204134', 'n03204306', + 'n03204436', 'n03204558', 'n03204955', 'n03205143', 'n03205304', + 'n03205458', 'n03205574', 'n03205669', 'n03205903', 'n03206023', + 'n03206158', 'n03206282', 'n03206405', 'n03206602', 'n03206718', + 'n03206908', 'n03207305', 'n03207548', 'n03207630', 'n03207743', + 'n03207835', 'n03207941', 'n03208556', 'n03208938', 'n03209359', + 'n03209477', 'n03209666', 'n03209910', 'n03210245', 'n03210372', + 'n03210552', 'n03210683', 'n03211117', 'n03211413', 'n03211616', + 'n03211789', 'n03212114', 'n03212247', 'n03212406', 'n03212811', + 'n03213014', 'n03213361', 'n03213538', 'n03213715', 'n03213826', + 'n03214253', 'n03214450', 'n03214582', 'n03214966', 'n03215076', + 'n03215191', 'n03215337', 'n03215508', 'n03215749', 'n03215930', + 'n03216199', 'n03216402', 'n03216562', 'n03216710', 'n03216828', + 'n03217653', 'n03217739', 'n03217889', 'n03218198', 'n03218446', + 'n03219010', 'n03219135', 'n03219483', 'n03219612', 'n03219859', + 'n03219966', 'n03220095', 'n03220237', 'n03220513', 'n03220692', + 'n03221059', 'n03221351', 'n03221540', 'n03221720', 'n03222176', + 'n03222318', 'n03222516', 'n03222722', 'n03222857', 'n03223162', + 'n03223299', 'n03223441', 'n03223553', 'n03223686', 'n03223923', + 'n03224490', 'n03224603', 'n03224753', 'n03224893', 'n03225108', + 'n03225458', 'n03225616', 'n03225777', 'n03225988', 'n03226090', + 'n03226254', 'n03226375', 'n03226538', 'n03226880', 'n03227010', + 'n03227184', 'n03227317', 'n03227721', 'n03227856', 'n03228016', + 'n03228254', 'n03228365', 'n03228533', 'n03228692', 'n03228796', + 'n03228967', 'n03229115', 'n03229244', 'n03229526', 'n03231160', + 'n03231368', 'n03231819', 'n03232309', 'n03232417', 'n03232543', + 'n03232815', 'n03232923', 'n03233123', 'n03233624', 'n03233744', + 'n03233905', 'n03234164', 'n03234952', 'n03235042', 'n03235180', + 'n03235327', 'n03235796', 'n03235979', 'n03236093', 'n03236217', + 'n03236423', 'n03236580', 'n03236735', 'n03237212', 'n03237340', + 'n03237416', 'n03237639', 'n03237839', 'n03237992', 'n03238131', + 'n03238286', 'n03238586', 'n03238762', 'n03238879', 'n03239054', + 'n03239259', 'n03239607', 'n03239726', 'n03240140', 'n03240683', + 'n03240892', 'n03241093', 'n03241335', 'n03241496', 'n03241903', + 'n03242120', 'n03242264', 'n03242390', 'n03242506', 'n03242995', + 'n03243218', 'n03243625', 'n03244047', 'n03244231', 'n03244388', + 'n03244775', 'n03244919', 'n03245271', 'n03245421', 'n03245724', + 'n03245889', 'n03246197', 'n03246312', 'n03246454', 'n03246653', + 'n03246933', 'n03247083', 'n03247351', 'n03247495', 'n03248835', + 'n03249342', 'n03249569', 'n03249956', 'n03250089', 'n03250279', + 'n03250405', 'n03250588', 'n03250847', 'n03250952', 'n03251100', + 'n03251280', 'n03251533', 'n03251766', 'n03251932', 'n03252231', + 'n03252324', 'n03252422', 'n03252637', 'n03252787', 'n03253071', + 'n03253187', 'n03253279', 'n03253714', 'n03253796', 'n03253886', + 'n03254046', 'n03254189', 'n03254374', 'n03254625', 'n03254737', + 'n03254862', 'n03255030', 'n03255167', 'n03255322', 'n03255488', + 'n03255899', 'n03256032', 'n03256166', 'n03256472', 'n03256631', + 'n03256788', 'n03256928', 'n03257065', 'n03257210', 'n03257586', + 'n03258192', 'n03258330', 'n03258456', 'n03258577', 'n03258905', + 'n03259009', 'n03259280', 'n03259401', 'n03259505', 'n03260206', + 'n03260504', 'n03260733', 'n03260849', 'n03261019', 'n03261263', + 'n03261395', 'n03261603', 'n03261776', 'n03262072', 'n03262248', + 'n03262519', 'n03262717', 'n03262809', 'n03262932', 'n03263076', + 'n03263338', 'n03263640', 'n03263758', 'n03264906', 'n03265032', + 'n03265754', 'n03266195', 'n03266371', 'n03266620', 'n03266749', + 'n03267113', 'n03267468', 'n03267696', 'n03267821', 'n03268142', + 'n03268311', 'n03268645', 'n03268790', 'n03268918', 'n03269073', + 'n03269203', 'n03269401', 'n03270165', 'n03270695', 'n03270854', + 'n03271030', 'n03271260', 'n03271376', 'n03271574', 'n03271765', + 'n03271865', 'n03272010', 'n03272125', 'n03272239', 'n03272383', + 'n03272562', 'n03272810', 'n03272940', 'n03273061', 'n03273551', + 'n03273740', 'n03273913', 'n03274265', 'n03274435', 'n03274561', + 'n03274796', 'n03275125', 'n03275311', 'n03275566', 'n03275681', + 'n03275864', 'n03276179', 'n03276696', 'n03276839', 'n03277004', + 'n03277149', 'n03277459', 'n03277602', 'n03277771', 'n03278248', + 'n03278914', 'n03279153', 'n03279364', 'n03279508', 'n03279804', + 'n03279918', 'n03280216', 'n03280394', 'n03280644', 'n03281145', + 'n03281524', 'n03281673', 'n03282060', 'n03282295', 'n03282401', + 'n03283221', 'n03283413', 'n03283827', 'n03284308', 'n03284482', + 'n03284743', 'n03284886', 'n03284981', 'n03285578', 'n03285730', + 'n03285912', 'n03286572', 'n03287351', 'n03287733', 'n03288003', + 'n03288500', 'n03288643', 'n03288742', 'n03288886', 'n03289660', + 'n03289985', 'n03290096', 'n03290195', 'n03290653', 'n03291413', + 'n03291551', 'n03291741', 'n03291819', 'n03291963', 'n03292085', + 'n03292362', 'n03292475', 'n03292603', 'n03292736', 'n03292960', + 'n03293095', 'n03293741', 'n03293863', 'n03294048', 'n03294604', + 'n03294833', 'n03295012', 'n03295140', 'n03295246', 'n03295928', + 'n03296081', 'n03296217', 'n03296328', 'n03296478', 'n03296963', + 'n03297103', 'n03297226', 'n03297495', 'n03297644', 'n03297735', + 'n03298089', 'n03298352', 'n03298716', 'n03298858', 'n03299406', + 'n03300216', 'n03300443', 'n03301175', 'n03301291', 'n03301389', + 'n03301568', 'n03301833', 'n03301940', 'n03302671', 'n03302790', + 'n03302938', 'n03303217', 'n03303669', 'n03303831', 'n03304197', + 'n03304323', 'n03304465', 'n03305300', 'n03305522', 'n03305953', + 'n03306385', 'n03306869', 'n03307037', 'n03307573', 'n03307792', + 'n03308152', 'n03308481', 'n03308614', 'n03309110', 'n03309356', + 'n03309465', 'n03309687', 'n03309808', 'n03313333', 'n03314227', + 'n03314378', 'n03314608', 'n03314780', 'n03314884', 'n03315644', + 'n03315805', 'n03315990', 'n03316105', 'n03316406', 'n03316873', + 'n03317233', 'n03317510', 'n03317673', 'n03317788', 'n03317889', + 'n03318136', 'n03318294', 'n03318865', 'n03318983', 'n03319167', + 'n03319457', 'n03319576', 'n03319745', 'n03320046', 'n03320262', + 'n03320421', 'n03320519', 'n03320845', 'n03320959', 'n03321103', + 'n03321419', 'n03321563', 'n03321843', 'n03321954', 'n03322570', + 'n03322704', 'n03322836', 'n03322940', 'n03323096', 'n03323211', + 'n03323319', 'n03323703', 'n03324629', 'n03324814', 'n03324928', + 'n03325088', 'n03325288', 'n03325403', 'n03325584', 'n03325691', + 'n03325941', 'n03326073', 'n03326371', 'n03326475', 'n03326660', + 'n03326795', 'n03326948', 'n03327133', 'n03327234', 'n03327553', + 'n03327691', 'n03327841', 'n03328201', 'n03329302', 'n03329536', + 'n03329663', 'n03330002', 'n03330665', 'n03330792', 'n03330947', + 'n03331077', 'n03331244', 'n03331599', 'n03332005', 'n03332173', + 'n03332271', 'n03332393', 'n03332591', 'n03332784', 'n03332989', + 'n03333129', 'n03333252', 'n03333349', 'n03333610', 'n03333711', + 'n03333851', 'n03334017', 'n03334291', 'n03334382', 'n03334492', + 'n03334912', 'n03335030', 'n03335333', 'n03335461', 'n03335846', + 'n03336168', 'n03336282', 'n03336575', 'n03336742', 'n03336839', + 'n03337140', 'n03337383', 'n03337494', 'n03337822', 'n03338287', + 'n03338821', 'n03339296', 'n03339529', 'n03339643', 'n03340009', + 'n03340723', 'n03340923', 'n03341035', 'n03341153', 'n03341297', + 'n03341606', 'n03342015', 'n03342127', 'n03342262', 'n03342432', + 'n03342657', 'n03342863', 'n03342961', 'n03343047', 'n03343234', + 'n03343354', 'n03343560', 'n03343737', 'n03343853', 'n03344305', + 'n03344393', 'n03344509', 'n03344642', 'n03344784', 'n03344935', + 'n03345487', 'n03345837', 'n03346135', 'n03346289', 'n03346455', + 'n03347037', 'n03347472', 'n03347617', 'n03348142', 'n03348868', + 'n03349020', 'n03349296', 'n03349367', 'n03349469', 'n03349599', + 'n03349771', 'n03349892', 'n03350204', 'n03350352', 'n03350456', + 'n03350602', 'n03351151', 'n03351262', 'n03351434', 'n03351979', + 'n03352232', 'n03352366', 'n03352628', 'n03352961', 'n03353281', + 'n03353951', 'n03354207', 'n03354903', 'n03355468', 'n03355768', + 'n03355925', 'n03356038', 'n03356279', 'n03356446', 'n03356559', + 'n03356858', 'n03356982', 'n03357081', 'n03357267', 'n03357716', + 'n03358172', 'n03358380', 'n03358726', 'n03358841', 'n03359137', + 'n03359285', 'n03359436', 'n03359566', 'n03360133', 'n03360300', + 'n03360431', 'n03360622', 'n03360731', 'n03361109', 'n03361297', + 'n03361380', 'n03361550', 'n03361683', 'n03362639', 'n03362771', + 'n03362890', 'n03363363', 'n03363549', 'n03363749', 'n03364008', + 'n03364156', 'n03364599', 'n03364937', 'n03365231', 'n03365374', + 'n03365592', 'n03365991', 'n03366464', 'n03366721', 'n03366823', + 'n03366974', 'n03367059', 'n03367321', 'n03367410', 'n03367545', + 'n03367875', 'n03367969', 'n03368048', 'n03368352', 'n03369276', + 'n03369407', 'n03369512', 'n03369866', 'n03370387', 'n03370646', + 'n03371875', 'n03372029', 'n03372549', 'n03372822', 'n03372933', + 'n03373237', 'n03373611', 'n03373943', 'n03374102', 'n03374282', + 'n03374372', 'n03374473', 'n03374570', 'n03374649', 'n03374838', + 'n03375171', 'n03375329', 'n03375575', 'n03376159', 'n03376279', + 'n03376595', 'n03376771', 'n03376938', 'n03378005', 'n03378174', + 'n03378342', 'n03378442', 'n03378593', 'n03378765', 'n03379051', + 'n03379204', 'n03379343', 'n03379719', 'n03379828', 'n03379989', + 'n03380301', 'n03380647', 'n03380724', 'n03380867', 'n03381126', + 'n03381231', 'n03381450', 'n03381565', 'n03381776', 'n03382104', + 'n03382292', 'n03382413', 'n03382533', 'n03382708', 'n03382856', + 'n03382969', 'n03383099', 'n03383211', 'n03383378', 'n03383468', + 'n03383562', 'n03383821', 'n03384167', 'n03384352', 'n03384891', + 'n03385295', 'n03385557', 'n03386011', 'n03386343', 'n03386544', + 'n03386726', 'n03386870', 'n03387323', 'n03387653', 'n03388043', + 'n03388183', 'n03388323', 'n03388549', 'n03388711', 'n03388990', + 'n03389611', 'n03389761', 'n03389889', 'n03389983', 'n03390075', + 'n03390327', 'n03390673', 'n03390786', 'n03390983', 'n03391301', + 'n03391613', 'n03391770', 'n03392648', 'n03392741', 'n03393017', + 'n03393199', 'n03393324', 'n03393761', 'n03393912', 'n03394149', + 'n03394272', 'n03394480', 'n03394649', 'n03394916', 'n03395256', + 'n03395401', 'n03395514', 'n03395859', 'n03396074', 'n03396580', + 'n03396654', 'n03396997', 'n03397087', 'n03397266', 'n03397412', + 'n03397532', 'n03397947', 'n03398153', 'n03398228', 'n03399579', + 'n03399677', 'n03399761', 'n03399971', 'n03400231', 'n03400972', + 'n03401129', 'n03401279', 'n03401721', 'n03402188', 'n03402369', + 'n03402511', 'n03402785', 'n03402941', 'n03403643', 'n03404012', + 'n03404149', 'n03404251', 'n03404360', 'n03404449', 'n03404900', + 'n03405111', 'n03405265', 'n03405595', 'n03405725', 'n03406759', + 'n03406966', 'n03407369', 'n03407865', 'n03408054', 'n03408264', + 'n03408340', 'n03408444', 'n03409297', 'n03409393', 'n03409591', + 'n03409920', 'n03410022', 'n03410147', 'n03410303', 'n03410423', + 'n03410571', 'n03410740', 'n03410938', 'n03411079', 'n03411208', + 'n03411339', 'n03411927', 'n03412058', 'n03412220', 'n03412387', + 'n03412511', 'n03412906', 'n03413124', 'n03413264', 'n03413428', + 'n03413684', 'n03413828', 'n03414029', 'n03414162', 'n03414676', + 'n03415252', 'n03415486', 'n03415626', 'n03415749', 'n03415868', + 'n03416094', 'n03416489', 'n03416640', 'n03416775', 'n03416900', + 'n03417042', 'n03417202', 'n03417345', 'n03417749', 'n03417970', + 'n03418158', 'n03418242', 'n03418402', 'n03418618', 'n03418749', + 'n03418915', 'n03419014', 'n03420345', 'n03420801', 'n03420935', + 'n03421117', 'n03421324', 'n03421485', 'n03421669', 'n03421768', + 'n03421960', 'n03422072', 'n03422484', 'n03422589', 'n03422771', + 'n03423099', 'n03423224', 'n03423306', 'n03423479', 'n03423568', + 'n03423719', 'n03423877', 'n03424204', 'n03424325', 'n03424489', + 'n03424630', 'n03424862', 'n03425241', 'n03425325', 'n03425413', + 'n03425595', 'n03425769', 'n03426134', 'n03426285', 'n03426462', + 'n03426574', 'n03426871', 'n03427202', 'n03427296', 'n03428090', + 'n03428226', 'n03428349', 'n03429003', 'n03429137', 'n03429288', + 'n03429682', 'n03429771', 'n03429914', 'n03430091', 'n03430313', + 'n03430418', 'n03430551', 'n03430959', 'n03431243', 'n03431570', + 'n03431745', 'n03432061', 'n03432129', 'n03432360', 'n03432509', + 'n03433247', 'n03433637', 'n03433877', 'n03434188', 'n03434285', + 'n03434830', 'n03435593', 'n03435743', 'n03435991', 'n03436075', + 'n03436182', 'n03436417', 'n03436549', 'n03436656', 'n03436772', + 'n03436891', 'n03436990', 'n03437184', 'n03437295', 'n03437430', + 'n03437581', 'n03437741', 'n03437829', 'n03437941', 'n03438071', + 'n03438257', 'n03438661', 'n03438780', 'n03438863', 'n03439348', + 'n03439631', 'n03439814', 'n03440216', 'n03440682', 'n03440876', + 'n03441112', 'n03441345', 'n03441465', 'n03441582', 'n03442288', + 'n03442487', 'n03442597', 'n03442756', 'n03443005', 'n03443149', + 'n03443371', 'n03443543', 'n03443912', 'n03444034', 'n03445326', + 'n03445617', 'n03445777', 'n03445924', 'n03446070', 'n03446268', + 'n03446832', 'n03447075', 'n03447358', 'n03447447', 'n03447721', + 'n03447894', 'n03448031', 'n03448590', 'n03448696', 'n03448956', + 'n03449217', 'n03449309', 'n03449451', 'n03449564', 'n03449858', + 'n03450230', 'n03450516', 'n03450734', 'n03450881', 'n03450974', + 'n03451120', 'n03451253', 'n03451365', 'n03451711', 'n03451798', + 'n03452267', 'n03452449', 'n03452594', 'n03452741', 'n03453231', + 'n03453320', 'n03453443', 'n03454110', 'n03454211', 'n03454442', + 'n03454536', 'n03454707', 'n03454885', 'n03455355', 'n03455488', + 'n03455642', 'n03455802', 'n03456024', 'n03456186', 'n03456299', + 'n03456447', 'n03456548', 'n03456665', 'n03457008', 'n03457451', + 'n03457686', 'n03457902', 'n03458271', 'n03458422', 'n03459328', + 'n03459591', 'n03459775', 'n03459914', 'n03460040', 'n03460147', + 'n03460297', 'n03460455', 'n03460899', 'n03461288', 'n03461385', + 'n03461651', 'n03461882', 'n03461988', 'n03462110', 'n03462315', + 'n03462747', 'n03462972', 'n03463185', 'n03463381', 'n03463666', + 'n03464053', 'n03464467', 'n03464628', 'n03464952', 'n03465040', + 'n03465151', 'n03465320', 'n03465426', 'n03465500', 'n03465605', + 'n03465718', 'n03465818', 'n03466162', 'n03466493', 'n03466600', + 'n03466839', 'n03466947', 'n03467068', 'n03467254', 'n03467380', + 'n03467517', 'n03467796', 'n03467887', 'n03467984', 'n03468570', + 'n03468696', 'n03468821', 'n03469031', 'n03469175', 'n03469493', + 'n03469832', 'n03469903', 'n03470005', 'n03470222', 'n03470387', + 'n03470629', 'n03470948', 'n03471030', 'n03471190', 'n03471347', + 'n03471779', 'n03472232', 'n03472535', 'n03472672', 'n03472796', + 'n03472937', 'n03473078', 'n03473227', 'n03473465', 'n03473817', + 'n03473966', 'n03474167', 'n03474352', 'n03474779', 'n03474896', + 'n03475581', 'n03475674', 'n03475823', 'n03475961', 'n03476083', + 'n03476313', 'n03476542', 'n03476684', 'n03476991', 'n03477143', + 'n03477303', 'n03477410', 'n03477512', 'n03477773', 'n03477902', + 'n03478589', 'n03478756', 'n03478907', 'n03479121', 'n03479266', + 'n03479397', 'n03479502', 'n03480579', 'n03480719', 'n03480973', + 'n03481172', 'n03481521', 'n03482001', 'n03482128', 'n03482252', + 'n03482405', 'n03482523', 'n03482877', 'n03483086', 'n03483230', + 'n03483316', 'n03483531', 'n03483637', 'n03483823', 'n03483971', + 'n03484083', 'n03484487', 'n03484576', 'n03484809', 'n03484931', + 'n03485198', 'n03485309', 'n03485407', 'n03485575', 'n03485794', + 'n03487090', 'n03487331', 'n03487444', 'n03487533', 'n03487642', + 'n03487774', 'n03487886', 'n03488111', 'n03488188', 'n03488438', + 'n03488603', 'n03488784', 'n03488887', 'n03489048', 'n03489162', + 'n03490006', 'n03490119', 'n03490324', 'n03490449', 'n03490649', + 'n03490784', 'n03490884', 'n03491032', 'n03491724', 'n03491988', + 'n03492087', 'n03492250', 'n03492542', 'n03492922', 'n03493219', + 'n03493792', 'n03493911', 'n03494278', 'n03494537', 'n03494706', + 'n03495039', 'n03495258', 'n03495570', 'n03495671', 'n03495941', + 'n03496183', 'n03496296', 'n03496486', 'n03496612', 'n03496892', + 'n03497100', 'n03497352', 'n03497657', 'n03498441', 'n03498536', + 'n03498662', 'n03498781', 'n03498866', 'n03498962', 'n03499354', + 'n03499468', 'n03499907', 'n03500090', 'n03500209', 'n03500295', + 'n03500389', 'n03500457', 'n03500557', 'n03500699', 'n03500838', + 'n03500971', 'n03501152', 'n03501288', 'n03501520', 'n03501614', + 'n03502200', 'n03502331', 'n03502509', 'n03502777', 'n03502897', + 'n03503097', 'n03503233', 'n03503358', 'n03503477', 'n03503567', + 'n03503718', 'n03503997', 'n03504205', 'n03504293', 'n03504723', + 'n03505015', 'n03505133', 'n03505383', 'n03505504', 'n03505667', + 'n03505764', 'n03506028', 'n03506184', 'n03506370', 'n03506560', + 'n03506727', 'n03506880', 'n03507241', 'n03507458', 'n03507658', + 'n03507963', 'n03508101', 'n03508485', 'n03508881', 'n03509394', + 'n03509608', 'n03509843', 'n03510072', 'n03510244', 'n03510384', + 'n03510487', 'n03510583', 'n03510866', 'n03510987', 'n03511175', + 'n03511333', 'n03512030', 'n03512147', 'n03512452', 'n03512624', + 'n03512911', 'n03513137', 'n03513376', 'n03514129', 'n03514340', + 'n03514451', 'n03514693', 'n03514894', 'n03515338', 'n03515934', + 'n03516266', 'n03516367', 'n03516647', 'n03516844', 'n03516996', + 'n03517509', 'n03517647', 'n03517760', 'n03517899', 'n03517982', + 'n03518135', 'n03518230', 'n03518305', 'n03518445', 'n03518631', + 'n03518829', 'n03518943', 'n03519081', 'n03519226', 'n03519387', + 'n03519674', 'n03519848', 'n03520493', 'n03521076', 'n03521431', + 'n03521544', 'n03521675', 'n03521771', 'n03521899', 'n03522003', + 'n03522100', 'n03522634', 'n03522863', 'n03522990', 'n03523134', + 'n03523398', 'n03523506', 'n03523987', 'n03524150', 'n03524287', + 'n03524425', 'n03524574', 'n03524745', 'n03524976', 'n03525074', + 'n03525252', 'n03525454', 'n03525693', 'n03525827', 'n03526062', + 'n03527149', 'n03527444', 'n03527565', 'n03527675', 'n03528100', + 'n03528263', 'n03528523', 'n03528901', 'n03529175', 'n03529444', + 'n03529629', 'n03529860', 'n03530189', 'n03530511', 'n03530642', + 'n03530910', 'n03531281', 'n03531447', 'n03531546', 'n03531691', + 'n03531982', 'n03532342', 'n03532672', 'n03532919', 'n03533014', + 'n03533392', 'n03533486', 'n03533654', 'n03533845', 'n03534580', + 'n03534695', 'n03534776', 'n03535024', 'n03535284', 'n03535647', + 'n03535780', 'n03536122', 'n03536568', 'n03536761', 'n03537085', + 'n03537241', 'n03537412', 'n03537550', 'n03538037', 'n03538179', + 'n03538300', 'n03538406', 'n03538542', 'n03538634', 'n03538817', + 'n03538957', 'n03539103', 'n03539293', 'n03539433', 'n03539546', + 'n03539678', 'n03539754', 'n03540090', 'n03540267', 'n03540476', + 'n03540595', 'n03540914', 'n03541091', 'n03541269', 'n03541393', + 'n03541537', 'n03541696', 'n03541923', 'n03542333', 'n03542605', + 'n03542727', 'n03542860', 'n03543012', 'n03543112', 'n03543254', + 'n03543394', 'n03543511', 'n03543603', 'n03543735', 'n03543945', + 'n03544143', 'n03544238', 'n03544360', 'n03545150', 'n03545470', + 'n03545585', 'n03545756', 'n03545961', 'n03546112', 'n03546235', + 'n03546340', 'n03547054', 'n03547229', 'n03547397', 'n03547530', + 'n03547861', 'n03548086', 'n03548195', 'n03548320', 'n03548402', + 'n03548533', 'n03548626', 'n03548930', 'n03549199', 'n03549350', + 'n03549473', 'n03549589', 'n03549732', 'n03549897', 'n03550153', + 'n03550289', 'n03550420', 'n03551084', 'n03551395', 'n03551582', + 'n03551790', 'n03552001', 'n03552449', 'n03552749', 'n03553019', + 'n03553248', 'n03553486', 'n03554375', 'n03554460', 'n03554645', + 'n03555006', 'n03555217', 'n03555426', 'n03555564', 'n03555662', + 'n03555862', 'n03555996', 'n03556173', 'n03556679', 'n03556811', + 'n03556992', 'n03557270', 'n03557360', 'n03557590', 'n03557692', + 'n03557840', 'n03558007', 'n03558176', 'n03558404', 'n03558633', + 'n03558739', 'n03559373', 'n03559531', 'n03559999', 'n03560430', + 'n03560860', 'n03561047', 'n03561169', 'n03561573', 'n03562565', + 'n03563200', 'n03563460', 'n03563710', 'n03563967', 'n03564849', + 'n03565288', 'n03565565', 'n03565710', 'n03565830', 'n03565991', + 'n03566193', 'n03566329', 'n03566555', 'n03566730', 'n03566860', + 'n03567066', 'n03567635', 'n03567788', 'n03567912', 'n03568117', + 'n03568818', 'n03569014', 'n03569174', 'n03569293', 'n03569494', + 'n03571280', 'n03571439', 'n03571625', 'n03571853', 'n03571942', + 'n03572107', 'n03572205', 'n03572321', 'n03572631', 'n03573574', + 'n03573848', 'n03574243', 'n03574416', 'n03574555', 'n03574816', + 'n03575958', 'n03576215', 'n03576443', 'n03576955', 'n03577090', + 'n03577312', 'n03577474', 'n03577672', 'n03577818', 'n03578055', + 'n03578251', 'n03578656', 'n03578981', 'n03579538', 'n03579982', + 'n03580518', 'n03580615', 'n03580845', 'n03580990', 'n03581125', + 'n03581531', 'n03581897', 'n03582508', 'n03582959', 'n03583419', + 'n03583621', 'n03584254', 'n03584400', 'n03584829', 'n03585073', + 'n03585337', 'n03585438', 'n03585551', 'n03585682', 'n03585778', + 'n03585875', 'n03586219', 'n03586631', 'n03586911', 'n03587205', + 'n03588216', 'n03588841', 'n03588951', 'n03589313', 'n03589513', + 'n03589672', 'n03589791', 'n03590306', 'n03590475', 'n03590588', + 'n03590841', 'n03590932', 'n03591116', 'n03591313', 'n03591592', + 'n03591798', 'n03591901', 'n03592245', 'n03592669', 'n03592773', + 'n03592931', 'n03593122', 'n03593222', 'n03593526', 'n03593862', + 'n03594010', 'n03594148', 'n03594277', 'n03594523', 'n03594734', + 'n03594945', 'n03595055', 'n03595264', 'n03595409', 'n03595523', + 'n03595614', 'n03595860', 'n03596099', 'n03596285', 'n03596543', + 'n03597147', 'n03597317', 'n03597916', 'n03598151', 'n03598299', + 'n03598385', 'n03598515', 'n03598646', 'n03598783', 'n03598930', + 'n03599486', 'n03599964', 'n03600285', 'n03600475', 'n03600722', + 'n03600977', 'n03601442', 'n03601638', 'n03601840', 'n03602081', + 'n03602194', 'n03602365', 'n03602686', 'n03602790', 'n03602883', + 'n03603442', 'n03603594', 'n03603722', 'n03604156', 'n03604311', + 'n03604400', 'n03604536', 'n03604629', 'n03604763', 'n03604843', + 'n03605417', 'n03605504', 'n03605598', 'n03605722', 'n03605915', + 'n03606106', 'n03606251', 'n03606347', 'n03606465', 'n03607029', + 'n03607186', 'n03607527', 'n03607659', 'n03607923', 'n03608504', + 'n03609147', 'n03609235', 'n03609397', 'n03609542', 'n03609786', + 'n03609959', 'n03610098', 'n03610418', 'n03610524', 'n03610682', + 'n03610836', 'n03610992', 'n03612010', 'n03612814', 'n03612965', + 'n03613294', 'n03613592', 'n03614007', 'n03614383', 'n03614532', + 'n03614782', 'n03614887', 'n03615300', 'n03615406', 'n03615563', + 'n03615655', 'n03615790', 'n03616091', 'n03616225', 'n03616428', + 'n03616763', 'n03616979', 'n03617095', 'n03617312', 'n03617480', + 'n03617594', 'n03617834', 'n03618101', 'n03618339', 'n03618546', + 'n03618678', 'n03618797', 'n03618982', 'n03619050', 'n03619196', + 'n03619275', 'n03619396', 'n03619650', 'n03619793', 'n03619890', + 'n03620052', 'n03620353', 'n03620967', 'n03621049', 'n03621377', + 'n03621694', 'n03622058', 'n03622401', 'n03622526', 'n03622839', + 'n03622931', 'n03623198', 'n03623338', 'n03623556', 'n03624134', + 'n03624400', 'n03624767', 'n03625355', 'n03625539', 'n03625646', + 'n03625943', 'n03626115', 'n03626272', 'n03626418', 'n03626502', + 'n03626760', 'n03627232', 'n03627954', 'n03628071', 'n03628215', + 'n03628421', 'n03628511', 'n03628728', 'n03628831', 'n03628984', + 'n03629100', 'n03629231', 'n03629520', 'n03629643', 'n03630262', + 'n03630383', 'n03631177', 'n03631811', 'n03631922', 'n03632100', + 'n03632577', 'n03632729', 'n03632852', 'n03632963', 'n03633091', + 'n03633341', 'n03633632', 'n03633886', 'n03634034', 'n03634899', + 'n03635032', 'n03635108', 'n03635330', 'n03635516', 'n03635668', + 'n03635932', 'n03636248', 'n03636649', 'n03637027', 'n03637181', + 'n03637318', 'n03637480', 'n03637787', 'n03637898', 'n03638014', + 'n03638180', 'n03638623', 'n03638743', 'n03638883', 'n03639077', + 'n03639230', 'n03639497', 'n03639675', 'n03639880', 'n03640850', + 'n03640988', 'n03641569', 'n03641947', 'n03642144', 'n03642341', + 'n03642444', 'n03642573', 'n03642806', 'n03643149', 'n03643253', + 'n03643491', 'n03643737', 'n03643907', 'n03644073', 'n03644378', + 'n03644858', 'n03645011', 'n03645168', 'n03645290', 'n03645577', + 'n03646020', 'n03646148', 'n03646296', 'n03646809', 'n03646916', + 'n03647423', 'n03647520', 'n03648219', 'n03648431', 'n03648667', + 'n03649003', 'n03649161', 'n03649288', 'n03649674', 'n03649797', + 'n03649909', 'n03650551', 'n03651388', 'n03651605', 'n03651843', + 'n03652100', 'n03652389', 'n03652729', 'n03652826', 'n03652932', + 'n03653110', 'n03653220', 'n03653454', 'n03653583', 'n03653740', + 'n03653833', 'n03653975', 'n03654576', 'n03654826', 'n03655072', + 'n03655470', 'n03655720', 'n03656484', 'n03656957', 'n03657121', + 'n03657239', 'n03657511', 'n03658102', 'n03658185', 'n03658635', + 'n03658858', 'n03659292', 'n03659686', 'n03659809', 'n03659950', + 'n03660124', 'n03660562', 'n03660909', 'n03661043', 'n03661340', + 'n03662301', 'n03662452', 'n03662601', 'n03662719', 'n03662887', + 'n03663433', 'n03663531', 'n03663910', 'n03664159', 'n03664675', + 'n03664840', 'n03664943', 'n03665232', 'n03665366', 'n03665851', + 'n03665924', 'n03666238', 'n03666362', 'n03666591', 'n03666917', + 'n03667060', 'n03667235', 'n03667552', 'n03667664', 'n03667829', + 'n03668067', 'n03668279', 'n03668488', 'n03668803', 'n03669245', + 'n03669534', 'n03669886', 'n03670208', 'n03671914', 'n03672521', + 'n03672827', 'n03673027', 'n03673270', 'n03673450', 'n03673767', + 'n03674270', 'n03674440', 'n03674731', 'n03674842', 'n03675076', + 'n03675235', 'n03675445', 'n03675558', 'n03675907', 'n03676087', + 'n03676483', 'n03676623', 'n03676759', 'n03677115', 'n03677682', + 'n03677766', 'n03678558', 'n03678729', 'n03678879', 'n03679384', + 'n03679712', 'n03680248', 'n03680355', 'n03680512', 'n03680734', + 'n03680858', 'n03680942', 'n03681477', 'n03681813', 'n03682380', + 'n03682487', 'n03682877', 'n03683079', 'n03683341', 'n03683457', + 'n03683606', 'n03683708', 'n03683995', 'n03684143', 'n03684224', + 'n03684489', 'n03684611', 'n03684740', 'n03684823', 'n03685307', + 'n03685486', 'n03685640', 'n03685820', 'n03686130', 'n03686363', + 'n03686470', 'n03686924', 'n03687137', 'n03687928', 'n03688066', + 'n03688192', 'n03688405', 'n03688504', 'n03688605', 'n03688707', + 'n03688832', 'n03688943', 'n03689157', 'n03689570', 'n03690168', + 'n03690279', 'n03690473', 'n03690851', 'n03690938', 'n03691459', + 'n03691817', 'n03692004', 'n03692136', 'n03692272', 'n03692379', + 'n03692522', 'n03692842', 'n03693293', 'n03693474', 'n03693707', + 'n03693860', 'n03694196', 'n03694356', 'n03694639', 'n03694761', + 'n03694949', 'n03695122', 'n03695452', 'n03695616', 'n03695753', + 'n03695857', 'n03695957', 'n03696065', 'n03696301', 'n03696445', + 'n03696568', 'n03696746', 'n03696909', 'n03697007', 'n03697366', + 'n03697552', 'n03697812', 'n03697913', 'n03698123', 'n03698226', + 'n03698360', 'n03698604', 'n03698723', 'n03698815', 'n03699280', + 'n03699591', 'n03699754', 'n03699975', 'n03700963', 'n03701191', + 'n03701391', 'n03701640', 'n03701790', 'n03702248', 'n03702440', + 'n03702582', 'n03703075', 'n03703203', 'n03703463', 'n03703590', + 'n03703730', 'n03703862', 'n03703945', 'n03704549', 'n03704834', + 'n03705379', 'n03705808', 'n03706229', 'n03706415', 'n03706653', + 'n03706939', 'n03707171', 'n03707372', 'n03707597', 'n03707766', + 'n03708036', 'n03708425', 'n03708843', 'n03708962', 'n03709206', + 'n03709363', 'n03709545', 'n03709644', 'n03709823', 'n03709960', + 'n03710079', 'n03710193', 'n03710294', 'n03710421', 'n03710528', + 'n03710637', 'n03710721', 'n03710937', 'n03711044', 'n03711711', + 'n03711999', 'n03712111', 'n03712337', 'n03712444', 'n03712887', + 'n03712981', 'n03713069', 'n03713151', 'n03713436', 'n03714235', + 'n03715114', 'n03715275', 'n03715386', 'n03715669', 'n03715892', + 'n03716228', 'n03716887', 'n03716966', 'n03717131', 'n03717285', + 'n03717447', 'n03717622', 'n03718212', 'n03718335', 'n03718458', + 'n03718581', 'n03718699', 'n03718789', 'n03718935', 'n03719053', + 'n03719343', 'n03719560', 'n03719743', 'n03720005', 'n03720163', + 'n03720665', 'n03720891', 'n03721047', 'n03721252', 'n03721384', + 'n03721590', 'n03722007', 'n03722288', 'n03722646', 'n03722944', + 'n03723153', 'n03723267', 'n03723439', 'n03723781', 'n03723885', + 'n03724066', 'n03724176', 'n03724417', 'n03724538', 'n03724623', + 'n03724756', 'n03724870', 'n03725035', 'n03725506', 'n03725600', + 'n03725717', 'n03725869', 'n03726116', 'n03726233', 'n03726371', + 'n03726516', 'n03726760', 'n03726993', 'n03727067', 'n03727465', + 'n03727605', 'n03727837', 'n03727946', 'n03728437', 'n03728982', + 'n03729131', 'n03729308', 'n03729402', 'n03729482', 'n03729647', + 'n03729826', 'n03729951', 'n03730153', 'n03730334', 'n03730494', + 'n03730655', 'n03730788', 'n03730893', 'n03731019', 'n03731483', + 'n03731695', 'n03731882', 'n03732020', 'n03732114', 'n03732458', + 'n03732543', 'n03732658', 'n03733131', 'n03733281', 'n03733465', + 'n03733547', 'n03733644', 'n03733805', 'n03733925', 'n03735637', + 'n03735963', 'n03736064', 'n03736147', 'n03736269', 'n03736372', + 'n03736470', 'n03736970', 'n03738066', 'n03738241', 'n03738472', + 'n03739518', 'n03739693', 'n03742019', 'n03742115', 'n03742238', + 'n03743016', 'n03743279', 'n03743902', 'n03744276', 'n03744684', + 'n03744840', 'n03745146', 'n03745487', 'n03745571', 'n03746005', + 'n03746155', 'n03746330', 'n03746486', 'n03748162', 'n03749504', + 'n03749634', 'n03749807', 'n03750206', 'n03750437', 'n03750614', + 'n03751065', 'n03751269', 'n03751458', 'n03751590', 'n03751757', + 'n03752071', 'n03752185', 'n03752398', 'n03752922', 'n03753077', + 'n03753514', 'n03757604', 'n03758089', 'n03758220', 'n03758894', + 'n03758992', 'n03759243', 'n03759432', 'n03759661', 'n03759954', + 'n03760310', 'n03760671', 'n03760944', 'n03761084', 'n03761588', + 'n03761731', 'n03762238', 'n03762332', 'n03762434', 'n03762602', + 'n03762982', 'n03763727', 'n03763968', 'n03764276', 'n03764606', + 'n03764736', 'n03764822', 'n03764995', 'n03765128', 'n03765467', + 'n03765561', 'n03765934', 'n03766044', 'n03766218', 'n03766322', + 'n03766508', 'n03766600', 'n03766697', 'n03766935', 'n03767112', + 'n03767203', 'n03767459', 'n03767745', 'n03767966', 'n03768132', + 'n03768683', 'n03768823', 'n03768916', 'n03769610', 'n03769722', + 'n03769881', 'n03770085', 'n03770224', 'n03770316', 'n03770439', + 'n03770520', 'n03770679', 'n03770834', 'n03770954', 'n03772077', + 'n03772269', 'n03772584', 'n03772674', 'n03773035', 'n03773504', + 'n03773835', 'n03774327', 'n03774461', 'n03775071', 'n03775199', + 'n03775388', 'n03775546', 'n03775636', 'n03775747', 'n03775847', + 'n03776167', 'n03776460', 'n03776877', 'n03776997', 'n03777126', + 'n03777568', 'n03777754', 'n03778459', 'n03778817', 'n03779000', + 'n03779128', 'n03779246', 'n03779370', 'n03779884', 'n03780047', + 'n03780799', 'n03781055', 'n03781244', 'n03781467', 'n03781594', + 'n03781683', 'n03781787', 'n03782006', 'n03782190', 'n03782794', + 'n03782929', 'n03783304', 'n03783430', 'n03783575', 'n03783873', + 'n03784139', 'n03784270', 'n03784793', 'n03784896', 'n03785016', + 'n03785142', 'n03785237', 'n03785499', 'n03785721', 'n03786096', + 'n03786194', 'n03786313', 'n03786621', 'n03786715', 'n03786901', + 'n03787032', 'n03787523', 'n03788047', 'n03788195', 'n03788365', + 'n03788498', 'n03788601', 'n03788914', 'n03789171', 'n03789400', + 'n03789603', 'n03789794', 'n03789946', 'n03790230', 'n03790512', + 'n03790755', 'n03790953', 'n03791053', 'n03791235', 'n03792048', + 'n03792334', 'n03792526', 'n03792782', 'n03792972', 'n03793489', + 'n03793850', 'n03794056', 'n03794136', 'n03794798', 'n03795123', + 'n03795269', 'n03795758', 'n03795976', 'n03796181', 'n03796401', + 'n03796522', 'n03796605', 'n03796848', 'n03796974', 'n03797062', + 'n03797182', 'n03797264', 'n03797390', 'n03797896', 'n03798061', + 'n03798442', 'n03798610', 'n03798982', 'n03799113', 'n03799240', + 'n03799375', 'n03799610', 'n03799876', 'n03800371', 'n03800485', + 'n03800563', 'n03800772', 'n03800933', 'n03801353', 'n03801533', + 'n03801671', 'n03801760', 'n03801880', 'n03802007', 'n03802228', + 'n03802393', 'n03802643', 'n03802800', 'n03802973', 'n03803116', + 'n03803284', 'n03803780', 'n03804211', 'n03804744', 'n03805180', + 'n03805280', 'n03805374', 'n03805503', 'n03805725', 'n03805933', + 'n03807334', 'n03809211', 'n03809312', 'n03809603', 'n03809686', + 'n03809802', 'n03810412', 'n03810952', 'n03811295', 'n03811444', + 'n03811847', 'n03811965', 'n03812263', 'n03812382', 'n03812789', + 'n03812924', 'n03813078', 'n03813176', 'n03813946', 'n03814528', + 'n03814639', 'n03814727', 'n03814817', 'n03814906', 'n03815149', + 'n03815278', 'n03815482', 'n03815615', 'n03816005', 'n03816136', + 'n03816394', 'n03816530', 'n03816849', 'n03817191', 'n03817331', + 'n03817522', 'n03817647', 'n03818001', 'n03818343', 'n03819047', + 'n03819336', 'n03819448', 'n03819595', 'n03819994', 'n03820154', + 'n03820318', 'n03820728', 'n03820950', 'n03821145', 'n03821424', + 'n03821518', 'n03822171', 'n03822361', 'n03822504', 'n03822656', + 'n03822767', 'n03823111', 'n03823216', 'n03823312', 'n03823673', + 'n03823906', 'n03824197', 'n03824284', 'n03824381', 'n03824589', + 'n03824713', 'n03824999', 'n03825080', 'n03825271', 'n03825442', + 'n03825673', 'n03825788', 'n03825913', 'n03826039', 'n03826186', + 'n03827420', 'n03827536', 'n03828020', 'n03829340', 'n03829857', + 'n03829954', 'n03831203', 'n03831382', 'n03831757', 'n03832144', + 'n03832673', 'n03833907', 'n03834040', 'n03834472', 'n03834604', + 'n03835197', 'n03835729', 'n03835941', 'n03836062', 'n03836451', + 'n03836602', 'n03836906', 'n03836976', 'n03837422', 'n03837606', + 'n03837698', 'n03837869', 'n03838024', 'n03838298', 'n03838748', + 'n03838899', 'n03839172', 'n03839276', 'n03839424', 'n03839671', + 'n03839795', 'n03840327', 'n03840681', 'n03840823', 'n03841011', + 'n03841143', 'n03841290', 'n03841666', 'n03842012', 'n03842156', + 'n03842276', 'n03842377', 'n03842585', 'n03842754', 'n03842986', + 'n03843092', 'n03843316', 'n03843438', 'n03843555', 'n03843883', + 'n03844045', 'n03844233', 'n03844550', 'n03844673', 'n03844815', + 'n03844965', 'n03845107', 'n03845190', 'n03845990', 'n03846100', + 'n03846234', 'n03846431', 'n03846677', 'n03846772', 'n03846970', + 'n03847471', 'n03847823', 'n03848033', 'n03848168', 'n03848348', + 'n03848537', 'n03849275', 'n03849412', 'n03849679', 'n03849814', + 'n03849943', 'n03850053', 'n03850245', 'n03850492', 'n03850613', + 'n03851341', 'n03851787', 'n03852280', 'n03852544', 'n03852688', + 'n03853291', 'n03853924', 'n03854065', 'n03854421', 'n03854506', + 'n03854722', 'n03854815', 'n03855214', 'n03855333', 'n03855464', + 'n03855604', 'n03855756', 'n03855908', 'n03856012', 'n03856335', + 'n03856465', 'n03856728', 'n03857026', 'n03857156', 'n03857291', + 'n03857687', 'n03857828', 'n03858085', 'n03858183', 'n03858418', + 'n03858533', 'n03858837', 'n03859000', 'n03859170', 'n03859280', + 'n03859495', 'n03859608', 'n03859958', 'n03860234', 'n03860404', + 'n03861048', 'n03861271', 'n03861430', 'n03861596', 'n03861842', + 'n03862379', 'n03862676', 'n03862862', 'n03863108', 'n03863262', + 'n03863657', 'n03863783', 'n03863923', 'n03864139', 'n03864356', + 'n03864692', 'n03865288', 'n03865371', 'n03865557', 'n03865820', + 'n03865949', 'n03866082', 'n03867854', 'n03868044', 'n03868242', + 'n03868324', 'n03868406', 'n03868643', 'n03868763', 'n03868863', + 'n03869838', 'n03869976', 'n03870105', 'n03870290', 'n03870546', + 'n03870672', 'n03870980', 'n03871083', 'n03871371', 'n03871524', + 'n03871628', 'n03871724', 'n03871860', 'n03872016', 'n03872167', + 'n03872273', 'n03873416', 'n03873699', 'n03873848', 'n03873996', + 'n03874138', 'n03874293', 'n03874487', 'n03874599', 'n03874823', + 'n03875218', 'n03875806', 'n03875955', 'n03876111', 'n03876231', + 'n03877351', 'n03877472', 'n03877674', 'n03877845', 'n03878066', + 'n03878211', 'n03878294', 'n03878418', 'n03878511', 'n03878674', + 'n03878828', 'n03878963', 'n03879456', 'n03879705', 'n03880032', + 'n03880129', 'n03880323', 'n03880531', 'n03881305', 'n03881404', + 'n03881534', 'n03882611', 'n03882960', 'n03883054', 'n03883385', + 'n03883524', 'n03883664', 'n03883773', 'n03883944', 'n03884397', + 'n03884554', 'n03884639', 'n03884778', 'n03884926', 'n03885028', + 'n03885194', 'n03885293', 'n03885410', 'n03885535', 'n03885669', + 'n03885788', 'n03885904', 'n03886053', 'n03886641', 'n03886762', + 'n03886940', 'n03887185', 'n03887330', 'n03887512', 'n03887697', + 'n03887899', 'n03888022', 'n03888257', 'n03888605', 'n03888808', + 'n03888998', 'n03889397', 'n03889503', 'n03889626', 'n03889726', + 'n03889871', 'n03890093', 'n03890233', 'n03890358', 'n03890514', + 'n03891051', 'n03891251', 'n03891332', 'n03891538', 'n03892178', + 'n03892425', 'n03892557', 'n03892728', 'n03893935', 'n03894051', + 'n03894379', 'n03894677', 'n03894933', 'n03895038', 'n03895170', + 'n03895866', 'n03896103', 'n03896233', 'n03896419', 'n03896526', + 'n03896628', 'n03896984', 'n03897130', 'n03897634', 'n03897943', + 'n03898129', 'n03898271', 'n03898395', 'n03898633', 'n03898787', + 'n03899100', 'n03899612', 'n03899768', 'n03899933', 'n03900028', + 'n03900194', 'n03900301', 'n03900393', 'n03900979', 'n03901229', + 'n03901338', 'n03901750', 'n03901974', 'n03902125', 'n03902220', + 'n03902482', 'n03902756', 'n03903133', 'n03903290', 'n03903424', + 'n03903733', 'n03903868', 'n03904060', 'n03904183', 'n03904433', + 'n03904657', 'n03904782', 'n03904909', 'n03905361', 'n03905540', + 'n03905730', 'n03905947', 'n03906106', 'n03906224', 'n03906463', + 'n03906590', 'n03906789', 'n03906894', 'n03906997', 'n03907475', + 'n03907654', 'n03907908', 'n03908111', 'n03908204', 'n03908456', + 'n03908618', 'n03908714', 'n03909020', 'n03909160', 'n03909406', + 'n03909516', 'n03909658', 'n03911406', 'n03911513', 'n03911658', + 'n03911767', 'n03911866', 'n03912218', 'n03912821', 'n03913343', + 'n03913930', 'n03914106', 'n03914337', 'n03914438', 'n03914583', + 'n03914831', 'n03915118', 'n03915320', 'n03915437', 'n03915900', + 'n03916031', 'n03916289', 'n03916385', 'n03916470', 'n03916720', + 'n03917048', 'n03917198', 'n03917327', 'n03917814', 'n03918074', + 'n03918480', 'n03918737', 'n03919096', 'n03919289', 'n03919430', + 'n03919808', 'n03920288', 'n03920384', 'n03920641', 'n03920737', + 'n03920867', 'n03923379', 'n03923564', 'n03923692', 'n03923918', + 'n03924069', 'n03924407', 'n03924532', 'n03924679', 'n03926148', + 'n03926412', 'n03926876', 'n03927091', 'n03927299', 'n03927539', + 'n03927792', 'n03928116', 'n03928589', 'n03928814', 'n03928994', + 'n03929091', 'n03929202', 'n03929443', 'n03929660', 'n03929855', + 'n03930229', 'n03930313', 'n03930431', 'n03930515', 'n03930630', + 'n03931765', 'n03931885', 'n03931980', 'n03932080', 'n03932670', + 'n03933391', 'n03933933', 'n03934042', 'n03934229', 'n03934311', + 'n03934565', 'n03934656', 'n03934890', 'n03935116', 'n03935234', + 'n03935335', 'n03935883', 'n03936269', 'n03936466', 'n03937543', + 'n03937835', 'n03937931', 'n03938037', 'n03938244', 'n03938401', + 'n03938522', 'n03938725', 'n03939062', 'n03939178', 'n03939281', + 'n03939440', 'n03939565', 'n03939677', 'n03939844', 'n03940256', + 'n03940894', 'n03941013', 'n03941231', 'n03941417', 'n03941586', + 'n03941684', 'n03941887', 'n03942028', 'n03942600', 'n03942813', + 'n03942920', 'n03943115', 'n03943266', 'n03943623', 'n03943714', + 'n03943833', 'n03943920', 'n03944024', 'n03944138', 'n03944341', + 'n03945459', 'n03945615', 'n03945817', 'n03945928', 'n03946076', + 'n03946162', 'n03947111', 'n03947343', 'n03947466', 'n03947798', + 'n03947888', 'n03948242', 'n03948459', 'n03948830', 'n03948950', + 'n03949145', 'n03949317', 'n03949761', 'n03950228', 'n03950359', + 'n03950537', 'n03950647', 'n03950899', 'n03951068', 'n03951213', + 'n03951453', 'n03951800', 'n03951971', 'n03952150', 'n03952576', + 'n03953020', 'n03953416', 'n03953901', 'n03954393', 'n03954731', + 'n03955296', 'n03955489', 'n03955809', 'n03955941', 'n03956157', + 'n03956331', 'n03956531', 'n03956623', 'n03956785', 'n03956922', + 'n03957315', 'n03957420', 'n03957762', 'n03957991', 'n03958227', + 'n03958338', 'n03958630', 'n03958752', 'n03959014', 'n03959123', + 'n03959227', 'n03959701', 'n03960374', 'n03960490', 'n03961394', + 'n03961630', 'n03961711', 'n03961828', 'n03961939', 'n03962525', + 'n03962685', 'n03962852', 'n03962932', 'n03963028', 'n03963198', + 'n03963294', 'n03963483', 'n03963645', 'n03964495', 'n03964611', + 'n03965456', 'n03965907', 'n03966206', 'n03966325', 'n03966582', + 'n03966751', 'n03966976', 'n03967270', 'n03967396', 'n03967562', + 'n03967942', 'n03968293', 'n03968479', 'n03968581', 'n03968728', + 'n03969510', 'n03970156', 'n03970363', 'n03970546', 'n03971218', + 'n03971321', 'n03971960', 'n03972146', 'n03972372', 'n03972524', + 'n03973003', 'n03973285', 'n03973402', 'n03973520', 'n03973628', + 'n03973839', 'n03973945', 'n03974070', 'n03974915', 'n03975035', + 'n03975657', 'n03975788', 'n03975926', 'n03976105', 'n03976268', + 'n03976467', 'n03976657', 'n03977158', 'n03977266', 'n03977430', + 'n03977592', 'n03977966', 'n03978421', 'n03978575', 'n03978686', + 'n03978815', 'n03978966', 'n03979377', 'n03979492', 'n03980026', + 'n03980478', 'n03980874', 'n03980986', 'n03981094', 'n03981340', + 'n03981566', 'n03981760', 'n03981924', 'n03982232', 'n03982331', + 'n03982430', 'n03982642', 'n03982767', 'n03982895', 'n03983396', + 'n03983499', 'n03983612', 'n03983712', 'n03983928', 'n03984125', + 'n03984234', 'n03984381', 'n03984643', 'n03984759', 'n03985069', + 'n03985232', 'n03985441', 'n03985881', 'n03986071', 'n03986224', + 'n03986355', 'n03986562', 'n03986704', 'n03986857', 'n03986949', + 'n03987266', 'n03987376', 'n03987674', 'n03987865', 'n03987990', + 'n03988170', 'n03988758', 'n03988926', 'n03989199', 'n03989349', + 'n03989447', 'n03989665', 'n03989777', 'n03989898', 'n03990474', + 'n03991062', 'n03991202', 'n03991321', 'n03991443', 'n03991646', + 'n03991837', 'n03992325', 'n03992436', 'n03992509', 'n03992703', + 'n03992975', 'n03993053', 'n03993180', 'n03993403', 'n03993703', + 'n03993878', 'n03994008', 'n03994297', 'n03994417', 'n03994614', + 'n03994757', 'n03995018', 'n03995265', 'n03995372', 'n03995535', + 'n03995661', 'n03995856', 'n03996004', 'n03996145', 'n03996416', + 'n03996849', 'n03997274', 'n03997484', 'n03997875', 'n03998194', + 'n03998333', 'n03998673', 'n03999064', 'n03999160', 'n03999621', + 'n03999992', 'n04000311', 'n04000480', 'n04000592', 'n04000716', + 'n04000998', 'n04001132', 'n04001265', 'n04001397', 'n04001499', + 'n04001661', 'n04001845', 'n04002262', 'n04002371', 'n04002629', + 'n04003241', 'n04003359', 'n04003856', 'n04004099', 'n04004210', + 'n04004475', 'n04004767', 'n04004990', 'n04005197', 'n04005630', + 'n04005912', 'n04006067', 'n04006227', 'n04006330', 'n04006411', + 'n04007415', 'n04007664', 'n04008385', 'n04008634', 'n04009552', + 'n04009801', 'n04009923', 'n04010057', 'n04010779', 'n04010927', + 'n04011827', 'n04012084', 'n04012482', 'n04012665', 'n04013060', + 'n04013176', 'n04013600', 'n04013729', 'n04014297', 'n04015204', + 'n04015786', 'n04015908', 'n04016240', 'n04016479', 'n04016576', + 'n04016684', 'n04016846', 'n04017571', 'n04017807', 'n04018155', + 'n04018399', 'n04018667', 'n04019101', 'n04019335', 'n04019541', + 'n04019696', 'n04019881', 'n04020087', 'n04020298', 'n04020744', + 'n04020912', 'n04021028', 'n04021164', 'n04021362', 'n04021503', + 'n04021704', 'n04021798', 'n04022332', 'n04022434', 'n04022708', + 'n04022866', 'n04023021', 'n04023119', 'n04023249', 'n04023422', + 'n04023695', 'n04023962', 'n04024137', 'n04024274', 'n04024862', + 'n04024983', 'n04025508', 'n04025633', 'n04026053', 'n04026180', + 'n04026417', 'n04026813', 'n04026918', 'n04027023', 'n04027367', + 'n04027706', 'n04027820', 'n04027935', 'n04028074', 'n04028221', + 'n04028315', 'n04028581', 'n04028764', 'n04029416', 'n04029647', + 'n04029734', 'n04029913', 'n04030054', 'n04030161', 'n04030274', + 'n04030414', 'n04030518', 'n04030846', 'n04030965', 'n04031884', + 'n04032509', 'n04032603', 'n04032936', 'n04033287', 'n04033425', + 'n04033557', 'n04033801', 'n04033901', 'n04033995', 'n04034262', + 'n04034367', 'n04035231', 'n04035634', 'n04035748', 'n04035836', + 'n04035912', 'n04036155', 'n04036303', 'n04036776', 'n04036963', + 'n04037076', 'n04037220', 'n04037298', 'n04037443', 'n04037873', + 'n04037964', 'n04038231', 'n04038338', 'n04038440', 'n04038727', + 'n04039041', 'n04039209', 'n04039381', 'n04039742', 'n04039848', + 'n04040247', 'n04040373', 'n04040540', 'n04040759', 'n04041069', + 'n04041243', 'n04041408', 'n04041544', 'n04041747', 'n04042076', + 'n04042204', 'n04042358', 'n04042632', 'n04042795', 'n04042985', + 'n04043168', 'n04043411', 'n04043733', 'n04044307', 'n04044498', + 'n04044716', 'n04044955', 'n04045085', 'n04045255', 'n04045397', + 'n04045644', 'n04045787', 'n04045941', 'n04046091', 'n04046277', + 'n04046400', 'n04046590', 'n04046974', 'n04047139', 'n04047401', + 'n04047733', 'n04047834', 'n04048441', 'n04049303', 'n04049405', + 'n04049585', 'n04049753', 'n04050066', 'n04050313', 'n04050600', + 'n04050933', 'n04051269', 'n04051439', 'n04051549', 'n04051705', + 'n04051825', 'n04052235', 'n04052346', 'n04052442', 'n04052658', + 'n04052757', 'n04053508', 'n04053677', 'n04053767', 'n04054361', + 'n04054566', 'n04054670', 'n04055180', 'n04055447', 'n04055700', + 'n04055861', 'n04056073', 'n04056180', 'n04056413', 'n04056932', + 'n04057047', 'n04057215', 'n04057435', 'n04057673', 'n04057846', + 'n04057981', 'n04058096', 'n04058239', 'n04058486', 'n04058594', + 'n04058721', 'n04059157', 'n04059298', 'n04059399', 'n04059516', + 'n04059947', 'n04060198', 'n04060448', 'n04060647', 'n04060904', + 'n04061681', 'n04061793', 'n04061969', 'n04062179', 'n04062428', + 'n04062644', 'n04062807', 'n04063154', 'n04063373', 'n04063868', + 'n04064213', 'n04064401', 'n04064747', 'n04064862', 'n04065272', + 'n04065464', 'n04065789', 'n04065909', 'n04066023', 'n04066270', + 'n04066388', 'n04066476', 'n04066767', 'n04067143', 'n04067231', + 'n04067353', 'n04067472', 'n04067658', 'n04067818', 'n04067921', + 'n04068441', 'n04068601', 'n04069166', 'n04069276', 'n04069434', + 'n04069582', 'n04069777', 'n04070003', 'n04070207', 'n04070415', + 'n04070545', 'n04070727', 'n04070964', 'n04071102', 'n04071263', + 'n04071393', 'n04072193', 'n04072551', 'n04072960', 'n04073425', + 'n04073948', 'n04074185', 'n04074963', 'n04075291', 'n04075468', + 'n04075715', 'n04075813', 'n04075916', 'n04076052', 'n04076284', + 'n04076713', 'n04077430', 'n04077594', 'n04077734', 'n04077889', + 'n04078002', 'n04078574', 'n04078955', 'n04079106', 'n04079244', + 'n04079603', 'n04079933', 'n04080138', 'n04080454', 'n04080705', + 'n04080833', 'n04081281', 'n04081699', 'n04081844', 'n04082344', + 'n04082562', 'n04082710', 'n04082886', 'n04083113', 'n04083309', + 'n04083649', 'n04083800', 'n04084517', 'n04084682', 'n04084889', + 'n04085017', 'n04085574', 'n04085873', 'n04086066', 'n04086273', + 'n04086446', 'n04086663', 'n04086794', 'n04086937', 'n04087126', + 'n04087432', 'n04087709', 'n04087826', 'n04088229', 'n04088343', + 'n04088441', 'n04088696', 'n04088797', 'n04089152', 'n04089376', + 'n04089666', 'n04089836', 'n04089976', 'n04090263', 'n04090548', + 'n04090781', 'n04091097', 'n04091466', 'n04091584', 'n04091693', + 'n04092168', 'n04093157', 'n04093223', 'n04093625', 'n04093775', + 'n04093915', 'n04094060', 'n04094250', 'n04094438', 'n04094608', + 'n04094720', 'n04094859', 'n04095109', 'n04095210', 'n04095342', + 'n04095577', 'n04095938', 'n04096066', 'n04096733', 'n04096848', + 'n04097085', 'n04097373', 'n04097622', 'n04097760', 'n04097866', + 'n04098169', 'n04098260', 'n04098399', 'n04098513', 'n04098795', + 'n04099003', 'n04099175', 'n04099429', 'n04099969', 'n04100174', + 'n04100519', 'n04101375', 'n04101497', 'n04101701', 'n04101860', + 'n04102037', 'n04102162', 'n04102285', 'n04102406', 'n04102618', + 'n04102760', 'n04102872', 'n04102962', 'n04103094', 'n04103206', + 'n04103364', 'n04103665', 'n04103769', 'n04103918', 'n04104147', + 'n04104384', 'n04104500', 'n04104770', 'n04104925', 'n04105068', + 'n04105438', 'n04105704', 'n04105893', 'n04107598', 'n04107743', + 'n04107984', 'n04108268', 'n04108822', 'n04108999', 'n04110068', + 'n04110178', 'n04110281', 'n04110439', 'n04110654', 'n04110841', + 'n04110955', 'n04111190', 'n04111414', 'n04111531', 'n04111668', + 'n04111962', 'n04112147', 'n04112252', 'n04112430', 'n04112579', + 'n04112654', 'n04112752', 'n04112921', 'n04113038', 'n04113194', + 'n04113316', 'n04113406', 'n04113641', 'n04113765', 'n04113968', + 'n04114069', 'n04114301', 'n04114428', 'n04114719', 'n04114844', + 'n04114996', 'n04115144', 'n04115256', 'n04115456', 'n04115542', + 'n04115802', 'n04115996', 'n04116098', 'n04116294', 'n04116389', + 'n04116512', 'n04117216', 'n04117464', 'n04117639', 'n04118021', + 'n04118538', 'n04118635', 'n04118776', 'n04119091', 'n04119230', + 'n04119360', 'n04119478', 'n04119630', 'n04119751', 'n04120489', + 'n04120695', 'n04120842', 'n04121228', 'n04121342', 'n04121426', + 'n04121511', 'n04121728', 'n04122262', 'n04122349', 'n04122492', + 'n04122578', 'n04122685', 'n04122825', 'n04123026', 'n04123123', + 'n04123228', 'n04123317', 'n04123448', 'n04123567', 'n04123740', + 'n04124098', 'n04124202', 'n04124370', 'n04124488', 'n04124573', + 'n04124887', 'n04125021', 'n04125116', 'n04125257', 'n04125541', + 'n04125692', 'n04125853', 'n04126066', 'n04126244', 'n04126541', + 'n04126659', 'n04126852', 'n04126980', 'n04127117', 'n04127249', + 'n04127395', 'n04127521', 'n04127633', 'n04127904', 'n04128413', + 'n04128499', 'n04128710', 'n04128837', 'n04129490', 'n04129688', + 'n04129766', 'n04130143', 'n04130257', 'n04130566', 'n04130907', + 'n04131015', 'n04131113', 'n04131208', 'n04131368', 'n04131499', + 'n04131690', 'n04131811', 'n04131929', 'n04132158', 'n04132465', + 'n04132603', 'n04132829', 'n04132985', 'n04133114', 'n04133789', + 'n04134008', 'n04134170', 'n04134523', 'n04134632', 'n04135024', + 'n04135118', 'n04135315', 'n04135710', 'n04135933', 'n04136045', + 'n04136161', 'n04136333', 'n04136510', 'n04136800', 'n04137089', + 'n04137217', 'n04137355', 'n04137444', 'n04137773', 'n04137897', + 'n04138131', 'n04138261', 'n04138869', 'n04138977', 'n04139140', + 'n04139395', 'n04139859', 'n04140064', 'n04140539', 'n04140631', + 'n04140777', 'n04140853', 'n04141076', 'n04141198', 'n04141327', + 'n04141712', 'n04141838', 'n04141975', 'n04142175', 'n04142327', + 'n04142434', 'n04142731', 'n04142999', 'n04143140', 'n04143365', + 'n04143897', 'n04144241', 'n04144539', 'n04144651', 'n04145863', + 'n04146050', 'n04146343', 'n04146504', 'n04146614', 'n04146862', + 'n04146976', 'n04147183', 'n04147291', 'n04147495', 'n04147793', + 'n04147916', 'n04148054', 'n04148285', 'n04148464', 'n04148579', + 'n04148703', 'n04149083', 'n04149374', 'n04149813', 'n04150153', + 'n04150273', 'n04150371', 'n04150980', 'n04151108', 'n04151581', + 'n04151940', 'n04152387', 'n04152593', 'n04153025', 'n04153330', + 'n04153751', 'n04154152', 'n04154340', 'n04154565', 'n04154753', + 'n04154854', 'n04154938', 'n04155068', 'n04155177', 'n04155457', + 'n04155625', 'n04155735', 'n04155889', 'n04156040', 'n04156140', + 'n04156297', 'n04156411', 'n04156591', 'n04156814', 'n04156946', + 'n04157099', 'n04157320', 'n04158002', 'n04158138', 'n04158250', + 'n04158672', 'n04158807', 'n04158956', 'n04160036', 'n04160261', + 'n04160372', 'n04160586', 'n04160847', 'n04161010', 'n04161358', + 'n04161981', 'n04162433', 'n04162706', 'n04163530', 'n04164002', + 'n04164199', 'n04164406', 'n04164757', 'n04164868', 'n04165409', + 'n04165675', 'n04165945', 'n04166111', 'n04166281', 'n04166436', + 'n04167346', 'n04167489', 'n04167661', 'n04168084', 'n04168199', + 'n04168472', 'n04168541', 'n04168840', 'n04169437', 'n04169597', + 'n04170037', 'n04170384', 'n04170515', 'n04170694', 'n04170933', + 'n04171208', 'n04171459', 'n04171629', 'n04171831', 'n04172107', + 'n04172230', 'n04172342', 'n04172512', 'n04172607', 'n04172776', + 'n04172904', 'n04173046', 'n04173172', 'n04173511', 'n04173907', + 'n04174026', 'n04174101', 'n04174234', 'n04174500', 'n04174705', + 'n04175039', 'n04175147', 'n04175574', 'n04176068', 'n04176190', + 'n04176295', 'n04176528', 'n04177041', 'n04177329', 'n04177545', + 'n04177654', 'n04177755', 'n04177820', 'n04177931', 'n04178190', + 'n04178329', 'n04178668', 'n04179126', 'n04179712', 'n04179824', + 'n04179913', 'n04180063', 'n04180229', 'n04180888', 'n04181083', + 'n04181228', 'n04181561', 'n04181718', 'n04182152', 'n04182322', + 'n04183217', 'n04183329', 'n04183957', 'n04184095', 'n04184316', + 'n04184435', 'n04184600', 'n04184880', 'n04185071', 'n04185529', + 'n04185804', 'n04185946', 'n04186051', 'n04186268', 'n04186455', + 'n04186624', 'n04186848', 'n04187061', 'n04187233', 'n04187547', + 'n04187751', 'n04187885', 'n04187970', 'n04188064', 'n04188179', + 'n04189092', 'n04189282', 'n04189651', 'n04189816', 'n04190052', + 'n04190376', 'n04190464', 'n04190747', 'n04190997', 'n04191150', + 'n04191595', 'n04191943', 'n04192238', 'n04192361', 'n04192521', + 'n04192698', 'n04192858', 'n04193179', 'n04193377', 'n04193742', + 'n04193883', 'n04194009', 'n04194127', 'n04194289', 'n04196080', + 'n04196502', 'n04196803', 'n04196925', 'n04197110', 'n04197391', + 'n04197781', 'n04197878', 'n04198015', 'n04198233', 'n04198355', + 'n04198453', 'n04198562', 'n04198722', 'n04198797', 'n04199027', + 'n04200000', 'n04200258', 'n04200537', 'n04200800', 'n04200908', + 'n04201064', 'n04201297', 'n04201733', 'n04202142', 'n04202282', + 'n04202417', 'n04203356', 'n04204081', 'n04204238', 'n04204347', + 'n04204755', 'n04205062', 'n04205318', 'n04205505', 'n04205613', + 'n04206070', 'n04206225', 'n04206356', 'n04206570', 'n04206790', + 'n04207151', 'n04207343', 'n04207596', 'n04207763', 'n04207903', + 'n04208065', 'n04208210', 'n04208427', 'n04208582', 'n04208760', + 'n04208936', 'n04209133', 'n04209239', 'n04209509', 'n04209613', + 'n04209811', 'n04210012', 'n04210120', 'n04210288', 'n04210390', + 'n04210591', 'n04210858', 'n04211001', 'n04211219', 'n04211356', + 'n04211528', 'n04211857', 'n04211970', 'n04212165', 'n04212282', + 'n04212467', 'n04212810', 'n04213105', 'n04213264', 'n04213353', + 'n04213530', 'n04214046', 'n04214282', 'n04214413', 'n04214649', + 'n04215153', 'n04215402', 'n04215588', 'n04215800', 'n04215910', + 'n04216634', 'n04216860', 'n04216963', 'n04217387', 'n04217546', + 'n04217718', 'n04217882', 'n04218564', 'n04218921', 'n04219185', + 'n04219424', 'n04219580', 'n04220250', 'n04220805', 'n04221076', + 'n04221673', 'n04221823', 'n04222210', 'n04222307', 'n04222470', + 'n04222723', 'n04222847', 'n04223066', 'n04223170', 'n04223299', + 'n04224395', 'n04224543', 'n04224842', 'n04225031', 'n04225222', + 'n04225729', 'n04225987', 'n04226322', 'n04226464', 'n04226537', + 'n04226826', 'n04226962', 'n04227050', 'n04227144', 'n04227519', + 'n04227787', 'n04227900', 'n04228054', 'n04228215', 'n04228422', + 'n04228581', 'n04228693', 'n04229007', 'n04229107', 'n04229480', + 'n04229620', 'n04229737', 'n04229816', 'n04229959', 'n04230387', + 'n04230487', 'n04230603', 'n04230707', 'n04230808', 'n04231272', + 'n04231693', 'n04231905', 'n04232153', 'n04232312', 'n04232437', + 'n04232800', 'n04233027', 'n04233124', 'n04233295', 'n04233715', + 'n04233832', 'n04234160', 'n04234260', 'n04234455', 'n04234670', + 'n04234763', 'n04234887', 'n04235291', 'n04235646', 'n04235771', + 'n04235860', 'n04236001', 'n04236377', 'n04236702', 'n04236809', + 'n04236935', 'n04237174', 'n04237287', 'n04237423', 'n04238128', + 'n04238321', 'n04238617', 'n04238763', 'n04238953', 'n04239074', + 'n04239218', 'n04239333', 'n04239436', 'n04239639', 'n04239786', + 'n04239900', 'n04240434', 'n04240752', 'n04240867', 'n04241042', + 'n04241249', 'n04241394', 'n04241573', 'n04242084', 'n04242315', + 'n04242408', 'n04242587', 'n04242704', 'n04243003', 'n04243142', + 'n04243251', 'n04243546', 'n04243941', 'n04244379', 'n04244847', + 'n04244997', 'n04245218', 'n04245412', 'n04245508', 'n04245847', + 'n04246060', 'n04246271', 'n04246459', 'n04246731', 'n04246855', + 'n04247011', 'n04247440', 'n04247544', 'n04247630', 'n04247736', + 'n04247876', 'n04248209', 'n04248396', 'n04248507', 'n04248851', + 'n04249415', 'n04249582', 'n04249882', 'n04250224', 'n04250473', + 'n04250599', 'n04250692', 'n04250850', 'n04251144', 'n04251701', + 'n04251791', 'n04252077', 'n04252225', 'n04252331', 'n04252560', + 'n04252653', 'n04253057', 'n04253168', 'n04253304', 'n04253931', + 'n04254009', 'n04254120', 'n04254450', 'n04254680', 'n04254777', + 'n04255163', 'n04255346', 'n04255499', 'n04255586', 'n04255670', + 'n04255768', 'n04255899', 'n04256318', 'n04256520', 'n04256758', + 'n04256891', 'n04257223', 'n04257684', 'n04257790', 'n04257986', + 'n04258138', 'n04258333', 'n04258438', 'n04258618', 'n04258732', + 'n04258859', 'n04259202', 'n04259468', 'n04259630', 'n04260192', + 'n04260364', 'n04260589', 'n04261116', 'n04261281', 'n04261369', + 'n04261506', 'n04261638', 'n04261767', 'n04261868', 'n04262161', + 'n04262530', 'n04262678', 'n04262869', 'n04263257', 'n04263336', + 'n04263502', 'n04263760', 'n04263950', 'n04264134', 'n04264233', + 'n04264361', 'n04264485', 'n04264628', 'n04264765', 'n04264914', + 'n04265275', 'n04265428', 'n04265904', 'n04266014', 'n04266162', + 'n04266375', 'n04266486', 'n04266849', 'n04266968', 'n04267091', + 'n04267165', 'n04267246', 'n04267435', 'n04267577', 'n04267985', + 'n04268142', 'n04268275', 'n04268418', 'n04268565', 'n04268799', + 'n04269086', 'n04269270', 'n04269502', 'n04269668', 'n04269822', + 'n04269944', 'n04270147', 'n04270371', 'n04270576', 'n04270891', + 'n04271148', 'n04271531', 'n04271793', 'n04271891', 'n04272054', + 'n04272389', 'n04272782', 'n04272928', 'n04273064', 'n04273285', + 'n04273569', 'n04273659', 'n04273796', 'n04273972', 'n04274686', + 'n04274985', 'n04275093', 'n04275175', 'n04275283', 'n04275548', + 'n04275661', 'n04275904', 'n04277352', 'n04277493', 'n04277669', + 'n04277826', 'n04278247', 'n04278353', 'n04278447', 'n04278605', + 'n04278932', 'n04279063', 'n04279172', 'n04279353', 'n04279462', + 'n04279858', 'n04279987', 'n04280259', 'n04280373', 'n04280487', + 'n04280845', 'n04280970', 'n04281260', 'n04281375', 'n04281571', + 'n04281998', 'n04282231', 'n04282494', 'n04282872', 'n04282992', + 'n04283096', 'n04283255', 'n04283378', 'n04283585', 'n04283784', + 'n04283905', 'n04284002', 'n04284341', 'n04284438', 'n04284572', + 'n04284869', 'n04285008', 'n04285146', 'n04285622', 'n04285803', + 'n04285965', 'n04286128', 'n04286575', 'n04286960', 'n04287351', + 'n04287451', 'n04287747', 'n04287898', 'n04287986', 'n04288165', + 'n04288272', 'n04288533', 'n04288673', 'n04289027', 'n04289195', + 'n04289449', 'n04289576', 'n04289690', 'n04289827', 'n04290079', + 'n04290259', 'n04290507', 'n04290615', 'n04290762', 'n04291069', + 'n04291242', 'n04291759', 'n04291992', 'n04292080', 'n04292221', + 'n04292414', 'n04292572', 'n04292921', 'n04293119', 'n04293258', + 'n04293744', 'n04294212', 'n04294426', 'n04294614', 'n04294879', + 'n04295081', 'n04295353', 'n04295571', 'n04295777', 'n04295881', + 'n04296562', 'n04297098', 'n04297750', 'n04297847', 'n04298053', + 'n04298661', 'n04298765', 'n04299215', 'n04299370', 'n04299963', + 'n04300358', 'n04300509', 'n04300643', 'n04301000', 'n04301242', + 'n04301474', 'n04301760', 'n04302200', 'n04302863', 'n04302988', + 'n04303095', 'n04303258', 'n04303357', 'n04303497', 'n04304215', + 'n04304375', 'n04304680', 'n04305016', 'n04305210', 'n04305323', + 'n04305471', 'n04305572', 'n04305947', 'n04306080', 'n04306592', + 'n04306847', 'n04307419', 'n04307767', 'n04307878', 'n04307986', + 'n04308084', 'n04308273', 'n04308397', 'n04308583', 'n04308807', + 'n04308915', 'n04309049', 'n04309348', 'n04309548', 'n04309833', + 'n04310018', 'n04310157', 'n04310507', 'n04310604', 'n04310721', + 'n04310904', 'n04311004', 'n04311174', 'n04311595', 'n04312020', + 'n04312154', 'n04312432', 'n04312654', 'n04312756', 'n04312916', + 'n04313220', 'n04313503', 'n04313628', 'n04314107', 'n04314216', + 'n04314522', 'n04314632', 'n04314914', 'n04315342', 'n04315713', + 'n04315828', 'n04315948', 'n04316498', 'n04316815', 'n04316924', + 'n04317063', 'n04317175', 'n04317325', 'n04317420', 'n04317833', + 'n04317976', 'n04318131', 'n04318787', 'n04318892', 'n04318982', + 'n04319545', 'n04319774', 'n04319937', 'n04320405', 'n04320598', + 'n04320871', 'n04320973', 'n04321121', 'n04321453', 'n04322026', + 'n04322531', 'n04322692', 'n04322801', 'n04323519', 'n04323819', + 'n04324120', 'n04324297', 'n04324387', 'n04324515', 'n04325041', + 'n04325208', 'n04325704', 'n04325804', 'n04325968', 'n04326547', + 'n04326676', 'n04326799', 'n04326896', 'n04327204', 'n04327544', + 'n04327682', 'n04328054', 'n04328186', 'n04328329', 'n04328580', + 'n04328703', 'n04328946', 'n04329477', 'n04329681', 'n04329834', + 'n04329958', 'n04330109', 'n04330189', 'n04330267', 'n04330340', + 'n04330669', 'n04330746', 'n04330896', 'n04330998', 'n04331277', + 'n04331443', 'n04331639', 'n04331765', 'n04331892', 'n04332074', + 'n04332243', 'n04332580', 'n04332987', 'n04333129', 'n04333869', + 'n04334105', 'n04334365', 'n04334504', 'n04334599', 'n04335209', + 'n04335435', 'n04335693', 'n04335886', 'n04336792', 'n04337157', + 'n04337287', 'n04337503', 'n04337650', 'n04338517', 'n04338963', + 'n04339062', 'n04339191', 'n04339638', 'n04339879', 'n04340019', + 'n04340521', 'n04340750', 'n04340935', 'n04341133', 'n04341288', + 'n04341414', 'n04341686', 'n04343511', 'n04343630', 'n04343740', + 'n04344003', 'n04344734', 'n04344873', 'n04345028', 'n04345201', + 'n04345787', 'n04346003', 'n04346157', 'n04346328', 'n04346428', + 'n04346511', 'n04346679', 'n04346855', 'n04347119', 'n04347519', + 'n04347754', 'n04348070', 'n04348184', 'n04348359', 'n04348988', + 'n04349189', 'n04349306', 'n04349401', 'n04349913', 'n04350104', + 'n04350235', 'n04350458', 'n04350581', 'n04350688', 'n04350769', + 'n04350905', 'n04351550', 'n04351699', 'n04353573', 'n04354026', + 'n04354182', 'n04354387', 'n04354487', 'n04354589', 'n04355115', + 'n04355267', 'n04355338', 'n04355511', 'n04355684', 'n04355821', + 'n04355933', 'n04356056', 'n04356595', 'n04356772', 'n04356925', + 'n04357121', 'n04357314', 'n04357531', 'n04357930', 'n04358117', + 'n04358256', 'n04358491', 'n04358707', 'n04358874', 'n04359034', + 'n04359124', 'n04359217', 'n04359335', 'n04359500', 'n04359589', + 'n04360501', 'n04360798', 'n04360914', 'n04361095', 'n04361260', + 'n04361937', 'n04362624', 'n04362821', 'n04362972', 'n04363082', + 'n04363210', 'n04363412', 'n04363671', 'n04363777', 'n04363874', + 'n04363991', 'n04364160', 'n04364397', 'n04364545', 'n04364827', + 'n04364994', 'n04365112', 'n04365229', 'n04365328', 'n04365484', + 'n04365751', 'n04366033', 'n04366116', 'n04366367', 'n04366832', + 'n04367011', 'n04367371', 'n04367480', 'n04367746', 'n04367950', + 'n04368109', 'n04368235', 'n04368365', 'n04368496', 'n04368695', + 'n04368840', 'n04369025', 'n04369282', 'n04369485', 'n04369618', + 'n04370048', 'n04370288', 'n04370456', 'n04370600', 'n04370774', + 'n04370955', 'n04371050', 'n04371430', 'n04371563', 'n04371774', + 'n04371979', 'n04372370', 'n04373089', 'n04373428', 'n04373563', + 'n04373704', 'n04373795', 'n04373894', 'n04374315', 'n04374521', + 'n04374735', 'n04374907', 'n04375080', 'n04375241', 'n04375405', + 'n04375615', 'n04375775', 'n04375926', 'n04376400', 'n04376876', + 'n04377057', 'n04378489', 'n04378651', 'n04378956', 'n04379096', + 'n04379243', 'n04379964', 'n04380255', 'n04380346', 'n04380533', + 'n04380916', 'n04381073', 'n04381450', 'n04381587', 'n04381724', + 'n04381860', 'n04381994', 'n04382334', 'n04382438', 'n04382537', + 'n04382695', 'n04382880', 'n04383015', 'n04383130', 'n04383301', + 'n04383839', 'n04383923', 'n04384593', 'n04384910', 'n04385079', + 'n04385157', 'n04385536', 'n04385799', 'n04386051', 'n04386456', + 'n04386664', 'n04386792', 'n04387095', 'n04387201', 'n04387261', + 'n04387400', 'n04387531', 'n04387706', 'n04387932', 'n04388040', + 'n04388162', 'n04388473', 'n04388574', 'n04388743', 'n04389033', + 'n04389430', 'n04389521', 'n04389718', 'n04389854', 'n04389999', + 'n04390483', 'n04390577', 'n04390873', 'n04390977', 'n04391445', + 'n04391838', 'n04392113', 'n04392526', 'n04392764', 'n04392985', + 'n04393095', 'n04393301', 'n04393549', 'n04393808', 'n04393913', + 'n04394031', 'n04394261', 'n04394421', 'n04394630', 'n04395024', + 'n04395106', 'n04395332', 'n04395651', 'n04395875', 'n04396226', + 'n04396335', 'n04396650', 'n04396808', 'n04396902', 'n04397027', + 'n04397168', 'n04397261', 'n04397452', 'n04397645', 'n04397768', + 'n04397860', 'n04398044', 'n04398497', 'n04398688', 'n04398834', + 'n04398951', 'n04399046', 'n04399158', 'n04399537', 'n04399846', + 'n04400109', 'n04400289', 'n04400499', 'n04400737', 'n04400899', + 'n04401088', 'n04401578', 'n04401680', 'n04401828', 'n04401949', + 'n04402057', 'n04402342', 'n04402449', 'n04402580', 'n04402746', + 'n04402984', 'n04403413', 'n04403524', 'n04403638', 'n04403925', + 'n04404072', 'n04404200', 'n04404412', 'n04404817', 'n04404997', + 'n04405540', 'n04405762', 'n04405907', 'n04406239', 'n04406552', + 'n04406687', 'n04406817', 'n04407257', 'n04407435', 'n04407686', + 'n04408871', 'n04409011', 'n04409128', 'n04409279', 'n04409384', + 'n04409515', 'n04409625', 'n04409806', 'n04409911', 'n04410086', + 'n04410365', 'n04410485', 'n04410565', 'n04410663', 'n04410760', + 'n04410886', 'n04411019', 'n04411264', 'n04411835', 'n04411966', + 'n04412097', 'n04412300', 'n04412416', 'n04413151', 'n04413419', + 'n04413969', 'n04414101', 'n04414199', 'n04414319', 'n04414476', + 'n04414675', 'n04414909', 'n04415257', 'n04415663', 'n04415815', + 'n04416005', 'n04416901', 'n04417086', 'n04417180', 'n04417361', + 'n04417672', 'n04417809', 'n04418357', 'n04418644', 'n04419073', + 'n04419642', 'n04419868', 'n04420024', 'n04420720', 'n04421083', + 'n04421258', 'n04421417', 'n04421582', 'n04421740', 'n04421872', + 'n04422409', 'n04422566', 'n04422727', 'n04422875', 'n04423552', + 'n04423687', 'n04423845', 'n04424692', 'n04425804', 'n04425977', + 'n04426184', 'n04426316', 'n04426427', 'n04427216', 'n04427473', + 'n04427559', 'n04427715', 'n04427857', 'n04428008', 'n04428191', + 'n04428382', 'n04428634', 'n04429038', 'n04429376', 'n04430475', + 'n04430605', 'n04430896', 'n04431025', 'n04431436', 'n04431648', + 'n04431745', 'n04431925', 'n04432043', 'n04432203', 'n04432662', + 'n04432785', 'n04433377', 'n04433585', 'n04434207', 'n04434531', + 'n04434932', 'n04435180', 'n04435552', 'n04435653', 'n04435759', + 'n04435870', 'n04436012', 'n04436185', 'n04436329', 'n04436401', + 'n04436542', 'n04436832', 'n04436992', 'n04437276', 'n04437380', + 'n04437670', 'n04437953', 'n04438304', 'n04438507', 'n04438643', + 'n04438897', 'n04439505', 'n04439585', 'n04439712', 'n04440597', + 'n04440963', 'n04441093', 'n04441528', 'n04441662', 'n04441790', + 'n04442312', 'n04442441', 'n04442582', 'n04442741', 'n04443164', + 'n04443257', 'n04443433', 'n04443766', 'n04444121', 'n04444218', + 'n04444749', 'n04444953', 'n04445040', 'n04445154', 'n04445327', + 'n04445610', 'n04445782', 'n04445952', 'n04446162', 'n04446276', + 'n04446844', 'n04447028', 'n04447156', 'n04447276', 'n04447443', + 'n04447861', 'n04448070', 'n04448185', 'n04448361', 'n04449290', + 'n04449449', 'n04449550', 'n04449700', 'n04449966', 'n04450133', + 'n04450243', 'n04450465', 'n04450640', 'n04450749', 'n04450994', + 'n04451139', 'n04451318', 'n04451636', 'n04451818', 'n04452528', + 'n04452615', 'n04452757', 'n04452848', 'n04453037', 'n04453156', + 'n04453390', 'n04453666', 'n04453910', 'n04454654', 'n04454792', + 'n04454908', 'n04455048', 'n04455250', 'n04455579', 'n04455652', + 'n04456011', 'n04456115', 'n04456472', 'n04456734', 'n04457157', + 'n04457326', 'n04457474', 'n04457638', 'n04457767', 'n04457910', + 'n04458201', 'n04458633', 'n04458843', 'n04459018', 'n04459122', + 'n04459243', 'n04459362', 'n04459610', 'n04459773', 'n04459909', + 'n04460130', 'n04461437', 'n04461570', 'n04461696', 'n04461879', + 'n04462011', 'n04462240', 'n04462576', 'n04463679', 'n04464125', + 'n04464615', 'n04464852', 'n04465050', 'n04465203', 'n04465358', + 'n04465501', 'n04465666', 'n04466871', 'n04467099', 'n04467307', + 'n04467506', 'n04467665', 'n04467899', 'n04468005', 'n04469003', + 'n04469251', 'n04469514', 'n04469684', 'n04469813', 'n04470741', + 'n04471148', 'n04471315', 'n04471632', 'n04471912', 'n04472243', + 'n04472563', 'n04472726', 'n04472961', 'n04473108', 'n04473275', + 'n04473884', 'n04474035', 'n04474187', 'n04474466', 'n04475309', + 'n04475411', 'n04475496', 'n04475631', 'n04475749', 'n04475900', + 'n04476116', 'n04476259', 'n04476526', 'n04476831', 'n04476972', + 'n04477219', 'n04477387', 'n04477548', 'n04477725', 'n04478066', + 'n04478383', 'n04478512', 'n04478657', 'n04479046', 'n04479287', + 'n04479405', 'n04479526', 'n04479694', 'n04479823', 'n04479939', + 'n04480033', 'n04480141', 'n04480303', 'n04480527', 'n04480853', + 'n04480995', 'n04481524', 'n04481642', 'n04482177', 'n04482297', + 'n04482393', 'n04482975', 'n04483073', 'n04483307', 'n04483925', + 'n04484024', 'n04484432', 'n04485082', 'n04485423', 'n04485586', + 'n04485750', 'n04485884', 'n04486054', 'n04486213', 'n04486322', + 'n04486616', 'n04486934', 'n04487081', 'n04487394', 'n04487724', + 'n04487894', 'n04488202', 'n04488427', 'n04488530', 'n04488742', + 'n04488857', 'n04489008', 'n04489695', 'n04489817', 'n04490091', + 'n04491312', 'n04491388', 'n04491638', 'n04491769', 'n04491934', + 'n04492060', 'n04492157', 'n04492375', 'n04492749', 'n04493109', + 'n04493259', 'n04493381', 'n04494204', 'n04495051', 'n04495183', + 'n04495310', 'n04495450', 'n04495555', 'n04495698', 'n04495843', + 'n04496614', 'n04496726', 'n04496872', 'n04497249', 'n04497442', + 'n04497570', 'n04497801', 'n04498275', 'n04498389', 'n04498523', + 'n04498873', 'n04499062', 'n04499300', 'n04499446', 'n04499554', + 'n04499810', 'n04500060', 'n04500390', 'n04501127', 'n04501281', + 'n04501370', 'n04501550', 'n04501837', 'n04501947', 'n04502059', + 'n04502197', 'n04502502', 'n04502670', 'n04502851', 'n04502989', + 'n04503073', 'n04503155', 'n04503269', 'n04503413', 'n04503499', + 'n04503593', 'n04503705', 'n04504038', 'n04504141', 'n04504770', + 'n04505036', 'n04505345', 'n04505470', 'n04505888', 'n04506289', + 'n04506402', 'n04506506', 'n04506688', 'n04506895', 'n04506994', + 'n04507155', 'n04507326', 'n04507453', 'n04507689', 'n04508163', + 'n04508489', 'n04508949', 'n04509171', 'n04509260', 'n04509417', + 'n04509592', 'n04510706', 'n04511002', 'n04513827', 'n04513998', + 'n04514095', 'n04514241', 'n04514648', 'n04515003', 'n04515444', + 'n04515729', 'n04515890', 'n04516116', 'n04516214', 'n04516354', + 'n04516672', 'n04517211', 'n04517408', 'n04517823', 'n04517999', + 'n04518132', 'n04518343', 'n04518643', 'n04518764', 'n04519153', + 'n04519536', 'n04519728', 'n04519887', 'n04520170', 'n04520382', + 'n04520784', 'n04520962', 'n04521571', 'n04521863', 'n04521987', + 'n04522168', 'n04523525', 'n04523831', 'n04524142', 'n04524313', + 'n04524594', 'n04524716', 'n04524941', 'n04525038', 'n04525191', + 'n04525305', 'n04525417', 'n04525584', 'n04525821', 'n04526520', + 'n04526800', 'n04526964', 'n04527648', 'n04528079', 'n04528968', + 'n04529108', 'n04529681', 'n04529962', 'n04530283', 'n04530456', + 'n04530566', 'n04531098', 'n04531873', 'n04532022', 'n04532106', + 'n04532398', 'n04532504', 'n04532670', 'n04532831', 'n04533042', + 'n04533199', 'n04533499', 'n04533594', 'n04533700', 'n04533802', + 'n04533946', 'n04534127', 'n04534359', 'n04534520', 'n04534895', + 'n04535252', 'n04535370', 'n04535524', 'n04536153', 'n04536335', + 'n04536465', 'n04536595', 'n04536765', 'n04536866', 'n04537436', + 'n04538249', 'n04538403', 'n04538552', 'n04538878', 'n04539053', + 'n04539203', 'n04539407', 'n04539794', 'n04540053', 'n04540255', + 'n04540397', 'n04540761', 'n04541136', 'n04541320', 'n04541662', + 'n04541777', 'n04541987', 'n04542095', 'n04542329', 'n04542474', + 'n04542595', 'n04542715', 'n04542858', 'n04542943', 'n04543158', + 'n04543509', 'n04543636', 'n04543772', 'n04543924', 'n04543996', + 'n04544325', 'n04544450', 'n04545305', 'n04545471', 'n04545748', + 'n04545858', 'n04545984', 'n04546081', 'n04546194', 'n04546340', + 'n04546595', 'n04546855', 'n04547592', 'n04548280', 'n04548362', + 'n04549028', 'n04549122', 'n04549629', 'n04549721', 'n04549919', + 'n04550184', 'n04550676', 'n04551055', 'n04551833', 'n04552097', + 'n04552348', 'n04552551', 'n04552696', 'n04553389', 'n04553561', + 'n04553703', 'n04554211', 'n04554406', 'n04554684', 'n04554871', + 'n04554998', 'n04555291', 'n04555400', 'n04555600', 'n04555700', + 'n04555897', 'n04556408', 'n04556533', 'n04556664', 'n04556948', + 'n04557308', 'n04557522', 'n04557648', 'n04557751', 'n04558059', + 'n04558199', 'n04558478', 'n04558804', 'n04559023', 'n04559166', + 'n04559451', 'n04559620', 'n04559730', 'n04559910', 'n04559994', + 'n04560113', 'n04560292', 'n04560502', 'n04560619', 'n04560804', + 'n04560882', 'n04561010', 'n04561287', 'n04561422', 'n04561734', + 'n04561857', 'n04561965', 'n04562122', 'n04562262', 'n04562496', + 'n04562935', 'n04563020', 'n04563204', 'n04563413', 'n04563560', + 'n04563790', 'n04564278', 'n04564581', 'n04565039', 'n04565375', + 'n04566257', 'n04566561', 'n04566756', 'n04567098', 'n04567593', + 'n04567746', 'n04568069', 'n04568557', 'n04568713', 'n04568841', + 'n04569063', 'n04569520', 'n04569822', 'n04570118', 'n04570214', + 'n04570416', 'n04570532', 'n04570815', 'n04570958', 'n04571292', + 'n04571566', 'n04571686', 'n04571800', 'n04571958', 'n04572121', + 'n04572235', 'n04572935', 'n04573045', 'n04573281', 'n04573379', + 'n04573513', 'n04573625', 'n04573832', 'n04573937', 'n04574067', + 'n04574348', 'n04574471', 'n04574606', 'n04574999', 'n04575723', + 'n04575824', 'n04576002', 'n04576211', 'n04576971', 'n04577139', + 'n04577293', 'n04577426', 'n04577567', 'n04577769', 'n04578112', + 'n04578329', 'n04578559', 'n04578708', 'n04578801', 'n04578934', + 'n04579056', 'n04579145', 'n04579230', 'n04579432', 'n04579667', + 'n04579986', 'n04580493', 'n04581102', 'n04581595', 'n04581829', + 'n04582205', 'n04582349', 'n04582771', 'n04582869', 'n04583022', + 'n04583212', 'n04583620', 'n04583888', 'n04583967', 'n04584056', + 'n04584207', 'n04584373', 'n04585128', 'n04585318', 'n04585456', + 'n04585626', 'n04585745', 'n04585980', 'n04586072', 'n04586581', + 'n04586932', 'n04587327', 'n04587404', 'n04587559', 'n04587648', + 'n04588739', 'n04589190', 'n04589325', 'n04589434', 'n04589593', + 'n04589890', 'n04590021', 'n04590129', 'n04590263', 'n04590553', + 'n04590746', 'n04590933', 'n04591056', 'n04591157', 'n04591249', + 'n04591359', 'n04591517', 'n04591631', 'n04591713', 'n04591887', + 'n04592005', 'n04592099', 'n04592356', 'n04592465', 'n04592596', + 'n04592741', 'n04593077', 'n04593185', 'n04593376', 'n04593524', + 'n04593629', 'n04593866', 'n04594114', 'n04594218', 'n04594489', + 'n04594742', 'n04594828', 'n04594919', 'n04595028', 'n04595285', + 'n04595501', 'n04595611', 'n04595762', 'n04595855', 'n04596116', + 'n04596492', 'n04596742', 'n04596852', 'n04597066', 'n04597309', + 'n04597400', 'n04597804', 'n04597913', 'n04598136', 'n04598318', + 'n04598416', 'n04598582', 'n04598965', 'n04599124', 'n04599235', + 'n04600312', 'n04600486', 'n04600912', 'n04601041', 'n04601159', + 'n04601938', 'n04602762', 'n04602840', 'n04602956', 'n04603399', + 'n04603729', 'n04603872', 'n04604276', 'n04604644', 'n04604806', + 'n04605057', 'n04605163', 'n04605321', 'n04605446', 'n04605572', + 'n04605726', 'n04606251', 'n04606574', 'n04607035', 'n04607242', + 'n04607640', 'n04607759', 'n04607869', 'n04607982', 'n04608329', + 'n04608435', 'n04608567', 'n04608809', 'n04608923', 'n04609531', + 'n04609651', 'n04609811', 'n04610013', 'n04610176', 'n04610274', + 'n04610503', 'n04610676', 'n04611351', 'n04611795', 'n04611916', + 'n04612026', 'n04612159', 'n04612257', 'n04612373', 'n04612504', + 'n04612840', 'n04613015', 'n04613158', 'n04613696', 'n04613939', + 'n04614505', 'n04614655', 'n04614844', 'n04615149', 'n04615226', + 'n04615644', 'n04682018', 'n04950713', 'n04950952', 'n04951071', + 'n04951186', 'n04951373', 'n04951716', 'n04951875', 'n04953296', + 'n04953678', 'n04955160', 'n04957356', 'n04957589', 'n04958634', + 'n04958865', 'n04959061', 'n04959230', 'n04959672', 'n04960277', + 'n04960582', 'n04961062', 'n04961331', 'n04961691', 'n04962062', + 'n04962240', 'n04963111', 'n04963307', 'n04963588', 'n04963740', + 'n04964001', 'n04964799', 'n04964878', 'n04965179', 'n04965451', + 'n04965661', 'n04966543', 'n04966941', 'n04967191', 'n04967561', + 'n04967674', 'n04967801', 'n04967882', 'n04968056', 'n04968139', + 'n04968749', 'n04968895', 'n04969242', 'n04969540', 'n04969798', + 'n04969952', 'n04970059', 'n04970312', 'n04970398', 'n04970470', + 'n04970631', 'n04970916', 'n04971211', 'n04971313', 'n04972350', + 'n04972451', 'n04972801', 'n04973020', 'n04973291', 'n04973386', + 'n04973585', 'n04973669', 'n04973816', 'n04974145', 'n04974340', + 'n04974859', 'n04975739', 'n04976319', 'n04976952', 'n04977412', + 'n04978561', 'n04979002', 'n04979307', 'n04981658', 'n05102764', + 'n05218119', 'n05233741', 'n05235879', 'n05238282', 'n05239437', + 'n05241218', 'n05241485', 'n05241662', 'n05242070', 'n05242239', + 'n05242928', 'n05244421', 'n05244755', 'n05244934', 'n05245192', + 'n05257476', 'n05257967', 'n05258051', 'n05258627', 'n05259914', + 'n05260127', 'n05260240', 'n05261310', 'n05262422', 'n05262534', + 'n05262698', 'n05263183', 'n05263316', 'n05263448', 'n05265736', + 'n05266096', 'n05266879', 'n05278922', 'n05279953', 'n05282652', + 'n05285623', 'n05302499', 'n05314075', 'n05399034', 'n05399243', + 'n05399356', 'n05418717', 'n05427346', 'n05442594', 'n05447757', + 'n05448704', 'n05448827', 'n05449196', 'n05449661', 'n05449959', + 'n05450617', 'n05451099', 'n05451384', 'n05453412', 'n05453657', + 'n05453815', 'n05454833', 'n05454978', 'n05455113', 'n05458173', + 'n05458576', 'n05459101', 'n05459457', 'n05459769', 'n05460759', + 'n05464534', 'n05467054', 'n05467758', 'n05468098', 'n05468739', + 'n05469664', 'n05469861', 'n05475397', 'n05482922', 'n05486510', + 'n05491154', 'n05526957', 'n05538625', 'n05539947', 'n05541509', + 'n05542893', 'n05545879', 'n05571341', 'n05578095', 'n05581932', + 'n05584746', 'n05586759', 'n05604434', 'n05716342', 'n06008896', + 'n06209940', 'n06254669', 'n06255081', 'n06255613', 'n06259898', + 'n06262567', 'n06262943', 'n06263202', 'n06263369', 'n06263609', + 'n06263762', 'n06263895', 'n06266417', 'n06266633', 'n06266710', + 'n06266878', 'n06266973', 'n06267145', 'n06267564', 'n06267655', + 'n06267758', 'n06267893', 'n06267991', 'n06271778', 'n06272290', + 'n06272612', 'n06272803', 'n06273207', 'n06273294', 'n06273414', + 'n06273555', 'n06273743', 'n06273890', 'n06273986', 'n06274092', + 'n06274292', 'n06274546', 'n06274760', 'n06274921', 'n06275095', + 'n06275353', 'n06275471', 'n06276501', 'n06276697', 'n06276902', + 'n06277025', 'n06277135', 'n06277280', 'n06278338', 'n06278475', + 'n06281040', 'n06281175', 'n06340977', 'n06359193', 'n06359467', + 'n06359657', 'n06415688', 'n06417096', 'n06418693', 'n06419354', + 'n06423496', 'n06470073', 'n06591815', 'n06592078', 'n06592281', + 'n06592421', 'n06595351', 'n06596179', 'n06596364', 'n06596474', + 'n06596607', 'n06596727', 'n06596845', 'n06613686', 'n06614901', + 'n06616216', 'n06618653', 'n06625062', 'n06785654', 'n06793231', + 'n06794110', 'n06874185', 'n06883725', 'n06892775', 'n06998748', + 'n07005523', 'n07248320', 'n07273802', 'n07461050', 'n07556406', + 'n07556637', 'n07556872', 'n07556970', 'n07557165', 'n07557434', + 'n07560193', 'n07560331', 'n07560422', 'n07560542', 'n07560652', + 'n07560903', 'n07561112', 'n07561590', 'n07561848', 'n07562017', + 'n07562172', 'n07562379', 'n07562495', 'n07562651', 'n07562881', + 'n07562984', 'n07563207', 'n07563366', 'n07563642', 'n07563800', + 'n07564008', 'n07564101', 'n07564292', 'n07564515', 'n07564629', + 'n07564796', 'n07564971', 'n07565083', 'n07565161', 'n07565259', + 'n07565608', 'n07565725', 'n07565945', 'n07566092', 'n07566231', + 'n07566340', 'n07566863', 'n07567039', 'n07567139', 'n07567390', + 'n07567611', 'n07567707', 'n07567980', 'n07568095', 'n07568241', + 'n07568389', 'n07568502', 'n07568625', 'n07568818', 'n07568991', + 'n07569106', 'n07569423', 'n07569543', 'n07569644', 'n07569873', + 'n07570021', 'n07570530', 'n07570720', 'n07572353', 'n07572616', + 'n07572858', 'n07572957', 'n07573103', 'n07573347', 'n07573453', + 'n07573563', 'n07573696', 'n07574176', 'n07574426', 'n07574504', + 'n07574602', 'n07574780', 'n07574923', 'n07575076', 'n07575226', + 'n07575392', 'n07575510', 'n07575726', 'n07575984', 'n07576182', + 'n07576438', 'n07576577', 'n07576781', 'n07576969', 'n07577144', + 'n07577374', 'n07577538', 'n07577657', 'n07577772', 'n07577918', + 'n07578093', 'n07579575', 'n07579688', 'n07579787', 'n07579917', + 'n07580053', 'n07580253', 'n07580359', 'n07580470', 'n07580592', + 'n07581249', 'n07581346', 'n07581607', 'n07581775', 'n07581931', + 'n07582027', 'n07582152', 'n07582277', 'n07582441', 'n07582609', + 'n07582811', 'n07582892', 'n07582970', 'n07583066', 'n07583197', + 'n07583865', 'n07583978', 'n07584110', 'n07584228', 'n07584332', + 'n07584423', 'n07584593', 'n07584859', 'n07584938', 'n07585015', + 'n07585107', 'n07585208', 'n07585474', 'n07585557', 'n07585644', + 'n07585758', 'n07585906', 'n07585997', 'n07586099', 'n07586179', + 'n07586318', 'n07586485', 'n07586604', 'n07586718', 'n07586894', + 'n07587023', 'n07587111', 'n07587206', 'n07587331', 'n07587441', + 'n07587618', 'n07587700', 'n07587819', 'n07587962', 'n07588111', + 'n07588193', 'n07588299', 'n07588419', 'n07588574', 'n07588688', + 'n07588817', 'n07588947', 'n07589458', 'n07589543', 'n07589724', + 'n07589872', 'n07589967', 'n07590068', 'n07590177', 'n07590320', + 'n07590502', 'n07590611', 'n07590752', 'n07590841', 'n07590974', + 'n07591049', 'n07591162', 'n07591236', 'n07591330', 'n07591473', + 'n07591586', 'n07591813', 'n07591961', 'n07592094', 'n07592317', + 'n07592400', 'n07592481', 'n07592656', 'n07592768', 'n07592922', + 'n07593004', 'n07593107', 'n07593199', 'n07593471', 'n07593774', + 'n07593972', 'n07594066', 'n07594155', 'n07594250', 'n07594737', + 'n07594840', 'n07595051', 'n07595180', 'n07595368', 'n07595649', + 'n07595751', 'n07595914', 'n07596046', 'n07596160', 'n07596362', + 'n07596452', 'n07596566', 'n07596684', 'n07596967', 'n07597145', + 'n07597263', 'n07597365', 'n07598256', 'n07598529', 'n07598622', + 'n07598734', 'n07598928', 'n07599068', 'n07599161', 'n07599242', + 'n07599383', 'n07599468', 'n07599554', 'n07599649', 'n07599783', + 'n07599911', 'n07599998', 'n07600177', 'n07600285', 'n07600394', + 'n07600506', 'n07600696', 'n07600895', 'n07601025', 'n07601175', + 'n07601290', 'n07601407', 'n07601572', 'n07601686', 'n07601809', + 'n07602650', 'n07604956', 'n07605040', 'n07605198', 'n07605282', + 'n07605380', 'n07605474', 'n07605597', 'n07605693', 'n07605804', + 'n07605944', 'n07606058', 'n07606191', 'n07606278', 'n07606419', + 'n07606538', 'n07606669', 'n07606764', 'n07606933', 'n07607027', + 'n07607138', 'n07607361', 'n07607492', 'n07607605', 'n07607707', + 'n07607832', 'n07607967', 'n07608098', 'n07608245', 'n07608339', + 'n07608429', 'n07608533', 'n07608641', 'n07608721', 'n07608866', + 'n07608980', 'n07609083', 'n07609215', 'n07609316', 'n07609407', + 'n07609549', 'n07609632', 'n07609728', 'n07609840', 'n07610295', + 'n07610502', 'n07610620', 'n07610746', 'n07610890', 'n07611046', + 'n07611148', 'n07611267', 'n07611358', 'n07611733', 'n07611839', + 'n07611991', 'n07612137', 'n07612273', 'n07612367', 'n07612530', + 'n07612632', 'n07612996', 'n07613158', 'n07613266', 'n07613480', + 'n07613671', 'n07613815', 'n07614103', 'n07614198', 'n07614348', + 'n07614500', 'n07614730', 'n07614825', 'n07615052', 'n07615190', + 'n07615289', 'n07615460', 'n07615569', 'n07615671', 'n07615774', + 'n07615954', 'n07616046', 'n07616174', 'n07616265', 'n07616386', + 'n07616487', 'n07616590', 'n07616748', 'n07616906', 'n07617051', + 'n07617188', 'n07617344', 'n07617447', 'n07617526', 'n07617611', + 'n07617708', 'n07617839', 'n07617932', 'n07618029', 'n07618119', + 'n07618281', 'n07618432', 'n07618587', 'n07618684', 'n07618871', + 'n07619004', 'n07619208', 'n07619301', 'n07619409', 'n07619508', + 'n07619881', 'n07620047', 'n07620145', 'n07620327', 'n07620597', + 'n07620689', 'n07621264', 'n07621497', 'n07621618', 'n07623136', + 'n07624466', 'n07624666', 'n07624757', 'n07624924', 'n07625061', + 'n07625324', 'n07627931', 'n07628068', 'n07628181', 'n07631926', + 'n07639069', 'n07641928', 'n07642361', 'n07642471', 'n07642742', + 'n07642833', 'n07642933', 'n07643026', 'n07643200', 'n07643306', + 'n07643474', 'n07643577', 'n07643679', 'n07643764', 'n07643891', + 'n07643981', 'n07644244', 'n07648913', 'n07648997', 'n07650792', + 'n07650903', 'n07651025', 'n07654148', 'n07654298', 'n07655067', + 'n07655263', 'n07663899', 'n07665438', 'n07666176', 'n07672914', + 'n07678586', 'n07678729', 'n07678953', 'n07679034', 'n07679140', + 'n07679356', 'n07680168', 'n07680313', 'n07680416', 'n07680517', + 'n07680655', 'n07680761', 'n07680932', 'n07681264', 'n07681355', + 'n07681450', 'n07681691', 'n07681805', 'n07681926', 'n07682197', + 'n07682316', 'n07682477', 'n07682624', 'n07682808', 'n07682952', + 'n07683039', 'n07683138', 'n07683265', 'n07683360', 'n07683490', + 'n07683617', 'n07683786', 'n07684084', 'n07684164', 'n07684289', + 'n07684422', 'n07684517', 'n07684600', 'n07684938', 'n07685031', + 'n07685118', 'n07685218', 'n07685303', 'n07685399', 'n07685546', + 'n07685730', 'n07685918', 'n07686021', 'n07686202', 'n07686299', + 'n07686461', 'n07686634', 'n07686720', 'n07686873', 'n07687053', + 'n07687211', 'n07687381', 'n07687469', 'n07687626', 'n07687789', + 'n07688021', 'n07688130', 'n07688265', 'n07688412', 'n07688624', + 'n07688757', 'n07688898', 'n07689003', 'n07689217', 'n07689313', + 'n07689490', 'n07689624', 'n07689757', 'n07689842', 'n07690019', + 'n07690152', 'n07690273', 'n07690431', 'n07690511', 'n07690585', + 'n07690739', 'n07690892', 'n07691091', 'n07691237', 'n07691539', + 'n07691650', 'n07691758', 'n07691863', 'n07691954', 'n07692114', + 'n07692248', 'n07692405', 'n07692517', 'n07692614', 'n07692887', + 'n07693048', 'n07693223', 'n07693439', 'n07693590', 'n07693725', + 'n07693889', 'n07693972', 'n07694169', 'n07694403', 'n07694516', + 'n07694659', 'n07694839', 'n07695187', 'n07695284', 'n07695410', + 'n07695504', 'n07695652', 'n07695742', 'n07695878', 'n07695965', + 'n07696403', 'n07696527', 'n07696625', 'n07696728', 'n07696839', + 'n07696977', 'n07697100', 'n07697313', 'n07697408', 'n07697537', + 'n07697699', 'n07697825', 'n07698250', 'n07698401', 'n07698543', + 'n07698672', 'n07698782', 'n07700003', 'n07703889', 'n07704054', + 'n07704205', 'n07704305', 'n07705931', 'n07707451', 'n07708124', + 'n07708398', 'n07708512', 'n07708685', 'n07708798', 'n07709046', + 'n07709172', 'n07709333', 'n07709701', 'n07709881', 'n07710007', + 'n07710283', 'n07710616', 'n07710952', 'n07711080', 'n07711232', + 'n07711371', 'n07711569', 'n07711683', 'n07711799', 'n07711907', + 'n07712063', 'n07712267', 'n07712382', 'n07712559', 'n07712748', + 'n07712856', 'n07712959', 'n07713074', 'n07713267', 'n07713395', + 'n07713763', 'n07713895', 'n07714078', 'n07714188', 'n07714287', + 'n07714448', 'n07714571', 'n07714802', 'n07714895', 'n07714990', + 'n07715103', 'n07715221', 'n07715407', 'n07715561', 'n07715721', + 'n07716034', 'n07716203', 'n07716358', 'n07716504', 'n07716649', + 'n07716750', 'n07716906', 'n07717070', 'n07717410', 'n07717556', + 'n07717714', 'n07717858', 'n07718068', 'n07718195', 'n07718329', + 'n07718472', 'n07718671', 'n07718747', 'n07718920', 'n07719058', + 'n07719213', 'n07719330', 'n07719437', 'n07719616', 'n07719756', + 'n07719839', 'n07719980', 'n07720084', 'n07720185', 'n07720277', + 'n07720442', 'n07720615', 'n07720875', 'n07721018', 'n07721118', + 'n07721195', 'n07721325', 'n07721456', 'n07721678', 'n07721833', + 'n07721942', 'n07722052', 'n07722217', 'n07722390', 'n07722485', + 'n07722666', 'n07722763', 'n07722888', 'n07723039', 'n07723177', + 'n07723330', 'n07723559', 'n07723753', 'n07723968', 'n07724078', + 'n07724173', 'n07724269', 'n07724492', 'n07724654', 'n07724819', + 'n07724943', 'n07725158', 'n07725255', 'n07725376', 'n07725531', + 'n07725663', 'n07725789', 'n07725888', 'n07726009', 'n07726095', + 'n07726230', 'n07726386', 'n07726525', 'n07726672', 'n07726796', + 'n07727048', 'n07727140', 'n07727252', 'n07727377', 'n07727458', + 'n07727578', 'n07727741', 'n07727868', 'n07728053', 'n07728181', + 'n07728284', 'n07728391', 'n07728585', 'n07728708', 'n07728804', + 'n07729000', 'n07729142', 'n07729225', 'n07729384', 'n07729485', + 'n07729828', 'n07729926', 'n07730033', 'n07730207', 'n07730320', + 'n07730406', 'n07730562', 'n07730708', 'n07730855', 'n07731006', + 'n07731122', 'n07731284', 'n07731436', 'n07731587', 'n07731767', + 'n07731952', 'n07732168', 'n07732302', 'n07732433', 'n07732525', + 'n07732636', 'n07732747', 'n07732904', 'n07733005', 'n07733124', + 'n07733217', 'n07733394', 'n07733567', 'n07733712', 'n07733847', + 'n07734017', 'n07734183', 'n07734292', 'n07734417', 'n07734555', + 'n07734744', 'n07734879', 'n07735052', 'n07735179', 'n07735294', + 'n07735404', 'n07735510', 'n07735687', 'n07735803', 'n07735981', + 'n07736087', 'n07736256', 'n07736371', 'n07736527', 'n07736692', + 'n07736813', 'n07736971', 'n07737081', 'n07737594', 'n07737745', + 'n07738105', 'n07738224', 'n07739035', 'n07739125', 'n07739344', + 'n07739506', 'n07739923', 'n07740033', 'n07740115', 'n07740220', + 'n07740342', 'n07740461', 'n07740597', 'n07740744', 'n07740855', + 'n07740954', 'n07741138', 'n07741235', 'n07741357', 'n07741461', + 'n07741623', 'n07741706', 'n07741804', 'n07741888', 'n07742012', + 'n07742224', 'n07742313', 'n07742415', 'n07742513', 'n07742605', + 'n07742704', 'n07743224', 'n07743384', 'n07743544', 'n07743723', + 'n07743902', 'n07744057', 'n07744246', 'n07744430', 'n07744559', + 'n07744682', 'n07744811', 'n07745046', 'n07745197', 'n07745357', + 'n07745466', 'n07745661', 'n07745940', 'n07746038', 'n07746186', + 'n07746334', 'n07746551', 'n07746749', 'n07746910', 'n07747055', + 'n07747607', 'n07747811', 'n07747951', 'n07748157', 'n07748276', + 'n07748416', 'n07748574', 'n07748753', 'n07748912', 'n07749095', + 'n07749192', 'n07749312', 'n07749446', 'n07749582', 'n07749731', + 'n07749870', 'n07749969', 'n07750146', 'n07750299', 'n07750449', + 'n07750586', 'n07750736', 'n07750872', 'n07751004', 'n07751148', + 'n07751280', 'n07751451', 'n07751737', 'n07751858', 'n07751977', + 'n07752109', 'n07752264', 'n07752377', 'n07752514', 'n07752602', + 'n07752664', 'n07752782', 'n07752874', 'n07752966', 'n07753113', + 'n07753275', 'n07753448', 'n07753592', 'n07753743', 'n07753980', + 'n07754155', 'n07754279', 'n07754451', 'n07754684', 'n07754894', + 'n07755089', 'n07755262', 'n07755411', 'n07755619', 'n07755707', + 'n07755929', 'n07756096', 'n07756325', 'n07756499', 'n07756641', + 'n07756838', 'n07756951', 'n07757132', 'n07757312', 'n07757511', + 'n07757602', 'n07757753', 'n07757874', 'n07757990', 'n07758125', + 'n07758260', 'n07758407', 'n07758582', 'n07758680', 'n07758950', + 'n07759194', 'n07759324', 'n07759424', 'n07759576', 'n07759691', + 'n07759816', 'n07760070', 'n07760153', 'n07760297', 'n07760395', + 'n07760501', 'n07760673', 'n07760755', 'n07760859', 'n07761141', + 'n07761309', 'n07761611', 'n07761777', 'n07761954', 'n07762114', + 'n07762244', 'n07762373', 'n07762534', 'n07762740', 'n07762913', + 'n07763107', 'n07763290', 'n07763483', 'n07763629', 'n07763792', + 'n07763987', 'n07764155', 'n07764315', 'n07764486', 'n07764630', + 'n07764847', 'n07765073', 'n07765208', 'n07765361', 'n07765517', + 'n07765612', 'n07765728', 'n07765862', 'n07765999', 'n07766173', + 'n07766409', 'n07766530', 'n07766723', 'n07766891', 'n07767002', + 'n07767171', 'n07767344', 'n07767549', 'n07767709', 'n07767847', + 'n07768068', 'n07768139', 'n07768230', 'n07768318', 'n07768423', + 'n07768590', 'n07768694', 'n07768858', 'n07769102', 'n07769306', + 'n07769465', 'n07769584', 'n07769731', 'n07769886', 'n07770034', + 'n07770180', 'n07770439', 'n07770571', 'n07770763', 'n07770869', + 'n07771082', 'n07771212', 'n07771405', 'n07771539', 'n07771731', + 'n07771891', 'n07772026', 'n07772147', 'n07772274', 'n07772413', + 'n07772788', 'n07772935', 'n07773428', 'n07774182', 'n07774295', + 'n07774479', 'n07774596', 'n07774719', 'n07774842', 'n07775050', + 'n07775197', 'n07783827', 'n07785487', 'n07800091', 'n07800487', + 'n07800636', 'n07800740', 'n07801007', 'n07801091', 'n07801342', + 'n07801508', 'n07801709', 'n07801779', 'n07801892', 'n07802026', + 'n07802152', 'n07802246', 'n07802417', 'n07802767', 'n07802863', + 'n07802963', 'n07803093', 'n07803213', 'n07803310', 'n07803408', + 'n07803545', 'n07803779', 'n07803895', 'n07803992', 'n07804152', + 'n07804323', 'n07804543', 'n07804657', 'n07804771', 'n07804900', + 'n07805006', 'n07805254', 'n07805389', 'n07805478', 'n07805594', + 'n07805731', 'n07805966', 'n07806043', 'n07806120', 'n07806221', + 'n07806633', 'n07806774', 'n07806879', 'n07807002', 'n07807171', + 'n07807317', 'n07807472', 'n07807594', 'n07807710', 'n07807834', + 'n07807922', 'n07808022', 'n07808166', 'n07808268', 'n07808352', + 'n07808479', 'n07808587', 'n07808675', 'n07808806', 'n07808904', + 'n07809096', 'n07809368', 'n07810531', 'n07810907', 'n07811416', + 'n07812046', 'n07812184', 'n07812662', 'n07812790', 'n07812913', + 'n07813107', 'n07813324', 'n07813495', 'n07813579', 'n07813717', + 'n07813833', 'n07814007', 'n07814203', 'n07814390', 'n07814487', + 'n07814634', 'n07814790', 'n07814925', 'n07815163', 'n07815294', + 'n07815424', 'n07815588', 'n07815839', 'n07815956', 'n07816052', + 'n07816164', 'n07816296', 'n07816398', 'n07816575', 'n07816726', + 'n07816839', 'n07817024', 'n07817160', 'n07817315', 'n07817465', + 'n07817599', 'n07817758', 'n07817871', 'n07818029', 'n07818133', + 'n07818277', 'n07818422', 'n07818572', 'n07818689', 'n07818825', + 'n07818995', 'n07819166', 'n07819303', 'n07819480', 'n07819682', + 'n07819769', 'n07819896', 'n07820036', 'n07820145', 'n07820297', + 'n07820497', 'n07820683', 'n07820814', 'n07820960', 'n07821107', + 'n07821260', 'n07821404', 'n07821610', 'n07821758', 'n07821919', + 'n07822053', 'n07822197', 'n07822323', 'n07822518', 'n07822687', + 'n07822845', 'n07823105', 'n07823280', 'n07823369', 'n07823460', + 'n07823591', 'n07823698', 'n07823814', 'n07823951', 'n07824191', + 'n07824268', 'n07824383', 'n07824502', 'n07824702', 'n07824863', + 'n07824988', 'n07825194', 'n07825399', 'n07825496', 'n07825597', + 'n07825717', 'n07825850', 'n07825972', 'n07826091', 'n07826250', + 'n07826340', 'n07826453', 'n07826544', 'n07826653', 'n07826930', + 'n07827130', 'n07827284', 'n07827410', 'n07827554', 'n07827750', + 'n07827896', 'n07828041', 'n07828156', 'n07828275', 'n07828378', + 'n07828642', 'n07828987', 'n07829248', 'n07829331', 'n07829412', + 'n07830493', 'n07830593', 'n07830690', 'n07830841', 'n07830986', + 'n07831146', 'n07831267', 'n07831450', 'n07831663', 'n07831821', + 'n07831955', 'n07832099', 'n07832202', 'n07832307', 'n07832416', + 'n07832592', 'n07832741', 'n07832902', 'n07833333', 'n07833535', + 'n07833672', 'n07833816', 'n07833951', 'n07834065', 'n07834160', + 'n07834286', 'n07834507', 'n07834618', 'n07834774', 'n07834872', + 'n07835051', 'n07835173', 'n07835331', 'n07835457', 'n07835547', + 'n07835701', 'n07835823', 'n07835921', 'n07836077', 'n07836269', + 'n07836456', 'n07836600', 'n07836731', 'n07836838', 'n07837002', + 'n07837110', 'n07837234', 'n07837362', 'n07837545', 'n07837630', + 'n07837755', 'n07837912', 'n07838073', 'n07838233', 'n07838441', + 'n07838551', 'n07838659', 'n07838811', 'n07838905', 'n07839055', + 'n07839172', 'n07839312', 'n07839478', 'n07839593', 'n07839730', + 'n07839864', 'n07840027', 'n07840124', 'n07840219', 'n07840304', + 'n07840395', 'n07840520', 'n07840672', 'n07840804', 'n07841037', + 'n07841345', 'n07841495', 'n07841639', 'n07841800', 'n07841907', + 'n07842044', 'n07842130', 'n07842202', 'n07842308', 'n07842433', + 'n07842605', 'n07842753', 'n07842972', 'n07843117', 'n07843220', + 'n07843348', 'n07843464', 'n07843636', 'n07843775', 'n07844042', + 'n07844604', 'n07844786', 'n07844867', 'n07845087', 'n07845166', + 'n07845335', 'n07845421', 'n07845495', 'n07845571', 'n07845702', + 'n07845775', 'n07845863', 'n07846014', 'n07846143', 'n07846274', + 'n07846359', 'n07846471', 'n07846557', 'n07846688', 'n07846802', + 'n07846938', 'n07847047', 'n07847198', 'n07847453', 'n07847585', + 'n07847706', 'n07847827', 'n07847917', 'n07848093', 'n07848196', + 'n07848338', 'n07848771', 'n07848936', 'n07849026', 'n07849186', + 'n07849336', 'n07849506', 'n07849619', 'n07849733', 'n07849912', + 'n07850083', 'n07850219', 'n07850329', 'n07851054', 'n07851298', + 'n07851443', 'n07851554', 'n07851641', 'n07851767', 'n07851926', + 'n07852045', 'n07852229', 'n07852302', 'n07852376', 'n07852452', + 'n07852532', 'n07852614', 'n07852712', 'n07852833', 'n07852919', + 'n07853125', 'n07853232', 'n07853345', 'n07853445', 'n07853560', + 'n07853648', 'n07853762', 'n07853852', 'n07853946', 'n07854066', + 'n07854184', 'n07854266', 'n07854348', 'n07854455', 'n07854614', + 'n07854707', 'n07854813', 'n07854982', 'n07855105', 'n07855188', + 'n07855317', 'n07855413', 'n07855510', 'n07855603', 'n07855721', + 'n07855812', 'n07855907', 'n07856045', 'n07856186', 'n07856270', + 'n07856756', 'n07856895', 'n07856992', 'n07857076', 'n07857170', + 'n07857356', 'n07857598', 'n07857731', 'n07857959', 'n07858114', + 'n07858197', 'n07858336', 'n07858484', 'n07858595', 'n07858841', + 'n07858978', 'n07859142', 'n07859284', 'n07859583', 'n07859796', + 'n07859951', 'n07860103', 'n07860208', 'n07860331', 'n07860447', + 'n07860548', 'n07860629', 'n07860805', 'n07860988', 'n07861158', + 'n07861247', 'n07861334', 'n07861557', 'n07861681', 'n07861813', + 'n07861983', 'n07862095', 'n07862244', 'n07862348', 'n07862461', + 'n07862611', 'n07862770', 'n07862946', 'n07863107', 'n07863229', + 'n07863374', 'n07863547', 'n07863644', 'n07863802', 'n07863935', + 'n07864065', 'n07864198', 'n07864317', 'n07864475', 'n07864638', + 'n07864756', 'n07864934', 'n07865105', 'n07865196', 'n07865484', + 'n07865575', 'n07865700', 'n07865788', 'n07866015', 'n07866151', + 'n07866277', 'n07866409', 'n07866571', 'n07866723', 'n07866868', + 'n07867021', 'n07867164', 'n07867324', 'n07867421', 'n07867616', + 'n07867751', 'n07867883', 'n07868045', 'n07868200', 'n07868340', + 'n07868508', 'n07868684', 'n07868830', 'n07868955', 'n07869111', + 'n07869291', 'n07869391', 'n07869522', 'n07869611', 'n07869775', + 'n07869937', 'n07870069', 'n07870167', 'n07870313', 'n07870478', + 'n07870620', 'n07870734', 'n07870894', 'n07871065', 'n07871234', + 'n07871335', 'n07871436', 'n07871588', 'n07871720', 'n07871810', + 'n07872593', 'n07872748', 'n07873057', 'n07873198', 'n07873348', + 'n07873464', 'n07873679', 'n07873807', 'n07874063', 'n07874159', + 'n07874259', 'n07874343', 'n07874441', 'n07874531', 'n07874674', + 'n07874780', 'n07874995', 'n07875086', 'n07875152', 'n07875267', + 'n07875436', 'n07875560', 'n07875693', 'n07875835', 'n07875926', + 'n07876026', 'n07876189', 'n07876281', 'n07876460', 'n07876550', + 'n07876651', 'n07876775', 'n07876893', 'n07877187', 'n07877299', + 'n07877675', 'n07877849', 'n07877961', 'n07878145', 'n07878283', + 'n07878479', 'n07878647', 'n07878785', 'n07878926', 'n07879072', + 'n07879174', 'n07879350', 'n07879450', 'n07879560', 'n07879659', + 'n07879821', 'n07879953', 'n07880080', 'n07880213', 'n07880325', + 'n07880458', 'n07880751', 'n07880880', 'n07880968', 'n07881117', + 'n07881205', 'n07881404', 'n07881525', 'n07881625', 'n07881800', + 'n07882420', 'n07882497', 'n07882886', 'n07883031', 'n07883156', + 'n07883251', 'n07883384', 'n07883510', 'n07883661', 'n07884567', + 'n07885705', 'n07886057', 'n07886176', 'n07886317', 'n07886463', + 'n07886572', 'n07886849', 'n07887099', 'n07887192', 'n07887304', + 'n07887461', 'n07887634', 'n07887967', 'n07888058', 'n07888229', + 'n07888378', 'n07888465', 'n07888816', 'n07888909', 'n07889193', + 'n07889274', 'n07889510', 'n07889814', 'n07889990', 'n07890068', + 'n07890226', 'n07890352', 'n07890540', 'n07890617', 'n07890750', + 'n07890890', 'n07890970', 'n07891095', 'n07891189', 'n07891309', + 'n07891433', 'n07891726', 'n07892418', 'n07892512', 'n07892813', + 'n07893253', 'n07893425', 'n07893528', 'n07893642', 'n07893792', + 'n07893891', 'n07894102', 'n07894298', 'n07894451', 'n07894551', + 'n07894703', 'n07894799', 'n07894965', 'n07895100', 'n07895237', + 'n07895435', 'n07895595', 'n07895710', 'n07895839', 'n07895962', + 'n07896060', 'n07896165', 'n07896287', 'n07896422', 'n07896560', + 'n07896661', 'n07896765', 'n07896893', 'n07896994', 'n07897116', + 'n07897200', 'n07897438', 'n07897600', 'n07897750', 'n07897865', + 'n07897975', 'n07898117', 'n07898247', 'n07898333', 'n07898443', + 'n07898617', 'n07898745', 'n07898895', 'n07899003', 'n07899108', + 'n07899292', 'n07899434', 'n07899533', 'n07899660', 'n07899769', + 'n07899899', 'n07899976', 'n07900225', 'n07900406', 'n07900616', + 'n07900734', 'n07900825', 'n07900958', 'n07901355', 'n07901457', + 'n07901587', 'n07902121', 'n07902336', 'n07902443', 'n07902520', + 'n07902698', 'n07902799', 'n07902937', 'n07903101', 'n07903208', + 'n07903543', 'n07903643', 'n07903731', 'n07903841', 'n07903962', + 'n07904072', 'n07904293', 'n07904395', 'n07904637', 'n07904760', + 'n07904865', 'n07904934', 'n07905038', 'n07905296', 'n07905386', + 'n07905474', 'n07905618', 'n07905770', 'n07905979', 'n07906111', + 'n07906284', 'n07906572', 'n07906718', 'n07906877', 'n07907037', + 'n07907161', 'n07907342', 'n07907429', 'n07907548', 'n07907831', + 'n07907943', 'n07908411', 'n07908567', 'n07908647', 'n07908812', + 'n07908923', 'n07909129', 'n07909231', 'n07909362', 'n07909504', + 'n07909593', 'n07909714', 'n07909811', 'n07909954', 'n07910048', + 'n07910152', 'n07910245', 'n07910379', 'n07910538', 'n07910656', + 'n07910799', 'n07910970', 'n07911061', 'n07911249', 'n07911371', + 'n07911677', 'n07912093', 'n07912211', 'n07913180', 'n07913300', + 'n07913393', 'n07913537', 'n07913644', 'n07913774', 'n07913882', + 'n07914006', 'n07914128', 'n07914271', 'n07914413', 'n07914586', + 'n07914686', 'n07914777', 'n07914887', 'n07914995', 'n07915094', + 'n07915213', 'n07915366', 'n07915491', 'n07915618', 'n07915800', + 'n07915918', 'n07916041', 'n07916183', 'n07916319', 'n07916437', + 'n07916582', 'n07917133', 'n07917272', 'n07917392', 'n07917507', + 'n07917618', 'n07917791', 'n07917874', 'n07917951', 'n07918028', + 'n07918193', 'n07918309', 'n07918706', 'n07918879', 'n07919165', + 'n07919310', 'n07919441', 'n07919572', 'n07919665', 'n07919787', + 'n07919894', 'n07920052', 'n07920222', 'n07920349', 'n07920540', + 'n07920663', 'n07920872', 'n07920989', 'n07921090', 'n07921239', + 'n07921360', 'n07921455', 'n07921615', 'n07921834', 'n07921948', + 'n07922041', 'n07922147', 'n07922512', 'n07922607', 'n07922764', + 'n07922955', 'n07923748', 'n07924033', 'n07924276', 'n07924366', + 'n07924443', 'n07924560', 'n07924655', 'n07924747', 'n07924834', + 'n07924955', 'n07925116', 'n07925229', 'n07925327', 'n07925423', + 'n07925500', 'n07925608', 'n07925708', 'n07925808', 'n07925966', + 'n07926250', 'n07926346', 'n07926442', 'n07926540', 'n07926785', + 'n07926920', 'n07927070', 'n07927197', 'n07927512', 'n07927716', + 'n07927836', 'n07927931', 'n07928163', 'n07928264', 'n07928367', + 'n07928488', 'n07928578', 'n07928696', 'n07928790', 'n07928887', + 'n07928998', 'n07929172', 'n07929351', 'n07929519', 'n07929940', + 'n07930062', 'n07930205', 'n07930315', 'n07930433', 'n07930554', + 'n07930864', 'n07931001', 'n07931096', 'n07931280', 'n07931452', + 'n07931612', 'n07931733', 'n07931870', 'n07932039', 'n07932323', + 'n07932454', 'n07932614', 'n07932762', 'n07932841', 'n07933154', + 'n07933274', 'n07933530', 'n07933652', 'n07933799', 'n07933891', + 'n07934032', 'n07934152', 'n07934282', 'n07934373', 'n07934530', + 'n07934678', 'n07934800', 'n07934908', 'n07935043', 'n07935152', + 'n07935288', 'n07935379', 'n07935504', 'n07935737', 'n07935878', + 'n07936015', 'n07936093', 'n07936263', 'n07936459', 'n07936548', + 'n07936745', 'n07936979', 'n07937069', 'n07937344', 'n07937461', + 'n07937621', 'n07938007', 'n07938149', 'n07938313', 'n07938594', + 'n07942152', 'n07951464', 'n07954211', 'n07977870', 'n08079613', + 'n08182379', 'n08238463', 'n08242223', 'n08249459', 'n08253141', + 'n08256735', 'n08376250', 'n08385989', 'n08492354', 'n08492461', + 'n08494231', 'n08495908', 'n08496334', 'n08500819', 'n08500989', + 'n08501887', 'n08505018', 'n08506347', 'n08511017', 'n08517010', + 'n08517676', 'n08518171', 'n08519299', 'n08521623', 'n08523340', + 'n08524735', 'n08539072', 'n08539276', 'n08540532', 'n08547468', + 'n08547544', 'n08551296', 'n08554440', 'n08555333', 'n08555710', + 'n08558770', 'n08558963', 'n08559155', 'n08560295', 'n08569482', + 'n08571275', 'n08571642', 'n08571898', 'n08573674', 'n08573842', + 'n08578517', 'n08579266', 'n08579352', 'n08580944', 'n08583292', + 'n08583455', 'n08583554', 'n08583682', 'n08584914', 'n08586978', + 'n08589670', 'n08596076', 'n08597579', 'n08598301', 'n08598568', + 'n08599174', 'n08599292', 'n08611339', 'n08611421', 'n08613733', + 'n08614632', 'n08616050', 'n08618831', 'n08619112', 'n08623676', + 'n08628141', 'n08633683', 'n08640531', 'n08640739', 'n08640962', + 'n08643267', 'n08644045', 'n08645104', 'n08645212', 'n08645318', + 'n08647264', 'n08648917', 'n08649711', 'n08651104', 'n08652376', + 'n08658309', 'n08658918', 'n08659242', 'n08659331', 'n08659446', + 'n08659861', 'n08661878', 'n08662427', 'n08663051', 'n08663703', + 'n08663860', 'n08673039', 'n08674344', 'n08676253', 'n08677424', + 'n08677801', 'n08678783', 'n08679167', 'n08679269', 'n08679562', + 'n08685188', 'n08782627', 'n08896327', 'n09032191', 'n09186592', + 'n09189157', 'n09191635', 'n09193551', 'n09193705', 'n09194227', + 'n09199101', 'n09201998', 'n09203827', 'n09205509', 'n09206896', + 'n09206985', 'n09208496', 'n09209025', 'n09210862', 'n09213434', + 'n09213565', 'n09214060', 'n09214269', 'n09214916', 'n09215023', + 'n09215437', 'n09217230', 'n09218315', 'n09218494', 'n09218641', + 'n09219233', 'n09223487', 'n09224725', 'n09226869', 'n09228055', + 'n09229709', 'n09230041', 'n09230202', 'n09231117', 'n09233446', + 'n09233603', 'n09238926', 'n09239302', 'n09242389', 'n09245515', + 'n09246464', 'n09247410', 'n09248153', 'n09248399', 'n09249034', + 'n09249155', 'n09251407', 'n09255070', 'n09256479', 'n09257843', + 'n09259025', 'n09259219', 'n09260907', 'n09262690', 'n09263912', + 'n09264803', 'n09265620', 'n09266604', 'n09267854', 'n09268007', + 'n09269341', 'n09269472', 'n09269882', 'n09270160', 'n09270657', + 'n09270735', 'n09274152', 'n09274305', 'n09279986', 'n09281252', + 'n09282208', 'n09283193', 'n09283405', 'n09283514', 'n09283767', + 'n09283866', 'n09287415', 'n09287968', 'n09288635', 'n09289331', + 'n09289596', 'n09290350', 'n09290444', 'n09294877', 'n09295210', + 'n09295946', 'n09300306', 'n09300905', 'n09302616', 'n09303008', + 'n09303528', 'n09304750', 'n09305031', 'n09305898', 'n09308572', + 'n09308743', 'n09309046', 'n09309168', 'n09309292', 'n09310616', + 'n09315159', 'n09319604', 'n09325824', 'n09326662', 'n09327077', + 'n09327538', 'n09330378', 'n09331251', 'n09332890', 'n09335693', + 'n09335809', 'n09336555', 'n09337048', 'n09337253', 'n09338013', + 'n09339810', 'n09344198', 'n09344324', 'n09344724', 'n09348460', + 'n09349648', 'n09351905', 'n09352849', 'n09353815', 'n09354511', + 'n09357346', 'n09357447', 'n09359803', 'n09361517', 'n09362316', + 'n09362945', 'n09366017', 'n09366317', 'n09375606', 'n09376198', + 'n09376526', 'n09376786', 'n09381242', 'n09382099', 'n09384106', + 'n09389867', 'n09391386', 'n09391644', 'n09391774', 'n09392402', + 'n09393524', 'n09393605', 'n09396465', 'n09396608', 'n09398076', + 'n09398677', 'n09399592', 'n09400584', 'n09400987', 'n09402944', + 'n09403086', 'n09403211', 'n09403427', 'n09403734', 'n09405078', + 'n09405787', 'n09406793', 'n09409512', 'n09409752', 'n09410224', + 'n09411189', 'n09411295', 'n09415584', 'n09415671', 'n09416076', + 'n09416890', 'n09421031', 'n09421799', 'n09421951', 'n09422190', + 'n09422631', 'n09425019', 'n09425344', 'n09428293', 'n09428628', + 'n09429630', 'n09432283', 'n09432990', 'n09433312', 'n09433442', + 'n09433839', 'n09435739', 'n09436444', 'n09436708', 'n09437454', + 'n09438844', 'n09438940', 'n09439032', 'n09439213', 'n09442595', + 'n09443281', 'n09443641', 'n09444783', 'n09445008', 'n09445289', + 'n09447666', 'n09448690', 'n09450163', 'n09451237', 'n09452291', + 'n09452395', 'n09452760', 'n09453008', 'n09454153', 'n09454412', + 'n09454744', 'n09456207', 'n09457979', 'n09458269', 'n09459979', + 'n09460046', 'n09461069', 'n09462600', 'n09463226', 'n09464486', + 'n09466678', 'n09467696', 'n09468604', 'n09470027', 'n09470222', + 'n09472413', 'n09472597', 'n09474010', 'n09474412', 'n09474765', + 'n09475044', 'n09475179', 'n09475925', 'n09476123', 'n09478210', + 'n09480959', 'n09481120', 'n09493983', 'n09495962', 'n09505153', + 'n09537660', 'n09556121', 'n09605110', 'n09606009', 'n09606527', + 'n09607630', 'n09607782', 'n09607903', 'n09608709', 'n09610255', + 'n09610405', 'n09611722', 'n09612700', 'n09613118', 'n09613191', + 'n09613690', 'n09615336', 'n09616573', 'n09616922', 'n09617161', + 'n09617435', 'n09617577', 'n09617696', 'n09618760', 'n09618880', + 'n09618957', 'n09619168', 'n09619452', 'n09620078', 'n09620794', + 'n09621232', 'n09622049', 'n09622302', 'n09624168', 'n09624559', + 'n09624899', 'n09625401', 'n09626238', 'n09627807', 'n09627906', + 'n09629065', 'n09629246', 'n09629752', 'n09631129', 'n09632274', + 'n09632518', 'n09633969', 'n09635534', 'n09635635', 'n09635973', + 'n09636339', 'n09637339', 'n09638454', 'n09638875', 'n09639382', + 'n09639919', 'n09640327', 'n09640715', 'n09641002', 'n09641578', + 'n09643799', 'n09644152', 'n09644657', 'n09648743', 'n09648911', + 'n09649067', 'n09650729', 'n09650839', 'n09650989', 'n09651123', + 'n09651968', 'n09652149', 'n09653144', 'n09653438', 'n09654079', + 'n09654518', 'n09654898', 'n09655213', 'n09655466', 'n09656077', + 'n09657206', 'n09657748', 'n09658254', 'n09658398', 'n09658815', + 'n09658921', 'n09659039', 'n09659188', 'n09660010', 'n09660240', + 'n09661873', 'n09662038', 'n09662661', 'n09662951', 'n09663248', + 'n09663786', 'n09663999', 'n09664556', 'n09664908', 'n09665367', + 'n09665545', 'n09666349', 'n09666476', 'n09666883', 'n09667358', + 'n09668199', 'n09668437', 'n09668562', 'n09668988', 'n09669631', + 'n09670280', 'n09670521', 'n09670909', 'n09671089', 'n09672590', + 'n09672725', 'n09672840', 'n09673091', 'n09674412', 'n09674786', + 'n09675045', 'n09675673', 'n09675799', 'n09675922', 'n09676021', + 'n09676247', 'n09676884', 'n09677427', 'n09678747', 'n09679028', + 'n09679170', 'n09679925', 'n09680908', 'n09681107', 'n09681234', + 'n09681973', 'n09683180', 'n09683757', 'n09683924', 'n09684082', + 'n09684901', 'n09685233', 'n09685806', 'n09686262', 'n09686401', + 'n09688233', 'n09688804', 'n09689435', 'n09689958', 'n09690083', + 'n09690208', 'n09690496', 'n09690621', 'n09690864', 'n09691604', + 'n09691729', 'n09691858', 'n09692125', 'n09692915', 'n09693244', + 'n09693982', 'n09694664', 'n09694771', 'n09695019', 'n09695132', + 'n09695514', 'n09695620', 'n09695979', 'n09696456', 'n09696585', + 'n09696763', 'n09697401', 'n09697986', 'n09698644', 'n09699020', + 'n09699642', 'n09700125', 'n09700964', 'n09701148', 'n09701833', + 'n09702134', 'n09702673', 'n09703101', 'n09703344', 'n09703485', + 'n09703708', 'n09703809', 'n09703932', 'n09704057', 'n09704157', + 'n09704283', 'n09705003', 'n09705124', 'n09705671', 'n09705784', + 'n09706029', 'n09706255', 'n09707061', 'n09707289', 'n09707735', + 'n09708750', 'n09708889', 'n09709531', 'n09709673', 'n09710041', + 'n09710164', 'n09710886', 'n09711132', 'n09711435', 'n09712324', + 'n09712448', 'n09712696', 'n09712967', 'n09713108', 'n09714120', + 'n09714694', 'n09715165', 'n09715303', 'n09715427', 'n09716047', + 'n09716933', 'n09717233', 'n09718217', 'n09718811', 'n09718936', + 'n09719309', 'n09719794', 'n09720033', 'n09720256', 'n09720595', + 'n09720702', 'n09720842', 'n09721244', 'n09721444', 'n09722064', + 'n09722658', 'n09722817', 'n09723067', 'n09723819', 'n09723944', + 'n09724234', 'n09724533', 'n09724656', 'n09724785', 'n09725000', + 'n09725229', 'n09725546', 'n09725653', 'n09725772', 'n09725935', + 'n09726621', 'n09726811', 'n09727440', 'n09727826', 'n09728137', + 'n09728285', 'n09729062', 'n09729156', 'n09730077', 'n09730204', + 'n09730824', 'n09731343', 'n09731436', 'n09731571', 'n09732170', + 'n09733459', 'n09733793', 'n09734185', 'n09734450', 'n09734535', + 'n09734639', 'n09735258', 'n09735654', 'n09736485', 'n09736798', + 'n09736945', 'n09737050', 'n09737161', 'n09737453', 'n09738121', + 'n09738400', 'n09740724', 'n09741074', 'n09741331', 'n09741722', + 'n09741816', 'n09741904', 'n09741999', 'n09742101', 'n09742315', + 'n09742927', 'n09743487', 'n09743601', 'n09743792', 'n09744161', + 'n09744346', 'n09744462', 'n09744679', 'n09744834', 'n09745229', + 'n09745324', 'n09745834', 'n09745933', 'n09746936', 'n09747191', + 'n09747495', 'n09748101', 'n09748408', 'n09748648', 'n09748889', + 'n09749386', 'n09750282', 'n09750641', 'n09750770', 'n09750891', + 'n09751076', 'n09751496', 'n09751622', 'n09751895', 'n09752023', + 'n09752519', 'n09753348', 'n09753792', 'n09754152', 'n09754217', + 'n09754633', 'n09754907', 'n09755086', 'n09755241', 'n09755555', + 'n09755788', 'n09755893', 'n09756049', 'n09756195', 'n09756961', + 'n09757449', 'n09758173', 'n09758885', 'n09759501', 'n09760290', + 'n09760609', 'n09760913', 'n09761068', 'n09761753', 'n09762011', + 'n09762385', 'n09763272', 'n09763784', 'n09764201', 'n09764598', + 'n09764732', 'n09764900', 'n09765118', 'n09765278', 'n09767197', + 'n09769076', 'n09769525', 'n09769929', 'n09770179', 'n09770359', + 'n09771435', 'n09772330', 'n09772746', 'n09772930', 'n09773962', + 'n09774167', 'n09774783', 'n09775907', 'n09776346', 'n09776642', + 'n09776807', 'n09777870', 'n09778266', 'n09778537', 'n09778783', + 'n09778927', 'n09779124', 'n09779280', 'n09779461', 'n09779790', + 'n09780395', 'n09780828', 'n09780984', 'n09781398', 'n09781504', + 'n09781650', 'n09782167', 'n09782397', 'n09782855', 'n09783537', + 'n09783776', 'n09783884', 'n09784043', 'n09784160', 'n09784564', + 'n09785236', 'n09785659', 'n09785891', 'n09786115', 'n09787534', + 'n09787765', 'n09788073', 'n09788237', 'n09789150', 'n09789566', + 'n09789898', 'n09790047', 'n09790482', 'n09791014', 'n09791419', + 'n09791816', 'n09792125', 'n09792555', 'n09792969', 'n09793141', + 'n09793352', 'n09793946', 'n09794550', 'n09794668', 'n09795010', + 'n09795124', 'n09795334', 'n09796809', 'n09796974', 'n09797742', + 'n09797873', 'n09797998', 'n09798096', 'n09800469', 'n09800964', + 'n09801102', 'n09801275', 'n09801533', 'n09802445', 'n09802641', + 'n09802951', 'n09804230', 'n09805151', 'n09805324', 'n09805475', + 'n09806944', 'n09807075', 'n09808080', 'n09808591', 'n09809279', + 'n09809538', 'n09809749', 'n09809925', 'n09810166', 'n09811568', + 'n09811712', 'n09811852', 'n09813219', 'n09814252', 'n09814381', + 'n09814488', 'n09814567', 'n09814660', 'n09815455', 'n09815790', + 'n09816654', 'n09816771', 'n09817174', 'n09817386', 'n09818022', + 'n09819477', 'n09820044', 'n09820263', 'n09821831', 'n09822830', + 'n09823153', 'n09823287', 'n09823502', 'n09823832', 'n09824135', + 'n09824609', 'n09825096', 'n09825750', 'n09826204', 'n09826605', + 'n09826821', 'n09827246', 'n09827363', 'n09828216', 'n09828403', + 'n09828988', 'n09830194', 'n09830400', 'n09830629', 'n09830759', + 'n09830926', 'n09831962', 'n09832456', 'n09832633', 'n09832978', + 'n09833111', 'n09833275', 'n09833441', 'n09833536', 'n09833751', + 'n09833997', 'n09834258', 'n09834378', 'n09834699', 'n09834885', + 'n09835017', 'n09835153', 'n09835230', 'n09835348', 'n09835506', + 'n09836160', 'n09836343', 'n09836519', 'n09836786', 'n09837459', + 'n09837720', 'n09838295', 'n09838370', 'n09838621', 'n09839702', + 'n09840217', 'n09840435', 'n09840520', 'n09841188', 'n09841515', + 'n09841696', 'n09842047', 'n09842288', 'n09842395', 'n09842528', + 'n09842823', 'n09843443', 'n09843602', 'n09843716', 'n09843824', + 'n09844457', 'n09844898', 'n09845401', 'n09845849', 'n09846142', + 'n09846469', 'n09846586', 'n09846755', 'n09846894', 'n09847267', + 'n09847344', 'n09847543', 'n09848110', 'n09848489', 'n09849167', + 'n09849990', 'n09850760', 'n09850974', 'n09851165', 'n09851575', + 'n09853541', 'n09853645', 'n09853881', 'n09854218', 'n09854421', + 'n09854915', 'n09855433', 'n09856401', 'n09856671', 'n09856827', + 'n09857007', 'n09858165', 'n09858299', 'n09858733', 'n09859152', + 'n09859285', 'n09859975', 'n09861287', 'n09861599', 'n09861863', + 'n09861946', 'n09862183', 'n09862621', 'n09863031', 'n09863339', + 'n09863749', 'n09863936', 'n09864632', 'n09864968', 'n09865068', + 'n09865162', 'n09865398', 'n09865672', 'n09865744', 'n09866115', + 'n09866354', 'n09866559', 'n09866661', 'n09866817', 'n09866922', + 'n09867069', 'n09867154', 'n09867311', 'n09868270', 'n09868782', + 'n09868899', 'n09869317', 'n09869447', 'n09869578', 'n09870096', + 'n09871095', 'n09871229', 'n09871681', 'n09871867', 'n09871952', + 'n09872066', 'n09872557', 'n09873348', 'n09873473', 'n09873769', + 'n09873899', 'n09874428', 'n09874725', 'n09874862', 'n09875025', + 'n09875979', 'n09876701', 'n09877288', 'n09877587', 'n09877750', + 'n09877951', 'n09878921', 'n09879552', 'n09880189', 'n09880741', + 'n09881265', 'n09881358', 'n09881895', 'n09883047', 'n09883452', + 'n09883807', 'n09885059', 'n09885866', 'n09886403', 'n09886540', + 'n09888635', 'n09889065', 'n09889170', 'n09889691', 'n09889941', + 'n09890192', 'n09890749', 'n09891730', 'n09892262', 'n09892513', + 'n09892693', 'n09893191', 'n09893344', 'n09893502', 'n09893600', + 'n09894143', 'n09894445', 'n09894654', 'n09894909', 'n09895222', + 'n09895480', 'n09895561', 'n09895701', 'n09895902', 'n09896170', + 'n09896311', 'n09896401', 'n09896685', 'n09896826', 'n09898020', + 'n09899289', 'n09899671', 'n09899782', 'n09899929', 'n09901337', + 'n09901502', 'n09901642', 'n09901786', 'n09901921', 'n09902128', + 'n09902353', 'n09902731', 'n09902851', 'n09902954', 'n09903153', + 'n09903501', 'n09903639', 'n09903936', 'n09904208', 'n09904837', + 'n09905050', 'n09905185', 'n09905530', 'n09906293', 'n09906449', + 'n09906704', 'n09907804', 'n09908769', 'n09909660', 'n09909929', + 'n09910222', 'n09910374', 'n09910556', 'n09910840', 'n09911226', + 'n09912431', 'n09912681', 'n09912907', 'n09912995', 'n09913329', + 'n09913455', 'n09913593', 'n09915434', 'n09915651', 'n09916348', + 'n09917214', 'n09917345', 'n09917481', 'n09917593', 'n09918248', + 'n09918554', 'n09918867', 'n09919061', 'n09919200', 'n09919451', + 'n09919899', 'n09920106', 'n09920283', 'n09920901', 'n09921034', + 'n09923003', 'n09923186', 'n09923418', 'n09923561', 'n09923673', + 'n09923996', 'n09924106', 'n09924195', 'n09924313', 'n09924437', + 'n09924996', 'n09927089', 'n09927451', 'n09928136', 'n09928451', + 'n09928845', 'n09929202', 'n09929298', 'n09929577', 'n09930257', + 'n09930628', 'n09930876', 'n09931165', 'n09931418', 'n09931640', + 'n09932098', 'n09932336', 'n09932508', 'n09932788', 'n09933020', + 'n09933098', 'n09933842', 'n09933972', 'n09934337', 'n09934488', + 'n09934774', 'n09935107', 'n09935434', 'n09936825', 'n09936892', + 'n09937056', 'n09937688', 'n09937802', 'n09937903', 'n09938080', + 'n09938449', 'n09938991', 'n09940725', 'n09940818', 'n09941089', + 'n09941571', 'n09941787', 'n09941964', 'n09942697', 'n09942970', + 'n09943239', 'n09943811', 'n09944022', 'n09944160', 'n09944430', + 'n09945021', 'n09945223', 'n09945319', 'n09945603', 'n09945745', + 'n09946814', 'n09947127', 'n09950457', 'n09950728', 'n09951070', + 'n09951274', 'n09951524', 'n09951616', 'n09952163', 'n09953052', + 'n09953350', 'n09953615', 'n09954355', 'n09954639', 'n09955406', + 'n09955944', 'n09956578', 'n09957523', 'n09958133', 'n09958292', + 'n09958447', 'n09958569', 'n09959142', 'n09959658', 'n09960688', + 'n09961198', 'n09961331', 'n09961469', 'n09961605', 'n09961739', + 'n09962966', 'n09964202', 'n09964411', 'n09965515', 'n09965787', + 'n09966470', 'n09966554', 'n09967063', 'n09967406', 'n09967555', + 'n09967816', 'n09967967', 'n09968259', 'n09968652', 'n09968741', + 'n09968845', 'n09970088', 'n09970192', 'n09970402', 'n09970822', + 'n09971273', 'n09971385', 'n09971839', 'n09972010', 'n09972458', + 'n09972587', 'n09974648', 'n09975425', 'n09976024', 'n09976283', + 'n09976429', 'n09976728', 'n09976917', 'n09978442', 'n09979321', + 'n09979913', 'n09980458', 'n09980805', 'n09980985', 'n09981092', + 'n09981278', 'n09981540', 'n09981939', 'n09982152', 'n09982525', + 'n09983314', 'n09983572', 'n09983889', 'n09984960', 'n09985470', + 'n09985809', 'n09985978', 'n09986450', 'n09986700', 'n09986904', + 'n09987045', 'n09987161', 'n09987239', 'n09988063', 'n09988311', + 'n09988493', 'n09988703', 'n09989502', 'n09990415', 'n09990690', + 'n09990777', 'n09991740', 'n09991867', 'n09992538', 'n09992837', + 'n09993252', 'n09993651', 'n09994400', 'n09994673', 'n09994808', + 'n09994878', 'n09995829', 'n09996039', 'n09996304', 'n09996481', + 'n09997622', 'n09998788', 'n09999135', 'n10000294', 'n10000459', + 'n10000787', 'n10001217', 'n10001481', 'n10001764', 'n10002257', + 'n10002760', 'n10003476', 'n10004718', 'n10005006', 'n10005934', + 'n10006177', 'n10006748', 'n10007684', 'n10007809', 'n10007995', + 'n10008123', 'n10008254', 'n10009162', 'n10009276', 'n10009484', + 'n10009671', 'n10010062', 'n10010243', 'n10010632', 'n10010767', + 'n10010864', 'n10011360', 'n10011486', 'n10012484', 'n10013811', + 'n10015215', 'n10015485', 'n10015792', 'n10015897', 'n10017272', + 'n10017422', 'n10018747', 'n10018861', 'n10019072', 'n10019187', + 'n10019406', 'n10020366', 'n10020533', 'n10020670', 'n10020807', + 'n10020890', 'n10022908', 'n10023264', 'n10023506', 'n10023656', + 'n10024025', 'n10024362', 'n10024937', 'n10025060', 'n10025295', + 'n10025391', 'n10025635', 'n10026976', 'n10027246', 'n10027590', + 'n10028402', 'n10028541', 'n10029068', 'n10030277', 'n10032987', + 'n10033412', 'n10033572', 'n10033663', 'n10033888', 'n10034201', + 'n10034614', 'n10035952', 'n10036266', 'n10036444', 'n10036692', + 'n10036929', 'n10037080', 'n10037385', 'n10037588', 'n10037922', + 'n10038119', 'n10038409', 'n10038620', 'n10039271', 'n10039946', + 'n10040240', 'n10040698', 'n10040945', 'n10041373', 'n10041887', + 'n10042690', 'n10042845', 'n10043024', 'n10043491', 'n10043643', + 'n10044682', 'n10044879', 'n10047199', 'n10047459', 'n10048117', + 'n10048367', 'n10048612', 'n10048836', 'n10049363', 'n10050043', + 'n10050880', 'n10051026', 'n10051761', 'n10051861', 'n10051975', + 'n10052694', 'n10053439', 'n10053808', 'n10054657', 'n10055297', + 'n10055410', 'n10055566', 'n10055730', 'n10055847', 'n10056103', + 'n10056611', 'n10056719', 'n10057271', 'n10058411', 'n10058962', + 'n10059067', 'n10060075', 'n10060175', 'n10060352', 'n10061043', + 'n10061195', 'n10061431', 'n10061882', 'n10062042', 'n10062176', + 'n10062275', 'n10062492', 'n10062594', 'n10062716', 'n10062905', + 'n10062996', 'n10063635', 'n10063919', 'n10064831', 'n10064977', + 'n10065758', 'n10066206', 'n10066314', 'n10067011', 'n10067305', + 'n10067600', 'n10067968', 'n10068234', 'n10068425', 'n10069296', + 'n10069981', 'n10070108', 'n10070377', 'n10070449', 'n10070563', + 'n10070711', 'n10071332', 'n10071557', 'n10072054', 'n10074249', + 'n10074578', 'n10074735', 'n10074841', 'n10075299', 'n10075693', + 'n10076224', 'n10076483', 'n10076604', 'n10076957', 'n10077106', + 'n10077593', 'n10077879', 'n10078131', 'n10078719', 'n10078806', + 'n10079399', 'n10079893', 'n10080117', 'n10080508', 'n10080869', + 'n10081204', 'n10081842', 'n10082043', 'n10082299', 'n10082423', + 'n10082562', 'n10082687', 'n10082997', 'n10083677', 'n10083823', + 'n10084043', 'n10084295', 'n10085101', 'n10085869', 'n10086383', + 'n10086744', 'n10087434', 'n10087736', 'n10088200', 'n10090745', + 'n10091349', 'n10091450', 'n10091564', 'n10091651', 'n10091861', + 'n10091997', 'n10092488', 'n10092643', 'n10092794', 'n10092978', + 'n10093167', 'n10093475', 'n10093818', 'n10094320', 'n10094584', + 'n10094782', 'n10095265', 'n10095420', 'n10095769', 'n10095869', + 'n10096126', 'n10096508', 'n10097262', 'n10097477', 'n10097590', + 'n10097842', 'n10097995', 'n10098245', 'n10098388', 'n10098517', + 'n10098624', 'n10098710', 'n10098862', 'n10099002', 'n10099375', + 'n10101308', 'n10101634', 'n10101981', 'n10102800', 'n10103155', + 'n10103228', 'n10103921', 'n10104064', 'n10104487', 'n10104756', + 'n10104888', 'n10105085', 'n10105733', 'n10105906', 'n10106387', + 'n10106509', 'n10106995', 'n10107173', 'n10107303', 'n10108018', + 'n10108089', 'n10108464', 'n10108832', 'n10109443', 'n10109662', + 'n10109826', 'n10110093', 'n10110731', 'n10110893', 'n10111358', + 'n10111779', 'n10111903', 'n10112129', 'n10113249', 'n10113583', + 'n10113869', 'n10114476', 'n10114550', 'n10114662', 'n10115430', + 'n10115946', 'n10116370', 'n10116478', 'n10116702', 'n10117017', + 'n10117267', 'n10117415', 'n10117739', 'n10117851', 'n10118301', + 'n10118743', 'n10118844', 'n10119609', 'n10120330', 'n10120671', + 'n10121026', 'n10121246', 'n10121714', 'n10121800', 'n10122300', + 'n10122531', 'n10123122', 'n10123844', 'n10126177', 'n10126424', + 'n10126708', 'n10127186', 'n10127689', 'n10128519', 'n10128748', + 'n10129338', 'n10129825', 'n10130686', 'n10130877', 'n10131151', + 'n10131268', 'n10131590', 'n10131815', 'n10132035', 'n10132502', + 'n10134178', 'n10134396', 'n10134760', 'n10134982', 'n10135129', + 'n10135197', 'n10135297', 'n10136615', 'n10136959', 'n10137825', + 'n10138369', 'n10138472', 'n10139077', 'n10139651', 'n10140051', + 'n10140597', 'n10140683', 'n10140783', 'n10140929', 'n10141364', + 'n10141732', 'n10142166', 'n10142391', 'n10142537', 'n10142747', + 'n10142946', 'n10143172', 'n10143595', 'n10143725', 'n10144338', + 'n10145239', 'n10145340', 'n10145480', 'n10145590', 'n10145774', + 'n10145902', 'n10146002', 'n10146104', 'n10146416', 'n10146816', + 'n10146927', 'n10147121', 'n10147262', 'n10147710', 'n10147935', + 'n10148035', 'n10148305', 'n10148825', 'n10149436', 'n10149867', + 'n10150071', 'n10150794', 'n10150940', 'n10151133', 'n10151261', + 'n10151367', 'n10151570', 'n10151760', 'n10152306', 'n10152616', + 'n10152763', 'n10153155', 'n10153414', 'n10153594', 'n10153865', + 'n10154013', 'n10154186', 'n10154601', 'n10155222', 'n10155600', + 'n10155849', 'n10156629', 'n10156831', 'n10157016', 'n10157128', + 'n10157271', 'n10158506', 'n10159045', 'n10159289', 'n10159533', + 'n10160188', 'n10160280', 'n10160412', 'n10161622', 'n10162016', + 'n10162194', 'n10162354', 'n10164025', 'n10164233', 'n10164492', + 'n10165448', 'n10166189', 'n10166394', 'n10167152', 'n10167361', + 'n10167565', 'n10167838', 'n10168012', 'n10168183', 'n10168584', + 'n10168837', 'n10169147', 'n10169241', 'n10169419', 'n10169796', + 'n10170060', 'n10170681', 'n10170866', 'n10171219', 'n10171456', + 'n10171567', 'n10172080', 'n10173410', 'n10173579', 'n10173665', + 'n10173771', 'n10174253', 'n10174330', 'n10174445', 'n10174589', + 'n10174695', 'n10174971', 'n10175248', 'n10175725', 'n10176913', + 'n10177150', 'n10178077', 'n10178216', 'n10179069', 'n10180580', + 'n10180791', 'n10180923', 'n10181445', 'n10181547', 'n10181799', + 'n10181878', 'n10182190', 'n10182402', 'n10183347', 'n10183931', + 'n10184505', 'n10185148', 'n10185483', 'n10185793', 'n10186068', + 'n10186143', 'n10186216', 'n10186350', 'n10186686', 'n10186774', + 'n10187130', 'n10187491', 'n10187990', 'n10188715', 'n10188856', + 'n10188957', 'n10189278', 'n10189597', 'n10190122', 'n10190516', + 'n10191001', 'n10191388', 'n10191613', 'n10192839', 'n10193650', + 'n10194231', 'n10194775', 'n10195056', 'n10195155', 'n10195261', + 'n10195593', 'n10196404', 'n10196725', 'n10197392', 'n10198437', + 'n10198832', 'n10199251', 'n10200246', 'n10200781', 'n10202225', + 'n10202624', 'n10202763', 'n10203949', 'n10204177', 'n10204833', + 'n10205231', 'n10205344', 'n10205457', 'n10205714', 'n10206173', + 'n10206506', 'n10206629', 'n10207077', 'n10207169', 'n10208189', + 'n10208847', 'n10208950', 'n10209082', 'n10209731', 'n10210137', + 'n10210512', 'n10210648', 'n10210911', 'n10211036', 'n10211666', + 'n10211830', 'n10212231', 'n10212501', 'n10212780', 'n10213034', + 'n10213429', 'n10214062', 'n10214390', 'n10215623', 'n10216106', + 'n10216403', 'n10217208', 'n10218043', 'n10218164', 'n10218292', + 'n10219240', 'n10219453', 'n10219879', 'n10220080', 'n10220924', + 'n10221312', 'n10221520', 'n10222170', 'n10222259', 'n10222497', + 'n10222716', 'n10223069', 'n10223177', 'n10223606', 'n10224578', + 'n10225219', 'n10225931', 'n10226413', 'n10227166', 'n10227266', + 'n10227393', 'n10227490', 'n10227698', 'n10227793', 'n10227985', + 'n10228278', 'n10228468', 'n10228592', 'n10228712', 'n10229883', + 'n10230216', 'n10233248', 'n10235024', 'n10235269', 'n10235385', + 'n10236304', 'n10236521', 'n10236842', 'n10237069', 'n10237196', + 'n10237464', 'n10237556', 'n10237676', 'n10237799', 'n10238272', + 'n10238375', 'n10239928', 'n10240082', 'n10240235', 'n10240417', + 'n10240821', 'n10241024', 'n10241300', 'n10242328', 'n10243137', + 'n10243273', 'n10243483', 'n10243664', 'n10243872', 'n10244108', + 'n10244359', 'n10244913', 'n10245029', 'n10245341', 'n10245507', + 'n10245639', 'n10245863', 'n10246317', 'n10246395', 'n10246703', + 'n10247358', 'n10247880', 'n10248008', 'n10248198', 'n10248377', + 'n10249191', 'n10249270', 'n10249459', 'n10249869', 'n10249950', + 'n10250712', 'n10251329', 'n10251612', 'n10252075', 'n10252222', + 'n10252354', 'n10252547', 'n10253122', 'n10253296', 'n10253479', + 'n10253611', 'n10253703', 'n10255459', 'n10257221', 'n10258602', + 'n10258786', 'n10259348', 'n10259780', 'n10259997', 'n10260473', + 'n10260706', 'n10260800', 'n10261211', 'n10261511', 'n10261624', + 'n10261862', 'n10262343', 'n10262445', 'n10262561', 'n10262655', + 'n10262880', 'n10263146', 'n10263411', 'n10263790', 'n10265281', + 'n10265801', 'n10265891', 'n10266016', 'n10266328', 'n10266848', + 'n10267166', 'n10267311', 'n10267865', 'n10268629', 'n10269199', + 'n10269289', 'n10271677', 'n10272782', 'n10272913', 'n10273064', + 'n10274173', 'n10274318', 'n10274815', 'n10275249', 'n10275395', + 'n10275848', 'n10276045', 'n10276477', 'n10276942', 'n10277027', + 'n10277638', 'n10277815', 'n10277912', 'n10278456', 'n10279018', + 'n10279778', 'n10280034', 'n10280130', 'n10280598', 'n10280674', + 'n10281546', 'n10281770', 'n10281896', 'n10282482', 'n10282672', + 'n10283170', 'n10283366', 'n10283546', 'n10284064', 'n10284871', + 'n10284965', 'n10286282', 'n10286539', 'n10286749', 'n10288964', + 'n10289039', 'n10289176', 'n10289462', 'n10289766', 'n10290422', + 'n10290541', 'n10290813', 'n10290919', 'n10291110', 'n10291469', + 'n10291822', 'n10291942', 'n10292316', 'n10293332', 'n10293590', + 'n10293861', 'n10294020', 'n10294139', 'n10295371', 'n10295479', + 'n10296176', 'n10296444', 'n10297234', 'n10297367', 'n10297531', + 'n10297841', 'n10298202', 'n10298271', 'n10298647', 'n10298912', + 'n10299125', 'n10299250', 'n10299700', 'n10299875', 'n10300041', + 'n10300154', 'n10300303', 'n10300500', 'n10300654', 'n10300829', + 'n10302576', 'n10302700', 'n10302905', 'n10303037', 'n10303814', + 'n10304086', 'n10304650', 'n10304914', 'n10305635', 'n10305802', + 'n10306004', 'n10306279', 'n10306496', 'n10306595', 'n10306890', + 'n10307114', 'n10308066', 'n10308168', 'n10308275', 'n10308504', + 'n10308653', 'n10308732', 'n10310783', 'n10311506', 'n10311661', + 'n10312287', 'n10312491', 'n10312600', 'n10313000', 'n10313239', + 'n10313441', 'n10313724', 'n10314054', 'n10314182', 'n10314517', + 'n10314836', 'n10315217', 'n10315456', 'n10315561', 'n10315730', + 'n10316360', 'n10316527', 'n10316862', 'n10317007', 'n10317500', + 'n10317963', 'n10318293', 'n10318607', 'n10318686', 'n10319313', + 'n10320484', 'n10320863', 'n10321126', 'n10321340', 'n10321632', + 'n10321882', 'n10322238', 'n10323634', 'n10323752', 'n10323999', + 'n10324560', 'n10325549', 'n10325774', 'n10326776', 'n10327143', + 'n10327987', 'n10328123', 'n10328328', 'n10328437', 'n10328696', + 'n10328941', 'n10329035', 'n10330593', 'n10330931', 'n10331098', + 'n10331167', 'n10331258', 'n10331347', 'n10331841', 'n10332110', + 'n10332385', 'n10332861', 'n10332953', 'n10333044', 'n10333165', + 'n10333317', 'n10333439', 'n10333601', 'n10333838', 'n10334009', + 'n10334461', 'n10334782', 'n10335246', 'n10335801', 'n10335931', + 'n10336411', 'n10336904', 'n10337488', 'n10338231', 'n10338391', + 'n10339179', 'n10339251', 'n10339717', 'n10340312', 'n10341243', + 'n10341343', 'n10341446', 'n10341573', 'n10341955', 'n10342180', + 'n10342367', 'n10342543', 'n10342893', 'n10342992', 'n10343088', + 'n10343355', 'n10343449', 'n10343554', 'n10343869', 'n10344121', + 'n10344203', 'n10344319', 'n10344656', 'n10344774', 'n10345015', + 'n10345100', 'n10345302', 'n10345422', 'n10345659', 'n10346015', + 'n10347204', 'n10347446', 'n10348526', 'n10349243', 'n10349750', + 'n10349836', 'n10350220', 'n10350774', 'n10351064', 'n10353016', + 'n10353355', 'n10353928', 'n10354265', 'n10354754', 'n10355142', + 'n10355306', 'n10355449', 'n10355688', 'n10355806', 'n10356450', + 'n10356877', 'n10357012', 'n10357613', 'n10357737', 'n10358032', + 'n10358124', 'n10358575', 'n10359117', 'n10359422', 'n10359546', + 'n10359659', 'n10360366', 'n10360747', 'n10361060', 'n10361194', + 'n10361296', 'n10361525', 'n10362003', 'n10362319', 'n10362557', + 'n10363445', 'n10363573', 'n10364198', 'n10364502', 'n10365514', + 'n10366145', 'n10366276', 'n10366966', 'n10368291', 'n10368528', + 'n10368624', 'n10368711', 'n10368798', 'n10369095', 'n10369317', + 'n10369417', 'n10369528', 'n10369699', 'n10369955', 'n10370381', + 'n10370955', 'n10371052', 'n10371221', 'n10371330', 'n10371450', + 'n10373390', 'n10373525', 'n10374541', 'n10374849', 'n10374943', + 'n10375052', 'n10375314', 'n10375402', 'n10376523', 'n10376890', + 'n10377021', 'n10377185', 'n10377291', 'n10377542', 'n10377633', + 'n10378026', 'n10378113', 'n10378780', 'n10379376', 'n10380126', + 'n10380499', 'n10380672', 'n10381804', 'n10381981', 'n10382157', + 'n10382302', 'n10382480', 'n10382710', 'n10382825', 'n10383094', + 'n10383237', 'n10383505', 'n10383816', 'n10384214', 'n10384392', + 'n10384496', 'n10385566', 'n10386196', 'n10386754', 'n10386874', + 'n10386984', 'n10387196', 'n10387324', 'n10387836', 'n10389865', + 'n10389976', 'n10390600', 'n10390698', 'n10390807', 'n10391416', + 'n10393909', 'n10394434', 'n10394786', 'n10395073', 'n10395209', + 'n10395390', 'n10395828', 'n10396106', 'n10396337', 'n10396727', + 'n10396908', 'n10397001', 'n10397142', 'n10397392', 'n10399130', + 'n10400003', 'n10400108', 'n10400205', 'n10400437', 'n10400618', + 'n10400998', 'n10401204', 'n10401331', 'n10401639', 'n10402709', + 'n10402824', 'n10403633', 'n10403876', 'n10404426', 'n10404998', + 'n10405540', 'n10405694', 'n10406266', 'n10406391', 'n10406765', + 'n10407310', 'n10407954', 'n10408809', 'n10409459', 'n10409752', + 'n10410246', 'n10410996', 'n10411356', 'n10411551', 'n10411867', + 'n10414239', 'n10414768', 'n10414865', 'n10415037', 'n10416567', + 'n10417288', 'n10417424', 'n10417551', 'n10417682', 'n10417843', + 'n10417969', 'n10418101', 'n10418735', 'n10419047', 'n10419472', + 'n10419630', 'n10419785', 'n10420031', 'n10420277', 'n10420507', + 'n10420649', 'n10421016', 'n10421470', 'n10421956', 'n10422405', + 'n10425946', 'n10426454', 'n10426630', 'n10427223', 'n10427359', + 'n10427764', 'n10428004', 'n10431122', 'n10431625', 'n10432189', + 'n10432441', 'n10432875', 'n10432957', 'n10433077', 'n10433452', + 'n10433610', 'n10433737', 'n10435169', 'n10435251', 'n10435716', + 'n10435988', 'n10436334', 'n10437014', 'n10437137', 'n10437262', + 'n10437698', 'n10438172', 'n10438619', 'n10438842', 'n10439373', + 'n10439523', 'n10439727', 'n10439851', 'n10441037', 'n10441124', + 'n10441694', 'n10441962', 'n10442093', 'n10442232', 'n10442417', + 'n10442573', 'n10443032', 'n10443659', 'n10443830', 'n10444194', + 'n10448322', 'n10448455', 'n10449664', 'n10450038', 'n10450161', + 'n10450303', 'n10451450', 'n10451590', 'n10451858', 'n10453184', + 'n10455619', 'n10456070', 'n10456138', 'n10456696', 'n10457214', + 'n10457444', 'n10457903', 'n10458111', 'n10458356', 'n10458596', + 'n10459882', 'n10460033', 'n10461060', 'n10462588', 'n10462751', + 'n10462860', 'n10464052', 'n10464542', 'n10464711', 'n10464870', + 'n10465002', 'n10465451', 'n10465831', 'n10466198', 'n10466564', + 'n10466918', 'n10467179', 'n10467395', 'n10468750', 'n10469611', + 'n10469874', 'n10470779', 'n10471640', 'n10471732', 'n10471859', + 'n10472129', 'n10472447', 'n10473453', 'n10473562', 'n10473789', + 'n10473917', 'n10474064', 'n10474343', 'n10474446', 'n10474645', + 'n10475835', 'n10475940', 'n10476467', 'n10477713', 'n10477955', + 'n10478118', 'n10478293', 'n10478462', 'n10478827', 'n10478960', + 'n10479135', 'n10479328', 'n10481167', 'n10481268', 'n10482054', + 'n10482220', 'n10482587', 'n10482921', 'n10483138', 'n10483395', + 'n10483799', 'n10483890', 'n10484858', 'n10485298', 'n10485883', + 'n10486166', 'n10486236', 'n10486561', 'n10487182', 'n10487363', + 'n10487592', 'n10488016', 'n10488309', 'n10488656', 'n10489426', + 'n10490421', 'n10491998', 'n10492086', 'n10492727', 'n10493199', + 'n10493419', 'n10493685', 'n10493835', 'n10493922', 'n10494195', + 'n10494373', 'n10495167', 'n10495421', 'n10495555', 'n10495756', + 'n10496393', 'n10496489', 'n10497135', 'n10497534', 'n10497645', + 'n10498046', 'n10498699', 'n10498816', 'n10498986', 'n10499110', + 'n10499232', 'n10499355', 'n10499631', 'n10499857', 'n10500217', + 'n10500419', 'n10500603', 'n10500824', 'n10500942', 'n10501453', + 'n10501635', 'n10502046', 'n10502329', 'n10502950', 'n10503818', + 'n10504090', 'n10504206', 'n10505347', 'n10505613', 'n10505732', + 'n10505942', 'n10506336', 'n10506544', 'n10506915', 'n10507070', + 'n10507380', 'n10507482', 'n10507565', 'n10507692', 'n10508141', + 'n10508379', 'n10508710', 'n10509063', 'n10509161', 'n10509810', + 'n10510245', 'n10510974', 'n10511771', 'n10512201', 'n10512372', + 'n10512708', 'n10512859', 'n10513509', 'n10513823', 'n10513938', + 'n10514051', 'n10514121', 'n10514255', 'n10514429', 'n10514784', + 'n10515863', 'n10516527', 'n10517137', 'n10517283', 'n10518349', + 'n10519126', 'n10519494', 'n10519984', 'n10520286', 'n10520544', + 'n10520964', 'n10521100', 'n10521662', 'n10521853', 'n10522035', + 'n10522324', 'n10522759', 'n10523341', 'n10524076', 'n10524223', + 'n10524869', 'n10525134', 'n10525436', 'n10525617', 'n10525878', + 'n10526534', 'n10527147', 'n10527334', 'n10528023', 'n10528148', + 'n10528493', 'n10529231', 'n10530150', 'n10530383', 'n10530571', + 'n10530959', 'n10531109', 'n10531445', 'n10531838', 'n10533874', + 'n10533983', 'n10536134', 'n10536274', 'n10536416', 'n10537708', + 'n10537906', 'n10538629', 'n10538733', 'n10538853', 'n10539015', + 'n10539160', 'n10539278', 'n10540114', 'n10540252', 'n10540656', + 'n10541833', 'n10542608', 'n10542761', 'n10542888', 'n10543161', + 'n10543937', 'n10544232', 'n10544748', 'n10545792', 'n10546428', + 'n10546633', 'n10548419', 'n10548537', 'n10548681', 'n10549510', + 'n10550252', 'n10550369', 'n10550468', 'n10551576', 'n10552393', + 'n10553140', 'n10553235', 'n10554024', 'n10554141', 'n10554846', + 'n10555059', 'n10555430', 'n10556033', 'n10556518', 'n10556704', + 'n10556825', 'n10557246', 'n10557854', 'n10559009', 'n10559288', + 'n10559508', 'n10559683', 'n10559996', 'n10560106', 'n10560637', + 'n10561222', 'n10561320', 'n10561736', 'n10562135', 'n10562283', + 'n10562509', 'n10562968', 'n10563314', 'n10563403', 'n10563711', + 'n10564098', 'n10565502', 'n10565667', 'n10566072', 'n10567613', + 'n10567722', 'n10567848', 'n10568200', 'n10568358', 'n10568443', + 'n10568608', 'n10568915', 'n10569011', 'n10569179', 'n10570019', + 'n10570704', 'n10571907', 'n10572706', 'n10572889', 'n10573957', + 'n10574311', 'n10574538', 'n10574840', 'n10575463', 'n10575594', + 'n10575787', 'n10576223', 'n10576316', 'n10576676', 'n10576818', + 'n10576962', 'n10577182', 'n10577284', 'n10577710', 'n10577820', + 'n10578021', 'n10578162', 'n10578471', 'n10578656', 'n10579062', + 'n10579549', 'n10580030', 'n10580437', 'n10580535', 'n10581648', + 'n10581890', 'n10582604', 'n10582746', 'n10583387', 'n10583790', + 'n10585077', 'n10585217', 'n10585628', 'n10586166', 'n10586265', + 'n10586444', 'n10586903', 'n10586998', 'n10588074', 'n10588357', + 'n10588724', 'n10588965', 'n10589666', 'n10590146', 'n10590239', + 'n10590452', 'n10590903', 'n10591072', 'n10591811', 'n10592049', + 'n10592811', 'n10593521', 'n10594147', 'n10594523', 'n10594857', + 'n10595164', 'n10595647', 'n10596517', 'n10596899', 'n10597505', + 'n10597745', 'n10597889', 'n10598013', 'n10598181', 'n10598459', + 'n10598904', 'n10599215', 'n10599806', 'n10601234', 'n10601362', + 'n10602119', 'n10602470', 'n10602985', 'n10603528', 'n10603851', + 'n10604275', 'n10604380', 'n10604634', 'n10604880', 'n10604979', + 'n10605253', 'n10605737', 'n10607291', 'n10607478', 'n10609092', + 'n10609198', 'n10610465', 'n10610850', 'n10611267', 'n10611613', + 'n10612210', 'n10612373', 'n10612518', 'n10613996', 'n10614507', + 'n10614629', 'n10615179', 'n10615334', 'n10616578', 'n10617024', + 'n10617193', 'n10617397', 'n10618234', 'n10618342', 'n10618465', + 'n10618685', 'n10618848', 'n10619492', 'n10619642', 'n10619888', + 'n10620212', 'n10620586', 'n10620758', 'n10621294', 'n10621400', + 'n10621514', 'n10622053', 'n10624074', 'n10624310', 'n10624437', + 'n10624540', 'n10625860', 'n10626630', 'n10627252', 'n10628097', + 'n10628644', 'n10629329', 'n10629647', 'n10629939', 'n10630093', + 'n10630188', 'n10631131', 'n10631309', 'n10631654', 'n10632576', + 'n10633298', 'n10633450', 'n10634464', 'n10634849', 'n10634990', + 'n10635788', 'n10636488', 'n10637483', 'n10638922', 'n10639238', + 'n10639359', 'n10639637', 'n10639817', 'n10641223', 'n10642596', + 'n10642705', 'n10643095', 'n10643837', 'n10643937', 'n10644598', + 'n10645017', 'n10645223', 'n10646032', 'n10646140', 'n10646433', + 'n10646641', 'n10646780', 'n10646942', 'n10647745', 'n10648237', + 'n10648696', 'n10649197', 'n10649308', 'n10650162', 'n10652605', + 'n10652703', 'n10654015', 'n10654211', 'n10654321', 'n10654827', + 'n10654932', 'n10655169', 'n10655442', 'n10655594', 'n10655730', + 'n10655986', 'n10656120', 'n10656223', 'n10656969', 'n10657306', + 'n10657556', 'n10657835', 'n10658304', 'n10659042', 'n10659762', + 'n10660128', 'n10660621', 'n10660883', 'n10661002', 'n10661216', + 'n10661563', 'n10661732', 'n10663315', 'n10663549', 'n10665302', + 'n10665587', 'n10665698', 'n10666752', 'n10667477', 'n10667709', + 'n10667863', 'n10668450', 'n10668666', 'n10669991', 'n10671042', + 'n10671613', 'n10671736', 'n10671898', 'n10672371', 'n10672540', + 'n10672662', 'n10673296', 'n10673776', 'n10674130', 'n10674713', + 'n10675010', 'n10675142', 'n10675609', 'n10676018', 'n10676434', + 'n10676569', 'n10678937', 'n10679174', 'n10679503', 'n10679610', + 'n10679723', 'n10680609', 'n10680796', 'n10681194', 'n10681557', + 'n10682713', 'n10682953', 'n10683675', 'n10684146', 'n10684630', + 'n10684827', 'n10685398', 'n10686073', 'n10686517', 'n10686694', + 'n10686885', 'n10688356', 'n10688811', 'n10689306', 'n10690268', + 'n10690421', 'n10690648', 'n10691318', 'n10691937', 'n10692090', + 'n10692482', 'n10692883', 'n10693235', 'n10693334', 'n10693824', + 'n10694258', 'n10694939', 'n10695450', 'n10696101', 'n10696508', + 'n10697135', 'n10697282', 'n10698368', 'n10699558', 'n10699752', + 'n10699981', 'n10700105', 'n10700201', 'n10700640', 'n10700963', + 'n10701180', 'n10701644', 'n10701962', 'n10702167', 'n10702615', + 'n10703221', 'n10703336', 'n10703480', 'n10703692', 'n10704238', + 'n10704712', 'n10704886', 'n10705448', 'n10705615', 'n10706812', + 'n10707134', 'n10707233', 'n10707707', 'n10708292', 'n10708454', + 'n10709529', 'n10710171', 'n10710259', 'n10710778', 'n10710913', + 'n10711483', 'n10711766', 'n10712229', 'n10712374', 'n10712474', + 'n10712690', 'n10712835', 'n10713254', 'n10713686', 'n10713843', + 'n10714195', 'n10715030', 'n10715347', 'n10715789', 'n10716576', + 'n10716864', 'n10717055', 'n10717196', 'n10717337', 'n10718131', + 'n10718349', 'n10718509', 'n10718665', 'n10718952', 'n10719036', + 'n10719132', 'n10719267', 'n10719807', 'n10720197', 'n10720453', + 'n10720964', 'n10721124', 'n10721321', 'n10721612', 'n10721708', + 'n10721819', 'n10722029', 'n10722575', 'n10722965', 'n10723230', + 'n10723597', 'n10724132', 'n10724372', 'n10724570', 'n10725280', + 'n10726031', 'n10726786', 'n10727016', 'n10727171', 'n10727458', + 'n10728117', 'n10728233', 'n10728624', 'n10728998', 'n10729330', + 'n10730542', 'n10730728', 'n10731013', 'n10731732', 'n10732010', + 'n10732521', 'n10732854', 'n10732967', 'n10733820', 'n10734394', + 'n10734741', 'n10734891', 'n10734963', 'n10735173', 'n10735298', + 'n10735984', 'n10737103', 'n10737264', 'n10738111', 'n10738215', + 'n10738670', 'n10738871', 'n10739135', 'n10739297', 'n10739391', + 'n10740594', 'n10740732', 'n10740868', 'n10741152', 'n10741367', + 'n10741493', 'n10742005', 'n10742111', 'n10742546', 'n10742997', + 'n10743124', 'n10743356', 'n10744078', 'n10744164', 'n10745006', + 'n10745770', 'n10746931', 'n10747119', 'n10747424', 'n10747548', + 'n10747965', 'n10748142', 'n10748506', 'n10748620', 'n10749928', + 'n10750031', 'n10750188', 'n10750640', 'n10751026', 'n10751152', + 'n10751265', 'n10751710', 'n10752480', 'n10753061', 'n10753182', + 'n10753339', 'n10753442', 'n10753989', 'n10754189', 'n10754281', + 'n10754449', 'n10755080', 'n10755164', 'n10755394', 'n10755648', + 'n10756061', 'n10756148', 'n10756261', 'n10756641', 'n10756837', + 'n10757050', 'n10757492', 'n10758337', 'n10758445', 'n10758949', + 'n10759151', 'n10759331', 'n10759982', 'n10760199', 'n10760622', + 'n10760951', 'n10761190', 'n10761326', 'n10761519', 'n10762212', + 'n10762480', 'n10763075', 'n10763245', 'n10763383', 'n10763620', + 'n10764465', 'n10764622', 'n10764719', 'n10765305', 'n10765587', + 'n10765679', 'n10765885', 'n10766260', 'n10768148', 'n10768272', + 'n10768903', 'n10769084', 'n10769188', 'n10769321', 'n10769459', + 'n10771066', 'n10772092', 'n10772580', 'n10772937', 'n10773665', + 'n10773800', 'n10774329', 'n10774756', 'n10775003', 'n10775128', + 'n10776052', 'n10776339', 'n10776887', 'n10777299', 'n10778044', + 'n10778148', 'n10778711', 'n10778999', 'n10779610', 'n10779897', + 'n10779995', 'n10780284', 'n10780632', 'n10781236', 'n10781817', + 'n10782362', 'n10782471', 'n10782791', 'n10782940', 'n10783240', + 'n10783539', 'n10783646', 'n10783734', 'n10784113', 'n10784544', + 'n10784922', 'n10785480', 'n10787470', 'n10788852', 'n10789415', + 'n10789709', 'n10791115', 'n10791221', 'n10791820', 'n10791890', + 'n10792335', 'n10792506', 'n10792856', 'n10793570', 'n10793799', + 'n10794014', 'n10801561', 'n10801802', 'n10802507', 'n10802621', + 'n10802953', 'n10803031', 'n10803282', 'n10803978', 'n10804287', + 'n10804636', 'n10804732', 'n10805501', 'n10806113', 'n10994097', + 'n11100798', 'n11196627', 'n11242849', 'n11318824', 'n11346873', + 'n11448153', 'n11487732', 'n11508382', 'n11511327', 'n11524451', + 'n11530008', 'n11531193', 'n11531334', 'n11532682', 'n11533212', + 'n11533999', 'n11536567', 'n11536673', 'n11537327', 'n11539289', + 'n11542137', 'n11542640', 'n11544015', 'n11545350', 'n11545524', + 'n11545714', 'n11547562', 'n11547855', 'n11548728', 'n11548870', + 'n11549009', 'n11549245', 'n11549779', 'n11549895', 'n11552133', + 'n11552386', 'n11552594', 'n11552806', 'n11552976', 'n11553240', + 'n11553522', 'n11596108', 'n11597657', 'n11598287', 'n11598686', + 'n11598886', 'n11599324', 'n11600372', 'n11601177', 'n11601333', + 'n11601918', 'n11602091', 'n11602478', 'n11602873', 'n11603246', + 'n11603462', 'n11603835', 'n11604046', 'n11608250', 'n11609475', + 'n11609684', 'n11609862', 'n11610047', 'n11610215', 'n11610437', + 'n11610602', 'n11610823', 'n11611087', 'n11611233', 'n11611356', + 'n11611561', 'n11611758', 'n11612018', 'n11612235', 'n11612349', + 'n11612575', 'n11612923', 'n11613219', 'n11613459', 'n11613692', + 'n11613867', 'n11614039', 'n11614250', 'n11614420', 'n11614713', + 'n11615026', 'n11615259', 'n11615387', 'n11615607', 'n11615812', + 'n11615967', 'n11616260', 'n11616486', 'n11616662', 'n11616852', + 'n11617090', 'n11617272', 'n11617631', 'n11617878', 'n11618079', + 'n11618290', 'n11618525', 'n11618861', 'n11619227', 'n11619455', + 'n11619687', 'n11619845', 'n11620016', 'n11620389', 'n11620673', + 'n11621029', 'n11621281', 'n11621547', 'n11621727', 'n11621950', + 'n11622184', 'n11622368', 'n11622591', 'n11622771', 'n11623105', + 'n11623815', 'n11623967', 'n11624192', 'n11624531', 'n11625003', + 'n11625223', 'n11625391', 'n11625632', 'n11625804', 'n11626010', + 'n11626152', 'n11626409', 'n11626585', 'n11626826', 'n11627168', + 'n11627512', 'n11627714', 'n11627908', 'n11628087', 'n11628456', + 'n11628793', 'n11629047', 'n11629354', 'n11630017', 'n11630489', + 'n11631159', 'n11631405', 'n11631619', 'n11631854', 'n11631985', + 'n11632167', 'n11632376', 'n11632619', 'n11632929', 'n11633284', + 'n11634736', 'n11635152', 'n11635433', 'n11635830', 'n11636204', + 'n11636835', 'n11639084', 'n11639306', 'n11639445', 'n11640132', + 'n11643835', 'n11644046', 'n11644226', 'n11644462', 'n11644872', + 'n11645163', 'n11645590', 'n11645914', 'n11646167', 'n11646344', + 'n11646517', 'n11646694', 'n11646955', 'n11647306', 'n11647703', + 'n11647868', 'n11648039', 'n11648268', 'n11648776', 'n11649150', + 'n11649359', 'n11649878', 'n11650160', 'n11650307', 'n11650430', + 'n11650558', 'n11650759', 'n11652039', 'n11652217', 'n11652376', + 'n11652578', 'n11652753', 'n11652966', 'n11653126', 'n11653570', + 'n11653904', 'n11654293', 'n11654438', 'n11654984', 'n11655152', + 'n11655592', 'n11655974', 'n11656123', 'n11656549', 'n11656771', + 'n11657585', 'n11658331', 'n11658544', 'n11658709', 'n11659248', + 'n11659627', 'n11660300', 'n11661372', 'n11661909', 'n11662128', + 'n11662371', 'n11662585', 'n11662937', 'n11663263', 'n11664418', + 'n11665372', 'n11666854', 'n11668117', 'n11669786', 'n11669921', + 'n11672269', 'n11672400', 'n11674019', 'n11674332', 'n11675025', + 'n11675404', 'n11675738', 'n11676500', 'n11676743', 'n11676850', + 'n11677485', 'n11677902', 'n11678010', 'n11678299', 'n11678377', + 'n11679378', 'n11680457', 'n11680596', 'n11682659', 'n11683216', + 'n11683838', 'n11684264', 'n11684499', 'n11684654', 'n11685091', + 'n11685621', 'n11686195', 'n11686652', 'n11686780', 'n11686912', + 'n11687071', 'n11687432', 'n11687789', 'n11687964', 'n11688069', + 'n11688378', 'n11689197', 'n11689367', 'n11689483', 'n11689678', + 'n11689815', 'n11689957', 'n11690088', 'n11690254', 'n11690455', + 'n11691046', 'n11691857', 'n11692265', 'n11692792', 'n11693981', + 'n11694300', 'n11694469', 'n11694664', 'n11694866', 'n11695085', + 'n11695285', 'n11695599', 'n11695974', 'n11696450', 'n11696935', + 'n11697560', 'n11697802', 'n11698042', 'n11698245', 'n11699442', + 'n11699751', 'n11700058', 'n11700279', 'n11700864', 'n11701066', + 'n11701302', 'n11702713', 'n11703669', 'n11704093', 'n11704620', + 'n11704791', 'n11705171', 'n11705387', 'n11705573', 'n11705776', + 'n11706325', 'n11706761', 'n11706942', 'n11707229', 'n11707827', + 'n11708658', 'n11708857', 'n11709045', 'n11709205', 'n11709674', + 'n11710136', 'n11710393', 'n11710658', 'n11710827', 'n11710987', + 'n11711289', 'n11711537', 'n11711764', 'n11711971', 'n11712282', + 'n11713164', 'n11713370', 'n11713763', 'n11714382', 'n11715430', + 'n11715678', 'n11716698', 'n11717399', 'n11717577', 'n11718296', + 'n11718681', 'n11719286', 'n11720353', 'n11720643', 'n11720891', + 'n11721337', 'n11721642', 'n11722036', 'n11722342', 'n11722466', + 'n11722621', 'n11722982', 'n11723227', 'n11723452', 'n11723770', + 'n11723986', 'n11724109', 'n11724660', 'n11725015', 'n11725311', + 'n11725480', 'n11725623', 'n11725821', 'n11725973', 'n11726145', + 'n11726269', 'n11726433', 'n11726707', 'n11727091', 'n11727358', + 'n11727540', 'n11727738', 'n11728099', 'n11728769', 'n11728945', + 'n11729142', 'n11729478', 'n11729860', 'n11730015', 'n11730458', + 'n11730602', 'n11730750', 'n11730933', 'n11731157', 'n11731659', + 'n11732052', 'n11732567', 'n11733054', 'n11733312', 'n11733548', + 'n11734493', 'n11734698', 'n11735053', 'n11735570', 'n11735977', + 'n11736362', 'n11736694', 'n11736851', 'n11737009', 'n11737125', + 'n11737534', 'n11738547', 'n11738997', 'n11739365', 'n11739978', + 'n11740414', 'n11741175', 'n11741350', 'n11741575', 'n11741797', + 'n11742310', 'n11742878', 'n11744011', 'n11744108', 'n11744471', + 'n11745817', 'n11746600', 'n11747468', 'n11748002', 'n11748811', + 'n11749112', 'n11749603', 'n11750173', 'n11750508', 'n11750989', + 'n11751765', 'n11751974', 'n11752578', 'n11752798', 'n11752937', + 'n11753143', 'n11753355', 'n11753562', 'n11753700', 'n11754893', + 'n11756092', 'n11756329', 'n11756669', 'n11756870', 'n11757017', + 'n11757190', 'n11757653', 'n11757851', 'n11758122', 'n11758276', + 'n11758483', 'n11758799', 'n11759224', 'n11759404', 'n11759609', + 'n11759853', 'n11760785', 'n11761202', 'n11761650', 'n11761836', + 'n11762018', 'n11762433', 'n11762927', 'n11763142', 'n11763625', + 'n11763874', 'n11764478', 'n11764814', 'n11765568', 'n11766046', + 'n11766189', 'n11766432', 'n11767354', 'n11767877', 'n11768816', + 'n11769176', 'n11769621', 'n11769803', 'n11770256', 'n11771147', + 'n11771539', 'n11771746', 'n11771924', 'n11772408', 'n11772879', + 'n11773408', 'n11773628', 'n11773987', 'n11774513', 'n11774972', + 'n11775340', 'n11775626', 'n11776234', 'n11777080', 'n11778092', + 'n11778257', 'n11779300', 'n11780148', 'n11780424', 'n11781176', + 'n11782036', 'n11782266', 'n11782761', 'n11782878', 'n11783162', + 'n11783920', 'n11784126', 'n11784497', 'n11785276', 'n11785668', + 'n11785875', 'n11786131', 'n11786539', 'n11786843', 'n11787190', + 'n11788039', 'n11788727', 'n11789066', 'n11789438', 'n11789589', + 'n11789962', 'n11790089', 'n11790788', 'n11790936', 'n11791341', + 'n11791569', 'n11792029', 'n11792341', 'n11792742', 'n11793403', + 'n11793779', 'n11794024', 'n11794139', 'n11794519', 'n11795049', + 'n11795216', 'n11795580', 'n11796005', 'n11796188', 'n11797321', + 'n11797508', 'n11797981', 'n11798270', 'n11798496', 'n11798688', + 'n11798978', 'n11799331', 'n11799732', 'n11800236', 'n11800565', + 'n11801392', 'n11801665', 'n11801891', 'n11802410', 'n11802586', + 'n11802800', 'n11802995', 'n11805255', 'n11805544', 'n11805956', + 'n11806219', 'n11806369', 'n11806521', 'n11806679', 'n11806814', + 'n11807108', 'n11807525', 'n11807696', 'n11807979', 'n11808299', + 'n11808468', 'n11808721', 'n11808932', 'n11809094', 'n11809271', + 'n11809437', 'n11809594', 'n11809754', 'n11810030', 'n11810358', + 'n11811059', 'n11811473', 'n11811706', 'n11811921', 'n11812094', + 'n11812910', 'n11813077', 'n11814584', 'n11814996', 'n11815491', + 'n11815721', 'n11815918', 'n11816121', 'n11816336', 'n11816649', + 'n11816829', 'n11817160', 'n11817501', 'n11817914', 'n11818069', + 'n11818636', 'n11819509', 'n11819912', 'n11820965', 'n11821184', + 'n11822300', 'n11823043', 'n11823305', 'n11823436', 'n11823756', + 'n11824146', 'n11824344', 'n11824747', 'n11825351', 'n11825749', + 'n11826198', 'n11826569', 'n11827541', 'n11828577', 'n11828973', + 'n11829205', 'n11829672', 'n11829922', 'n11830045', 'n11830252', + 'n11830400', 'n11830714', 'n11830906', 'n11831100', 'n11831297', + 'n11831521', 'n11832214', 'n11832480', 'n11832671', 'n11832899', + 'n11833373', 'n11833749', 'n11834272', 'n11834654', 'n11834890', + 'n11835251', 'n11836327', 'n11836722', 'n11837204', 'n11837351', + 'n11837562', 'n11837743', 'n11837970', 'n11838413', 'n11838916', + 'n11839460', 'n11839568', 'n11839823', 'n11840067', 'n11840246', + 'n11840476', 'n11840764', 'n11841247', 'n11843441', 'n11844371', + 'n11844892', 'n11845557', 'n11845793', 'n11845913', 'n11846312', + 'n11846425', 'n11846765', 'n11847169', 'n11848479', 'n11848867', + 'n11849271', 'n11849467', 'n11849871', 'n11849983', 'n11850521', + 'n11850918', 'n11851258', 'n11851578', 'n11851839', 'n11852028', + 'n11852148', 'n11852531', 'n11853079', 'n11853356', 'n11853813', + 'n11854479', 'n11855274', 'n11855435', 'n11855553', 'n11855842', + 'n11856573', 'n11857696', 'n11857875', 'n11858077', 'n11858703', + 'n11858814', 'n11859275', 'n11859472', 'n11859737', 'n11860208', + 'n11860555', 'n11861238', 'n11861487', 'n11861641', 'n11861853', + 'n11862835', 'n11863467', 'n11863877', 'n11865071', 'n11865276', + 'n11865429', 'n11865574', 'n11865874', 'n11866248', 'n11866706', + 'n11867311', 'n11868814', 'n11869351', 'n11869689', 'n11870044', + 'n11870418', 'n11870747', 'n11871059', 'n11871496', 'n11871748', + 'n11872146', 'n11872324', 'n11872658', 'n11873182', 'n11873612', + 'n11874081', 'n11874423', 'n11874878', 'n11875523', 'n11875691', + 'n11875938', 'n11876204', 'n11876432', 'n11876634', 'n11876803', + 'n11877193', 'n11877283', 'n11877473', 'n11877646', 'n11877860', + 'n11878101', 'n11878283', 'n11878633', 'n11879054', 'n11879722', + 'n11879895', 'n11881189', 'n11882074', 'n11882237', 'n11882426', + 'n11882636', 'n11882821', 'n11882972', 'n11883328', 'n11883628', + 'n11883945', 'n11884384', 'n11884967', 'n11885856', 'n11887119', + 'n11887310', 'n11887476', 'n11887750', 'n11888061', 'n11888424', + 'n11888800', 'n11889205', 'n11889619', 'n11890022', 'n11890150', + 'n11890884', 'n11891175', 'n11892029', 'n11892181', 'n11892637', + 'n11892817', 'n11893640', 'n11893916', 'n11894327', 'n11894558', + 'n11894770', 'n11895092', 'n11895472', 'n11895714', 'n11896141', + 'n11896722', 'n11897116', 'n11897466', 'n11898639', 'n11898775', + 'n11899223', 'n11899762', 'n11899921', 'n11900569', 'n11901294', + 'n11901452', 'n11901597', 'n11901759', 'n11901977', 'n11902200', + 'n11902389', 'n11902709', 'n11902982', 'n11903333', 'n11903671', + 'n11904109', 'n11904274', 'n11905392', 'n11905749', 'n11906127', + 'n11906514', 'n11906917', 'n11907100', 'n11907405', 'n11907689', + 'n11908549', 'n11908846', 'n11909864', 'n11910271', 'n11910460', + 'n11910666', 'n11915214', 'n11915658', 'n11915899', 'n11916467', + 'n11916696', 'n11917407', 'n11917835', 'n11918286', 'n11918473', + 'n11918808', 'n11919447', 'n11919761', 'n11919975', 'n11920133', + 'n11920498', 'n11920663', 'n11920998', 'n11921395', 'n11921792', + 'n11922661', 'n11922755', 'n11922839', 'n11922926', 'n11923174', + 'n11923397', 'n11923637', 'n11924014', 'n11924445', 'n11924849', + 'n11925303', 'n11925450', 'n11925898', 'n11926365', 'n11926833', + 'n11926976', 'n11927215', 'n11927740', 'n11928352', 'n11928858', + 'n11929743', 'n11930038', 'n11930203', 'n11930353', 'n11930571', + 'n11930788', 'n11930994', 'n11931135', 'n11931540', 'n11931918', + 'n11932745', 'n11932927', 'n11933099', 'n11933257', 'n11933387', + 'n11933546', 'n11933728', 'n11933903', 'n11934041', 'n11934239', + 'n11934463', 'n11934616', 'n11934807', 'n11935027', 'n11935187', + 'n11935330', 'n11935469', 'n11935627', 'n11935715', 'n11935794', + 'n11935877', 'n11935953', 'n11936027', 'n11936113', 'n11936199', + 'n11936287', 'n11936369', 'n11936448', 'n11936539', 'n11936624', + 'n11936707', 'n11936782', 'n11936864', 'n11936946', 'n11937023', + 'n11937102', 'n11937195', 'n11937278', 'n11937360', 'n11937446', + 'n11937692', 'n11938556', 'n11939180', 'n11939491', 'n11939699', + 'n11940006', 'n11940349', 'n11940599', 'n11940750', 'n11941094', + 'n11941478', 'n11941924', 'n11942659', 'n11943133', 'n11943407', + 'n11943660', 'n11943992', 'n11944196', 'n11944751', 'n11944954', + 'n11945367', 'n11945514', 'n11945783', 'n11946051', 'n11946313', + 'n11946727', 'n11946918', 'n11947251', 'n11947629', 'n11947802', + 'n11948044', 'n11948264', 'n11948469', 'n11948864', 'n11949015', + 'n11949402', 'n11949857', 'n11950345', 'n11950686', 'n11950877', + 'n11951052', 'n11951511', 'n11951820', 'n11952346', 'n11952541', + 'n11953038', 'n11953339', 'n11953610', 'n11953884', 'n11954161', + 'n11954345', 'n11954484', 'n11954642', 'n11954798', 'n11955040', + 'n11955153', 'n11955532', 'n11955896', 'n11956348', 'n11956850', + 'n11957317', 'n11957514', 'n11957678', 'n11958080', 'n11958499', + 'n11958888', 'n11959259', 'n11959632', 'n11959862', 'n11960245', + 'n11960673', 'n11961100', 'n11961446', 'n11961871', 'n11962272', + 'n11962667', 'n11962994', 'n11963572', 'n11963932', 'n11964446', + 'n11964848', 'n11965218', 'n11965627', 'n11965962', 'n11966083', + 'n11966215', 'n11966385', 'n11966617', 'n11966896', 'n11967142', + 'n11967315', 'n11967744', 'n11967878', 'n11968519', 'n11968704', + 'n11968931', 'n11969166', 'n11969607', 'n11969806', 'n11970101', + 'n11970298', 'n11970586', 'n11971248', 'n11971406', 'n11971783', + 'n11971927', 'n11972291', 'n11972759', 'n11972959', 'n11973341', + 'n11973634', 'n11973749', 'n11974373', 'n11974557', 'n11974888', + 'n11975254', 'n11976170', 'n11976314', 'n11976511', 'n11976933', + 'n11977303', 'n11977660', 'n11977887', 'n11978233', 'n11978551', + 'n11978713', 'n11978961', 'n11979187', 'n11979354', 'n11979527', + 'n11979715', 'n11979964', 'n11980318', 'n11980682', 'n11981192', + 'n11981475', 'n11982115', 'n11982545', 'n11982939', 'n11983375', + 'n11983606', 'n11984144', 'n11984542', 'n11985053', 'n11985321', + 'n11985739', 'n11985903', 'n11986511', 'n11986729', 'n11987126', + 'n11987349', 'n11987511', 'n11988132', 'n11988596', 'n11988893', + 'n11989087', 'n11989393', 'n11989869', 'n11990167', 'n11990313', + 'n11990627', 'n11990920', 'n11991263', 'n11991549', 'n11991777', + 'n11992479', 'n11992806', 'n11993203', 'n11993444', 'n11993675', + 'n11994150', 'n11995092', 'n11995396', 'n11996251', 'n11996677', + 'n11997032', 'n11997160', 'n11997969', 'n11998492', 'n11998888', + 'n11999278', 'n11999656', 'n12000191', 'n12001294', 'n12001707', + 'n12001924', 'n12002428', 'n12002651', 'n12002826', 'n12003167', + 'n12003696', 'n12004120', 'n12004547', 'n12004987', 'n12005656', + 'n12006306', 'n12006766', 'n12006930', 'n12007196', 'n12007406', + 'n12007766', 'n12008252', 'n12008487', 'n12008749', 'n12009047', + 'n12009420', 'n12009792', 'n12010628', 'n12010815', 'n12011370', + 'n12011620', 'n12012111', 'n12012253', 'n12012510', 'n12013035', + 'n12013511', 'n12013701', 'n12014085', 'n12014355', 'n12014923', + 'n12015221', 'n12015525', 'n12015959', 'n12016434', 'n12016567', + 'n12016777', 'n12016914', 'n12017127', 'n12017326', 'n12017511', + 'n12017664', 'n12017853', 'n12018014', 'n12018100', 'n12018188', + 'n12018271', 'n12018363', 'n12018447', 'n12018530', 'n12018760', + 'n12019035', 'n12019827', 'n12020184', 'n12020507', 'n12020736', + 'n12020941', 'n12022054', 'n12022382', 'n12022821', 'n12023108', + 'n12023407', 'n12023726', 'n12024176', 'n12024445', 'n12024690', + 'n12024805', 'n12025220', 'n12026018', 'n12026476', 'n12026981', + 'n12027222', 'n12027658', 'n12028424', 'n12029039', 'n12029635', + 'n12030092', 'n12030654', 'n12030908', 'n12031139', 'n12031388', + 'n12031547', 'n12031927', 'n12032429', 'n12032686', 'n12033139', + 'n12033504', 'n12033709', 'n12034141', 'n12034384', 'n12034594', + 'n12035631', 'n12035907', 'n12036067', 'n12036226', 'n12036939', + 'n12037499', 'n12037691', 'n12038038', 'n12038208', 'n12038406', + 'n12038585', 'n12038760', 'n12038898', 'n12039317', 'n12041446', + 'n12043444', 'n12043673', 'n12043836', 'n12044041', 'n12044467', + 'n12044784', 'n12045157', 'n12045514', 'n12045860', 'n12046028', + 'n12046428', 'n12046815', 'n12047345', 'n12047884', 'n12048056', + 'n12048399', 'n12048928', 'n12049282', 'n12049562', 'n12050533', + 'n12050959', 'n12051103', 'n12051514', 'n12051792', 'n12052267', + 'n12052447', 'n12052787', 'n12053405', 'n12053690', 'n12053962', + 'n12054195', 'n12055073', 'n12055516', 'n12056099', 'n12056217', + 'n12056601', 'n12056758', 'n12056990', 'n12057211', 'n12057447', + 'n12057660', 'n12057895', 'n12058192', 'n12058630', 'n12058822', + 'n12059314', 'n12059625', 'n12060546', 'n12061104', 'n12061380', + 'n12061614', 'n12062105', 'n12062468', 'n12062626', 'n12062781', + 'n12063211', 'n12063639', 'n12064389', 'n12064591', 'n12065316', + 'n12065649', 'n12065777', 'n12066018', 'n12066261', 'n12066451', + 'n12066630', 'n12066821', 'n12067029', 'n12067193', 'n12067433', + 'n12067672', 'n12067817', 'n12068138', 'n12068432', 'n12068615', + 'n12069009', 'n12069217', 'n12069679', 'n12070016', 'n12070381', + 'n12070583', 'n12070712', 'n12071259', 'n12071477', 'n12071744', + 'n12072210', 'n12072722', 'n12073217', 'n12073554', 'n12073991', + 'n12074408', 'n12074867', 'n12075010', 'n12075151', 'n12075299', + 'n12075830', 'n12076223', 'n12076577', 'n12076852', 'n12077244', + 'n12077944', 'n12078172', 'n12078451', 'n12078747', 'n12079120', + 'n12079523', 'n12079963', 'n12080395', 'n12080588', 'n12080820', + 'n12081215', 'n12081649', 'n12082131', 'n12083113', 'n12083591', + 'n12083847', 'n12084158', 'n12084400', 'n12084555', 'n12084890', + 'n12085267', 'n12085664', 'n12086012', 'n12086192', 'n12086539', + 'n12086778', 'n12087961', 'n12088223', 'n12088327', 'n12088495', + 'n12088909', 'n12089320', 'n12089496', 'n12089846', 'n12090890', + 'n12091213', 'n12091377', 'n12091550', 'n12091697', 'n12091953', + 'n12092262', 'n12092417', 'n12092629', 'n12092930', 'n12093329', + 'n12093600', 'n12093885', 'n12094244', 'n12094401', 'n12094612', + 'n12095020', 'n12095281', 'n12095412', 'n12095543', 'n12095647', + 'n12095934', 'n12096089', 'n12096395', 'n12096563', 'n12096674', + 'n12097396', 'n12097556', 'n12098403', 'n12098524', 'n12098827', + 'n12099342', 'n12100187', 'n12101870', 'n12102133', 'n12103680', + 'n12103894', 'n12104104', 'n12104238', 'n12104501', 'n12104734', + 'n12105125', 'n12105353', 'n12105828', 'n12105981', 'n12106134', + 'n12106323', 'n12107002', 'n12107191', 'n12107710', 'n12107970', + 'n12108432', 'n12108613', 'n12108871', 'n12109365', 'n12109827', + 'n12110085', 'n12110236', 'n12110352', 'n12110475', 'n12110778', + 'n12111238', 'n12111627', 'n12112008', 'n12112337', 'n12112609', + 'n12112918', 'n12113195', 'n12113323', 'n12113657', 'n12114010', + 'n12114590', 'n12115180', 'n12116058', 'n12116429', 'n12116734', + 'n12117017', 'n12117235', 'n12117326', 'n12117695', 'n12117912', + 'n12118414', 'n12118661', 'n12119099', 'n12119238', 'n12119390', + 'n12119539', 'n12119717', 'n12120347', 'n12120578', 'n12121033', + 'n12121187', 'n12121610', 'n12122442', 'n12122725', 'n12122918', + 'n12123648', 'n12123741', 'n12124172', 'n12124627', 'n12124818', + 'n12125001', 'n12125183', 'n12125584', 'n12126084', 'n12126360', + 'n12126736', 'n12127460', 'n12127575', 'n12127768', 'n12128071', + 'n12128306', 'n12128490', 'n12129134', 'n12129738', 'n12129986', + 'n12130549', 'n12131405', 'n12131550', 'n12132092', 'n12132956', + 'n12133151', 'n12133462', 'n12133682', 'n12134025', 'n12134486', + 'n12134695', 'n12134836', 'n12135049', 'n12135576', 'n12135729', + 'n12135898', 'n12136392', 'n12136581', 'n12136720', 'n12137120', + 'n12137569', 'n12137791', 'n12137954', 'n12138110', 'n12138248', + 'n12138444', 'n12138578', 'n12139196', 'n12139575', 'n12139793', + 'n12139921', 'n12140511', 'n12140759', 'n12140903', 'n12141167', + 'n12141385', 'n12141495', 'n12142085', 'n12142357', 'n12142450', + 'n12143065', 'n12143215', 'n12143405', 'n12143676', 'n12144313', + 'n12144580', 'n12144987', 'n12145148', 'n12145477', 'n12146311', + 'n12146488', 'n12146654', 'n12147226', 'n12147835', 'n12148757', + 'n12150722', 'n12150969', 'n12151170', 'n12151615', 'n12152031', + 'n12152251', 'n12152532', 'n12152722', 'n12153033', 'n12153224', + 'n12153580', 'n12153741', 'n12153914', 'n12154114', 'n12154773', + 'n12155009', 'n12155583', 'n12155773', 'n12156679', 'n12156819', + 'n12157056', 'n12157179', 'n12157769', 'n12158031', 'n12158443', + 'n12158798', 'n12159055', 'n12159388', 'n12159555', 'n12159804', + 'n12159942', 'n12160125', 'n12160303', 'n12160490', 'n12160857', + 'n12161056', 'n12161285', 'n12161577', 'n12161744', 'n12161969', + 'n12162181', 'n12162425', 'n12162758', 'n12163035', 'n12163279', + 'n12164363', 'n12164656', 'n12164881', 'n12165170', 'n12165384', + 'n12165758', 'n12166128', 'n12166424', 'n12166793', 'n12166929', + 'n12167075', 'n12167436', 'n12167602', 'n12168565', 'n12169099', + 'n12170585', 'n12171098', 'n12171316', 'n12171966', 'n12172364', + 'n12172481', 'n12172906', 'n12173069', 'n12173664', 'n12173912', + 'n12174311', 'n12174521', 'n12174926', 'n12175181', 'n12175370', + 'n12175598', 'n12176453', 'n12176709', 'n12176953', 'n12177129', + 'n12177455', 'n12178129', 'n12178780', 'n12178896', 'n12179122', + 'n12179632', 'n12180168', 'n12180456', 'n12180885', 'n12181352', + 'n12181612', 'n12182049', 'n12182276', 'n12183026', 'n12183452', + 'n12183816', 'n12184095', 'n12184468', 'n12184912', 'n12185254', + 'n12185859', 'n12186352', 'n12186554', 'n12186839', 'n12187247', + 'n12187663', 'n12187891', 'n12188289', 'n12188635', 'n12189429', + 'n12189779', 'n12189987', 'n12190410', 'n12190869', 'n12191240', + 'n12192132', 'n12192877', 'n12193334', 'n12193665', 'n12194147', + 'n12194613', 'n12195391', 'n12195533', 'n12195734', 'n12196129', + 'n12196336', 'n12196527', 'n12196694', 'n12196954', 'n12197359', + 'n12197601', 'n12198286', 'n12198793', 'n12199266', 'n12199399', + 'n12199790', 'n12199982', 'n12200143', 'n12200504', 'n12200905', + 'n12201331', 'n12201580', 'n12201938', 'n12202936', 'n12203529', + 'n12203699', 'n12203896', 'n12204032', 'n12204175', 'n12204730', + 'n12205460', 'n12205694', 'n12214789', 'n12215022', 'n12215210', + 'n12215579', 'n12215824', 'n12216215', 'n12216628', 'n12216968', + 'n12217453', 'n12217851', 'n12218274', 'n12218490', 'n12218868', + 'n12219668', 'n12220019', 'n12220496', 'n12220829', 'n12221191', + 'n12221368', 'n12221522', 'n12221801', 'n12222090', 'n12222493', + 'n12222900', 'n12223160', 'n12223569', 'n12223764', 'n12224978', + 'n12225222', 'n12225349', 'n12225563', 'n12226932', 'n12227658', + 'n12227909', 'n12228229', 'n12228387', 'n12228689', 'n12228886', + 'n12229111', 'n12229651', 'n12229887', 'n12230540', 'n12230794', + 'n12231192', 'n12231709', 'n12232114', 'n12232280', 'n12232851', + 'n12233249', 'n12234318', 'n12234669', 'n12235051', 'n12235479', + 'n12236160', 'n12236546', 'n12236768', 'n12236977', 'n12237152', + 'n12237486', 'n12237641', 'n12237855', 'n12238756', 'n12238913', + 'n12239240', 'n12239647', 'n12239880', 'n12240150', 'n12240477', + 'n12240965', 'n12241192', 'n12241426', 'n12241880', 'n12242123', + 'n12242409', 'n12242850', 'n12243109', 'n12243693', 'n12244153', + 'n12244458', 'n12244650', 'n12244819', 'n12245319', 'n12245695', + 'n12245885', 'n12246037', 'n12246232', 'n12246773', 'n12246941', + 'n12247202', 'n12247407', 'n12247963', 'n12248141', 'n12248359', + 'n12248574', 'n12248780', 'n12248941', 'n12249122', 'n12249294', + 'n12249542', 'n12251001', 'n12251278', 'n12251740', 'n12252168', + 'n12252383', 'n12252866', 'n12253229', 'n12253487', 'n12253664', + 'n12253835', 'n12254168', 'n12255225', 'n12256112', 'n12256325', + 'n12256522', 'n12256708', 'n12256920', 'n12257570', 'n12257725', + 'n12258101', 'n12258885', 'n12259316', 'n12260799', 'n12261359', + 'n12261571', 'n12261808', 'n12262018', 'n12262185', 'n12262553', + 'n12263038', 'n12263204', 'n12263410', 'n12263588', 'n12263738', + 'n12263987', 'n12264512', 'n12264786', 'n12265083', 'n12265394', + 'n12265600', 'n12266217', 'n12266528', 'n12266644', 'n12266796', + 'n12266984', 'n12267133', 'n12267265', 'n12267411', 'n12267534', + 'n12267677', 'n12267931', 'n12268246', 'n12269241', 'n12269406', + 'n12269652', 'n12270027', 'n12270278', 'n12270460', 'n12270741', + 'n12270946', 'n12271187', 'n12271451', 'n12271643', 'n12271933', + 'n12272239', 'n12272432', 'n12272735', 'n12272883', 'n12273114', + 'n12273344', 'n12273515', 'n12273768', 'n12273939', 'n12274151', + 'n12274358', 'n12274630', 'n12274863', 'n12275131', 'n12275317', + 'n12275489', 'n12275675', 'n12275888', 'n12276110', 'n12276314', + 'n12276477', 'n12276628', 'n12276872', 'n12277150', 'n12277334', + 'n12277578', 'n12277800', 'n12278107', 'n12278371', 'n12278650', + 'n12278865', 'n12279060', 'n12279293', 'n12279458', 'n12279772', + 'n12280060', 'n12280364', 'n12281241', 'n12281788', 'n12281974', + 'n12282235', 'n12282527', 'n12282737', 'n12282933', 'n12283147', + 'n12283395', 'n12283542', 'n12283790', 'n12284262', 'n12284821', + 'n12285049', 'n12285195', 'n12285369', 'n12285512', 'n12285705', + 'n12285900', 'n12286068', 'n12286197', 'n12286826', 'n12286988', + 'n12287195', 'n12287642', 'n12287836', 'n12288005', 'n12288823', + 'n12289310', 'n12289433', 'n12289585', 'n12290748', 'n12290975', + 'n12291143', 'n12291459', 'n12291671', 'n12291959', 'n12292463', + 'n12292877', 'n12293723', 'n12294124', 'n12294331', 'n12294542', + 'n12294723', 'n12294871', 'n12295033', 'n12295237', 'n12295429', + 'n12295796', 'n12296045', 'n12296432', 'n12296735', 'n12296929', + 'n12297110', 'n12297280', 'n12297507', 'n12297846', 'n12298165', + 'n12299640', 'n12300840', 'n12301180', 'n12301445', 'n12301613', + 'n12301766', 'n12302071', 'n12302248', 'n12302565', 'n12303083', + 'n12303462', 'n12304115', 'n12304286', 'n12304420', 'n12304703', + 'n12304899', 'n12305089', 'n12305293', 'n12305475', 'n12305654', + 'n12305819', 'n12305986', 'n12306089', 'n12306270', 'n12306717', + 'n12306938', 'n12307076', 'n12307240', 'n12307756', 'n12308112', + 'n12308447', 'n12308907', 'n12309277', 'n12309630', 'n12310021', + 'n12310349', 'n12310638', 'n12311045', 'n12311224', 'n12311413', + 'n12311579', 'n12312110', 'n12312728', 'n12315060', 'n12315245', + 'n12315598', 'n12315999', 'n12316444', 'n12316572', 'n12317296', + 'n12318378', 'n12318782', 'n12318965', 'n12319204', 'n12319414', + 'n12320010', 'n12320414', 'n12320627', 'n12320806', 'n12321077', + 'n12321395', 'n12321669', 'n12321873', 'n12322099', 'n12322501', + 'n12322699', 'n12323665', 'n12324056', 'n12324222', 'n12324388', + 'n12324558', 'n12324906', 'n12325234', 'n12325787', 'n12327022', + 'n12327528', 'n12327846', 'n12328398', 'n12328567', 'n12328801', + 'n12329260', 'n12329473', 'n12330239', 'n12330469', 'n12330587', + 'n12330891', 'n12331066', 'n12331263', 'n12331655', 'n12331788', + 'n12332030', 'n12332218', 'n12332555', 'n12333053', 'n12333530', + 'n12333771', 'n12333961', 'n12334153', 'n12334293', 'n12334891', + 'n12335483', 'n12335664', 'n12335800', 'n12335937', 'n12336092', + 'n12336224', 'n12336333', 'n12336586', 'n12336727', 'n12336973', + 'n12337131', 'n12337246', 'n12337391', 'n12337617', 'n12337800', + 'n12337922', 'n12338034', 'n12338146', 'n12338258', 'n12338454', + 'n12338655', 'n12338796', 'n12338979', 'n12339526', 'n12339831', + 'n12340383', 'n12340581', 'n12340755', 'n12341542', 'n12341931', + 'n12342299', 'n12342498', 'n12342852', 'n12343480', 'n12343753', + 'n12344283', 'n12344483', 'n12344700', 'n12344837', 'n12345280', + 'n12345899', 'n12346578', 'n12346813', 'n12346986', 'n12347158', + 'n12349315', 'n12349711', 'n12350032', 'n12350758', 'n12351091', + 'n12351790', 'n12352287', 'n12352639', 'n12352844', 'n12352990', + 'n12353203', 'n12353431', 'n12353754', 'n12355760', 'n12356023', + 'n12356395', 'n12356960', 'n12357485', 'n12357968', 'n12358293', + 'n12360108', 'n12360534', 'n12360684', 'n12360817', 'n12360958', + 'n12361135', 'n12361560', 'n12361754', 'n12361946', 'n12362274', + 'n12362514', 'n12362668', 'n12363301', 'n12363768', 'n12364604', + 'n12364940', 'n12365158', 'n12365285', 'n12365462', 'n12365900', + 'n12366053', 'n12366186', 'n12366313', 'n12366675', 'n12366870', + 'n12367611', 'n12368028', 'n12368257', 'n12368451', 'n12369066', + 'n12369309', 'n12369476', 'n12369665', 'n12369845', 'n12370174', + 'n12370549', 'n12371202', 'n12371439', 'n12371704', 'n12372233', + 'n12373100', 'n12373739', 'n12374418', 'n12374705', 'n12374862', + 'n12375769', 'n12377198', 'n12377494', 'n12378249', 'n12378753', + 'n12378963', 'n12379531', 'n12380761', 'n12381511', 'n12382233', + 'n12382875', 'n12383737', 'n12383894', 'n12384037', 'n12384227', + 'n12384375', 'n12384569', 'n12384680', 'n12384839', 'n12385429', + 'n12385566', 'n12385830', 'n12386945', 'n12387103', 'n12387633', + 'n12387839', 'n12388143', 'n12388293', 'n12388858', 'n12388989', + 'n12389130', 'n12389501', 'n12389727', 'n12389932', 'n12390099', + 'n12390314', 'n12392070', 'n12392549', 'n12392765', 'n12393269', + 'n12394118', 'n12394328', 'n12394638', 'n12395068', 'n12395289', + 'n12395463', 'n12395906', 'n12396091', 'n12396924', 'n12397431', + 'n12399132', 'n12399384', 'n12399534', 'n12399656', 'n12399899', + 'n12400489', 'n12400720', 'n12400924', 'n12401335', 'n12401684', + 'n12401893', 'n12402051', 'n12402348', 'n12402596', 'n12402840', + 'n12403075', 'n12403276', 'n12403513', 'n12403994', 'n12404729', + 'n12405714', 'n12406304', 'n12406488', 'n12406715', 'n12406902', + 'n12407079', 'n12407222', 'n12407396', 'n12407545', 'n12407715', + 'n12407890', 'n12408077', 'n12408280', 'n12408466', 'n12408717', + 'n12408873', 'n12409231', 'n12409470', 'n12409651', 'n12409840', + 'n12411461', 'n12412355', 'n12412606', 'n12412987', 'n12413165', + 'n12413301', 'n12413419', 'n12413642', 'n12413880', 'n12414035', + 'n12414159', 'n12414329', 'n12414449', 'n12414818', 'n12414932', + 'n12415595', 'n12416073', 'n12416423', 'n12416703', 'n12417836', + 'n12418221', 'n12418507', 'n12419037', 'n12419878', 'n12420124', + 'n12420535', 'n12420722', 'n12421137', 'n12421467', 'n12421683', + 'n12421917', 'n12422129', 'n12422559', 'n12425281', 'n12426623', + 'n12426749', 'n12427184', 'n12427391', 'n12427566', 'n12427757', + 'n12427946', 'n12428076', 'n12428242', 'n12428412', 'n12428747', + 'n12429352', 'n12430198', 'n12430471', 'n12430675', 'n12431434', + 'n12432069', 'n12432356', 'n12432574', 'n12432707', 'n12433081', + 'n12433178', 'n12433769', 'n12433952', 'n12434106', 'n12434483', + 'n12434634', 'n12434775', 'n12434985', 'n12435152', 'n12435486', + 'n12435649', 'n12435777', 'n12435965', 'n12436090', 'n12436907', + 'n12437513', 'n12437769', 'n12437930', 'n12439154', 'n12439830', + 'n12441183', 'n12441390', 'n12441552', 'n12441958', 'n12442548', + 'n12443323', 'n12443736', 'n12444095', 'n12444898', 'n12446200', + 'n12446519', 'n12446737', 'n12446908', 'n12447121', 'n12447346', + 'n12447581', 'n12447891', 'n12448136', 'n12448361', 'n12448700', + 'n12449296', 'n12449526', 'n12449784', 'n12449934', 'n12450344', + 'n12450607', 'n12450840', 'n12451070', 'n12451240', 'n12451399', + 'n12451566', 'n12451915', 'n12452256', 'n12452480', 'n12452673', + 'n12452836', 'n12453018', 'n12453186', 'n12453714', 'n12453857', + 'n12454159', 'n12454436', 'n12454556', 'n12454705', 'n12454793', + 'n12454949', 'n12455950', 'n12457091', 'n12458550', 'n12458713', + 'n12458874', 'n12459629', 'n12460146', 'n12460697', 'n12460957', + 'n12461109', 'n12461466', 'n12461673', 'n12462032', 'n12462221', + 'n12462582', 'n12462805', 'n12463134', 'n12463743', 'n12463975', + 'n12464128', 'n12464476', 'n12464649', 'n12465557', 'n12466727', + 'n12467018', 'n12467197', 'n12467433', 'n12467592', 'n12468545', + 'n12468719', 'n12469517', 'n12470092', 'n12470512', 'n12470907', + 'n12472024', 'n12473608', 'n12473840', 'n12474167', 'n12474418', + 'n12475035', 'n12475242', 'n12475774', 'n12476510', 'n12477163', + 'n12477401', 'n12477583', 'n12477747', 'n12477983', 'n12478768', + 'n12479537', 'n12480456', 'n12480895', 'n12481150', 'n12481289', + 'n12481458', 'n12482437', 'n12482668', 'n12482893', 'n12483282', + 'n12483427', 'n12483625', 'n12483841', 'n12484244', 'n12484784', + 'n12485653', 'n12485981', 'n12486574', 'n12487058', 'n12488454', + 'n12488709', 'n12489046', 'n12489676', 'n12489815', 'n12490490', + 'n12491017', 'n12491435', 'n12491826', 'n12492106', 'n12492460', + 'n12492682', 'n12492900', 'n12493208', 'n12493426', 'n12493868', + 'n12494794', 'n12495146', 'n12495670', 'n12495895', 'n12496427', + 'n12496949', 'n12497669', 'n12498055', 'n12498457', 'n12499163', + 'n12499757', 'n12499979', 'n12500309', 'n12500518', 'n12500751', + 'n12501202', 'n12504570', 'n12504783', 'n12505253', 'n12506181', + 'n12506341', 'n12506991', 'n12507379', 'n12507823', 'n12508309', + 'n12508618', 'n12508762', 'n12509109', 'n12509476', 'n12509665', + 'n12509821', 'n12509993', 'n12510343', 'n12510774', 'n12511488', + 'n12511856', 'n12512095', 'n12512294', 'n12512674', 'n12513172', + 'n12513613', 'n12513933', 'n12514138', 'n12514592', 'n12514992', + 'n12515393', 'n12515711', 'n12515925', 'n12516165', 'n12516584', + 'n12516828', 'n12517077', 'n12517445', 'n12517642', 'n12518013', + 'n12518481', 'n12519089', 'n12519563', 'n12520406', 'n12521186', + 'n12521394', 'n12522188', 'n12522678', 'n12522894', 'n12523141', + 'n12523475', 'n12523850', 'n12524188', 'n12525168', 'n12525513', + 'n12525753', 'n12526178', 'n12526516', 'n12526754', 'n12527081', + 'n12527738', 'n12528109', 'n12528382', 'n12528549', 'n12528768', + 'n12528974', 'n12529220', 'n12529500', 'n12529905', 'n12530629', + 'n12530818', 'n12531328', 'n12531727', 'n12532564', 'n12532886', + 'n12533190', 'n12533437', 'n12534208', 'n12534625', 'n12534862', + 'n12536291', 'n12537253', 'n12537569', 'n12538209', 'n12539074', + 'n12539306', 'n12539832', 'n12540250', 'n12540647', 'n12540966', + 'n12541157', 'n12541403', 'n12542043', 'n12542240', 'n12543186', + 'n12543455', 'n12543639', 'n12543826', 'n12544240', 'n12544539', + 'n12545232', 'n12545635', 'n12545865', 'n12546183', 'n12546420', + 'n12546617', 'n12546962', 'n12547215', 'n12547503', 'n12548280', + 'n12548564', 'n12548804', 'n12549005', 'n12549192', 'n12549420', + 'n12549799', 'n12550210', 'n12550408', 'n12551173', 'n12551457', + 'n12552309', 'n12552893', 'n12553742', 'n12554029', 'n12554526', + 'n12554729', 'n12554911', 'n12555255', 'n12555859', 'n12556656', + 'n12557064', 'n12557438', 'n12557556', 'n12557681', 'n12558230', + 'n12558425', 'n12558680', 'n12559044', 'n12559518', 'n12560282', + 'n12560621', 'n12560775', 'n12561169', 'n12561309', 'n12561594', + 'n12562141', 'n12562577', 'n12562785', 'n12563045', 'n12563702', + 'n12564083', 'n12564613', 'n12565102', 'n12565912', 'n12566331', + 'n12566954', 'n12567950', 'n12568186', 'n12568649', 'n12569037', + 'n12569616', 'n12569851', 'n12570394', 'n12570703', 'n12570972', + 'n12571781', 'n12572546', 'n12572759', 'n12572858', 'n12573256', + 'n12573474', 'n12573647', 'n12573911', 'n12574320', 'n12574470', + 'n12574866', 'n12575322', 'n12575812', 'n12576323', 'n12576451', + 'n12576695', 'n12577362', 'n12577895', 'n12578255', 'n12578626', + 'n12578916', 'n12579038', 'n12579404', 'n12579822', 'n12580012', + 'n12580654', 'n12580786', 'n12580896', 'n12581110', 'n12582231', + 'n12582665', 'n12582846', 'n12583126', 'n12583401', 'n12583681', + 'n12583855', 'n12584191', 'n12584365', 'n12584715', 'n12585137', + 'n12585373', 'n12585629', 'n12586298', 'n12586499', 'n12586725', + 'n12586989', 'n12587132', 'n12587487', 'n12587803', 'n12588320', + 'n12588780', 'n12589142', 'n12589458', 'n12589687', 'n12589841', + 'n12590232', 'n12590499', 'n12590600', 'n12590715', 'n12591017', + 'n12591351', 'n12591702', 'n12592058', 'n12592544', 'n12592839', + 'n12593122', 'n12593341', 'n12593994', 'n12594324', 'n12594989', + 'n12595699', 'n12595964', 'n12596148', 'n12596345', 'n12596709', + 'n12596849', 'n12597134', 'n12597466', 'n12597798', 'n12598027', + 'n12599185', 'n12599435', 'n12599661', 'n12599874', 'n12600095', + 'n12600267', 'n12601494', 'n12601805', 'n12602262', 'n12602434', + 'n12602612', 'n12602980', 'n12603273', 'n12603449', 'n12603672', + 'n12604228', 'n12604460', 'n12604639', 'n12604845', 'n12605683', + 'n12606438', 'n12606545', 'n12607456', 'n12609379', 'n12610328', + 'n12610740', 'n12611640', 'n12612170', 'n12612811', 'n12613706', + 'n12614096', 'n12614477', 'n12614625', 'n12615232', 'n12615710', + 'n12616248', 'n12616630', 'n12616996', 'n12617559', 'n12618146', + 'n12618727', 'n12620196', 'n12620546', 'n12620969', 'n12621410', + 'n12621619', 'n12621945', 'n12622297', 'n12622875', 'n12623077', + 'n12623211', 'n12623818', 'n12624381', 'n12624568', 'n12625003', + 'n12625383', 'n12625670', 'n12625823', 'n12626674', 'n12626878', + 'n12627119', 'n12627347', 'n12627526', 'n12628356', 'n12628705', + 'n12628986', 'n12629305', 'n12629666', 'n12630763', 'n12630999', + 'n12631331', 'n12631637', 'n12631932', 'n12632335', 'n12632733', + 'n12633061', 'n12633638', 'n12633994', 'n12634211', 'n12634429', + 'n12634734', 'n12634986', 'n12635151', 'n12635359', 'n12635532', + 'n12635744', 'n12635955', 'n12636224', 'n12636885', 'n12637123', + 'n12637485', 'n12638218', 'n12638556', 'n12638753', 'n12638964', + 'n12639168', 'n12639376', 'n12639584', 'n12639736', 'n12639910', + 'n12640081', 'n12640284', 'n12640435', 'n12640607', 'n12640839', + 'n12641007', 'n12641180', 'n12641413', 'n12641931', 'n12642090', + 'n12642200', 'n12642435', 'n12642600', 'n12642964', 'n12643113', + 'n12643313', 'n12643473', 'n12643688', 'n12643877', 'n12644283', + 'n12644902', 'n12645174', 'n12645530', 'n12646072', 'n12646197', + 'n12646397', 'n12646605', 'n12646740', 'n12646950', 'n12647231', + 'n12647376', 'n12647560', 'n12647787', 'n12647893', 'n12648045', + 'n12648196', 'n12648424', 'n12648693', 'n12648888', 'n12649065', + 'n12649317', 'n12649539', 'n12649866', 'n12650038', 'n12650229', + 'n12650379', 'n12650556', 'n12650805', 'n12650915', 'n12651229', + 'n12651611', 'n12651821', 'n12653218', 'n12653436', 'n12653633', + 'n12654227', 'n12654857', 'n12655062', 'n12655245', 'n12655351', + 'n12655498', 'n12655605', 'n12655726', 'n12655869', 'n12656369', + 'n12656528', 'n12656685', 'n12656909', 'n12657082', 'n12657755', + 'n12658118', 'n12658308', 'n12658481', 'n12658603', 'n12658715', + 'n12658846', 'n12659064', 'n12659356', 'n12659539', 'n12660601', + 'n12661045', 'n12661227', 'n12661538', 'n12662074', 'n12662379', + 'n12662772', 'n12663023', 'n12663254', 'n12663359', 'n12663804', + 'n12664005', 'n12664187', 'n12664469', 'n12664710', 'n12665048', + 'n12665271', 'n12665659', 'n12665857', 'n12666050', 'n12666159', + 'n12666369', 'n12666965', 'n12667406', 'n12667582', 'n12667964', + 'n12668131', 'n12669803', 'n12670334', 'n12670758', 'n12670962', + 'n12671651', 'n12672289', 'n12673588', 'n12674120', 'n12674685', + 'n12674895', 'n12675299', 'n12675515', 'n12675876', 'n12676134', + 'n12676370', 'n12676534', 'n12676703', 'n12677120', 'n12677331', + 'n12677612', 'n12677841', 'n12678794', 'n12679023', 'n12679432', + 'n12679593', 'n12679876', 'n12680402', 'n12680652', 'n12680864', + 'n12681376', 'n12681579', 'n12681893', 'n12682411', 'n12682668', + 'n12682882', 'n12683096', 'n12683407', 'n12683571', 'n12683791', + 'n12684379', 'n12685431', 'n12685831', 'n12686077', 'n12686274', + 'n12686496', 'n12686676', 'n12686877', 'n12687044', 'n12687462', + 'n12687698', 'n12687957', 'n12688187', 'n12688372', 'n12688716', + 'n12689305', 'n12690653', 'n12691428', 'n12691661', 'n12692024', + 'n12692160', 'n12692521', 'n12692714', 'n12693244', 'n12693352', + 'n12693865', 'n12694486', 'n12695144', 'n12695975', 'n12696492', + 'n12696830', 'n12697152', 'n12697514', 'n12698027', 'n12698435', + 'n12698598', 'n12698774', 'n12699031', 'n12699301', 'n12699922', + 'n12700088', 'n12700357', 'n12702124', 'n12703190', 'n12703383', + 'n12703557', 'n12703716', 'n12703856', 'n12704041', 'n12704343', + 'n12704513', 'n12705013', 'n12705220', 'n12705458', 'n12705698', + 'n12705978', 'n12706410', 'n12707199', 'n12707781', 'n12708293', + 'n12708654', 'n12708941', 'n12709103', 'n12709349', 'n12709688', + 'n12709901', 'n12710295', 'n12710415', 'n12710577', 'n12710693', + 'n12710917', 'n12711182', 'n12711398', 'n12711596', 'n12711817', + 'n12711984', 'n12712320', 'n12712626', 'n12713063', 'n12713358', + 'n12713521', 'n12713866', 'n12714254', 'n12714755', 'n12714949', + 'n12715195', 'n12715914', 'n12716400', 'n12716594', 'n12717072', + 'n12717224', 'n12717644', 'n12718074', 'n12718483', 'n12718995', + 'n12719684', 'n12719944', 'n12720200', 'n12720354', 'n12721122', + 'n12721477', 'n12722071', 'n12723062', 'n12723610', 'n12724942', + 'n12725521', 'n12725738', 'n12725940', 'n12726159', 'n12726357', + 'n12726528', 'n12726670', 'n12726902', 'n12727101', 'n12727301', + 'n12727518', 'n12727729', 'n12727960', 'n12728164', 'n12728322', + 'n12728508', 'n12728656', 'n12728864', 'n12729023', 'n12729164', + 'n12729315', 'n12729521', 'n12729729', 'n12729950', 'n12730143', + 'n12730370', 'n12730544', 'n12730776', 'n12731029', 'n12731401', + 'n12731835', 'n12732009', 'n12732252', 'n12732491', 'n12732605', + 'n12732756', 'n12732966', 'n12733218', 'n12733428', 'n12733647', + 'n12733870', 'n12734070', 'n12734215', 'n12735160', 'n12736603', + 'n12736999', 'n12737383', 'n12737898', 'n12738259', 'n12739332', + 'n12739966', 'n12740967', 'n12741222', 'n12741586', 'n12741792', + 'n12742290', 'n12742741', 'n12742878', 'n12743009', 'n12743352', + 'n12743823', 'n12743976', 'n12744142', 'n12744387', 'n12744850', + 'n12745386', 'n12745564', 'n12746884', 'n12747120', 'n12748248', + 'n12749049', 'n12749456', 'n12749679', 'n12749852', 'n12750076', + 'n12750767', 'n12751172', 'n12751675', 'n12752205', 'n12753007', + 'n12753245', 'n12753573', 'n12753762', 'n12754003', 'n12754174', + 'n12754311', 'n12754468', 'n12754648', 'n12754781', 'n12754981', + 'n12755225', 'n12755387', 'n12755559', 'n12755727', 'n12755876', + 'n12756457', 'n12757115', 'n12757303', 'n12757458', 'n12757668', + 'n12757816', 'n12757930', 'n12758014', 'n12758099', 'n12758176', + 'n12758250', 'n12758325', 'n12758399', 'n12758471', 'n12758555', + 'n12759273', 'n12759668', 'n12760539', 'n12760875', 'n12761284', + 'n12761702', 'n12761905', 'n12762049', 'n12762405', 'n12762896', + 'n12763529', 'n12764008', 'n12764202', 'n12764507', 'n12764978', + 'n12765115', 'n12765402', 'n12765846', 'n12766043', 'n12766595', + 'n12766869', 'n12767208', 'n12767423', 'n12767648', 'n12768369', + 'n12768682', 'n12768809', 'n12768933', 'n12769065', 'n12769219', + 'n12769318', 'n12770529', 'n12770892', 'n12771085', 'n12771192', + 'n12771390', 'n12771597', 'n12771890', 'n12772753', 'n12772908', + 'n12773142', 'n12773651', 'n12773917', 'n12774299', 'n12774641', + 'n12775070', 'n12775393', 'n12775717', 'n12775919', 'n12776558', + 'n12776774', 'n12777436', 'n12777680', 'n12777778', 'n12777892', + 'n12778398', 'n12778605', 'n12779603', 'n12779851', 'n12780325', + 'n12780563', 'n12781940', 'n12782530', 'n12782915', 'n12783316', + 'n12783730', 'n12784371', 'n12784889', 'n12785724', 'n12785889', + 'n12786273', 'n12786464', 'n12786836', 'n12787364', 'n12788854', + 'n12789054', 'n12789554', 'n12789977', 'n12790430', 'n12791064', + 'n12791329', 'n12793015', 'n12793284', 'n12793494', 'n12793695', + 'n12793886', 'n12794135', 'n12794367', 'n12794568', 'n12794985', + 'n12795209', 'n12795352', 'n12795555', 'n12796022', 'n12796385', + 'n12796849', 'n12797368', 'n12797860', 'n12798284', 'n12798910', + 'n12799269', 'n12799776', 'n12800049', 'n12800586', 'n12801072', + 'n12801520', 'n12801781', 'n12801966', 'n12803226', 'n12803754', + 'n12803958', 'n12804352', 'n12805146', 'n12805561', 'n12805762', + 'n12806015', 'n12806732', 'n12807251', 'n12807409', 'n12807624', + 'n12807773', 'n12808007', 'n12809868', 'n12810007', 'n12810151', + 'n12810595', 'n12811027', 'n12811713', 'n12812235', 'n12812478', + 'n12812801', 'n12813189', 'n12814643', 'n12814857', 'n12814960', + 'n12815198', 'n12815668', 'n12815838', 'n12816508', 'n12816942', + 'n12817464', 'n12817694', 'n12817855', 'n12818004', 'n12818346', + 'n12818601', 'n12818966', 'n12819141', 'n12819354', 'n12819728', + 'n12820113', 'n12820669', 'n12820853', 'n12821505', 'n12821895', + 'n12822115', 'n12822466', 'n12822769', 'n12822955', 'n12823717', + 'n12823859', 'n12824053', 'n12824289', 'n12824735', 'n12825497', + 'n12826143', 'n12827270', 'n12827537', 'n12827907', 'n12828220', + 'n12828379', 'n12828520', 'n12828791', 'n12828977', 'n12829582', + 'n12829975', 'n12830222', 'n12830568', 'n12831141', 'n12831535', + 'n12831932', 'n12832315', 'n12832538', 'n12832822', 'n12833149', + 'n12833985', 'n12834190', 'n12834798', 'n12834938', 'n12835331', + 'n12835766', 'n12836212', 'n12836337', 'n12836508', 'n12836862', + 'n12837052', 'n12837259', 'n12837466', 'n12837803', 'n12839574', + 'n12839979', 'n12840168', 'n12840362', 'n12840502', 'n12840749', + 'n12841007', 'n12841193', 'n12841354', 'n12842302', 'n12842519', + 'n12842642', 'n12842887', 'n12843144', 'n12843316', 'n12843557', + 'n12843970', 'n12844409', 'n12844939', 'n12845187', 'n12845413', + 'n12845908', 'n12846335', 'n12846690', 'n12847008', 'n12847374', + 'n12847927', 'n12848499', 'n12849061', 'n12849279', 'n12849416', + 'n12849952', 'n12850168', 'n12850336', 'n12850906', 'n12851094', + 'n12851469', 'n12851860', 'n12852234', 'n12852428', 'n12852570', + 'n12853080', 'n12853287', 'n12853482', 'n12854048', 'n12854193', + 'n12854600', 'n12855365', 'n12855494', 'n12855710', 'n12855886', + 'n12856091', 'n12856287', 'n12856479', 'n12856680', 'n12857204', + 'n12857779', 'n12858150', 'n12858397', 'n12858618', 'n12858871', + 'n12858987', 'n12859153', 'n12859272', 'n12859679', 'n12859986', + 'n12860365', 'n12860978', 'n12861345', 'n12861541', 'n12861892', + 'n12862512', 'n12862828', 'n12863234', 'n12863624', 'n12864160', + 'n12865037', 'n12865562', 'n12865708', 'n12865824', 'n12866002', + 'n12866162', 'n12866333', 'n12866459', 'n12866635', 'n12866968', + 'n12867184', 'n12867449', 'n12867826', 'n12868019', 'n12868880', + 'n12869061', 'n12869478', 'n12869668', 'n12870048', 'n12870225', + 'n12870535', 'n12870682', 'n12870891', 'n12871272', 'n12871696', + 'n12871859', 'n12872458', 'n12872914', 'n12873341', 'n12873984', + 'n12875269', 'n12875697', 'n12875861', 'n12876899', 'n12877244', + 'n12877493', 'n12877637', 'n12877838', 'n12878169', 'n12878325', + 'n12878784', 'n12879068', 'n12879527', 'n12879963', 'n12880244', + 'n12880462', 'n12880638', 'n12880799', 'n12881105', 'n12881913', + 'n12882158', 'n12882779', 'n12882945', 'n12883265', 'n12883628', + 'n12884100', 'n12884260', 'n12885045', 'n12885265', 'n12885510', + 'n12885754', 'n12886185', 'n12886402', 'n12886600', 'n12886831', + 'n12887293', 'n12887532', 'n12887713', 'n12888016', 'n12888234', + 'n12888457', 'n12889219', 'n12889412', 'n12889579', 'n12889713', + 'n12890265', 'n12890490', 'n12890685', 'n12890928', 'n12891093', + 'n12891305', 'n12891469', 'n12891643', 'n12891824', 'n12892013', + 'n12893463', 'n12893993', 'n12895298', 'n12895811', 'n12896615', + 'n12897118', 'n12897788', 'n12897999', 'n12898342', 'n12898774', + 'n12899166', 'n12899537', 'n12899752', 'n12899971', 'n12900783', + 'n12901724', 'n12902466', 'n12902662', 'n12903014', 'n12903367', + 'n12903503', 'n12903964', 'n12904314', 'n12904562', 'n12904938', + 'n12905135', 'n12905412', 'n12906214', 'n12906498', 'n12906771', + 'n12907057', 'n12907671', 'n12907857', 'n12908093', 'n12908645', + 'n12908854', 'n12909421', 'n12909614', 'n12909759', 'n12909917', + 'n12911079', 'n12911264', 'n12911440', 'n12911673', 'n12911914', + 'n12912274', 'n12912670', 'n12912801', 'n12913144', 'n12913524', + 'n12913791', 'n12914923', 'n12915140', 'n12915568', 'n12915811', + 'n12916179', 'n12916511', 'n12917901', 'n12918609', 'n12918810', + 'n12918991', 'n12919195', 'n12919403', 'n12919646', 'n12919847', + 'n12920043', 'n12920204', 'n12920521', 'n12920719', 'n12920955', + 'n12921315', 'n12921499', 'n12921660', 'n12921868', 'n12922119', + 'n12922458', 'n12922763', 'n12923108', 'n12923257', 'n12924623', + 'n12925179', 'n12925583', 'n12926039', 'n12926480', 'n12926689', + 'n12927013', 'n12927194', 'n12927494', 'n12927758', 'n12928071', + 'n12928307', 'n12928491', 'n12928819', 'n12929403', 'n12929600', + 'n12930778', 'n12930951', 'n12931231', 'n12931542', 'n12931906', + 'n12932173', 'n12932365', 'n12932706', 'n12932966', 'n12933274', + 'n12934036', 'n12934174', 'n12934479', 'n12934685', 'n12934985', + 'n12935166', 'n12935609', 'n12936155', 'n12936826', 'n12937130', + 'n12938081', 'n12938193', 'n12938445', 'n12938667', 'n12939104', + 'n12939282', 'n12939479', 'n12939874', 'n12940226', 'n12940609', + 'n12941220', 'n12941536', 'n12941717', 'n12942025', 'n12942395', + 'n12942572', 'n12942729', 'n12943049', 'n12943443', 'n12943912', + 'n12944095', 'n12945177', 'n12945366', 'n12945549', 'n12946849', + 'n12947313', 'n12947544', 'n12947756', 'n12947895', 'n12948053', + 'n12948251', 'n12948495', 'n12949160', 'n12949361', 'n12950126', + 'n12950314', 'n12950796', 'n12951146', 'n12951835', 'n12952165', + 'n12952469', 'n12952590', 'n12952717', 'n12953206', 'n12953484', + 'n12953712', 'n12954353', 'n12954799', 'n12955414', 'n12955840', + 'n12956170', 'n12956367', 'n12956588', 'n12956922', 'n12957608', + 'n12957803', 'n12957924', 'n12958261', 'n12958615', 'n12959074', + 'n12959538', 'n12960378', 'n12960552', 'n12960863', 'n12961242', + 'n12961393', 'n12961536', 'n12961879', 'n12963628', 'n12964920', + 'n12965626', 'n12965951', 'n12966804', 'n12966945', 'n12968136', + 'n12968309', 'n12969131', 'n12969425', 'n12969670', 'n12969927', + 'n12970193', 'n12970293', 'n12970733', 'n12971400', 'n12971804', + 'n12972136', 'n12973443', 'n12973791', 'n12973937', 'n12974987', + 'n12975804', 'n12976198', 'n12976554', 'n12978076', 'n12979316', + 'n12979829', 'n12980080', 'n12980840', 'n12981086', 'n12981301', + 'n12981443', 'n12981954', 'n12982468', 'n12982590', 'n12982915', + 'n12983048', 'n12983654', 'n12983873', 'n12983961', 'n12984267', + 'n12984489', 'n12984595', 'n12985420', 'n12985773', 'n12985857', + 'n12986227', 'n12987056', 'n12987423', 'n12987535', 'n12988158', + 'n12988341', 'n12988572', 'n12989007', 'n12989938', 'n12990597', + 'n12991184', 'n12991837', 'n12992177', 'n12992868', 'n12994892', + 'n12995601', 'n12997654', 'n12997919', 'n12998815', 'n13000891', + 'n13001041', 'n13001206', 'n13001366', 'n13001529', 'n13001930', + 'n13002209', 'n13002750', 'n13002925', 'n13003061', 'n13003254', + 'n13003522', 'n13003712', 'n13004423', 'n13004640', 'n13004826', + 'n13004992', 'n13005329', 'n13005984', 'n13006171', 'n13006631', + 'n13006894', 'n13007034', 'n13007417', 'n13007629', 'n13008157', + 'n13008315', 'n13008485', 'n13008689', 'n13008839', 'n13009085', + 'n13009244', 'n13009429', 'n13009656', 'n13010694', 'n13010951', + 'n13011221', 'n13011595', 'n13012253', 'n13012469', 'n13012973', + 'n13013534', 'n13013764', 'n13013965', 'n13014097', 'n13014265', + 'n13014409', 'n13014581', 'n13014741', 'n13014879', 'n13015509', + 'n13015688', 'n13016076', 'n13016289', 'n13017102', 'n13017240', + 'n13017439', 'n13017610', 'n13017789', 'n13017979', 'n13018088', + 'n13018232', 'n13018407', 'n13018906', 'n13019496', 'n13019643', + 'n13019835', 'n13020191', 'n13020481', 'n13020964', 'n13021166', + 'n13021332', 'n13021543', 'n13021689', 'n13021867', 'n13022210', + 'n13022709', 'n13022903', 'n13023134', 'n13024012', 'n13024500', + 'n13024653', 'n13025647', 'n13025854', 'n13026015', 'n13027557', + 'n13027879', 'n13028611', 'n13028937', 'n13029122', 'n13029326', + 'n13029610', 'n13029760', 'n13030337', 'n13030616', 'n13030852', + 'n13031193', 'n13031323', 'n13031474', 'n13032115', 'n13032381', + 'n13032618', 'n13032923', 'n13033134', 'n13033396', 'n13033577', + 'n13033879', 'n13034062', 'n13034555', 'n13034788', 'n13035241', + 'n13035389', 'n13035707', 'n13035925', 'n13036116', 'n13036312', + 'n13036804', 'n13037406', 'n13037585', 'n13037805', 'n13038068', + 'n13038376', 'n13038577', 'n13038744', 'n13039349', 'n13040303', + 'n13040629', 'n13040796', 'n13041312', 'n13041943', 'n13042134', + 'n13042316', 'n13042982', 'n13043926', 'n13044375', 'n13044778', + 'n13045210', 'n13045594', 'n13045975', 'n13046130', 'n13046669', + 'n13047862', 'n13048447', 'n13049953', 'n13050397', 'n13050705', + 'n13050940', 'n13051346', 'n13052014', 'n13052248', 'n13052670', + 'n13052931', 'n13053608', 'n13054073', 'n13054560', 'n13055423', + 'n13055577', 'n13055792', 'n13055949', 'n13056135', 'n13056349', + 'n13056607', 'n13056799', 'n13057054', 'n13057242', 'n13057422', + 'n13057639', 'n13058037', 'n13058272', 'n13058608', 'n13059298', + 'n13059657', 'n13060017', 'n13060190', 'n13061172', 'n13061348', + 'n13061471', 'n13061704', 'n13062421', 'n13063269', 'n13063514', + 'n13064111', 'n13064457', 'n13065089', 'n13065514', 'n13066129', + 'n13066448', 'n13066979', 'n13067191', 'n13067330', 'n13067532', + 'n13067672', 'n13068255', 'n13068434', 'n13068735', 'n13068917', + 'n13069224', 'n13069773', 'n13070308', 'n13070875', 'n13071371', + 'n13071553', 'n13071815', 'n13072031', 'n13072209', 'n13072350', + 'n13072528', 'n13072706', 'n13072863', 'n13073055', 'n13073703', + 'n13074619', 'n13074814', 'n13075020', 'n13075272', 'n13075441', + 'n13075684', 'n13075847', 'n13076041', 'n13076405', 'n13076643', + 'n13076831', 'n13077033', 'n13077295', 'n13078021', 'n13079073', + 'n13079419', 'n13079567', 'n13080306', 'n13080866', 'n13081229', + 'n13081999', 'n13082568', 'n13083023', 'n13083461', 'n13084184', + 'n13084834', 'n13085113', 'n13085747', 'n13090018', 'n13090871', + 'n13091620', 'n13091774', 'n13091982', 'n13092078', 'n13092240', + 'n13092385', 'n13092987', 'n13093275', 'n13093629', 'n13094145', + 'n13094273', 'n13095013', 'n13096779', 'n13098515', 'n13098962', + 'n13099833', 'n13099999', 'n13100156', 'n13100677', 'n13102648', + 'n13102775', 'n13103023', 'n13103660', 'n13103750', 'n13103877', + 'n13104059', 'n13107694', 'n13107807', 'n13107891', 'n13108131', + 'n13108323', 'n13108481', 'n13108545', 'n13108662', 'n13108841', + 'n13109733', 'n13110915', 'n13111174', 'n13111340', 'n13111504', + 'n13111881', 'n13112035', 'n13112201', 'n13118330', 'n13118707', + 'n13119870', 'n13120211', 'n13120958', 'n13121104', 'n13121349', + 'n13122364', 'n13123309', 'n13123431', 'n13123841', 'n13124358', + 'n13124654', 'n13125117', 'n13126050', 'n13126856', 'n13127001', + 'n13127303', 'n13127666', 'n13127843', 'n13128278', 'n13128582', + 'n13128976', 'n13129078', 'n13130014', 'n13130161', 'n13130726', + 'n13131028', 'n13131618', 'n13132034', 'n13132156', 'n13132338', + 'n13132486', 'n13132656', 'n13132756', 'n13132940', 'n13133140', + 'n13133233', 'n13133316', 'n13133613', 'n13133932', 'n13134302', + 'n13134531', 'n13134844', 'n13134947', 'n13135692', 'n13135832', + 'n13136316', 'n13136556', 'n13136781', 'n13137010', 'n13137225', + 'n13137409', 'n13137672', 'n13137951', 'n13138155', 'n13138308', + 'n13138658', 'n13138842', 'n13139055', 'n13139321', 'n13139482', + 'n13139647', 'n13139837', 'n13140049', 'n13140367', 'n13141141', + 'n13141415', 'n13141564', 'n13141797', 'n13141972', 'n13142182', + 'n13142504', 'n13142907', 'n13143285', 'n13143758', 'n13144084', + 'n13145040', 'n13145250', 'n13145444', 'n13146403', 'n13146583', + 'n13146928', 'n13147153', 'n13147270', 'n13147386', 'n13147532', + 'n13147689', 'n13147918', 'n13148208', 'n13148384', 'n13149296', + 'n13149970', 'n13150378', 'n13150592', 'n13150894', 'n13151082', + 'n13152339', 'n13154388', 'n13154494', 'n13154841', 'n13155095', + 'n13155305', 'n13155611', 'n13156986', 'n13157137', 'n13157346', + 'n13157481', 'n13157684', 'n13157971', 'n13158167', 'n13158512', + 'n13158605', 'n13158714', 'n13158815', 'n13159357', 'n13159691', + 'n13159890', 'n13160116', 'n13160254', 'n13160365', 'n13160604', + 'n13160831', 'n13160938', 'n13161151', 'n13161254', 'n13161904', + 'n13163553', 'n13163649', 'n13163991', 'n13164501', 'n13170840', + 'n13171210', 'n13171797', 'n13172923', 'n13173132', 'n13173259', + 'n13173488', 'n13173697', 'n13173882', 'n13174354', 'n13174670', + 'n13174823', 'n13175682', 'n13176363', 'n13176714', 'n13177048', + 'n13177529', 'n13177768', 'n13177884', 'n13178284', 'n13178707', + 'n13179056', 'n13179804', 'n13180534', 'n13180875', 'n13181055', + 'n13181244', 'n13181406', 'n13181811', 'n13182164', 'n13182338', + 'n13182799', 'n13182937', 'n13183056', 'n13183489', 'n13184394', + 'n13185269', 'n13185658', 'n13186388', 'n13186546', 'n13187367', + 'n13188096', 'n13188268', 'n13188462', 'n13188767', 'n13190060', + 'n13190747', 'n13191148', 'n13191620', 'n13191884', 'n13192625', + 'n13193143', 'n13193269', 'n13193466', 'n13193642', 'n13193856', + 'n13194036', 'n13194212', 'n13194572', 'n13194758', 'n13194918', + 'n13195341', 'n13195761', 'n13196003', 'n13196234', 'n13196369', + 'n13196738', 'n13197274', 'n13197507', 'n13198054', 'n13198482', + 'n13198914', 'n13199717', 'n13199970', 'n13200193', 'n13200542', + 'n13200651', 'n13200986', 'n13201423', 'n13201566', 'n13201969', + 'n13202125', 'n13202355', 'n13202602', 'n13205058', 'n13205249', + 'n13206178', 'n13206817', 'n13207094', 'n13207335', 'n13207572', + 'n13207736', 'n13207923', 'n13208302', 'n13208705', 'n13208965', + 'n13209129', 'n13209270', 'n13209460', 'n13209808', 'n13210350', + 'n13210597', 'n13211020', 'n13211790', 'n13212025', 'n13212175', + 'n13212379', 'n13212559', 'n13213066', 'n13213397', 'n13213577', + 'n13214217', 'n13214340', 'n13214485', 'n13215258', 'n13215586', + 'n13217005', 'n13219422', 'n13219833', 'n13219976', 'n13220122', + 'n13220355', 'n13220525', 'n13220663', 'n13221529', 'n13222877', + 'n13222985', 'n13223090', 'n13223588', 'n13223710', 'n13223843', + 'n13224673', 'n13224922', 'n13225244', 'n13225365', 'n13225617', + 'n13226320', 'n13226871', 'n13228017', 'n13228536', 'n13229543', + 'n13229951', 'n13230190', 'n13230662', 'n13230843', 'n13231078', + 'n13231678', 'n13231919', 'n13232106', 'n13232363', 'n13232779', + 'n13233727', 'n13234114', 'n13234519', 'n13234678', 'n13234857', + 'n13235011', 'n13235159', 'n13235319', 'n13235503', 'n13235766', + 'n13236100', 'n13237188', 'n13237508', 'n13238375', 'n13238654', + 'n13238988', 'n13239177', 'n13239736', 'n13239921', 'n13240362', + 'n13252672', 'n13354021', 'n13555775', 'n13579829', 'n13650447', + 'n13653902', 'n13862407', 'n13862552', 'n13862780', 'n13863020', + 'n13863186', 'n13863473', 'n13863771', 'n13864035', 'n13864153', + 'n13864965', 'n13865298', 'n13865483', 'n13865904', 'n13866144', + 'n13866626', 'n13866827', 'n13867005', 'n13867492', 'n13868248', + 'n13868371', 'n13868515', 'n13868944', 'n13869045', 'n13869547', + 'n13869788', 'n13869896', 'n13871717', 'n13872592', 'n13872822', + 'n13873361', 'n13873502', 'n13873917', 'n13874073', 'n13874558', + 'n13875392', 'n13875571', 'n13875884', 'n13876561', 'n13877547', + 'n13877667', 'n13878306', 'n13879049', 'n13879320', 'n13879816', + 'n13880199', 'n13880415', 'n13880551', 'n13880704', 'n13880994', + 'n13881512', 'n13881644', 'n13882201', 'n13882276', 'n13882487', + 'n13882563', 'n13882639', 'n13882713', 'n13882961', 'n13883603', + 'n13883763', 'n13884261', 'n13884384', 'n13884930', 'n13885011', + 'n13886260', 'n13888491', 'n13889066', 'n13889331', 'n13891547', + 'n13891937', 'n13893786', 'n13894154', 'n13894434', 'n13895262', + 'n13896100', 'n13896217', 'n13897198', 'n13897528', 'n13897996', + 'n13898207', 'n13898315', 'n13898645', 'n13899735', 'n13900287', + 'n13900422', 'n13901211', 'n13901321', 'n13901423', 'n13901490', + 'n13901858', 'n13902048', 'n13902336', 'n13902793', 'n13903079', + 'n13905121', 'n13905275', 'n13905792', 'n13906484', 'n13906669', + 'n13906767', 'n13906936', 'n13907272', 'n13908201', 'n13908580', + 'n13911045', 'n13912260', 'n13912540', 'n13914141', 'n13914265', + 'n13914608', 'n13915023', 'n13915113', 'n13915209', 'n13915305', + 'n13915999', 'n13916363', 'n13916721', 'n13917690', 'n13917785', + 'n13918274', 'n13918387', 'n13918717', 'n13919547', 'n13919919', + 'n13926786', 'n14131950', 'n14175579', 'n14564779', 'n14582716', + 'n14583400', 'n14585392', 'n14592309', 'n14603798', 'n14633206', + 'n14685296', 'n14696793', 'n14698884', 'n14714645', 'n14720833', + 'n14765422', 'n14785065', 'n14786943', 'n14804958', 'n14810561', + 'n14820180', 'n14821852', 'n14844693', 'n14853210', 'n14858292', + 'n14867545', 'n14891255', 'n14899328', 'n14900184', 'n14900342', + 'n14908027', 'n14909584', 'n14914945', 'n14915184', 'n14919819', + 'n14938389', 'n14941787', 'n14942411', 'n14973585', 'n14974264', + 'n14975598', 'n14976759', 'n14976871', 'n14977188', 'n14977504', + 'n14992287', 'n14993378', 'n15005577', 'n15006012', 'n15019030', + 'n15048888', 'n15060326', 'n15060688', 'n15062057', 'n15067877', + 'n15075141', 'n15086247', 'n15089258', 'n15089472', 'n15089645', + 'n15089803', 'n15090065', 'n15090238', 'n15090742', 'n15091129', + 'n15091304', 'n15091473', 'n15091669', 'n15091846', 'n15092059', + 'n15092227', 'n15092409', 'n15092650', 'n15092751', 'n15092942', + 'n15093049', 'n15093137', 'n15093298', 'n15102359', 'n15102455', + 'n15102894', + ] From 95739b45d7231125185e5c0ff9b1f5eb634976d3 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 17 Mar 2022 17:57:05 -0700 Subject: [PATCH 57/61] Fix partially removed alt_lable impl from TFDS variant of ImageNet22/12k --- timm/data/parsers/tfds/imagenet22k.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/timm/data/parsers/tfds/imagenet22k.py b/timm/data/parsers/tfds/imagenet22k.py index 9f5dde0b34..760f6466a1 100644 --- a/timm/data/parsers/tfds/imagenet22k.py +++ b/timm/data/parsers/tfds/imagenet22k.py @@ -77,12 +77,12 @@ def _split_generators(self, dl_manager: tfds.download.DownloadManager): 'validation': self._generate_examples(val_records, manual_dir), } - def _generate_examples(self, records, manual_dir, alt_label=None, resize_short=True, max_img_size=MAX_DIM): + def _generate_examples(self, records, manual_dir, resize_short=True, max_img_size=MAX_DIM): """Yields examples.""" for r in records: try: filename, output_record = _process_record( - r, manual_dir, alt_label=alt_label, resize_short=resize_short, max_img_size=max_img_size) + r, manual_dir, resize_short=resize_short, max_img_size=max_img_size) yield filename, output_record except Exception as e: print('Exception:', e) @@ -114,8 +114,6 @@ def _load_records( train_csv, validation_csv, labels, - alt_labels=None, - alt_label_name='', min_img_size=MIN_DIM, ): pd = tfds.core.lazy_imports.pandas @@ -133,12 +131,10 @@ def _load_records( train_record_df['label'] = train_record_df['cls'].map(class_to_idx).astype(int) train_record_df = train_record_df[['filename', 'label']] - train_record_df = train_record_df.sample(frac=1, random_state=42) print('num train records:', len(train_record_df.index)) val_record_df['label'] = val_record_df['cls'].map(class_to_idx).astype(int) val_record_df = val_record_df[['filename', 'label']] - val_record_df = val_record_df.sample(frac=1, random_state=42) print('num val records:', len(val_record_df.index)) train_records = train_record_df.to_records(index=False) From c76d77267005aeb9ac291938195c6ffefd8c0f7c Mon Sep 17 00:00:00 2001 From: Edoardo Debenedetti Date: Thu, 28 Apr 2022 10:04:03 +0000 Subject: [PATCH 58/61] Add support for different TFDS `BuilderConfig`s --- timm/data/parsers/parser_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/data/parsers/parser_factory.py b/timm/data/parsers/parser_factory.py index 6dedb46782..89383d9b24 100644 --- a/timm/data/parsers/parser_factory.py +++ b/timm/data/parsers/parser_factory.py @@ -11,7 +11,7 @@ def create_parser(name, root, split='train', **kwargs): prefix = '' if len(name) > 1: prefix = name[0] - name = name[-1] + name = "/".join(name[1:]) # FIXME improve the selection right now just tfds prefix or fallback path, will need options to # explicitly select other options shortly From 6fe01993ad344d5083a54c227a563fe73e3f56ed Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 13 Jul 2022 09:53:15 -0700 Subject: [PATCH 59/61] verions 0.8.x for bits_and_tpu branch --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index 06f971e2f1..7556fbd972 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.7.0.dev0' +__version__ = '0.8.0.dev0' From 5a40c6a3c48672cf11a1995f9ebdb54f44903f09 Mon Sep 17 00:00:00 2001 From: Edoardo Debenedetti Date: Wed, 17 Aug 2022 15:10:41 +0200 Subject: [PATCH 60/61] Fix issue with torchvision's ImageNet --- timm/data/dataset_factory.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/timm/data/dataset_factory.py b/timm/data/dataset_factory.py index 647357a98a..c4738ae9b1 100644 --- a/timm/data/dataset_factory.py +++ b/timm/data/dataset_factory.py @@ -122,12 +122,14 @@ def create_dataset( elif name == 'imagenet': if split in _EVAL_SYNONYM: split = 'val' + torch_kwargs.pop("download") ds = ImageNet(split=split, **torch_kwargs) elif name == 'image_folder' or name == 'folder': # in case torchvision ImageFolder is preferred over timm ImageDataset for some reason if search_split and os.path.isdir(root): # look for split specific sub-folder in root root = _search_split(root, split) + torch_kwargs.pop("download") ds = ImageFolder(root, **kwargs) else: assert False, f"Unknown torchvision dataset {name}" From a25bf974a9a2c67c72fbb780c056c8638c966a83 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 1 Nov 2022 21:49:10 -0700 Subject: [PATCH 61/61] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d0f6cd0ef1..5d301b7ad5 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ Thanks to the following for hardware support: * TPU Research Cloud (TRC) (https://sites.research.google/trc/about/) + * TPU support can be found on the [`bits_and_tpu`](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/) branch, w/ some setup help [here](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/timm/bits) * Nvidia (https://www.nvidia.com/en-us/) And a big thanks to all GitHub sponsors who helped with some of my costs before I joined Hugging Face.