Commit
·
ad89d98
1
Parent(s):
ac89b4b
Upload 10 files
Browse files- attention_mlp.ckpt +3 -0
- classifier.ckpt +3 -0
- custom_interface.py +157 -0
- custom_model.py +100 -0
- discrete_embedding_layer.ckpt +3 -0
- embedding_model.ckpt +3 -0
- example1.wav +0 -0
- example2.flac +0 -0
- hyperparams.yaml +129 -0
- label_encoder.txt +1213 -0
attention_mlp.ckpt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:079123a0377f9c2d9c3efe00032dedf38fe5bbcc88e72bbf6a16f78d8eac2636
|
| 3 |
+
size 4204478
|
classifier.ckpt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:09e06f21026255bb11042e450e59e0a4ed8058ba6982a9cc83305a285de65cf9
|
| 3 |
+
size 931371
|
custom_interface.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from speechbrain.inference.interfaces import Pretrained
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class CustomEncoderClassifier(Pretrained):
|
| 6 |
+
"""A ready-to-use class for utterance-level classification (e.g, speaker-id,
|
| 7 |
+
language-id, emotion recognition, keyword spotting, etc).
|
| 8 |
+
The class assumes that an self-supervised encoder like wav2vec2/hubert and a classifier model
|
| 9 |
+
are defined in the yaml file. If you want to
|
| 10 |
+
convert the predicted index into a corresponding text label, please
|
| 11 |
+
provide the path of the label_encoder in a variable called 'lab_encoder_file'
|
| 12 |
+
within the yaml.
|
| 13 |
+
The class can be used either to run only the encoder (encode_batch()) to
|
| 14 |
+
extract embeddings or to run a classification step (classify_batch()).
|
| 15 |
+
```
|
| 16 |
+
Example
|
| 17 |
+
-------
|
| 18 |
+
>>> import torchaudio
|
| 19 |
+
>>> from speechbrain.pretrained import EncoderClassifier
|
| 20 |
+
>>> # Model is downloaded from the speechbrain HuggingFace repo
|
| 21 |
+
>>> tmpdir = getfixture("tmpdir")
|
| 22 |
+
>>> classifier = EncoderClassifier.from_hparams(
|
| 23 |
+
... source="speechbrain/spkrec-ecapa-voxceleb",
|
| 24 |
+
... savedir=tmpdir,
|
| 25 |
+
... )
|
| 26 |
+
>>> # Compute embeddings
|
| 27 |
+
>>> signal, fs = torchaudio.load("samples/audio_samples/example1.wav")
|
| 28 |
+
>>> embeddings = classifier.encode_batch(signal)
|
| 29 |
+
>>> # Classification
|
| 30 |
+
>>> prediction = classifier .classify_batch(signal)
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
def __init__(self, *args, **kwargs):
|
| 34 |
+
super().__init__(*args, **kwargs)
|
| 35 |
+
self.similarity = torch.nn.CosineSimilarity(dim=-1, eps=1e-6)
|
| 36 |
+
|
| 37 |
+
def encode_batch(self, wavs, wav_lens=None, normalize=False):
|
| 38 |
+
"""Encodes the input audio into a single vector embedding.
|
| 39 |
+
The waveforms should already be in the model's desired format.
|
| 40 |
+
You can call:
|
| 41 |
+
``normalized = <this>.normalizer(signal, sample_rate)``
|
| 42 |
+
to get a correctly converted signal in most cases.
|
| 43 |
+
Arguments
|
| 44 |
+
---------
|
| 45 |
+
wavs : torch.tensor
|
| 46 |
+
Batch of waveforms [batch, time, channels] or [batch, time]
|
| 47 |
+
depending on the model. Make sure the sample rate is fs=16000 Hz.
|
| 48 |
+
wav_lens : torch.tensor
|
| 49 |
+
Lengths of the waveforms relative to the longest one in the
|
| 50 |
+
batch, tensor of shape [batch]. The longest one should have
|
| 51 |
+
relative length 1.0 and others len(waveform) / max_length.
|
| 52 |
+
Used for ignoring padding.
|
| 53 |
+
normalize : bool
|
| 54 |
+
If True, it normalizes the embeddings with the statistics
|
| 55 |
+
contained in mean_var_norm_emb.
|
| 56 |
+
Returns
|
| 57 |
+
-------
|
| 58 |
+
torch.tensor
|
| 59 |
+
The encoded batch
|
| 60 |
+
"""
|
| 61 |
+
# Manage single waveforms in input
|
| 62 |
+
if len(wavs.shape) == 1:
|
| 63 |
+
wavs = wavs.unsqueeze(0)
|
| 64 |
+
|
| 65 |
+
# Assign full length if wav_lens is not assigned
|
| 66 |
+
if wav_lens is None:
|
| 67 |
+
wav_lens = torch.ones(wavs.shape[0], device=self.device)
|
| 68 |
+
|
| 69 |
+
# Storing waveform in the specified device
|
| 70 |
+
wavs, wav_lens = wavs.to(self.device), wav_lens.to(self.device)
|
| 71 |
+
wavs = wavs.float()
|
| 72 |
+
|
| 73 |
+
with torch.no_grad():
|
| 74 |
+
self.hparams.codec.to(self.device).eval()
|
| 75 |
+
tokens, _, _ = self.hparams.codec(
|
| 76 |
+
wavs, wav_lens, **self.hparams.tokenizer_config
|
| 77 |
+
)
|
| 78 |
+
embeddings = self.mods.discrete_embedding_layer(tokens)
|
| 79 |
+
att_w = self.mods.attention_mlp(embeddings)
|
| 80 |
+
feats = torch.matmul(att_w.transpose(2, -1), embeddings).squeeze(-2)
|
| 81 |
+
embeddings = self.mods.embedding_model(feats, wav_lens)
|
| 82 |
+
return embeddings.squeeze(1)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def verify_batch(
|
| 86 |
+
self, wavs1, wavs2, wav1_lens=None, wav2_lens=None, threshold=0.25
|
| 87 |
+
):
|
| 88 |
+
"""Performs speaker verification with cosine distance.
|
| 89 |
+
|
| 90 |
+
It returns the score and the decision (0 different speakers,
|
| 91 |
+
1 same speakers).
|
| 92 |
+
|
| 93 |
+
Arguments
|
| 94 |
+
---------
|
| 95 |
+
wavs1 : Torch.Tensor
|
| 96 |
+
torch.Tensor containing the speech waveform1 (batch, time).
|
| 97 |
+
Make sure the sample rate is fs=16000 Hz.
|
| 98 |
+
wavs2 : Torch.Tensor
|
| 99 |
+
torch.Tensor containing the speech waveform2 (batch, time).
|
| 100 |
+
Make sure the sample rate is fs=16000 Hz.
|
| 101 |
+
wav1_lens : Torch.Tensor
|
| 102 |
+
torch.Tensor containing the relative length for each sentence
|
| 103 |
+
in the length (e.g., [0.8 0.6 1.0])
|
| 104 |
+
wav2_lens : Torch.Tensor
|
| 105 |
+
torch.Tensor containing the relative length for each sentence
|
| 106 |
+
in the length (e.g., [0.8 0.6 1.0])
|
| 107 |
+
threshold : Float
|
| 108 |
+
Threshold applied to the cosine distance to decide if the
|
| 109 |
+
speaker is different (0) or the same (1).
|
| 110 |
+
|
| 111 |
+
Returns
|
| 112 |
+
-------
|
| 113 |
+
score
|
| 114 |
+
The score associated to the binary verification output
|
| 115 |
+
(cosine distance).
|
| 116 |
+
prediction
|
| 117 |
+
The prediction is 1 if the two signals in input are from the same
|
| 118 |
+
speaker and 0 otherwise.
|
| 119 |
+
"""
|
| 120 |
+
emb1 = self.encode_batch(wavs1, wav1_lens, normalize=False)
|
| 121 |
+
emb2 = self.encode_batch(wavs2, wav2_lens, normalize=False)
|
| 122 |
+
score = self.similarity(emb1, emb2)
|
| 123 |
+
return score, score > threshold
|
| 124 |
+
|
| 125 |
+
def verify_files(self, path_x, path_y, **kwargs):
|
| 126 |
+
"""Speaker verification with cosine distance
|
| 127 |
+
|
| 128 |
+
Returns the score and the decision (0 different speakers,
|
| 129 |
+
1 same speakers).
|
| 130 |
+
|
| 131 |
+
Arguments
|
| 132 |
+
---------
|
| 133 |
+
path_x : str
|
| 134 |
+
Path to file x
|
| 135 |
+
path_y : str
|
| 136 |
+
Path to file y
|
| 137 |
+
**kwargs : dict
|
| 138 |
+
Arguments to ``load_audio``
|
| 139 |
+
|
| 140 |
+
Returns
|
| 141 |
+
-------
|
| 142 |
+
score
|
| 143 |
+
The score associated to the binary verification output
|
| 144 |
+
(cosine distance).
|
| 145 |
+
prediction
|
| 146 |
+
The prediction is 1 if the two signals in input are from the same
|
| 147 |
+
speaker and 0 otherwise.
|
| 148 |
+
"""
|
| 149 |
+
waveform_x = self.load_audio(path_x, **kwargs)
|
| 150 |
+
waveform_y = self.load_audio(path_y, **kwargs)
|
| 151 |
+
# Fake batches:
|
| 152 |
+
batch_x = waveform_x.unsqueeze(0)
|
| 153 |
+
batch_y = waveform_y.unsqueeze(0)
|
| 154 |
+
# Verify:
|
| 155 |
+
score, decision = self.verify_batch(batch_x, batch_y)
|
| 156 |
+
# Squeeze:
|
| 157 |
+
return score[0], decision[0]
|
custom_model.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class AttentionMLP(torch.nn.Module):
|
| 5 |
+
def __init__(self, input_dim, hidden_dim):
|
| 6 |
+
super(AttentionMLP, self).__init__()
|
| 7 |
+
self.layers = torch.nn.Sequential(
|
| 8 |
+
torch.nn.Linear(input_dim, hidden_dim),
|
| 9 |
+
torch.nn.ReLU(),
|
| 10 |
+
torch.nn.Linear(hidden_dim, 1, bias=False),
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
def forward(self, x):
|
| 14 |
+
x = self.layers(x)
|
| 15 |
+
att_w = torch.nn.functional.softmax(x, dim=2)
|
| 16 |
+
return att_w
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Discrete_EmbeddingLayer(torch.nn.Module):
|
| 20 |
+
"""This class handles embedding layers for discrete tokens.
|
| 21 |
+
|
| 22 |
+
Arguments
|
| 23 |
+
---------
|
| 24 |
+
num_codebooks: int ,
|
| 25 |
+
number of codebooks of the tokenizer.
|
| 26 |
+
vocab_size : int,
|
| 27 |
+
size of the dictionary of embeddings
|
| 28 |
+
emb_dim: int ,
|
| 29 |
+
the size of each embedding vector
|
| 30 |
+
pad_index: int (default: 0),
|
| 31 |
+
If specified, the entries at padding_idx do not contribute to the gradient.
|
| 32 |
+
init: boolean (default: False):
|
| 33 |
+
If set to True, init the embedding with the tokenizer embedding otherwise init randomly.
|
| 34 |
+
freeze: boolean (default: False)
|
| 35 |
+
If True, the embedding is frozen. If False, the model will be trained
|
| 36 |
+
alongside with the rest of the pipeline.
|
| 37 |
+
|
| 38 |
+
Example
|
| 39 |
+
-------
|
| 40 |
+
>>> from speechbrain.lobes.models.huggingface_transformers.encodec import Encodec
|
| 41 |
+
>>> model_hub = "facebook/encodec_24khz"
|
| 42 |
+
>>> save_path = "savedir"
|
| 43 |
+
>>> model = Encodec(model_hub, save_path)
|
| 44 |
+
>>> audio = torch.randn(4, 1000)
|
| 45 |
+
>>> length = torch.tensor([1.0, .5, .75, 1.0])
|
| 46 |
+
>>> tokens, emb = model.encode(audio, length)
|
| 47 |
+
>>> print(tokens.shape)
|
| 48 |
+
torch.Size([4, 4, 2])
|
| 49 |
+
>>> emb= Discrete_EmbeddingLayer(2, 1024, 1024)
|
| 50 |
+
>>> in_emb = emb(tokens)
|
| 51 |
+
>>> print(in_emb.shape)
|
| 52 |
+
torch.Size([4, 4, 2, 1024])
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(
|
| 56 |
+
self,
|
| 57 |
+
num_codebooks,
|
| 58 |
+
vocab_size,
|
| 59 |
+
emb_dim,
|
| 60 |
+
pad_index=0,
|
| 61 |
+
init=False,
|
| 62 |
+
freeze=False,
|
| 63 |
+
):
|
| 64 |
+
super(Discrete_EmbeddingLayer, self).__init__()
|
| 65 |
+
self.vocab_size = vocab_size
|
| 66 |
+
self.num_codebooks = num_codebooks
|
| 67 |
+
self.freeze = freeze
|
| 68 |
+
self.embedding = torch.nn.Embedding(
|
| 69 |
+
num_codebooks * vocab_size, emb_dim
|
| 70 |
+
).requires_grad_(not self.freeze)
|
| 71 |
+
self.init = init
|
| 72 |
+
|
| 73 |
+
def init_embedding(self, weights):
|
| 74 |
+
with torch.no_grad():
|
| 75 |
+
self.embedding.weight = torch.nn.Parameter(weights)
|
| 76 |
+
|
| 77 |
+
def forward(self, in_tokens):
|
| 78 |
+
"""Computes the embedding for discrete tokens.
|
| 79 |
+
a sample.
|
| 80 |
+
|
| 81 |
+
Arguments
|
| 82 |
+
---------
|
| 83 |
+
in_tokens : torch.Tensor
|
| 84 |
+
A (Batch x Time x num_codebooks)
|
| 85 |
+
audio sample
|
| 86 |
+
Returns
|
| 87 |
+
-------
|
| 88 |
+
in_embs : torch.Tensor
|
| 89 |
+
"""
|
| 90 |
+
with torch.set_grad_enabled(not self.freeze):
|
| 91 |
+
# Add unique token IDs across diffrent codebooks by adding num_codebooks * vocab_size
|
| 92 |
+
in_tokens += torch.arange(
|
| 93 |
+
0,
|
| 94 |
+
self.num_codebooks * self.vocab_size,
|
| 95 |
+
self.vocab_size,
|
| 96 |
+
device=in_tokens.device,
|
| 97 |
+
)
|
| 98 |
+
# Forward Pass to embedding and
|
| 99 |
+
in_embs = self.embedding(in_tokens)
|
| 100 |
+
return in_embs
|
discrete_embedding_layer.ckpt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:87fbbc6ab429b57493baba626f7c26584ef559b7608be849f2d09ef53f7077ef
|
| 3 |
+
size 24577457
|
embedding_model.ckpt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:99fff92b110bc038dc81ceee1bab1a61ebbb2e69d10de13929259516ba7f66da
|
| 3 |
+
size 102646844
|
example1.wav
ADDED
|
Binary file (104 kB). View file
|
|
|
example2.flac
ADDED
|
Binary file (39.6 kB). View file
|
|
|
hyperparams.yaml
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ############################################################################
|
| 2 |
+
# Model: ECAPA big for Speaker verification
|
| 3 |
+
# ############################################################################
|
| 4 |
+
|
| 5 |
+
# Feature parameters
|
| 6 |
+
n_mels: 80
|
| 7 |
+
|
| 8 |
+
# Pretrain folder (HuggingFace)
|
| 9 |
+
# pretrained_path: poonehmousavi/discrete_wavlm_spk_rec_ecapatdn
|
| 10 |
+
pretrained_path: benchmarks/DASB/VoiceCeleb1/speaker_ver/temp
|
| 11 |
+
# Output parameters
|
| 12 |
+
out_n_neurons: 1211
|
| 13 |
+
save_folder: tmp
|
| 14 |
+
|
| 15 |
+
### Configuration for discrete SSL model
|
| 16 |
+
# ssl_model_type: hubert, wavlm, wav2vec2
|
| 17 |
+
# ssl_hub: facebook/hubert-large-ll60k, microsoft/wavlm-large, facebook/wav2vec2-large
|
| 18 |
+
ssl_model_type: wavlm # hubert, wavml or wav2vec2
|
| 19 |
+
ssl_hub: microsoft/wavlm-large
|
| 20 |
+
ssl_folder: !ref <save_folder>/ssl_checkpoint
|
| 21 |
+
kmeans_repo_id: speechbrain/SSL_Quantization
|
| 22 |
+
kmeans_cache_dir: !ref <save_folder>/kmeans_checkpoint
|
| 23 |
+
kmeans_dataset: LibriSpeech-100-360-500
|
| 24 |
+
freeze_ssl: True
|
| 25 |
+
freeze_feature_extractor: True
|
| 26 |
+
num_clusters: 1000
|
| 27 |
+
|
| 28 |
+
### Config for Tokenizer
|
| 29 |
+
# Layer number should be among the supported layers for discrete SSL models(kmenas model should be available for that layer)
|
| 30 |
+
# ssl_layer_num: [3, 7, 12, 23]
|
| 31 |
+
# deduplicate: [False, False, False, False]
|
| 32 |
+
# bpe_tokenizer_path: [null , null, null, null]
|
| 33 |
+
ssl_layer_num: [1, 3, 7, 12, 18, 23]
|
| 34 |
+
num_codebooks: 6
|
| 35 |
+
deduplicate: [False, False, False, False, False, False]
|
| 36 |
+
bpe_tokenizer_path: [null, null, null, null, null, null]
|
| 37 |
+
sample_rate: 16000
|
| 38 |
+
|
| 39 |
+
# Feature parameters
|
| 40 |
+
encoder_dim: 1024
|
| 41 |
+
# Modules
|
| 42 |
+
tokenizer_config:
|
| 43 |
+
SSL_layers: !ref <ssl_layer_num>
|
| 44 |
+
deduplicates: !ref <deduplicate>
|
| 45 |
+
bpe_tokenizers: !ref <bpe_tokenizer_path>
|
| 46 |
+
|
| 47 |
+
ssl_model: !apply:speechbrain.utils.hparams.choice
|
| 48 |
+
value: !ref <ssl_model_type>
|
| 49 |
+
choices:
|
| 50 |
+
wavlm: !new:speechbrain.lobes.models.huggingface_transformers.wavlm.WavLM
|
| 51 |
+
source: !ref <ssl_hub>
|
| 52 |
+
output_norm: False
|
| 53 |
+
freeze: !ref <freeze_ssl>
|
| 54 |
+
freeze_feature_extractor: !ref <freeze_feature_extractor>
|
| 55 |
+
output_all_hiddens: True
|
| 56 |
+
save_path: !ref <ssl_folder>
|
| 57 |
+
hubert: !new:speechbrain.lobes.models.huggingface_transformers.hubert.HuBERT
|
| 58 |
+
source: !ref <ssl_hub>
|
| 59 |
+
output_norm: False
|
| 60 |
+
freeze: !ref <freeze_ssl>
|
| 61 |
+
freeze_feature_extractor: !ref <freeze_feature_extractor>
|
| 62 |
+
output_all_hiddens: True
|
| 63 |
+
save_path: !ref <ssl_folder>
|
| 64 |
+
wav2vec2: !new:speechbrain.lobes.models.huggingface_transformers.wav2vec2.Wav2Vec2
|
| 65 |
+
source: !ref <ssl_hub>
|
| 66 |
+
output_norm: False
|
| 67 |
+
freeze: !ref <freeze_ssl>
|
| 68 |
+
freeze_feature_extractor: !ref <freeze_feature_extractor>
|
| 69 |
+
output_all_hiddens: True
|
| 70 |
+
save_path: !ref <ssl_folder>
|
| 71 |
+
|
| 72 |
+
codec: !new:speechbrain.lobes.models.huggingface_transformers.discrete_ssl.DiscreteSSL
|
| 73 |
+
save_path: !ref <kmeans_cache_dir>
|
| 74 |
+
ssl_model: !ref <ssl_model>
|
| 75 |
+
kmeans_dataset: !ref <kmeans_dataset>
|
| 76 |
+
kmeans_repo_id: !ref <kmeans_repo_id>
|
| 77 |
+
num_clusters: !ref <num_clusters>
|
| 78 |
+
|
| 79 |
+
discrete_embedding_layer: !new:custom_model.Discrete_EmbeddingLayer
|
| 80 |
+
num_codebooks: !ref <num_codebooks>
|
| 81 |
+
vocab_size: !ref <num_clusters>
|
| 82 |
+
emb_dim: !ref <encoder_dim>
|
| 83 |
+
|
| 84 |
+
attention_mlp: !new:custom_model.AttentionMLP
|
| 85 |
+
input_dim: !ref <encoder_dim>
|
| 86 |
+
hidden_dim: !ref <encoder_dim>
|
| 87 |
+
|
| 88 |
+
embedding_model: !new:speechbrain.lobes.models.ECAPA_TDNN.ECAPA_TDNN
|
| 89 |
+
input_size: !ref <encoder_dim>
|
| 90 |
+
channels: [1024, 1024, 1024, 1024, 3072]
|
| 91 |
+
kernel_sizes: [5, 3, 3, 3, 1]
|
| 92 |
+
dilations: [1, 2, 3, 4, 1]
|
| 93 |
+
groups: [1, 1, 1, 1, 1]
|
| 94 |
+
attention_channels: 128
|
| 95 |
+
lin_neurons: 192
|
| 96 |
+
|
| 97 |
+
classifier: !new:speechbrain.lobes.models.ECAPA_TDNN.Classifier
|
| 98 |
+
input_size: 192
|
| 99 |
+
out_neurons: !ref <out_n_neurons>
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
modules:
|
| 104 |
+
embedding_model: !ref <embedding_model>
|
| 105 |
+
classifier: !ref <classifier>
|
| 106 |
+
attention_mlp: !ref <attention_mlp>
|
| 107 |
+
codec: !ref <codec>
|
| 108 |
+
discrete_embedding_layer: !ref <discrete_embedding_layer>
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
label_encoder: !new:speechbrain.dataio.encoder.CategoricalEncoder
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer
|
| 115 |
+
loadables:
|
| 116 |
+
embedding_model: !ref <embedding_model>
|
| 117 |
+
classifier: !ref <classifier>
|
| 118 |
+
attention_mlp: !ref <attention_mlp>
|
| 119 |
+
discrete_embedding_layer: !ref <discrete_embedding_layer>
|
| 120 |
+
label_encoder: !ref <label_encoder>
|
| 121 |
+
|
| 122 |
+
paths:
|
| 123 |
+
embedding_model: !ref <pretrained_path>/embedding_model.ckpt
|
| 124 |
+
classifier: !ref <pretrained_path>/classifier.ckpt
|
| 125 |
+
attention_mlp: !ref <pretrained_path>/attention_mlp.ckpt
|
| 126 |
+
label_encoder: !ref <pretrained_path>/label_encoder.txt
|
| 127 |
+
discrete_embedding_layer: !ref <pretrained_path>/discrete_embedding_layer.ckpt
|
| 128 |
+
|
| 129 |
+
|
label_encoder.txt
ADDED
|
@@ -0,0 +1,1213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'id11129' => 0
|
| 2 |
+
'id10248' => 1
|
| 3 |
+
'id11225' => 2
|
| 4 |
+
'id10977' => 3
|
| 5 |
+
'id11142' => 4
|
| 6 |
+
'id10715' => 5
|
| 7 |
+
'id11250' => 6
|
| 8 |
+
'id10892' => 7
|
| 9 |
+
'id11239' => 8
|
| 10 |
+
'id11168' => 9
|
| 11 |
+
'id10711' => 10
|
| 12 |
+
'id10571' => 11
|
| 13 |
+
'id10197' => 12
|
| 14 |
+
'id10129' => 13
|
| 15 |
+
'id11008' => 14
|
| 16 |
+
'id10783' => 15
|
| 17 |
+
'id10944' => 16
|
| 18 |
+
'id10639' => 17
|
| 19 |
+
'id11195' => 18
|
| 20 |
+
'id10790' => 19
|
| 21 |
+
'id10780' => 20
|
| 22 |
+
'id10805' => 21
|
| 23 |
+
'id10145' => 22
|
| 24 |
+
'id10716' => 23
|
| 25 |
+
'id10664' => 24
|
| 26 |
+
'id10086' => 25
|
| 27 |
+
'id10840' => 26
|
| 28 |
+
'id10928' => 27
|
| 29 |
+
'id10447' => 28
|
| 30 |
+
'id10510' => 29
|
| 31 |
+
'id11105' => 30
|
| 32 |
+
'id10569' => 31
|
| 33 |
+
'id10398' => 32
|
| 34 |
+
'id10047' => 33
|
| 35 |
+
'id10184' => 34
|
| 36 |
+
'id10706' => 35
|
| 37 |
+
'id10959' => 36
|
| 38 |
+
'id10480' => 37
|
| 39 |
+
'id10631' => 38
|
| 40 |
+
'id10617' => 39
|
| 41 |
+
'id10543' => 40
|
| 42 |
+
'id11027' => 41
|
| 43 |
+
'id10225' => 42
|
| 44 |
+
'id10786' => 43
|
| 45 |
+
'id10611' => 44
|
| 46 |
+
'id10608' => 45
|
| 47 |
+
'id10035' => 46
|
| 48 |
+
'id10488' => 47
|
| 49 |
+
'id10428' => 48
|
| 50 |
+
'id11204' => 49
|
| 51 |
+
'id10491' => 50
|
| 52 |
+
'id10730' => 51
|
| 53 |
+
'id10979' => 52
|
| 54 |
+
'id11183' => 53
|
| 55 |
+
'id10784' => 54
|
| 56 |
+
'id10090' => 55
|
| 57 |
+
'id11041' => 56
|
| 58 |
+
'id10449' => 57
|
| 59 |
+
'id10419' => 58
|
| 60 |
+
'id10536' => 59
|
| 61 |
+
'id11188' => 60
|
| 62 |
+
'id10036' => 61
|
| 63 |
+
'id11057' => 62
|
| 64 |
+
'id10921' => 63
|
| 65 |
+
'id11148' => 64
|
| 66 |
+
'id11056' => 65
|
| 67 |
+
'id10573' => 66
|
| 68 |
+
'id10472' => 67
|
| 69 |
+
'id10125' => 68
|
| 70 |
+
'id10610' => 69
|
| 71 |
+
'id10534' => 70
|
| 72 |
+
'id10943' => 71
|
| 73 |
+
'id10367' => 72
|
| 74 |
+
'id11076' => 73
|
| 75 |
+
'id10883' => 74
|
| 76 |
+
'id10853' => 75
|
| 77 |
+
'id10260' => 76
|
| 78 |
+
'id10549' => 77
|
| 79 |
+
'id10919' => 78
|
| 80 |
+
'id10994' => 79
|
| 81 |
+
'id10756' => 80
|
| 82 |
+
'id10325' => 81
|
| 83 |
+
'id11112' => 82
|
| 84 |
+
'id10396' => 83
|
| 85 |
+
'id10471' => 84
|
| 86 |
+
'id10329' => 85
|
| 87 |
+
'id10144' => 86
|
| 88 |
+
'id10850' => 87
|
| 89 |
+
'id10640' => 88
|
| 90 |
+
'id11165' => 89
|
| 91 |
+
'id10430' => 90
|
| 92 |
+
'id10792' => 91
|
| 93 |
+
'id10798' => 92
|
| 94 |
+
'id10317' => 93
|
| 95 |
+
'id10103' => 94
|
| 96 |
+
'id10149' => 95
|
| 97 |
+
'id10981' => 96
|
| 98 |
+
'id11182' => 97
|
| 99 |
+
'id10732' => 98
|
| 100 |
+
'id10425' => 99
|
| 101 |
+
'id10411' => 100
|
| 102 |
+
'id10520' => 101
|
| 103 |
+
'id11152' => 102
|
| 104 |
+
'id10930' => 103
|
| 105 |
+
'id10636' => 104
|
| 106 |
+
'id10986' => 105
|
| 107 |
+
'id10130' => 106
|
| 108 |
+
'id10063' => 107
|
| 109 |
+
'id11224' => 108
|
| 110 |
+
'id10113' => 109
|
| 111 |
+
'id10477' => 110
|
| 112 |
+
'id10925' => 111
|
| 113 |
+
'id10383' => 112
|
| 114 |
+
'id10517' => 113
|
| 115 |
+
'id10237' => 114
|
| 116 |
+
'id10957' => 115
|
| 117 |
+
'id10576' => 116
|
| 118 |
+
'id11130' => 117
|
| 119 |
+
'id10956' => 118
|
| 120 |
+
'id10831' => 119
|
| 121 |
+
'id11144' => 120
|
| 122 |
+
'id10604' => 121
|
| 123 |
+
'id10799' => 122
|
| 124 |
+
'id10508' => 123
|
| 125 |
+
'id10148' => 124
|
| 126 |
+
'id10583' => 125
|
| 127 |
+
'id10906' => 126
|
| 128 |
+
'id11121' => 127
|
| 129 |
+
'id10938' => 128
|
| 130 |
+
'id10243' => 129
|
| 131 |
+
'id10949' => 130
|
| 132 |
+
'id10426' => 131
|
| 133 |
+
'id11004' => 132
|
| 134 |
+
'id11223' => 133
|
| 135 |
+
'id10466' => 134
|
| 136 |
+
'id10006' => 135
|
| 137 |
+
'id10537' => 136
|
| 138 |
+
'id10975' => 137
|
| 139 |
+
'id11158' => 138
|
| 140 |
+
'id10423' => 139
|
| 141 |
+
'id10659' => 140
|
| 142 |
+
'id11015' => 141
|
| 143 |
+
'id10627' => 142
|
| 144 |
+
'id10931' => 143
|
| 145 |
+
'id10031' => 144
|
| 146 |
+
'id10817' => 145
|
| 147 |
+
'id11101' => 146
|
| 148 |
+
'id10879' => 147
|
| 149 |
+
'id10913' => 148
|
| 150 |
+
'id10399' => 149
|
| 151 |
+
'id10239' => 150
|
| 152 |
+
'id10708' => 151
|
| 153 |
+
'id10025' => 152
|
| 154 |
+
'id10912' => 153
|
| 155 |
+
'id11210' => 154
|
| 156 |
+
'id10891' => 155
|
| 157 |
+
'id10267' => 156
|
| 158 |
+
'id10584' => 157
|
| 159 |
+
'id10450' => 158
|
| 160 |
+
'id10755' => 159
|
| 161 |
+
'id10193' => 160
|
| 162 |
+
'id10993' => 161
|
| 163 |
+
'id11149' => 162
|
| 164 |
+
'id10045' => 163
|
| 165 |
+
'id10055' => 164
|
| 166 |
+
'id10062' => 165
|
| 167 |
+
'id10095' => 166
|
| 168 |
+
'id10686' => 167
|
| 169 |
+
'id10712' => 168
|
| 170 |
+
'id10002' => 169
|
| 171 |
+
'id10106' => 170
|
| 172 |
+
'id10094' => 171
|
| 173 |
+
'id10505' => 172
|
| 174 |
+
'id10421' => 173
|
| 175 |
+
'id10587' => 174
|
| 176 |
+
'id10231' => 175
|
| 177 |
+
'id10072' => 176
|
| 178 |
+
'id11154' => 177
|
| 179 |
+
'id10689' => 178
|
| 180 |
+
'id10665' => 179
|
| 181 |
+
'id10950' => 180
|
| 182 |
+
'id10586' => 181
|
| 183 |
+
'id10108' => 182
|
| 184 |
+
'id10690' => 183
|
| 185 |
+
'id10335' => 184
|
| 186 |
+
'id10704' => 185
|
| 187 |
+
'id11181' => 186
|
| 188 |
+
'id10860' => 187
|
| 189 |
+
'id10465' => 188
|
| 190 |
+
'id10199' => 189
|
| 191 |
+
'id11039' => 190
|
| 192 |
+
'id10539' => 191
|
| 193 |
+
'id10245' => 192
|
| 194 |
+
'id10191' => 193
|
| 195 |
+
'id10168' => 194
|
| 196 |
+
'id11002' => 195
|
| 197 |
+
'id10679' => 196
|
| 198 |
+
'id10165' => 197
|
| 199 |
+
'id10945' => 198
|
| 200 |
+
'id10454' => 199
|
| 201 |
+
'id11044' => 200
|
| 202 |
+
'id10380' => 201
|
| 203 |
+
'id11177' => 202
|
| 204 |
+
'id10996' => 203
|
| 205 |
+
'id11169' => 204
|
| 206 |
+
'id10155' => 205
|
| 207 |
+
'id10691' => 206
|
| 208 |
+
'id11184' => 207
|
| 209 |
+
'id10810' => 208
|
| 210 |
+
'id10941' => 209
|
| 211 |
+
'id10482' => 210
|
| 212 |
+
'id11089' => 211
|
| 213 |
+
'id10579' => 212
|
| 214 |
+
'id11113' => 213
|
| 215 |
+
'id10462' => 214
|
| 216 |
+
'id10186' => 215
|
| 217 |
+
'id11136' => 216
|
| 218 |
+
'id10628' => 217
|
| 219 |
+
'id10102' => 218
|
| 220 |
+
'id11199' => 219
|
| 221 |
+
'id10707' => 220
|
| 222 |
+
'id10980' => 221
|
| 223 |
+
'id10633' => 222
|
| 224 |
+
'id10328' => 223
|
| 225 |
+
'id11035' => 224
|
| 226 |
+
'id10397' => 225
|
| 227 |
+
'id10175' => 226
|
| 228 |
+
'id11011' => 227
|
| 229 |
+
'id10356' => 228
|
| 230 |
+
'id10886' => 229
|
| 231 |
+
'id10416' => 230
|
| 232 |
+
'id11058' => 231
|
| 233 |
+
'id10779' => 232
|
| 234 |
+
'id10240' => 233
|
| 235 |
+
'id11030' => 234
|
| 236 |
+
'id10180' => 235
|
| 237 |
+
'id10096' => 236
|
| 238 |
+
'id10061' => 237
|
| 239 |
+
'id10116' => 238
|
| 240 |
+
'id11123' => 239
|
| 241 |
+
'id10811' => 240
|
| 242 |
+
'id10092' => 241
|
| 243 |
+
'id10354' => 242
|
| 244 |
+
'id10601' => 243
|
| 245 |
+
'id10709' => 244
|
| 246 |
+
'id11193' => 245
|
| 247 |
+
'id10042' => 246
|
| 248 |
+
'id11176' => 247
|
| 249 |
+
'id10578' => 248
|
| 250 |
+
'id10032' => 249
|
| 251 |
+
'id10439' => 250
|
| 252 |
+
'id10988' => 251
|
| 253 |
+
'id10143' => 252
|
| 254 |
+
'id10147' => 253
|
| 255 |
+
'id11111' => 254
|
| 256 |
+
'id10258' => 255
|
| 257 |
+
'id11059' => 256
|
| 258 |
+
'id11221' => 257
|
| 259 |
+
'id10935' => 258
|
| 260 |
+
'id10929' => 259
|
| 261 |
+
'id11095' => 260
|
| 262 |
+
'id10166' => 261
|
| 263 |
+
'id11003' => 262
|
| 264 |
+
'id10825' => 263
|
| 265 |
+
'id10405' => 264
|
| 266 |
+
'id10226' => 265
|
| 267 |
+
'id10839' => 266
|
| 268 |
+
'id10768' => 267
|
| 269 |
+
'id10794' => 268
|
| 270 |
+
'id10512' => 269
|
| 271 |
+
'id10259' => 270
|
| 272 |
+
'id11205' => 271
|
| 273 |
+
'id10652' => 272
|
| 274 |
+
'id10552' => 273
|
| 275 |
+
'id10351' => 274
|
| 276 |
+
'id10997' => 275
|
| 277 |
+
'id10268' => 276
|
| 278 |
+
'id10448' => 277
|
| 279 |
+
'id10353' => 278
|
| 280 |
+
'id11093' => 279
|
| 281 |
+
'id10942' => 280
|
| 282 |
+
'id10672' => 281
|
| 283 |
+
'id10519' => 282
|
| 284 |
+
'id10662' => 283
|
| 285 |
+
'id11097' => 284
|
| 286 |
+
'id10334' => 285
|
| 287 |
+
'id10843' => 286
|
| 288 |
+
'id11220' => 287
|
| 289 |
+
'id10645' => 288
|
| 290 |
+
'id10410' => 289
|
| 291 |
+
'id10342' => 290
|
| 292 |
+
'id10582' => 291
|
| 293 |
+
'id10720' => 292
|
| 294 |
+
'id10341' => 293
|
| 295 |
+
'id10503' => 294
|
| 296 |
+
'id10650' => 295
|
| 297 |
+
'id11166' => 296
|
| 298 |
+
'id11019' => 297
|
| 299 |
+
'id10597' => 298
|
| 300 |
+
'id11010' => 299
|
| 301 |
+
'id10596' => 300
|
| 302 |
+
'id10053' => 301
|
| 303 |
+
'id10753' => 302
|
| 304 |
+
'id10360' => 303
|
| 305 |
+
'id11202' => 304
|
| 306 |
+
'id10207' => 305
|
| 307 |
+
'id10394' => 306
|
| 308 |
+
'id10738' => 307
|
| 309 |
+
'id11235' => 308
|
| 310 |
+
'id10775' => 309
|
| 311 |
+
'id10209' => 310
|
| 312 |
+
'id10098' => 311
|
| 313 |
+
'id10431' => 312
|
| 314 |
+
'id10079' => 313
|
| 315 |
+
'id11190' => 314
|
| 316 |
+
'id10826' => 315
|
| 317 |
+
'id10812' => 316
|
| 318 |
+
'id10995' => 317
|
| 319 |
+
'id10922' => 318
|
| 320 |
+
'id10084' => 319
|
| 321 |
+
'id10638' => 320
|
| 322 |
+
'id10829' => 321
|
| 323 |
+
'id10513' => 322
|
| 324 |
+
'id10479' => 323
|
| 325 |
+
'id11024' => 324
|
| 326 |
+
'id10702' => 325
|
| 327 |
+
'id11107' => 326
|
| 328 |
+
'id10402' => 327
|
| 329 |
+
'id10206' => 328
|
| 330 |
+
'id10404' => 329
|
| 331 |
+
'id10761' => 330
|
| 332 |
+
'id11094' => 331
|
| 333 |
+
'id10787' => 332
|
| 334 |
+
'id10412' => 333
|
| 335 |
+
'id10171' => 334
|
| 336 |
+
'id11020' => 335
|
| 337 |
+
'id10244' => 336
|
| 338 |
+
'id10012' => 337
|
| 339 |
+
'id11209' => 338
|
| 340 |
+
'id10141' => 339
|
| 341 |
+
'id10764' => 340
|
| 342 |
+
'id10769' => 341
|
| 343 |
+
'id10157' => 342
|
| 344 |
+
'id10983' => 343
|
| 345 |
+
'id10310' => 344
|
| 346 |
+
'id11203' => 345
|
| 347 |
+
'id11174' => 346
|
| 348 |
+
'id10313' => 347
|
| 349 |
+
'id10971' => 348
|
| 350 |
+
'id11077' => 349
|
| 351 |
+
'id10355' => 350
|
| 352 |
+
'id10043' => 351
|
| 353 |
+
'id10970' => 352
|
| 354 |
+
'id11236' => 353
|
| 355 |
+
'id11122' => 354
|
| 356 |
+
'id10218' => 355
|
| 357 |
+
'id10326' => 356
|
| 358 |
+
'id10156' => 357
|
| 359 |
+
'id10845' => 358
|
| 360 |
+
'id11206' => 359
|
| 361 |
+
'id10311' => 360
|
| 362 |
+
'id10851' => 361
|
| 363 |
+
'id10744' => 362
|
| 364 |
+
'id10040' => 363
|
| 365 |
+
'id10444' => 364
|
| 366 |
+
'id10976' => 365
|
| 367 |
+
'id10859' => 366
|
| 368 |
+
'id10663' => 367
|
| 369 |
+
'id10588' => 368
|
| 370 |
+
'id11173' => 369
|
| 371 |
+
'id10065' => 370
|
| 372 |
+
'id10343' => 371
|
| 373 |
+
'id10417' => 372
|
| 374 |
+
'id10345' => 373
|
| 375 |
+
'id10403' => 374
|
| 376 |
+
'id10347' => 375
|
| 377 |
+
'id10414' => 376
|
| 378 |
+
'id10333' => 377
|
| 379 |
+
'id10377' => 378
|
| 380 |
+
'id10160' => 379
|
| 381 |
+
'id10630' => 380
|
| 382 |
+
'id10782' => 381
|
| 383 |
+
'id10801' => 382
|
| 384 |
+
'id10257' => 383
|
| 385 |
+
'id10642' => 384
|
| 386 |
+
'id11099' => 385
|
| 387 |
+
'id10954' => 386
|
| 388 |
+
'id10557' => 387
|
| 389 |
+
'id10881' => 388
|
| 390 |
+
'id10685' => 389
|
| 391 |
+
'id10100' => 390
|
| 392 |
+
'id10727' => 391
|
| 393 |
+
'id10776' => 392
|
| 394 |
+
'id10797' => 393
|
| 395 |
+
'id10624' => 394
|
| 396 |
+
'id10562' => 395
|
| 397 |
+
'id10188' => 396
|
| 398 |
+
'id10214' => 397
|
| 399 |
+
'id10232' => 398
|
| 400 |
+
'id10901' => 399
|
| 401 |
+
'id10701' => 400
|
| 402 |
+
'id10349' => 401
|
| 403 |
+
'id11062' => 402
|
| 404 |
+
'id10135' => 403
|
| 405 |
+
'id10110' => 404
|
| 406 |
+
'id11048' => 405
|
| 407 |
+
'id10201' => 406
|
| 408 |
+
'id10140' => 407
|
| 409 |
+
'id10682' => 408
|
| 410 |
+
'id10122' => 409
|
| 411 |
+
'id10391' => 410
|
| 412 |
+
'id10612' => 411
|
| 413 |
+
'id11201' => 412
|
| 414 |
+
'id10312' => 413
|
| 415 |
+
'id10870' => 414
|
| 416 |
+
'id11009' => 415
|
| 417 |
+
'id10039' => 416
|
| 418 |
+
'id10131' => 417
|
| 419 |
+
'id10330' => 418
|
| 420 |
+
'id10082' => 419
|
| 421 |
+
'id10884' => 420
|
| 422 |
+
'id11064' => 421
|
| 423 |
+
'id10252' => 422
|
| 424 |
+
'id10365' => 423
|
| 425 |
+
'id10729' => 424
|
| 426 |
+
'id10064' => 425
|
| 427 |
+
'id11109' => 426
|
| 428 |
+
'id10803' => 427
|
| 429 |
+
'id10736' => 428
|
| 430 |
+
'id10069' => 429
|
| 431 |
+
'id11014' => 430
|
| 432 |
+
'id10241' => 431
|
| 433 |
+
'id10909' => 432
|
| 434 |
+
'id10902' => 433
|
| 435 |
+
'id11090' => 434
|
| 436 |
+
'id10568' => 435
|
| 437 |
+
'id10777' => 436
|
| 438 |
+
'id10265' => 437
|
| 439 |
+
'id10920' => 438
|
| 440 |
+
'id10731' => 439
|
| 441 |
+
'id10757' => 440
|
| 442 |
+
'id10083' => 441
|
| 443 |
+
'id11140' => 442
|
| 444 |
+
'id10564' => 443
|
| 445 |
+
'id11189' => 444
|
| 446 |
+
'id11248' => 445
|
| 447 |
+
'id10813' => 446
|
| 448 |
+
'id10392' => 447
|
| 449 |
+
'id10337' => 448
|
| 450 |
+
'id10492' => 449
|
| 451 |
+
'id10680' => 450
|
| 452 |
+
'id10933' => 451
|
| 453 |
+
'id10540' => 452
|
| 454 |
+
'id10443' => 453
|
| 455 |
+
'id10915' => 454
|
| 456 |
+
'id10874' => 455
|
| 457 |
+
'id11081' => 456
|
| 458 |
+
'id10560' => 457
|
| 459 |
+
'id11157' => 458
|
| 460 |
+
'id10229' => 459
|
| 461 |
+
'id11139' => 460
|
| 462 |
+
'id10992' => 461
|
| 463 |
+
'id10256' => 462
|
| 464 |
+
'id10542' => 463
|
| 465 |
+
'id10644' => 464
|
| 466 |
+
'id10233' => 465
|
| 467 |
+
'id10484' => 466
|
| 468 |
+
'id11246' => 467
|
| 469 |
+
'id10204' => 468
|
| 470 |
+
'id10895' => 469
|
| 471 |
+
'id10442' => 470
|
| 472 |
+
'id10434' => 471
|
| 473 |
+
'id10725' => 472
|
| 474 |
+
'id10139' => 473
|
| 475 |
+
'id10828' => 474
|
| 476 |
+
'id10676' => 475
|
| 477 |
+
'id10570' => 476
|
| 478 |
+
'id11171' => 477
|
| 479 |
+
'id10220' => 478
|
| 480 |
+
'id10649' => 479
|
| 481 |
+
'id11047' => 480
|
| 482 |
+
'id11180' => 481
|
| 483 |
+
'id10170' => 482
|
| 484 |
+
'id10060' => 483
|
| 485 |
+
'id10771' => 484
|
| 486 |
+
'id11117' => 485
|
| 487 |
+
'id11211' => 486
|
| 488 |
+
'id10152' => 487
|
| 489 |
+
'id11025' => 488
|
| 490 |
+
'id10580' => 489
|
| 491 |
+
'id11150' => 490
|
| 492 |
+
'id11043' => 491
|
| 493 |
+
'id10619' => 492
|
| 494 |
+
'id11147' => 493
|
| 495 |
+
'id10019' => 494
|
| 496 |
+
'id10500' => 495
|
| 497 |
+
'id11218' => 496
|
| 498 |
+
'id11249' => 497
|
| 499 |
+
'id10528' => 498
|
| 500 |
+
'id10743' => 499
|
| 501 |
+
'id11185' => 500
|
| 502 |
+
'id10044' => 501
|
| 503 |
+
'id10246' => 502
|
| 504 |
+
'id10497' => 503
|
| 505 |
+
'id10594' => 504
|
| 506 |
+
'id11082' => 505
|
| 507 |
+
'id10618' => 506
|
| 508 |
+
'id10236' => 507
|
| 509 |
+
'id10982' => 508
|
| 510 |
+
'id10017' => 509
|
| 511 |
+
'id10408' => 510
|
| 512 |
+
'id10056' => 511
|
| 513 |
+
'id10028' => 512
|
| 514 |
+
'id11120' => 513
|
| 515 |
+
'id10531' => 514
|
| 516 |
+
'id10393' => 515
|
| 517 |
+
'id10059' => 516
|
| 518 |
+
'id10374' => 517
|
| 519 |
+
'id10016' => 518
|
| 520 |
+
'id10172' => 519
|
| 521 |
+
'id10376' => 520
|
| 522 |
+
'id10616' => 521
|
| 523 |
+
'id10363' => 522
|
| 524 |
+
'id10473' => 523
|
| 525 |
+
'id11016' => 524
|
| 526 |
+
'id11066' => 525
|
| 527 |
+
'id10200' => 526
|
| 528 |
+
'id10718' => 527
|
| 529 |
+
'id10710' => 528
|
| 530 |
+
'id10388' => 529
|
| 531 |
+
'id10934' => 530
|
| 532 |
+
'id10614' => 531
|
| 533 |
+
'id10014' => 532
|
| 534 |
+
'id10752' => 533
|
| 535 |
+
'id10415' => 534
|
| 536 |
+
'id11108' => 535
|
| 537 |
+
'id10625' => 536
|
| 538 |
+
'id10020' => 537
|
| 539 |
+
'id10362' => 538
|
| 540 |
+
'id11013' => 539
|
| 541 |
+
'id11127' => 540
|
| 542 |
+
'id11038' => 541
|
| 543 |
+
'id10858' => 542
|
| 544 |
+
'id10952' => 543
|
| 545 |
+
'id10483' => 544
|
| 546 |
+
'id10386' => 545
|
| 547 |
+
'id10456' => 546
|
| 548 |
+
'id10767' => 547
|
| 549 |
+
'id11069' => 548
|
| 550 |
+
'id11073' => 549
|
| 551 |
+
'id10085' => 550
|
| 552 |
+
'id10486' => 551
|
| 553 |
+
'id11212' => 552
|
| 554 |
+
'id11000' => 553
|
| 555 |
+
'id11159' => 554
|
| 556 |
+
'id11234' => 555
|
| 557 |
+
'id10759' => 556
|
| 558 |
+
'id10834' => 557
|
| 559 |
+
'id10511' => 558
|
| 560 |
+
'id10847' => 559
|
| 561 |
+
'id11119' => 560
|
| 562 |
+
'id10893' => 561
|
| 563 |
+
'id10550' => 562
|
| 564 |
+
'id11012' => 563
|
| 565 |
+
'id10556' => 564
|
| 566 |
+
'id10071' => 565
|
| 567 |
+
'id10849' => 566
|
| 568 |
+
'id10607' => 567
|
| 569 |
+
'id11233' => 568
|
| 570 |
+
'id10077' => 569
|
| 571 |
+
'id10058' => 570
|
| 572 |
+
'id10089' => 571
|
| 573 |
+
'id10008' => 572
|
| 574 |
+
'id10118' => 573
|
| 575 |
+
'id10501' => 574
|
| 576 |
+
'id10504' => 575
|
| 577 |
+
'id10250' => 576
|
| 578 |
+
'id10190' => 577
|
| 579 |
+
'id10238' => 578
|
| 580 |
+
'id10198' => 579
|
| 581 |
+
'id10899' => 580
|
| 582 |
+
'id11106' => 581
|
| 583 |
+
'id11178' => 582
|
| 584 |
+
'id10348' => 583
|
| 585 |
+
'id10331' => 584
|
| 586 |
+
'id10490' => 585
|
| 587 |
+
'id10762' => 586
|
| 588 |
+
'id10489' => 587
|
| 589 |
+
'id10809' => 588
|
| 590 |
+
'id11032' => 589
|
| 591 |
+
'id10951' => 590
|
| 592 |
+
'id10936' => 591
|
| 593 |
+
'id11018' => 592
|
| 594 |
+
'id10529' => 593
|
| 595 |
+
'id10262' => 594
|
| 596 |
+
'id10474' => 595
|
| 597 |
+
'id10460' => 596
|
| 598 |
+
'id10852' => 597
|
| 599 |
+
'id11128' => 598
|
| 600 |
+
'id10223' => 599
|
| 601 |
+
'id10872' => 600
|
| 602 |
+
'id11021' => 601
|
| 603 |
+
'id10592' => 602
|
| 604 |
+
'id10822' => 603
|
| 605 |
+
'id11046' => 604
|
| 606 |
+
'id10748' => 605
|
| 607 |
+
'id10321' => 606
|
| 608 |
+
'id10808' => 607
|
| 609 |
+
'id11049' => 608
|
| 610 |
+
'id10029' => 609
|
| 611 |
+
'id10873' => 610
|
| 612 |
+
'id10048' => 611
|
| 613 |
+
'id10499' => 612
|
| 614 |
+
'id10916' => 613
|
| 615 |
+
'id10074' => 614
|
| 616 |
+
'id10269' => 615
|
| 617 |
+
'id10469' => 616
|
| 618 |
+
'id10076' => 617
|
| 619 |
+
'id10918' => 618
|
| 620 |
+
'id10478' => 619
|
| 621 |
+
'id10567' => 620
|
| 622 |
+
'id10506' => 621
|
| 623 |
+
'id10989' => 622
|
| 624 |
+
'id10544' => 623
|
| 625 |
+
'id10352' => 624
|
| 626 |
+
'id10158' => 625
|
| 627 |
+
'id10216' => 626
|
| 628 |
+
'id11072' => 627
|
| 629 |
+
'id10136' => 628
|
| 630 |
+
'id10509' => 629
|
| 631 |
+
'id10816' => 630
|
| 632 |
+
'id10203' => 631
|
| 633 |
+
'id10559' => 632
|
| 634 |
+
'id10862' => 633
|
| 635 |
+
'id11240' => 634
|
| 636 |
+
'id10179' => 635
|
| 637 |
+
'id10889' => 636
|
| 638 |
+
'id10527' => 637
|
| 639 |
+
'id10195' => 638
|
| 640 |
+
'id11060' => 639
|
| 641 |
+
'id10661' => 640
|
| 642 |
+
'id10683' => 641
|
| 643 |
+
'id10835' => 642
|
| 644 |
+
'id10955' => 643
|
| 645 |
+
'id10751' => 644
|
| 646 |
+
'id10115' => 645
|
| 647 |
+
'id10024' => 646
|
| 648 |
+
'id10137' => 647
|
| 649 |
+
'id10338' => 648
|
| 650 |
+
'id10038' => 649
|
| 651 |
+
'id10485' => 650
|
| 652 |
+
'id10795' => 651
|
| 653 |
+
'id11054' => 652
|
| 654 |
+
'id10390' => 653
|
| 655 |
+
'id10327' => 654
|
| 656 |
+
'id10760' => 655
|
| 657 |
+
'id10009' => 656
|
| 658 |
+
'id11087' => 657
|
| 659 |
+
'id10609' => 658
|
| 660 |
+
'id10553' => 659
|
| 661 |
+
'id10361' => 660
|
| 662 |
+
'id10263' => 661
|
| 663 |
+
'id10167' => 662
|
| 664 |
+
'id10177' => 663
|
| 665 |
+
'id10742' => 664
|
| 666 |
+
'id10457' => 665
|
| 667 |
+
'id10178' => 666
|
| 668 |
+
'id10364' => 667
|
| 669 |
+
'id11230' => 668
|
| 670 |
+
'id10463' => 669
|
| 671 |
+
'id10900' => 670
|
| 672 |
+
'id10885' => 671
|
| 673 |
+
'id11229' => 672
|
| 674 |
+
'id10242' => 673
|
| 675 |
+
'id10904' => 674
|
| 676 |
+
'id10694' => 675
|
| 677 |
+
'id10819' => 676
|
| 678 |
+
'id11194' => 677
|
| 679 |
+
'id10120' => 678
|
| 680 |
+
'id11098' => 679
|
| 681 |
+
'id10739' => 680
|
| 682 |
+
'id10590' => 681
|
| 683 |
+
'id11238' => 682
|
| 684 |
+
'id10138' => 683
|
| 685 |
+
'id10961' => 684
|
| 686 |
+
'id10081' => 685
|
| 687 |
+
'id10162' => 686
|
| 688 |
+
'id10734' => 687
|
| 689 |
+
'id11084' => 688
|
| 690 |
+
'id10948' => 689
|
| 691 |
+
'id10078' => 690
|
| 692 |
+
'id10344' => 691
|
| 693 |
+
'id10635' => 692
|
| 694 |
+
'id10524' => 693
|
| 695 |
+
'id10440' => 694
|
| 696 |
+
'id10818' => 695
|
| 697 |
+
'id11161' => 696
|
| 698 |
+
'id11017' => 697
|
| 699 |
+
'id10545' => 698
|
| 700 |
+
'id10964' => 699
|
| 701 |
+
'id10535' => 700
|
| 702 |
+
'id10001' => 701
|
| 703 |
+
'id11170' => 702
|
| 704 |
+
'id10646' => 703
|
| 705 |
+
'id11198' => 704
|
| 706 |
+
'id10806' => 705
|
| 707 |
+
'id10495' => 706
|
| 708 |
+
'id10427' => 707
|
| 709 |
+
'id10379' => 708
|
| 710 |
+
'id10114' => 709
|
| 711 |
+
'id10577' => 710
|
| 712 |
+
'id10453' => 711
|
| 713 |
+
'id10796' => 712
|
| 714 |
+
'id10656' => 713
|
| 715 |
+
'id11065' => 714
|
| 716 |
+
'id10123' => 715
|
| 717 |
+
'id11100' => 716
|
| 718 |
+
'id10888' => 717
|
| 719 |
+
'id10525' => 718
|
| 720 |
+
'id11080' => 719
|
| 721 |
+
'id10395' => 720
|
| 722 |
+
'id10212' => 721
|
| 723 |
+
'id11086' => 722
|
| 724 |
+
'id10437' => 723
|
| 725 |
+
'id10677' => 724
|
| 726 |
+
'id10668' => 725
|
| 727 |
+
'id10820' => 726
|
| 728 |
+
'id10643' => 727
|
| 729 |
+
'id10714' => 728
|
| 730 |
+
'id11231' => 729
|
| 731 |
+
'id10107' => 730
|
| 732 |
+
'id10173' => 731
|
| 733 |
+
'id11075' => 732
|
| 734 |
+
'id10903' => 733
|
| 735 |
+
'id10251' => 734
|
| 736 |
+
'id11052' => 735
|
| 737 |
+
'id11088' => 736
|
| 738 |
+
'id10224' => 737
|
| 739 |
+
'id10073' => 738
|
| 740 |
+
'id10823' => 739
|
| 741 |
+
'id10978' => 740
|
| 742 |
+
'id10905' => 741
|
| 743 |
+
'id10692' => 742
|
| 744 |
+
'id10530' => 743
|
| 745 |
+
'id10595' => 744
|
| 746 |
+
'id10400' => 745
|
| 747 |
+
'id10432' => 746
|
| 748 |
+
'id10634' => 747
|
| 749 |
+
'id11071' => 748
|
| 750 |
+
'id10932' => 749
|
| 751 |
+
'id10856' => 750
|
| 752 |
+
'id10066' => 751
|
| 753 |
+
'id10523' => 752
|
| 754 |
+
'id10104' => 753
|
| 755 |
+
'id10318' => 754
|
| 756 |
+
'id11151' => 755
|
| 757 |
+
'id10371' => 756
|
| 758 |
+
'id10022' => 757
|
| 759 |
+
'id10254' => 758
|
| 760 |
+
'id10004' => 759
|
| 761 |
+
'id10667' => 760
|
| 762 |
+
'id10041' => 761
|
| 763 |
+
'id10464' => 762
|
| 764 |
+
'id10211' => 763
|
| 765 |
+
'id10522' => 764
|
| 766 |
+
'id10161' => 765
|
| 767 |
+
'id10908' => 766
|
| 768 |
+
'id10368' => 767
|
| 769 |
+
'id10696' => 768
|
| 770 |
+
'id11132' => 769
|
| 771 |
+
'id10515' => 770
|
| 772 |
+
'id10003' => 771
|
| 773 |
+
'id10261' => 772
|
| 774 |
+
'id11131' => 773
|
| 775 |
+
'id10433' => 774
|
| 776 |
+
'id10966' => 775
|
| 777 |
+
'id11007' => 776
|
| 778 |
+
'id10541' => 777
|
| 779 |
+
'id11242' => 778
|
| 780 |
+
'id10183' => 779
|
| 781 |
+
'id11232' => 780
|
| 782 |
+
'id10037' => 781
|
| 783 |
+
'id11026' => 782
|
| 784 |
+
'id10773' => 783
|
| 785 |
+
'id10868' => 784
|
| 786 |
+
'id11110' => 785
|
| 787 |
+
'id10735' => 786
|
| 788 |
+
'id11116' => 787
|
| 789 |
+
'id10322' => 788
|
| 790 |
+
'id10678' => 789
|
| 791 |
+
'id10623' => 790
|
| 792 |
+
'id10336' => 791
|
| 793 |
+
'id10435' => 792
|
| 794 |
+
'id10937' => 793
|
| 795 |
+
'id10747' => 794
|
| 796 |
+
'id10346' => 795
|
| 797 |
+
'id10629' => 796
|
| 798 |
+
'id10196' => 797
|
| 799 |
+
'id11167' => 798
|
| 800 |
+
'id10369' => 799
|
| 801 |
+
'id10654' => 800
|
| 802 |
+
'id10698' => 801
|
| 803 |
+
'id10632' => 802
|
| 804 |
+
'id10867' => 803
|
| 805 |
+
'id11022' => 804
|
| 806 |
+
'id10227' => 805
|
| 807 |
+
'id10067' => 806
|
| 808 |
+
'id10723' => 807
|
| 809 |
+
'id11197' => 808
|
| 810 |
+
'id10605' => 809
|
| 811 |
+
'id10210' => 810
|
| 812 |
+
'id10965' => 811
|
| 813 |
+
'id11079' => 812
|
| 814 |
+
'id10791' => 813
|
| 815 |
+
'id10724' => 814
|
| 816 |
+
'id10222' => 815
|
| 817 |
+
'id11244' => 816
|
| 818 |
+
'id10051' => 817
|
| 819 |
+
'id10070' => 818
|
| 820 |
+
'id10985' => 819
|
| 821 |
+
'id10555' => 820
|
| 822 |
+
'id10615' => 821
|
| 823 |
+
'id10458' => 822
|
| 824 |
+
'id10132' => 823
|
| 825 |
+
'id11063' => 824
|
| 826 |
+
'id10424' => 825
|
| 827 |
+
'id10758' => 826
|
| 828 |
+
'id10846' => 827
|
| 829 |
+
'id10827' => 828
|
| 830 |
+
'id10861' => 829
|
| 831 |
+
'id10866' => 830
|
| 832 |
+
'id10027' => 831
|
| 833 |
+
'id10159' => 832
|
| 834 |
+
'id10647' => 833
|
| 835 |
+
'id10194' => 834
|
| 836 |
+
'id10185' => 835
|
| 837 |
+
'id11137' => 836
|
| 838 |
+
'id10091' => 837
|
| 839 |
+
'id10726' => 838
|
| 840 |
+
'id11200' => 839
|
| 841 |
+
'id10153' => 840
|
| 842 |
+
'id10088' => 841
|
| 843 |
+
'id10378' => 842
|
| 844 |
+
'id10498' => 843
|
| 845 |
+
'id10461' => 844
|
| 846 |
+
'id11092' => 845
|
| 847 |
+
'id10911' => 846
|
| 848 |
+
'id10719' => 847
|
| 849 |
+
'id11133' => 848
|
| 850 |
+
'id10963' => 849
|
| 851 |
+
'id10991' => 850
|
| 852 |
+
'id10476' => 851
|
| 853 |
+
'id10021' => 852
|
| 854 |
+
'id10101' => 853
|
| 855 |
+
'id10713' => 854
|
| 856 |
+
'id11006' => 855
|
| 857 |
+
'id11164' => 856
|
| 858 |
+
'id10054' => 857
|
| 859 |
+
'id10688' => 858
|
| 860 |
+
'id11023' => 859
|
| 861 |
+
'id11160' => 860
|
| 862 |
+
'id10745' => 861
|
| 863 |
+
'id10842' => 862
|
| 864 |
+
'id10804' => 863
|
| 865 |
+
'id10007' => 864
|
| 866 |
+
'id11216' => 865
|
| 867 |
+
'id10968' => 866
|
| 868 |
+
'id10566' => 867
|
| 869 |
+
'id11196' => 868
|
| 870 |
+
'id10316' => 869
|
| 871 |
+
'id10722' => 870
|
| 872 |
+
'id10880' => 871
|
| 873 |
+
'id10973' => 872
|
| 874 |
+
'id11163' => 873
|
| 875 |
+
'id11138' => 874
|
| 876 |
+
'id11104' => 875
|
| 877 |
+
'id10532' => 876
|
| 878 |
+
'id10967' => 877
|
| 879 |
+
'id10593' => 878
|
| 880 |
+
'id10192' => 879
|
| 881 |
+
'id10109' => 880
|
| 882 |
+
'id11186' => 881
|
| 883 |
+
'id10324' => 882
|
| 884 |
+
'id10603' => 883
|
| 885 |
+
'id10740' => 884
|
| 886 |
+
'id10574' => 885
|
| 887 |
+
'id10359' => 886
|
| 888 |
+
'id10551' => 887
|
| 889 |
+
'id10516' => 888
|
| 890 |
+
'id10917' => 889
|
| 891 |
+
'id10057' => 890
|
| 892 |
+
'id10841' => 891
|
| 893 |
+
'id10887' => 892
|
| 894 |
+
'id11068' => 893
|
| 895 |
+
'id10898' => 894
|
| 896 |
+
'id10687' => 895
|
| 897 |
+
'id10674' => 896
|
| 898 |
+
'id10781' => 897
|
| 899 |
+
'id10864' => 898
|
| 900 |
+
'id11228' => 899
|
| 901 |
+
'id10452' => 900
|
| 902 |
+
'id10785' => 901
|
| 903 |
+
'id10622' => 902
|
| 904 |
+
'id10018' => 903
|
| 905 |
+
'id10766' => 904
|
| 906 |
+
'id10789' => 905
|
| 907 |
+
'id10660' => 906
|
| 908 |
+
'id11050' => 907
|
| 909 |
+
'id11114' => 908
|
| 910 |
+
'id11208' => 909
|
| 911 |
+
'id10481' => 910
|
| 912 |
+
'id10572' => 911
|
| 913 |
+
'id10824' => 912
|
| 914 |
+
'id11055' => 913
|
| 915 |
+
'id10637' => 914
|
| 916 |
+
'id10370' => 915
|
| 917 |
+
'id10833' => 916
|
| 918 |
+
'id10350' => 917
|
| 919 |
+
'id10589' => 918
|
| 920 |
+
'id10387' => 919
|
| 921 |
+
'id10445' => 920
|
| 922 |
+
'id10052' => 921
|
| 923 |
+
'id10878' => 922
|
| 924 |
+
'id10854' => 923
|
| 925 |
+
'id10697' => 924
|
| 926 |
+
'id10521' => 925
|
| 927 |
+
'id10247' => 926
|
| 928 |
+
'id10671' => 927
|
| 929 |
+
'id11045' => 928
|
| 930 |
+
'id10011' => 929
|
| 931 |
+
'id10877' => 930
|
| 932 |
+
'id10438' => 931
|
| 933 |
+
'id10733' => 932
|
| 934 |
+
'id10585' => 933
|
| 935 |
+
'id10750' => 934
|
| 936 |
+
'id10418' => 935
|
| 937 |
+
'id10939' => 936
|
| 938 |
+
'id10670' => 937
|
| 939 |
+
'id10844' => 938
|
| 940 |
+
'id10548' => 939
|
| 941 |
+
'id11187' => 940
|
| 942 |
+
'id10163' => 941
|
| 943 |
+
'id11226' => 942
|
| 944 |
+
'id10010' => 943
|
| 945 |
+
'id11214' => 944
|
| 946 |
+
'id11028' => 945
|
| 947 |
+
'id10234' => 946
|
| 948 |
+
'id10049' => 947
|
| 949 |
+
'id10914' => 948
|
| 950 |
+
'id10863' => 949
|
| 951 |
+
'id10182' => 950
|
| 952 |
+
'id10737' => 951
|
| 953 |
+
'id11213' => 952
|
| 954 |
+
'id10754' => 953
|
| 955 |
+
'id11245' => 954
|
| 956 |
+
'id10857' => 955
|
| 957 |
+
'id10599' => 956
|
| 958 |
+
'id10215' => 957
|
| 959 |
+
'id11179' => 958
|
| 960 |
+
'id10728' => 959
|
| 961 |
+
'id10940' => 960
|
| 962 |
+
'id10778' => 961
|
| 963 |
+
'id10406' => 962
|
| 964 |
+
'id10999' => 963
|
| 965 |
+
'id10897' => 964
|
| 966 |
+
'id10372' => 965
|
| 967 |
+
'id10451' => 966
|
| 968 |
+
'id10653' => 967
|
| 969 |
+
'id11155' => 968
|
| 970 |
+
'id10127' => 969
|
| 971 |
+
'id11103' => 970
|
| 972 |
+
'id10765' => 971
|
| 973 |
+
'id10332' => 972
|
| 974 |
+
'id10871' => 973
|
| 975 |
+
'id11070' => 974
|
| 976 |
+
'id10974' => 975
|
| 977 |
+
'id10969' => 976
|
| 978 |
+
'id10230' => 977
|
| 979 |
+
'id10800' => 978
|
| 980 |
+
'id10357' => 979
|
| 981 |
+
'id11215' => 980
|
| 982 |
+
'id10561' => 981
|
| 983 |
+
'id10705' => 982
|
| 984 |
+
'id10436' => 983
|
| 985 |
+
'id10838' => 984
|
| 986 |
+
'id10146' => 985
|
| 987 |
+
'id10323' => 986
|
| 988 |
+
'id11078' => 987
|
| 989 |
+
'id10358' => 988
|
| 990 |
+
'id11172' => 989
|
| 991 |
+
'id10927' => 990
|
| 992 |
+
'id10314' => 991
|
| 993 |
+
'id11227' => 992
|
| 994 |
+
'id10987' => 993
|
| 995 |
+
'id10124' => 994
|
| 996 |
+
'id10187' => 995
|
| 997 |
+
'id10600' => 996
|
| 998 |
+
'id10496' => 997
|
| 999 |
+
'id11067' => 998
|
| 1000 |
+
'id10249' => 999
|
| 1001 |
+
'id10023' => 1000
|
| 1002 |
+
'id10673' => 1001
|
| 1003 |
+
'id10126' => 1002
|
| 1004 |
+
'id10563' => 1003
|
| 1005 |
+
'id10621' => 1004
|
| 1006 |
+
'id10401' => 1005
|
| 1007 |
+
'id10869' => 1006
|
| 1008 |
+
'id10319' => 1007
|
| 1009 |
+
'id11126' => 1008
|
| 1010 |
+
'id10097' => 1009
|
| 1011 |
+
'id11134' => 1010
|
| 1012 |
+
'id10373' => 1011
|
| 1013 |
+
'id10422' => 1012
|
| 1014 |
+
'id10554' => 1013
|
| 1015 |
+
'id11033' => 1014
|
| 1016 |
+
'id11125' => 1015
|
| 1017 |
+
'id10960' => 1016
|
| 1018 |
+
'id11085' => 1017
|
| 1019 |
+
'id11061' => 1018
|
| 1020 |
+
'id10033' => 1019
|
| 1021 |
+
'id10703' => 1020
|
| 1022 |
+
'id10907' => 1021
|
| 1023 |
+
'id11091' => 1022
|
| 1024 |
+
'id10441' => 1023
|
| 1025 |
+
'id10830' => 1024
|
| 1026 |
+
'id10151' => 1025
|
| 1027 |
+
'id11237' => 1026
|
| 1028 |
+
'id10005' => 1027
|
| 1029 |
+
'id10641' => 1028
|
| 1030 |
+
'id10315' => 1029
|
| 1031 |
+
'id10875' => 1030
|
| 1032 |
+
'id10189' => 1031
|
| 1033 |
+
'id10121' => 1032
|
| 1034 |
+
'id10112' => 1033
|
| 1035 |
+
'id11053' => 1034
|
| 1036 |
+
'id11096' => 1035
|
| 1037 |
+
'id10420' => 1036
|
| 1038 |
+
'id10080' => 1037
|
| 1039 |
+
'id10266' => 1038
|
| 1040 |
+
'id10150' => 1039
|
| 1041 |
+
'id10235' => 1040
|
| 1042 |
+
'id10546' => 1041
|
| 1043 |
+
'id10575' => 1042
|
| 1044 |
+
'id11143' => 1043
|
| 1045 |
+
'id10407' => 1044
|
| 1046 |
+
'id10793' => 1045
|
| 1047 |
+
'id10446' => 1046
|
| 1048 |
+
'id10111' => 1047
|
| 1049 |
+
'id10581' => 1048
|
| 1050 |
+
'id10099' => 1049
|
| 1051 |
+
'id11146' => 1050
|
| 1052 |
+
'id10695' => 1051
|
| 1053 |
+
'id10802' => 1052
|
| 1054 |
+
'id10684' => 1053
|
| 1055 |
+
'id10890' => 1054
|
| 1056 |
+
'id10774' => 1055
|
| 1057 |
+
'id10105' => 1056
|
| 1058 |
+
'id10026' => 1057
|
| 1059 |
+
'id10865' => 1058
|
| 1060 |
+
'id11074' => 1059
|
| 1061 |
+
'id10117' => 1060
|
| 1062 |
+
'id10602' => 1061
|
| 1063 |
+
'id10693' => 1062
|
| 1064 |
+
'id10923' => 1063
|
| 1065 |
+
'id10669' => 1064
|
| 1066 |
+
'id11102' => 1065
|
| 1067 |
+
'id10821' => 1066
|
| 1068 |
+
'id10547' => 1067
|
| 1069 |
+
'id10087' => 1068
|
| 1070 |
+
'id10651' => 1069
|
| 1071 |
+
'id10046' => 1070
|
| 1072 |
+
'id10514' => 1071
|
| 1073 |
+
'id11219' => 1072
|
| 1074 |
+
'id10763' => 1073
|
| 1075 |
+
'id10205' => 1074
|
| 1076 |
+
'id11124' => 1075
|
| 1077 |
+
'id10613' => 1076
|
| 1078 |
+
'id11031' => 1077
|
| 1079 |
+
'id11145' => 1078
|
| 1080 |
+
'id10366' => 1079
|
| 1081 |
+
'id10494' => 1080
|
| 1082 |
+
'id10699' => 1081
|
| 1083 |
+
'id11162' => 1082
|
| 1084 |
+
'id10217' => 1083
|
| 1085 |
+
'id10493' => 1084
|
| 1086 |
+
'id10876' => 1085
|
| 1087 |
+
'id11156' => 1086
|
| 1088 |
+
'id10648' => 1087
|
| 1089 |
+
'id11042' => 1088
|
| 1090 |
+
'id10264' => 1089
|
| 1091 |
+
'id10533' => 1090
|
| 1092 |
+
'id10389' => 1091
|
| 1093 |
+
'id10681' => 1092
|
| 1094 |
+
'id10788' => 1093
|
| 1095 |
+
'id10470' => 1094
|
| 1096 |
+
'id10174' => 1095
|
| 1097 |
+
'id10340' => 1096
|
| 1098 |
+
'id11192' => 1097
|
| 1099 |
+
'id10181' => 1098
|
| 1100 |
+
'id10606' => 1099
|
| 1101 |
+
'id10208' => 1100
|
| 1102 |
+
'id10176' => 1101
|
| 1103 |
+
'id11083' => 1102
|
| 1104 |
+
'id10924' => 1103
|
| 1105 |
+
'id10591' => 1104
|
| 1106 |
+
'id10894' => 1105
|
| 1107 |
+
'id10119' => 1106
|
| 1108 |
+
'id10721' => 1107
|
| 1109 |
+
'id11029' => 1108
|
| 1110 |
+
'id11241' => 1109
|
| 1111 |
+
'id10598' => 1110
|
| 1112 |
+
'id10998' => 1111
|
| 1113 |
+
'id10133' => 1112
|
| 1114 |
+
'id11247' => 1113
|
| 1115 |
+
'id10526' => 1114
|
| 1116 |
+
'id10221' => 1115
|
| 1117 |
+
'id10429' => 1116
|
| 1118 |
+
'id10030' => 1117
|
| 1119 |
+
'id10034' => 1118
|
| 1120 |
+
'id10620' => 1119
|
| 1121 |
+
'id11005' => 1120
|
| 1122 |
+
'id11036' => 1121
|
| 1123 |
+
'id10675' => 1122
|
| 1124 |
+
'id10518' => 1123
|
| 1125 |
+
'id11051' => 1124
|
| 1126 |
+
'id11141' => 1125
|
| 1127 |
+
'id10666' => 1126
|
| 1128 |
+
'id10990' => 1127
|
| 1129 |
+
'id10962' => 1128
|
| 1130 |
+
'id11118' => 1129
|
| 1131 |
+
'id11037' => 1130
|
| 1132 |
+
'id10075' => 1131
|
| 1133 |
+
'id10154' => 1132
|
| 1134 |
+
'id10972' => 1133
|
| 1135 |
+
'id10855' => 1134
|
| 1136 |
+
'id10487' => 1135
|
| 1137 |
+
'id10565' => 1136
|
| 1138 |
+
'id10409' => 1137
|
| 1139 |
+
'id10741' => 1138
|
| 1140 |
+
'id10142' => 1139
|
| 1141 |
+
'id10626' => 1140
|
| 1142 |
+
'id10700' => 1141
|
| 1143 |
+
'id10339' => 1142
|
| 1144 |
+
'id10202' => 1143
|
| 1145 |
+
'id11222' => 1144
|
| 1146 |
+
'id10749' => 1145
|
| 1147 |
+
'id10896' => 1146
|
| 1148 |
+
'id11243' => 1147
|
| 1149 |
+
'id10655' => 1148
|
| 1150 |
+
'id10837' => 1149
|
| 1151 |
+
'id10717' => 1150
|
| 1152 |
+
'id10013' => 1151
|
| 1153 |
+
'id10814' => 1152
|
| 1154 |
+
'id10169' => 1153
|
| 1155 |
+
'id10015' => 1154
|
| 1156 |
+
'id10467' => 1155
|
| 1157 |
+
'id10384' => 1156
|
| 1158 |
+
'id11040' => 1157
|
| 1159 |
+
'id10093' => 1158
|
| 1160 |
+
'id10836' => 1159
|
| 1161 |
+
'id10219' => 1160
|
| 1162 |
+
'id10050' => 1161
|
| 1163 |
+
'id11175' => 1162
|
| 1164 |
+
'id10382' => 1163
|
| 1165 |
+
'id11001' => 1164
|
| 1166 |
+
'id11034' => 1165
|
| 1167 |
+
'id10413' => 1166
|
| 1168 |
+
'id10958' => 1167
|
| 1169 |
+
'id11153' => 1168
|
| 1170 |
+
'id10984' => 1169
|
| 1171 |
+
'id11207' => 1170
|
| 1172 |
+
'id10910' => 1171
|
| 1173 |
+
'id10459' => 1172
|
| 1174 |
+
'id10213' => 1173
|
| 1175 |
+
'id10558' => 1174
|
| 1176 |
+
'id10253' => 1175
|
| 1177 |
+
'id10502' => 1176
|
| 1178 |
+
'id10947' => 1177
|
| 1179 |
+
'id10381' => 1178
|
| 1180 |
+
'id11251' => 1179
|
| 1181 |
+
'id10815' => 1180
|
| 1182 |
+
'id11115' => 1181
|
| 1183 |
+
'id10385' => 1182
|
| 1184 |
+
'id11217' => 1183
|
| 1185 |
+
'id10832' => 1184
|
| 1186 |
+
'id10770' => 1185
|
| 1187 |
+
'id10134' => 1186
|
| 1188 |
+
'id10455' => 1187
|
| 1189 |
+
'id10848' => 1188
|
| 1190 |
+
'id10807' => 1189
|
| 1191 |
+
'id10228' => 1190
|
| 1192 |
+
'id10882' => 1191
|
| 1193 |
+
'id11191' => 1192
|
| 1194 |
+
'id10164' => 1193
|
| 1195 |
+
'id10772' => 1194
|
| 1196 |
+
'id10538' => 1195
|
| 1197 |
+
'id10068' => 1196
|
| 1198 |
+
'id10375' => 1197
|
| 1199 |
+
'id11135' => 1198
|
| 1200 |
+
'id10255' => 1199
|
| 1201 |
+
'id10746' => 1200
|
| 1202 |
+
'id10657' => 1201
|
| 1203 |
+
'id10658' => 1202
|
| 1204 |
+
'id10946' => 1203
|
| 1205 |
+
'id10926' => 1204
|
| 1206 |
+
'id10507' => 1205
|
| 1207 |
+
'id10468' => 1206
|
| 1208 |
+
'id10128' => 1207
|
| 1209 |
+
'id10475' => 1208
|
| 1210 |
+
'id10953' => 1209
|
| 1211 |
+
'id10320' => 1210
|
| 1212 |
+
================
|
| 1213 |
+
'starting_index' => 0
|